diff --git a/.evergreen/combine-coverage.sh b/.evergreen/combine-coverage.sh index 92d2f1f1f8..36266c1842 100755 --- a/.evergreen/combine-coverage.sh +++ b/.evergreen/combine-coverage.sh @@ -3,12 +3,11 @@ # Coverage combine merges (and removes) all the coverage files and # generates a new .coverage file in the current directory. -set -o xtrace # Write all commands first to stderr -set -o errexit # Exit the script with error if any of the commands fail +set -eu . .evergreen/utils.sh -if [ -z "$PYTHON_BINARY" ]; then +if [ -z "${PYTHON_BINARY:-}" ]; then PYTHON_BINARY=$(find_python3) fi diff --git a/.evergreen/config.yml b/.evergreen/config.yml index 5c0e2983ea..91fa442775 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -25,632 +25,13 @@ timeout: binary: ls -la include: + - filename: .evergreen/generated_configs/functions.yml - filename: .evergreen/generated_configs/tasks.yml - filename: .evergreen/generated_configs/variants.yml -functions: - "fetch source": - # Executes clone and applies the submitted patch, if any - - command: git.get_project - params: - directory: "src" - # Applies the subitted patch, if any - # Deprecated. Should be removed. But still needed for certain agents (ZAP) - - command: git.apply_patch - - "setup system": - # Make an evergreen expansion file with dynamic values - - command: subprocess.exec - params: - include_expansions_in_env: ["is_patch", "project", "version_id", "AUTH", "SSL", "test_encryption", "test_encryption_pyopenssl", "test_crypt_shared", "test_pyopenssl", "SETDEFAULTENCODING", "test_loadbalancer", "test_serverless", "SKIP_CSOT_TESTS", "MONGODB_STARTED", "DISABLE_TEST_COMMANDS", "GREEN_FRAMEWORK", "NO_EXT", "COVERAGE", "COMPRESSORS", "TEST_SUITES", "MONGODB_API_VERSION", "skip_crypt_shared", "VERSION", "TOPOLOGY", "STORAGE_ENGINE", "ORCHESTRATION_FILE", "REQUIRE_API_VERSION", "LOAD_BALANCER", "skip_web_identity_auth_test", "skip_ECS_auth_test"] - binary: bash - working_dir: "src" - args: - - .evergreen/scripts/configure-env.sh - # Load the expansion file to make an evergreen variable with the current unique version - - command: expansions.update - params: - file: src/expansion.yml - - command: subprocess.exec - params: - include_expansions_in_env: ["PROJECT_DIRECTORY", "DRIVERS_TOOLS"] - binary: bash - args: - - src/.evergreen/scripts/prepare-resources.sh - # Run drivers-evergreen-tools system setup - - command: subprocess.exec - params: - include_expansions_in_env: ["PROJECT_DIRECTORY", "DRIVERS_TOOLS"] - binary: bash - args: - - ${DRIVERS_TOOLS}/.evergreen/setup.sh - - "upload coverage" : - - command: ec2.assume_role - params: - role_arn: ${assume_role_arn} - - command: s3.put - params: - aws_key: ${AWS_ACCESS_KEY_ID} - aws_secret: ${AWS_SECRET_ACCESS_KEY} - aws_session_token: ${AWS_SESSION_TOKEN} - local_file: src/.coverage - optional: true - # Upload the coverage report for all tasks in a single build to the same directory. - remote_file: coverage/${revision}/${version_id}/coverage/coverage.${build_variant}.${task_name} - bucket: ${bucket_name} - permissions: public-read - content_type: text/html - display_name: "Raw Coverage Report" - - "download and merge coverage" : - - command: ec2.assume_role - params: - role_arn: ${assume_role_arn} - - command: subprocess.exec - params: - silent: true - binary: bash - working_dir: "src" - include_expansions_in_env: ["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN"] - args: - - .evergreen/scripts/download-and-merge-coverage.sh - - ${bucket_name} - - ${revision} - - ${version_id} - - command: subprocess.exec - params: - working_dir: "src" - binary: bash - args: - - .evergreen/combine-coverage.sh - # Upload the resulting html coverage report. - - command: subprocess.exec - params: - silent: true - binary: bash - working_dir: "src" - include_expansions_in_env: ["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN"] - args: - - .evergreen/scripts/upload-coverage-report.sh - - ${bucket_name} - - ${revision} - - ${version_id} - # Attach the index.html with s3.put so it shows up in the Evergreen UI. - - command: s3.put - params: - aws_key: ${AWS_ACCESS_KEY_ID} - aws_secret: ${AWS_SECRET_ACCESS_KEY} - aws_session_token: ${AWS_SESSION_TOKEN} - local_file: src/htmlcov/index.html - remote_file: coverage/${revision}/${version_id}/htmlcov/index.html - bucket: ${bucket_name} - permissions: public-read - content_type: text/html - display_name: "Coverage Report HTML" - - - "upload mo artifacts": - - command: ec2.assume_role - params: - role_arn: ${assume_role_arn} - - command: archive.targz_pack - params: - target: "mongo-coredumps.tgz" - source_dir: "./" - include: - - "./**.core" - - "./**.mdmp" # Windows: minidumps - - command: s3.put - params: - aws_key: ${AWS_ACCESS_KEY_ID} - aws_secret: ${AWS_SECRET_ACCESS_KEY} - aws_session_token: ${AWS_SESSION_TOKEN} - local_file: mongo-coredumps.tgz - remote_file: ${build_variant}/${revision}/${version_id}/${build_id}/coredumps/${task_id}-${execution}-mongodb-coredumps.tar.gz - bucket: ${bucket_name} - permissions: public-read - content_type: ${content_type|application/gzip} - display_name: Core Dumps - Execution - optional: true - - command: s3.put - params: - aws_key: ${AWS_ACCESS_KEY_ID} - aws_secret: ${AWS_SECRET_ACCESS_KEY} - aws_session_token: ${AWS_SESSION_TOKEN} - local_file: ${DRIVERS_TOOLS}/.evergreen/test_logs.tar.gz - remote_file: ${build_variant}/${revision}/${version_id}/${build_id}/logs/${task_id}-${execution}-drivers-tools-logs.tar.gz - bucket: ${bucket_name} - permissions: public-read - content_type: ${content_type|application/x-gzip} - display_name: "drivers-tools-logs.tar.gz" - - "upload working dir": - - command: ec2.assume_role - params: - role_arn: ${assume_role_arn} - - command: archive.targz_pack - params: - target: "working-dir.tar.gz" - source_dir: ${PROJECT_DIRECTORY}/ - include: - - "./**" - - command: s3.put - params: - aws_key: ${AWS_ACCESS_KEY_ID} - aws_secret: ${AWS_SECRET_ACCESS_KEY} - aws_session_token: ${AWS_SESSION_TOKEN} - local_file: working-dir.tar.gz - remote_file: ${build_variant}/${revision}/${version_id}/${build_id}/artifacts/${task_id}-${execution}-working-dir.tar.gz - bucket: ${bucket_name} - permissions: public-read - content_type: ${content_type|application/x-gzip} - display_name: "working-dir.tar.gz" - - command: archive.targz_pack - params: - target: "drivers-dir.tar.gz" - source_dir: ${DRIVERS_TOOLS} - include: - - "./**" - exclude_files: - # Windows cannot read the mongod *.lock files because they are locked. - - "*.lock" - - command: s3.put - params: - aws_key: ${AWS_ACCESS_KEY_ID} - aws_secret: ${AWS_SECRET_ACCESS_KEY} - aws_session_token: ${AWS_SESSION_TOKEN} - local_file: drivers-dir.tar.gz - remote_file: ${build_variant}/${revision}/${version_id}/${build_id}/artifacts/${task_id}-${execution}-drivers-dir.tar.gz - bucket: ${bucket_name} - permissions: public-read - content_type: ${content_type|application/x-gzip} - display_name: "drivers-dir.tar.gz" - - "upload test results": - - command: attach.results - params: - file_location: "${DRIVERS_TOOLS}/results.json" - - command: attach.xunit_results - params: - file: "src/xunit-results/TEST-*.xml" - - "bootstrap mongo-orchestration": - - command: subprocess.exec - params: - binary: bash - include_expansions_in_env: ["VERSION", "TOPOLOGY", "AUTH", "SSL", "ORCHESTRATION_FILE", "LOAD_BALANCER"] - args: - - src/.evergreen/scripts/run-with-env.sh - - src/.evergreen/scripts/bootstrap-mongo-orchestration.sh - - command: expansions.update - params: - file: mo-expansion.yml - - command: expansions.update - params: - updates: - - key: MONGODB_STARTED - value: "1" - - "bootstrap data lake": - - command: subprocess.exec - type: setup - params: - binary: bash - args: - - ${DRIVERS_TOOLS}/.evergreen/atlas_data_lake/pull-mongohouse-image.sh - - command: subprocess.exec - type: setup - params: - binary: bash - args: - - ${DRIVERS_TOOLS}/.evergreen/atlas_data_lake/run-mongohouse-image.sh - - "stop mongo-orchestration": - - command: subprocess.exec - params: - binary: bash - args: - - ${DRIVERS_TOOLS}/.evergreen/stop-orchestration.sh - - "run mod_wsgi tests": - - command: subprocess.exec - type: test - params: - include_expansions_in_env: [MOD_WSGI_VERSION, MOD_WSGI_EMBEDDED, "PYTHON_BINARY"] - working_dir: "src" - binary: bash - args: - - .evergreen/scripts/run-with-env.sh - - .evergreen/scripts/run-mod-wsgi-tests.sh - - "run mockupdb tests": - - command: subprocess.exec - type: test - params: - include_expansions_in_env: ["PYTHON_BINARY"] - working_dir: "src" - binary: bash - args: - - .evergreen/scripts/run-with-env.sh - - .evergreen/scripts/run-mockupdb-tests.sh - - "run doctests": - - command: subprocess.exec - type: test - params: - include_expansions_in_env: [ "PYTHON_BINARY" ] - working_dir: "src" - binary: bash - args: - - .evergreen/scripts/run-with-env.sh - - .evergreen/scripts/run-doctests.sh - - "run tests": - - command: subprocess.exec - params: - include_expansions_in_env: ["TEST_DATA_LAKE", "PYTHON_BINARY", "AUTH", "SSL", "TEST_INDEX_MANAGEMENT", "CRYPT_SHARED_LIB_PATH", "test_encryption", "test_encryption_pyopenssl", "test_crypt_shared", "test_pyopenssl", "test_loadbalancer", "test_serverless", "ORCHESTRATION_FILE"] - binary: bash - working_dir: "src" - args: - - .evergreen/scripts/setup-tests.sh - - command: subprocess.exec - params: - working_dir: "src" - binary: bash - background: true - include_expansions_in_env: ["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN"] - args: - - .evergreen/scripts/run-with-env.sh - - .evergreen/scripts/setup-encryption.sh - - command: subprocess.exec - type: test - params: - working_dir: "src" - binary: bash - include_expansions_in_env: ["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN", "PYTHON_BINARY", "TEST_DATA_LAKE", "TEST_INDEX_MANAGEMENT", "CRYPT_SHARED_LIB_PATH", "SINGLE_MONGOS_LB_URI", "MULTI_MONGOS_LB_URI", "TEST_SUITES"] - args: - - .evergreen/scripts/run-with-env.sh - - .evergreen/scripts/run-tests.sh - - "run enterprise auth tests": - - command: subprocess.exec - type: test - params: - binary: bash - working_dir: "src" - include_expansions_in_env: ["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN", "PYTHON_BINARY"] - args: - - .evergreen/scripts/run-with-env.sh - - .evergreen/scripts/run-enterprise-auth-tests.sh - - "run atlas tests": - - command: subprocess.exec - type: test - params: - binary: bash - include_expansions_in_env: ["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN", "PYTHON_BINARY"] - working_dir: "src" - args: - - .evergreen/scripts/run-with-env.sh - - .evergreen/scripts/run-atlas-tests.sh - - "get aws auth secrets": - - command: subprocess.exec - type: test - params: - include_expansions_in_env: ["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN"] - binary: bash - args: - - ${DRIVERS_TOOLS}/.evergreen/auth_aws/setup-secrets.sh - - "run aws auth test with regular aws credentials": - - command: subprocess.exec - params: - include_expansions_in_env: ["TEST_DATA_LAKE", "TEST_INDEX_MANAGEMENT", "CRYPT_SHARED_LIB_PATH", "test_encryption", "test_encryption_pyopenssl", "test_crypt_shared", "test_pyopenssl", "test_loadbalancer", "test_serverless", "ORCHESTRATION_FILE"] - binary: bash - working_dir: "src" - args: - - .evergreen/scripts/setup-tests.sh - - command: subprocess.exec - type: test - params: - include_expansions_in_env: ["DRIVERS_TOOLS", "skip_EC2_auth_test"] - binary: bash - working_dir: "src" - args: - - .evergreen/scripts/run-with-env.sh - - .evergreen/scripts/run-mongodb-aws-test.sh - - regular - - "run aws auth test with assume role credentials": - - command: subprocess.exec - params: - include_expansions_in_env: [ "TEST_DATA_LAKE", "TEST_INDEX_MANAGEMENT", "CRYPT_SHARED_LIB_PATH", "test_encryption", "test_encryption_pyopenssl", "test_crypt_shared", "test_pyopenssl", "test_loadbalancer", "test_serverless", "ORCHESTRATION_FILE" ] - binary: bash - working_dir: "src" - args: - - .evergreen/scripts/setup-tests.sh - - command: subprocess.exec - type: test - params: - include_expansions_in_env: ["DRIVERS_TOOLS", "skip_EC2_auth_test"] - binary: bash - working_dir: "src" - args: - - .evergreen/scripts/run-with-env.sh - - .evergreen/scripts/run-mongodb-aws-test.sh - - assume-role - - "run aws auth test with aws EC2 credentials": - - command: subprocess.exec - params: - include_expansions_in_env: [ "TEST_DATA_LAKE", "TEST_INDEX_MANAGEMENT", "CRYPT_SHARED_LIB_PATH", "test_encryption", "test_encryption_pyopenssl", "test_crypt_shared", "test_pyopenssl", "test_loadbalancer", "test_serverless", "ORCHESTRATION_FILE" ] - binary: bash - working_dir: "src" - args: - - .evergreen/scripts/setup-tests.sh - - command: subprocess.exec - type: test - params: - include_expansions_in_env: ["DRIVERS_TOOLS", "skip_EC2_auth_test"] - binary: bash - working_dir: "src" - args: - - .evergreen/scripts/run-with-env.sh - - .evergreen/scripts/run-mongodb-aws-test.sh - - ec2 - - "run aws auth test with aws web identity credentials": - - command: subprocess.exec - params: - include_expansions_in_env: [ "TEST_DATA_LAKE", "TEST_INDEX_MANAGEMENT", "CRYPT_SHARED_LIB_PATH", "test_encryption", "test_encryption_pyopenssl", "test_crypt_shared", "test_pyopenssl", "test_loadbalancer", "test_serverless", "ORCHESTRATION_FILE" ] - binary: bash - working_dir: "src" - args: - - .evergreen/scripts/setup-tests.sh - - # Test with and without AWS_ROLE_SESSION_NAME set. - - command: subprocess.exec - type: test - params: - include_expansions_in_env: ["DRIVERS_TOOLS", "skip_EC2_auth_test"] - binary: bash - working_dir: "src" - args: - - .evergreen/scripts/run-with-env.sh - - .evergreen/scripts/run-mongodb-aws-test.sh - - web-identity - - command: subprocess.exec - type: test - params: - include_expansions_in_env: [ "DRIVERS_TOOLS", "skip_EC2_auth_test" ] - binary: bash - working_dir: "src" - env: - AWS_ROLE_SESSION_NAME: test - args: - - .evergreen/scripts/run-with-env.sh - - .evergreen/scripts/run-mongodb-aws-test.sh - - web-identity - - "run aws auth test with aws credentials as environment variables": - - command: subprocess.exec - params: - include_expansions_in_env: [ "TEST_DATA_LAKE", "TEST_INDEX_MANAGEMENT", "CRYPT_SHARED_LIB_PATH", "test_encryption", "test_encryption_pyopenssl", "test_crypt_shared", "test_pyopenssl", "test_loadbalancer", "test_serverless", "ORCHESTRATION_FILE" ] - binary: bash - working_dir: "src" - args: - - .evergreen/scripts/setup-tests.sh - - command: subprocess.exec - type: test - params: - include_expansions_in_env: ["DRIVERS_TOOLS", "skip_EC2_auth_test"] - binary: bash - working_dir: "src" - args: - - .evergreen/scripts/run-with-env.sh - - .evergreen/scripts/run-mongodb-aws-test.sh - - env-creds - - "run aws auth test with aws credentials and session token as environment variables": - - command: subprocess.exec - params: - include_expansions_in_env: [ "TEST_DATA_LAKE", "TEST_INDEX_MANAGEMENT", "CRYPT_SHARED_LIB_PATH", "test_encryption", "test_encryption_pyopenssl", "test_crypt_shared", "test_pyopenssl", "test_loadbalancer", "test_serverless", "ORCHESTRATION_FILE" ] - binary: bash - working_dir: "src" - args: - - .evergreen/scripts/setup-tests.sh - - command: subprocess.exec - type: test - params: - include_expansions_in_env: ["DRIVERS_TOOLS", "skip_EC2_auth_test"] - binary: bash - working_dir: "src" - args: - - .evergreen/scripts/run-with-env.sh - - .evergreen/scripts/run-mongodb-aws-test.sh - - session-creds - - "run oidc auth test with test credentials": - - command: subprocess.exec - params: - include_expansions_in_env: [ "TEST_DATA_LAKE", "TEST_INDEX_MANAGEMENT", "CRYPT_SHARED_LIB_PATH", "test_encryption", "test_encryption_pyopenssl", "test_crypt_shared", "test_pyopenssl", "test_loadbalancer", "test_serverless", "ORCHESTRATION_FILE" ] - binary: bash - working_dir: "src" - args: - - .evergreen/scripts/setup-tests.sh - - command: subprocess.exec - type: test - params: - working_dir: "src" - binary: bash - include_expansions_in_env: ["DRIVERS_TOOLS", "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN"] - args: - - .evergreen/run-mongodb-oidc-test.sh - - "run oidc k8s auth test": - - command: subprocess.exec - type: test - params: - binary: bash - working_dir: src - env: - OIDC_ENV: k8s - include_expansions_in_env: ["DRIVERS_TOOLS", "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN", "K8S_VARIANT"] - args: - - ${PROJECT_DIRECTORY}/.evergreen/run-mongodb-oidc-remote-test.sh - - "run aws ECS auth test": - - command: subprocess.exec - type: test - params: - binary: bash - working_dir: "src" - args: - - .evergreen/scripts/run-with-env.sh - - .evergreen/scripts/run-aws-ecs-auth-test.sh - - "cleanup": - - command: subprocess.exec - params: - binary: bash - working_dir: "src" - args: - - .evergreen/scripts/run-with-env.sh - - .evergreen/scripts/cleanup.sh - - "teardown system": - - command: subprocess.exec - params: - binary: bash - working_dir: "src" - args: - # Ensure the instance profile is reassigned for aws tests. - - ${DRIVERS_TOOLS}/.evergreen/auth_aws/teardown.sh - - command: subprocess.exec - params: - binary: bash - working_dir: "src" - args: - - ${DRIVERS_TOOLS}/.evergreen/csfle/teardown.sh - - command: subprocess.exec - params: - binary: bash - working_dir: "src" - args: - - ${DRIVERS_TOOLS}/.evergreen/ocsp/teardown.sh - - command: subprocess.exec - params: - binary: bash - working_dir: "src" - args: - - ${DRIVERS_TOOLS}/.evergreen/teardown.sh - - "install dependencies": - - command: subprocess.exec - params: - binary: bash - working_dir: "src" - args: - - .evergreen/scripts/run-with-env.sh - - .evergreen/scripts/install-dependencies.sh - - "assume ec2 role": - - command: ec2.assume_role - params: - role_arn: ${aws_test_secrets_role} - - "setup atlas": - - command: subprocess.exec - params: - binary: bash - include_expansions_in_env: ["task_id", "execution"] - env: - MONGODB_VERSION: "7.0" - LAMBDA_STACK_NAME: dbx-python-lambda - args: - - ${DRIVERS_TOOLS}/.evergreen/atlas/setup-atlas-cluster.sh - - command: expansions.update - params: - file: atlas-expansion.yml - - "run-ocsp-test": - - command: subprocess.exec - params: - include_expansions_in_env: [ "TEST_DATA_LAKE", "TEST_INDEX_MANAGEMENT", "CRYPT_SHARED_LIB_PATH", "test_encryption", "test_encryption_pyopenssl", "test_crypt_shared", "test_pyopenssl", "test_loadbalancer", "test_serverless", "ORCHESTRATION_FILE" ] - binary: bash - working_dir: "src" - args: - - .evergreen/scripts/setup-tests.sh - - command: subprocess.exec - type: test - params: - include_expansions_in_env: ["OCSP_ALGORITHM", "OCSP_TLS_SHOULD_SUCCEED", "PYTHON_BINARY"] - binary: bash - working_dir: "src" - args: - - .evergreen/scripts/run-with-env.sh - - .evergreen/scripts/run-ocsp-test.sh - - "run-ocsp-server": - - command: subprocess.exec - params: - background: true - binary: bash - include_expansions_in_env: [SERVER_TYPE, OCSP_ALGORITHM] - args: - - ${DRIVERS_TOOLS}/.evergreen/ocsp/setup.sh - - "run load-balancer": - - command: subprocess.exec - params: - binary: bash - include_expansions_in_env: ["MONGODB_URI"] - args: - - src/.evergreen/scripts/run-with-env.sh - - src/.evergreen/scripts/run-load-balancer.sh - - command: expansions.update - params: - file: lb-expansion.yml - - "stop load-balancer": - - command: subprocess.exec - params: - binary: bash - args: - - src/.evergreen/scripts/stop-load-balancer.sh - - "teardown atlas": - - command: subprocess.exec - params: - binary: bash - args: - - ${DRIVERS_TOOLS}/.evergreen/atlas/teardown-atlas-cluster.sh - - "run perf tests": - - command: subprocess.exec - type: test - params: - working_dir: "src" - binary: bash - args: - - .evergreen/scripts/run-with-env.sh - - .evergreen/scripts/run-perf-tests.sh - - "attach benchmark test results": - - command: attach.results - params: - file_location: src/report.json - - "send dashboard data": - - command: perf.send - params: - file: src/results.json - pre: - func: "fetch source" - func: "setup system" - - func: "install dependencies" - func: "assume ec2 role" post: @@ -660,1033 +41,24 @@ post: - func: "upload coverage" - func: "upload mo artifacts" - func: "upload test results" - - func: "stop mongo-orchestration" - func: "cleanup" -task_groups: - - name: serverless_task_group - setup_group_can_fail_task: true - setup_group_timeout_secs: 1800 # 30 minutes - setup_group: - - func: "fetch source" - - func: "setup system" - - command: subprocess.exec - params: - binary: bash - env: - VAULT_NAME: ${VAULT_NAME} - args: - - ${DRIVERS_TOOLS}/.evergreen/serverless/create-instance.sh - teardown_task: - - command: subprocess.exec - params: - binary: bash - args: - - ${DRIVERS_TOOLS}/.evergreen/serverless/delete-instance.sh - - func: "upload test results" - tasks: - - ".serverless" - - - name: testgcpkms_task_group - setup_group_can_fail_task: true - setup_group_timeout_secs: 1800 # 30 minutes - setup_group: - - func: fetch source - - func: setup system - - command: subprocess.exec - params: - binary: bash - args: - - ${DRIVERS_TOOLS}/.evergreen/csfle/gcpkms/create-and-setup-instance.sh - teardown_task: - - command: subprocess.exec - params: - binary: bash - args: - - ${DRIVERS_TOOLS}/.evergreen/csfle/gcpkms/delete-instance.sh - - func: "upload test results" - tasks: - - testgcpkms-task - - - name: testazurekms_task_group - setup_group: - - func: fetch source - - func: setup system - - command: subprocess.exec - params: - binary: bash - env: - AZUREKMS_VMNAME_PREFIX: "PYTHON_DRIVER" - args: - - ${DRIVERS_TOOLS}/.evergreen/csfle/azurekms/create-and-setup-vm.sh - teardown_group: - - command: subprocess.exec - params: - binary: bash - args: - - ${DRIVERS_TOOLS}/.evergreen/csfle/azurekms/delete-vm.sh - - func: "upload test results" - setup_group_can_fail_task: true - teardown_task_can_fail_task: true - setup_group_timeout_secs: 1800 - tasks: - - testazurekms-task - - - name: testazureoidc_task_group - setup_group: - - func: fetch source - - func: setup system - - command: subprocess.exec - params: - binary: bash - env: - AZUREOIDC_VMNAME_PREFIX: "PYTHON_DRIVER" - args: - - ${DRIVERS_TOOLS}/.evergreen/auth_oidc/azure/create-and-setup-vm.sh - teardown_task: - - command: subprocess.exec - params: - binary: bash - args: - - ${DRIVERS_TOOLS}/.evergreen/auth_oidc/azure/delete-vm.sh - setup_group_can_fail_task: true - setup_group_timeout_secs: 1800 - tasks: - - oidc-auth-test-azure - - - name: testgcpoidc_task_group - setup_group: - - func: fetch source - - func: setup system - - command: subprocess.exec - params: - binary: bash - env: - GCPOIDC_VMNAME_PREFIX: "PYTHON_DRIVER" - args: - - ${DRIVERS_TOOLS}/.evergreen/auth_oidc/gcp/setup.sh - teardown_task: - - command: subprocess.exec - params: - binary: bash - args: - - ${DRIVERS_TOOLS}/.evergreen/auth_oidc/gcp/teardown.sh - setup_group_can_fail_task: true - setup_group_timeout_secs: 1800 - tasks: - - oidc-auth-test-gcp - - - name: testk8soidc_task_group - setup_group: - - func: fetch source - - func: setup system - - command: ec2.assume_role - params: - role_arn: ${aws_test_secrets_role} - duration_seconds: 1800 - - command: subprocess.exec - params: - binary: bash - args: - - ${DRIVERS_TOOLS}/.evergreen/auth_oidc/k8s/setup.sh - teardown_task: - - command: subprocess.exec - params: - binary: bash - args: - - ${DRIVERS_TOOLS}/.evergreen/auth_oidc/k8s/teardown.sh - setup_group_can_fail_task: true - setup_group_timeout_secs: 1800 - tasks: - - oidc-auth-test-k8s - - - name: testoidc_task_group - setup_group: - - func: fetch source - - func: setup system - - func: "assume ec2 role" - - command: subprocess.exec - params: - binary: bash - include_expansions_in_env: ["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN"] - args: - - ${DRIVERS_TOOLS}/.evergreen/auth_oidc/setup.sh - teardown_task: - - command: subprocess.exec - params: - binary: bash - args: - - ${DRIVERS_TOOLS}/.evergreen/auth_oidc/teardown.sh - setup_group_can_fail_task: true - setup_group_timeout_secs: 1800 - tasks: - - oidc-auth-test - - - name: test_aws_lambda_task_group - setup_group: - - func: fetch source - - func: setup system - - func: setup atlas - teardown_task: - - func: teardown atlas - setup_group_can_fail_task: true - setup_group_timeout_secs: 1800 - tasks: - - test-aws-lambda-deployed - - - name: test_atlas_task_group_search_indexes - setup_group: - - func: fetch source - - func: setup system - - func: setup atlas - teardown_task: - - func: teardown atlas - setup_group_can_fail_task: true - setup_group_timeout_secs: 1800 - tasks: - - test-search-index-helpers - tasks: - # Wildcard task. Do you need to find out what tools are available and where? - # Throw it here, and execute this task on all buildvariants - - name: getdata - commands: - - command: subprocess.exec - binary: bash - type: test - params: - args: - - src/.evergreen/scripts/run-getdata.sh -# Standard test tasks {{{ - - - name: "mockupdb" - tags: ["mockupdb"] - commands: - - func: "run mockupdb tests" - - - name: "doctests" - tags: ["doctests"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "latest" - TOPOLOGY: "server" - - func: "run doctests" - - - name: "test-serverless" - tags: ["serverless"] - commands: - - func: "run tests" - - - name: "test-enterprise-auth" - tags: ["enterprise-auth"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "latest" - TOPOLOGY: "server" - - func: "assume ec2 role" - - func: "run enterprise auth tests" - - - name: "test-search-index-helpers" - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "6.0" - TOPOLOGY: "replica_set" - - func: "run tests" - vars: - TEST_INDEX_MANAGEMENT: "1" - - - name: "mod-wsgi-standalone" - tags: ["mod_wsgi"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "latest" - TOPOLOGY: "server" - - func: "run mod_wsgi tests" - - - name: "mod-wsgi-replica-set" - tags: ["mod_wsgi"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "latest" - TOPOLOGY: "replica_set" - - func: "run mod_wsgi tests" - - - name: "mod-wsgi-embedded-mode-standalone" - tags: ["mod_wsgi"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "latest" - TOPOLOGY: "server" - - func: "run mod_wsgi tests" - vars: - MOD_WSGI_EMBEDDED: "1" - - - name: "mod-wsgi-embedded-mode-replica-set" - tags: ["mod_wsgi"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "latest" - TOPOLOGY: "replica_set" - - func: "run mod_wsgi tests" - vars: - MOD_WSGI_EMBEDDED: "1" - - - name: "no-server" - tags: ["no-server"] - commands: - - func: "run tests" - - - name: "atlas-connect" - tags: ["atlas-connect"] - commands: - - func: "assume ec2 role" - - func: "run atlas tests" - - - name: atlas-data-lake-tests - commands: - - func: "bootstrap data lake" - - func: "run tests" - vars: - TEST_DATA_LAKE: "true" - - - name: "test-aws-lambda-deployed" - commands: - - func: "install dependencies" - - command: ec2.assume_role - params: - role_arn: ${LAMBDA_AWS_ROLE_ARN} - duration_seconds: 3600 - - command: subprocess.exec - params: - working_dir: src - binary: bash - add_expansions_to_env: true - args: - - .evergreen/run-deployed-lambda-aws-tests.sh - env: - TEST_LAMBDA_DIRECTORY: ${PROJECT_DIRECTORY}/test/lambda - - - name: test-ocsp-rsa-valid-cert-server-staples - tags: ["ocsp", "ocsp-rsa", "ocsp-staple"] - commands: - - func: run-ocsp-server - vars: - OCSP_ALGORITHM: "rsa" - SERVER_TYPE: "valid" - - func: "bootstrap mongo-orchestration" - vars: - ORCHESTRATION_FILE: "rsa-basic-tls-ocsp-mustStaple.json" - - func: run-ocsp-test - vars: - OCSP_ALGORITHM: "rsa" - OCSP_TLS_SHOULD_SUCCEED: "true" - - - name: test-ocsp-rsa-invalid-cert-server-staples - tags: ["ocsp", "ocsp-rsa", "ocsp-staple"] - commands: - - func: run-ocsp-server - vars: - OCSP_ALGORITHM: "rsa" - SERVER_TYPE: "revoked" - - func: "bootstrap mongo-orchestration" - vars: - ORCHESTRATION_FILE: "rsa-basic-tls-ocsp-mustStaple.json" - - func: run-ocsp-test - vars: - OCSP_ALGORITHM: "rsa" - OCSP_TLS_SHOULD_SUCCEED: "false" - - - name: test-ocsp-rsa-valid-cert-server-does-not-staple - tags: ["ocsp", "ocsp-rsa"] - commands: - - func: run-ocsp-server - vars: - OCSP_ALGORITHM: "rsa" - SERVER_TYPE: valid - - func: "bootstrap mongo-orchestration" - vars: - ORCHESTRATION_FILE: "rsa-basic-tls-ocsp-disableStapling.json" - - func: run-ocsp-test - vars: - OCSP_ALGORITHM: "rsa" - OCSP_TLS_SHOULD_SUCCEED: "true" - - - name: test-ocsp-rsa-invalid-cert-server-does-not-staple - tags: ["ocsp", "ocsp-rsa"] - commands: - - func: run-ocsp-server - vars: - OCSP_ALGORITHM: "rsa" - SERVER_TYPE: revoked - - func: "bootstrap mongo-orchestration" - vars: - ORCHESTRATION_FILE: "rsa-basic-tls-ocsp-disableStapling.json" - - func: run-ocsp-test - vars: - OCSP_ALGORITHM: "rsa" - OCSP_TLS_SHOULD_SUCCEED: "false" - - - name: test-ocsp-rsa-soft-fail - tags: ["ocsp", "ocsp-rsa"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - ORCHESTRATION_FILE: "rsa-basic-tls-ocsp-disableStapling.json" - - func: run-ocsp-test - vars: - OCSP_ALGORITHM: "rsa" - OCSP_TLS_SHOULD_SUCCEED: "true" - - - name: test-ocsp-rsa-malicious-invalid-cert-mustStaple-server-does-not-staple - tags: ["ocsp", "ocsp-rsa"] - commands: - - func: run-ocsp-server - vars: - OCSP_ALGORITHM: "rsa" - SERVER_TYPE: revoked - - func: "bootstrap mongo-orchestration" - vars: - ORCHESTRATION_FILE: "rsa-basic-tls-ocsp-mustStaple-disableStapling.json" - - func: run-ocsp-test - vars: - OCSP_ALGORITHM: "rsa" - OCSP_TLS_SHOULD_SUCCEED: "false" - - - name: test-ocsp-rsa-malicious-no-responder-mustStaple-server-does-not-staple - tags: ["ocsp", "ocsp-rsa"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - ORCHESTRATION_FILE: "rsa-basic-tls-ocsp-mustStaple-disableStapling.json" - - func: run-ocsp-test - vars: - OCSP_ALGORITHM: "rsa" - OCSP_TLS_SHOULD_SUCCEED: "false" - - - name: test-ocsp-rsa-delegate-valid-cert-server-staples - tags: ["ocsp", "ocsp-rsa", "ocsp-staple"] - commands: - - func: run-ocsp-server - vars: - OCSP_ALGORITHM: "rsa" - SERVER_TYPE: valid-delegate - - func: "bootstrap mongo-orchestration" - vars: - ORCHESTRATION_FILE: "rsa-basic-tls-ocsp-mustStaple.json" - - func: run-ocsp-test - vars: - OCSP_ALGORITHM: "rsa" - OCSP_TLS_SHOULD_SUCCEED: "true" - - - name: test-ocsp-rsa-delegate-invalid-cert-server-staples - tags: ["ocsp", "ocsp-rsa", "ocsp-staple"] - commands: - - func: run-ocsp-server - vars: - OCSP_ALGORITHM: "rsa" - SERVER_TYPE: revoked-delegate - - func: "bootstrap mongo-orchestration" - vars: - ORCHESTRATION_FILE: "rsa-basic-tls-ocsp-mustStaple.json" - - func: run-ocsp-test - vars: - OCSP_ALGORITHM: "rsa" - OCSP_TLS_SHOULD_SUCCEED: "false" - - - name: test-ocsp-rsa-delegate-valid-cert-server-does-not-staple - tags: ["ocsp", "ocsp-rsa"] - commands: - - func: run-ocsp-server - vars: - OCSP_ALGORITHM: "rsa" - SERVER_TYPE: valid-delegate - - func: "bootstrap mongo-orchestration" - vars: - ORCHESTRATION_FILE: "rsa-basic-tls-ocsp-disableStapling.json" - - func: run-ocsp-test - vars: - OCSP_ALGORITHM: "rsa" - OCSP_TLS_SHOULD_SUCCEED: "true" - - - name: test-ocsp-rsa-delegate-invalid-cert-server-does-not-staple - tags: ["ocsp", "ocsp-rsa"] - commands: - - func: run-ocsp-server - vars: - OCSP_ALGORITHM: "rsa" - SERVER_TYPE: revoked-delegate - - func: "bootstrap mongo-orchestration" - vars: - ORCHESTRATION_FILE: "rsa-basic-tls-ocsp-disableStapling.json" - - func: run-ocsp-test - vars: - OCSP_ALGORITHM: "rsa" - OCSP_TLS_SHOULD_SUCCEED: "false" - - - name: test-ocsp-rsa-delegate-malicious-invalid-cert-mustStaple-server-does-not-staple - tags: ["ocsp", "ocsp-rsa"] - commands: - - func: run-ocsp-server - vars: - OCSP_ALGORITHM: "rsa" - SERVER_TYPE: revoked-delegate - - func: "bootstrap mongo-orchestration" - vars: - ORCHESTRATION_FILE: "rsa-basic-tls-ocsp-mustStaple-disableStapling.json" - - func: run-ocsp-test - vars: - OCSP_ALGORITHM: "rsa" - OCSP_TLS_SHOULD_SUCCEED: "false" - - - name: test-ocsp-ecdsa-valid-cert-server-staples - tags: ["ocsp", "ocsp-ecdsa", "ocsp-staple"] - commands: - - func: run-ocsp-server - vars: - OCSP_ALGORITHM: "ecdsa" - SERVER_TYPE: valid - - func: "bootstrap mongo-orchestration" - vars: - ORCHESTRATION_FILE: "ecdsa-basic-tls-ocsp-mustStaple.json" - - func: run-ocsp-test - vars: - OCSP_ALGORITHM: "ecdsa" - OCSP_TLS_SHOULD_SUCCEED: "true" - - - name: test-ocsp-ecdsa-invalid-cert-server-staples - tags: ["ocsp", "ocsp-ecdsa", "ocsp-staple"] - commands: - - func: run-ocsp-server - vars: - OCSP_ALGORITHM: "ecdsa" - SERVER_TYPE: revoked - - func: "bootstrap mongo-orchestration" - vars: - ORCHESTRATION_FILE: "ecdsa-basic-tls-ocsp-mustStaple.json" - - func: run-ocsp-test - vars: - OCSP_ALGORITHM: "ecdsa" - OCSP_TLS_SHOULD_SUCCEED: "false" - - - name: test-ocsp-ecdsa-valid-cert-server-does-not-staple - tags: ["ocsp", "ocsp-ecdsa"] - commands: - - func: run-ocsp-server - vars: - OCSP_ALGORITHM: "ecdsa" - SERVER_TYPE: valid - - func: "bootstrap mongo-orchestration" - vars: - ORCHESTRATION_FILE: "ecdsa-basic-tls-ocsp-disableStapling.json" - - func: run-ocsp-test - vars: - OCSP_ALGORITHM: "ecdsa" - OCSP_TLS_SHOULD_SUCCEED: "true" - - - name: test-ocsp-ecdsa-invalid-cert-server-does-not-staple - tags: ["ocsp", "ocsp-ecdsa"] - commands: - - func: run-ocsp-server - vars: - OCSP_ALGORITHM: "ecdsa" - SERVER_TYPE: revoked - - func: "bootstrap mongo-orchestration" - vars: - ORCHESTRATION_FILE: "ecdsa-basic-tls-ocsp-disableStapling.json" - - func: run-ocsp-test - vars: - OCSP_ALGORITHM: "ecdsa" - OCSP_TLS_SHOULD_SUCCEED: "false" - - - name: test-ocsp-ecdsa-soft-fail - tags: ["ocsp", "ocsp-ecdsa"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - ORCHESTRATION_FILE: "ecdsa-basic-tls-ocsp-disableStapling.json" - - func: run-ocsp-test - vars: - OCSP_ALGORITHM: "ecdsa" - OCSP_TLS_SHOULD_SUCCEED: "true" - - - name: test-ocsp-ecdsa-malicious-invalid-cert-mustStaple-server-does-not-staple - tags: ["ocsp", "ocsp-ecdsa"] - commands: - - func: run-ocsp-server - vars: - OCSP_ALGORITHM: "ecdsa" - SERVER_TYPE: revoked - - func: "bootstrap mongo-orchestration" - vars: - ORCHESTRATION_FILE: "ecdsa-basic-tls-ocsp-mustStaple-disableStapling.json" - - func: run-ocsp-test - vars: - OCSP_ALGORITHM: "ecdsa" - OCSP_TLS_SHOULD_SUCCEED: "false" - - - name: test-ocsp-ecdsa-malicious-no-responder-mustStaple-server-does-not-staple - tags: ["ocsp", "ocsp-ecdsa"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - ORCHESTRATION_FILE: "ecdsa-basic-tls-ocsp-mustStaple-disableStapling.json" - - func: run-ocsp-test - vars: - OCSP_ALGORITHM: "ecdsa" - OCSP_TLS_SHOULD_SUCCEED: "false" - - - name: test-ocsp-ecdsa-delegate-valid-cert-server-staples - tags: ["ocsp", "ocsp-ecdsa", "ocsp-staple"] - commands: - - func: run-ocsp-server - vars: - OCSP_ALGORITHM: "ecdsa" - SERVER_TYPE: valid-delegate - - func: "bootstrap mongo-orchestration" - vars: - ORCHESTRATION_FILE: "ecdsa-basic-tls-ocsp-mustStaple.json" - - func: run-ocsp-test - vars: - OCSP_ALGORITHM: "ecdsa" - OCSP_TLS_SHOULD_SUCCEED: "true" - - - name: test-ocsp-ecdsa-delegate-invalid-cert-server-staples - tags: ["ocsp", "ocsp-ecdsa", "ocsp-staple"] - commands: - - func: run-ocsp-server - vars: - OCSP_ALGORITHM: "ecdsa" - SERVER_TYPE: revoked-delegate - - func: "bootstrap mongo-orchestration" - vars: - ORCHESTRATION_FILE: "ecdsa-basic-tls-ocsp-mustStaple.json" - - func: run-ocsp-test - vars: - OCSP_ALGORITHM: "ecdsa" - OCSP_TLS_SHOULD_SUCCEED: "false" - - - name: test-ocsp-ecdsa-delegate-valid-cert-server-does-not-staple - tags: ["ocsp", "ocsp-ecdsa"] - commands: - - func: run-ocsp-server - vars: - OCSP_ALGORITHM: "ecdsa" - SERVER_TYPE: valid-delegate - - func: "bootstrap mongo-orchestration" - vars: - ORCHESTRATION_FILE: "ecdsa-basic-tls-ocsp-disableStapling.json" - - func: run-ocsp-test - vars: - OCSP_ALGORITHM: "ecdsa" - OCSP_TLS_SHOULD_SUCCEED: "true" - - - name: test-ocsp-ecdsa-delegate-invalid-cert-server-does-not-staple - tags: ["ocsp", "ocsp-ecdsa"] - commands: - - func: run-ocsp-server - vars: - OCSP_ALGORITHM: "ecdsa" - SERVER_TYPE: revoked-delegate - - func: "bootstrap mongo-orchestration" - vars: - ORCHESTRATION_FILE: "ecdsa-basic-tls-ocsp-disableStapling.json" - - func: run-ocsp-test - vars: - OCSP_ALGORITHM: "ecdsa" - OCSP_TLS_SHOULD_SUCCEED: "false" - - - name: test-ocsp-ecdsa-delegate-malicious-invalid-cert-mustStaple-server-does-not-staple - tags: ["ocsp", "ocsp-ecdsa"] - commands: - - func: run-ocsp-server - vars: - OCSP_ALGORITHM: "ecdsa" - SERVER_TYPE: valid-delegate - - func: "bootstrap mongo-orchestration" - vars: - ORCHESTRATION_FILE: "ecdsa-basic-tls-ocsp-mustStaple-disableStapling.json" - - func: run-ocsp-test - vars: - OCSP_ALGORITHM: "ecdsa" - OCSP_TLS_SHOULD_SUCCEED: "false" - - - name: "aws-auth-test-4.4" - commands: - - func: "bootstrap mongo-orchestration" - vars: - AUTH: "auth" - ORCHESTRATION_FILE: "auth-aws.json" - TOPOLOGY: "server" - VERSION: "4.4" - - func: "assume ec2 role" - - func: "get aws auth secrets" - - func: "run aws auth test with regular aws credentials" - - func: "run aws auth test with assume role credentials" - - func: "run aws auth test with aws credentials as environment variables" - - func: "run aws auth test with aws credentials and session token as environment variables" - - func: "run aws auth test with aws EC2 credentials" - - func: "run aws auth test with aws web identity credentials" - - func: "run aws ECS auth test" - - - name: "aws-auth-test-5.0" - commands: - - func: "bootstrap mongo-orchestration" - vars: - AUTH: "auth" - ORCHESTRATION_FILE: "auth-aws.json" - TOPOLOGY: "server" - VERSION: "5.0" - - func: "assume ec2 role" - - func: "get aws auth secrets" - - func: "run aws auth test with regular aws credentials" - - func: "run aws auth test with assume role credentials" - - func: "run aws auth test with aws credentials as environment variables" - - func: "run aws auth test with aws credentials and session token as environment variables" - - func: "run aws auth test with aws EC2 credentials" - - func: "run aws auth test with aws web identity credentials" - - func: "run aws ECS auth test" - - - name: "aws-auth-test-6.0" - commands: - - func: "bootstrap mongo-orchestration" - vars: - AUTH: "auth" - ORCHESTRATION_FILE: "auth-aws.json" - TOPOLOGY: "server" - VERSION: "6.0" - - func: "assume ec2 role" - - func: "get aws auth secrets" - - func: "run aws auth test with regular aws credentials" - - func: "run aws auth test with assume role credentials" - - func: "run aws auth test with aws credentials as environment variables" - - func: "run aws auth test with aws credentials and session token as environment variables" - - func: "run aws auth test with aws EC2 credentials" - - func: "run aws auth test with aws web identity credentials" - - func: "run aws ECS auth test" - - - name: "aws-auth-test-7.0" - commands: - - func: "bootstrap mongo-orchestration" - vars: - AUTH: "auth" - ORCHESTRATION_FILE: "auth-aws.json" - TOPOLOGY: "server" - VERSION: "7.0" - - func: "assume ec2 role" - - func: "get aws auth secrets" - - func: "run aws auth test with regular aws credentials" - - func: "run aws auth test with assume role credentials" - - func: "run aws auth test with aws credentials as environment variables" - - func: "run aws auth test with aws credentials and session token as environment variables" - - func: "run aws auth test with aws EC2 credentials" - - func: "run aws auth test with aws web identity credentials" - - func: "run aws ECS auth test" - - - name: "aws-auth-test-8.0" - commands: - - func: "bootstrap mongo-orchestration" - vars: - AUTH: "auth" - ORCHESTRATION_FILE: "auth-aws.json" - TOPOLOGY: "server" - VERSION: "8.0" - - func: "assume ec2 role" - - func: "get aws auth secrets" - - func: "run aws auth test with regular aws credentials" - - func: "run aws auth test with assume role credentials" - - func: "run aws auth test with aws credentials as environment variables" - - func: "run aws auth test with aws credentials and session token as environment variables" - - func: "run aws auth test with aws EC2 credentials" - - func: "run aws auth test with aws web identity credentials" - - func: "run aws ECS auth test" - - - name: "aws-auth-test-rapid" - commands: - - func: "bootstrap mongo-orchestration" - vars: - AUTH: "auth" - ORCHESTRATION_FILE: "auth-aws.json" - TOPOLOGY: "server" - VERSION: "rapid" - - func: "assume ec2 role" - - func: "get aws auth secrets" - - func: "run aws auth test with regular aws credentials" - - func: "run aws auth test with assume role credentials" - - func: "run aws auth test with aws credentials as environment variables" - - func: "run aws auth test with aws credentials and session token as environment variables" - - func: "run aws auth test with aws EC2 credentials" - - func: "run aws auth test with aws web identity credentials" - - func: "run aws ECS auth test" - - - name: "aws-auth-test-latest" - commands: - - func: "bootstrap mongo-orchestration" - vars: - AUTH: "auth" - ORCHESTRATION_FILE: "auth-aws.json" - TOPOLOGY: "server" - VERSION: "latest" - - func: "assume ec2 role" - - func: "get aws auth secrets" - - func: "run aws auth test with regular aws credentials" - - func: "run aws auth test with assume role credentials" - - func: "run aws auth test with aws credentials as environment variables" - - func: "run aws auth test with aws credentials and session token as environment variables" - - func: "run aws auth test with aws EC2 credentials" - - func: "run aws auth test with aws web identity credentials" - - func: "run aws ECS auth test" - - - name: "oidc-auth-test" - commands: - - func: "run oidc auth test with test credentials" - - - name: "oidc-auth-test-azure" - commands: - - command: subprocess.exec - type: test - params: - binary: bash - working_dir: src - env: - OIDC_ENV: azure - include_expansions_in_env: ["DRIVERS_TOOLS"] - args: - - ${PROJECT_DIRECTORY}/.evergreen/run-mongodb-oidc-remote-test.sh - - - name: "oidc-auth-test-gcp" - commands: - - command: subprocess.exec - type: test - params: - binary: bash - working_dir: src - env: - OIDC_ENV: gcp - include_expansions_in_env: ["DRIVERS_TOOLS"] - args: - - ${PROJECT_DIRECTORY}/.evergreen/run-mongodb-oidc-remote-test.sh - - - name: "oidc-auth-test-k8s" - commands: - - func: "run oidc k8s auth test" - vars: - K8S_VARIANT: eks - - func: "run oidc k8s auth test" - vars: - K8S_VARIANT: gke - - func: "run oidc k8s auth test" - vars: - K8S_VARIANT: aks -# }}} - - name: "coverage-report" - tags: ["coverage"] - depends_on: - # BUILD-3165: We can't use "*" (all tasks) and specify "variant". - # Instead list out all coverage tasks using tags. - - name: ".standalone" - variant: ".coverage_tag" - # Run the coverage task even if some tasks fail. - status: "*" - # Run the coverage task even if some tasks are not scheduled in a patch build. - patch_optional: true - - name: ".replica_set" - variant: ".coverage_tag" - status: "*" - patch_optional: true - - name: ".sharded_cluster" - variant: ".coverage_tag" - status: "*" - patch_optional: true - commands: - - func: "download and merge coverage" - - - name: "testgcpkms-task" - commands: - - command: subprocess.exec - type: setup - params: - working_dir: "src" - binary: bash - include_expansions_in_env: ["DRIVERS_TOOLS"] - args: - - .evergreen/run-gcpkms-test.sh - - - name: "testgcpkms-fail-task" - # testgcpkms-fail-task runs in a non-GCE environment. - # It is expected to fail to obtain GCE credentials. - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "latest" - TOPOLOGY: "server" - - command: subprocess.exec - type: test - params: - include_expansions_in_env: ["PYTHON_BINARY"] - working_dir: "src" - binary: "bash" - args: - - .evergreen/scripts/run-gcpkms-fail-test.sh - - - name: testazurekms-task - commands: + - name: resync_specs + commands: - command: subprocess.exec params: binary: bash - working_dir: src - include_expansions_in_env: ["DRIVERS_TOOLS"] + include_expansions_in_env: [AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AWS_SESSION_TOKEN] args: - - .evergreen/run-azurekms-test.sh - - - name: testazurekms-fail-task - commands: - - func: fetch source - - func: setup system - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "latest" - TOPOLOGY: "server" - - command: subprocess.exec - type: test - params: - binary: bash + - .evergreen/scripts/resync-all-specs.sh working_dir: src - include_expansions_in_env: ["DRIVERS_TOOLS"] - args: - - .evergreen/run-azurekms-fail-test.sh - - - name: "perf-6.0-standalone" - tags: ["perf"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "v6.0-perf" - TOPOLOGY: "server" - - func: "run perf tests" - - func: "attach benchmark test results" - - func: "send dashboard data" - - - name: "perf-6.0-standalone-ssl" - tags: ["perf"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "v6.0-perf" - TOPOLOGY: "server" - SSL: "ssl" - - func: "run perf tests" - - func: "attach benchmark test results" - - func: "send dashboard data" - - - name: "perf-8.0-standalone" - tags: ["perf"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "8.0" - TOPOLOGY: "server" - - func: "run perf tests" - - func: "attach benchmark test results" - - func: "send dashboard data" - - - name: "check-import-time" - tags: ["pr"] - commands: - - command: subprocess.exec - type: test - params: - binary: bash - working_dir: src - include_expansions_in_env: ["PYTHON_BINARY"] - args: - - .evergreen/scripts/check-import-time.sh - - ${revision} - - ${github.amrom.workers.devmit} - - name: "backport-pr" - allowed_requesters: ["commit"] - commands: - - command: subprocess.exec - type: test - params: - binary: bash - args: - - ${DRIVERS_TOOLS}/.evergreen/github_app/backport-pr.sh - - mongodb - - mongo-python-driver - - ${github.amrom.workers.devmit} buildvariants: -- name: "no-server" - display_name: "No server" - run_on: - - rhel84-small - tasks: - - name: "no-server" - -- name: "Coverage Report" - display_name: "Coverage Report" - run_on: - - rhel84-small - tasks: - - name: "coverage-report" - -- name: testkms-variant - display_name: "KMS" - run_on: - - debian11-small - tasks: - - name: testgcpkms_task_group - batchtime: 20160 # Use a batchtime of 14 days as suggested by the CSFLE test README - - testgcpkms-fail-task - - name: testazurekms_task_group - batchtime: 20160 # Use a batchtime of 14 days as suggested by the CSFLE test README - - testazurekms-fail-task - -- name: rhel8-test-lambda - display_name: FaaS Lambda - run_on: rhel87-small - tasks: - - name: test_aws_lambda_task_group - -- name: rhel8-import-time - display_name: Import Time - run_on: rhel87-small - tasks: - - name: "check-import-time" - -- name: backport-pr - display_name: "Backport PR" - run_on: - - rhel8.7-small - tasks: - - name: "backport-pr" - -- name: "perf-tests" - display_name: "Performance Benchmarks" - batchtime: 10080 # 7 days - run_on: rhel90-dbx-perf-large - tasks: - - name: "perf-6.0-standalone" - - name: "perf-6.0-standalone-ssl" - - name: "perf-8.0-standalone" - - # Platform notes - # i386 builds of OpenSSL or Cyrus SASL are not available - # Debian 8.1 only supports MongoDB 3.4+ - # SUSE12 s390x is only supported by MongoDB 3.4+ - # No enterprise build for Archlinux, SSL not available - # RHEL 7.6 and RHEL 8.4 only supports 3.6+. - # RHEL 7 only supports 2.6+ - # RHEL 7.1 ppc64le is only supported by MongoDB 3.2+ - # RHEL 7.2 s390x is only supported by MongoDB 3.4+ - # Solaris MongoDB SSL builds are not available - # Darwin MongoDB SSL builds are not available for 2.6 - # SUSE12 x86_64 is only supported by MongoDB 3.2+ - # vim: set et sw=2 ts=2 : + - name: resync_specs + display_name: "Resync Specs" + run_on: rhel80-small + cron: '0 16 * * MON' + patchable: true + tasks: + - name: resync_specs diff --git a/.evergreen/generated_configs/functions.yml b/.evergreen/generated_configs/functions.yml new file mode 100644 index 0000000000..ce95648849 --- /dev/null +++ b/.evergreen/generated_configs/functions.yml @@ -0,0 +1,309 @@ +functions: + # Assume ec2 role + assume ec2 role: + - command: ec2.assume_role + params: + role_arn: ${aws_test_secrets_role} + duration_seconds: 3600 + + # Attach benchmark test results + attach benchmark test results: + - command: attach.results + params: + file_location: src/report.json + + # Cleanup + cleanup: + - command: subprocess.exec + params: + binary: bash + args: + - .evergreen/scripts/cleanup.sh + working_dir: src + type: test + + # Download and merge coverage + download and merge coverage: + - command: ec2.assume_role + params: + role_arn: ${assume_role_arn} + type: setup + - command: subprocess.exec + params: + binary: bash + args: + - .evergreen/scripts/download-and-merge-coverage.sh + - ${bucket_name} + - ${revision} + - ${version_id} + working_dir: src + silent: true + include_expansions_in_env: + - AWS_ACCESS_KEY_ID + - AWS_SECRET_ACCESS_KEY + - AWS_SESSION_TOKEN + type: test + - command: subprocess.exec + params: + binary: bash + args: + - .evergreen/combine-coverage.sh + working_dir: src + type: test + - command: subprocess.exec + params: + binary: bash + args: + - .evergreen/scripts/upload-coverage-report.sh + - ${bucket_name} + - ${revision} + - ${version_id} + working_dir: src + silent: true + include_expansions_in_env: + - AWS_ACCESS_KEY_ID + - AWS_SECRET_ACCESS_KEY + - AWS_SESSION_TOKEN + type: test + - command: s3.put + params: + remote_file: coverage/${revision}/${version_id}/htmlcov/index.html + aws_key: ${AWS_ACCESS_KEY_ID} + aws_secret: ${AWS_SECRET_ACCESS_KEY} + aws_session_token: ${AWS_SESSION_TOKEN} + bucket: ${bucket_name} + local_file: src/htmlcov/index.html + permissions: public-read + content_type: text/html + display_name: Coverage Report HTML + optional: "true" + type: setup + + # Fetch source + fetch source: + - command: git.get_project + params: + directory: src + + # Run server + run server: + - command: subprocess.exec + params: + binary: bash + args: + - .evergreen/just.sh + - run-server + - ${TEST_NAME} + working_dir: src + include_expansions_in_env: + - VERSION + - TOPOLOGY + - AUTH + - SSL + - ORCHESTRATION_FILE + - PYTHON_BINARY + - PYTHON_VERSION + - STORAGE_ENGINE + - REQUIRE_API_VERSION + - DRIVERS_TOOLS + - TEST_CRYPT_SHARED + - AUTH_AWS + - LOAD_BALANCER + - LOCAL_ATLAS + - NO_EXT + type: test + - command: expansions.update + params: + file: ${DRIVERS_TOOLS}/mo-expansion.yml + + # Run tests + run tests: + - command: subprocess.exec + params: + binary: bash + args: + - .evergreen/just.sh + - setup-tests + - ${TEST_NAME} + - ${SUB_TEST_NAME} + working_dir: src + include_expansions_in_env: + - AUTH + - SSL + - AWS_ACCESS_KEY_ID + - AWS_SECRET_ACCESS_KEY + - AWS_SESSION_TOKEN + - COVERAGE + - PYTHON_BINARY + - LIBMONGOCRYPT_URL + - MONGODB_URI + - PYTHON_VERSION + - DISABLE_TEST_COMMANDS + - GREEN_FRAMEWORK + - NO_EXT + - COMPRESSORS + - MONGODB_API_VERSION + - REQUIRE_API_VERSION + - DEBUG_LOG + - DISABLE_FLAKY + - ORCHESTRATION_FILE + - OCSP_SERVER_TYPE + - VERSION + - IS_WIN32 + - REQUIRE_FIPS + - TEST_MIN_DEPS + type: test + - command: subprocess.exec + params: + binary: bash + args: + - .evergreen/just.sh + - run-tests + working_dir: src + type: test + + # Send dashboard data + send dashboard data: + - command: subprocess.exec + params: + binary: bash + args: + - .evergreen/scripts/perf-submission-setup.sh + working_dir: src + include_expansions_in_env: + - requester + - revision_order_id + - project_id + - version_id + - build_variant + - parsed_order_id + - task_name + - task_id + - execution + - is_mainline + type: test + - command: expansions.update + params: + file: src/expansion.yml + - command: subprocess.exec + params: + binary: bash + args: + - .evergreen/scripts/perf-submission.sh + working_dir: src + include_expansions_in_env: + - requester + - revision_order_id + - project_id + - version_id + - build_variant + - parsed_order_id + - task_name + - task_id + - execution + - is_mainline + type: test + + # Setup system + setup system: + - command: subprocess.exec + params: + binary: bash + args: + - .evergreen/scripts/setup-system.sh + working_dir: src + include_expansions_in_env: + - is_patch + - project + - version_id + type: test + - command: expansions.update + params: + file: src/expansion.yml + + # Teardown system + teardown system: + - command: subprocess.exec + params: + binary: bash + args: + - .evergreen/just.sh + - teardown-tests + working_dir: src + type: test + - command: subprocess.exec + params: + binary: bash + args: + - ${DRIVERS_TOOLS}/.evergreen/teardown.sh + working_dir: src + type: test + + # Upload coverage + upload coverage: + - command: ec2.assume_role + params: + role_arn: ${assume_role_arn} + type: setup + - command: s3.put + params: + remote_file: coverage/${revision}/${version_id}/coverage/coverage.${build_variant}.${task_name} + aws_key: ${AWS_ACCESS_KEY_ID} + aws_secret: ${AWS_SECRET_ACCESS_KEY} + aws_session_token: ${AWS_SESSION_TOKEN} + bucket: ${bucket_name} + local_file: src/.coverage + permissions: public-read + content_type: text/html + display_name: Raw Coverage Report + optional: "true" + type: setup + + # Upload mo artifacts + upload mo artifacts: + - command: ec2.assume_role + params: + role_arn: ${assume_role_arn} + type: setup + - command: archive.targz_pack + params: + target: mongo-coredumps.tgz + source_dir: ./ + include: + - ./**.core + - ./**.mdmp + - command: s3.put + params: + remote_file: ${build_variant}/${revision}/${version_id}/${build_id}/coredumps/${task_id}-${execution}-mongodb-coredumps.tar.gz + aws_key: ${AWS_ACCESS_KEY_ID} + aws_secret: ${AWS_SECRET_ACCESS_KEY} + aws_session_token: ${AWS_SESSION_TOKEN} + bucket: ${bucket_name} + local_file: mongo-coredumps.tgz + permissions: public-read + content_type: ${content_type|application/x-gzip} + display_name: Core Dumps - Execution + optional: "true" + type: setup + - command: s3.put + params: + remote_file: ${build_variant}/${revision}/${version_id}/${build_id}/logs/${task_id}-${execution}-drivers-tools-logs.tar.gz + aws_key: ${AWS_ACCESS_KEY_ID} + aws_secret: ${AWS_SECRET_ACCESS_KEY} + aws_session_token: ${AWS_SESSION_TOKEN} + bucket: ${bucket_name} + local_file: ${DRIVERS_TOOLS}/.evergreen/test_logs.tar.gz + permissions: public-read + content_type: ${content_type|application/x-gzip} + display_name: drivers-tools-logs.tar.gz + optional: "true" + type: setup + + # Upload test results + upload test results: + - command: attach.results + params: + file_location: ${DRIVERS_TOOLS}/results.json + - command: attach.xunit_results + params: + file: src/xunit-results/TEST-*.xml diff --git a/.evergreen/generated_configs/tasks.yml b/.evergreen/generated_configs/tasks.yml index c666c6901a..855cbefef8 100644 --- a/.evergreen/generated_configs/tasks.yml +++ b/.evergreen/generated_configs/tasks.yml @@ -1,4369 +1,5116 @@ tasks: - # Load balancer tests - - name: test-load-balancer-auth-ssl + # Aws lambda tests + - name: test-aws-lambda-deployed commands: - - func: bootstrap mongo-orchestration - vars: - TOPOLOGY: sharded_cluster - AUTH: auth - SSL: ssl - LOAD_BALANCER: "true" - - func: run load-balancer + - func: assume ec2 role - func: run tests vars: - AUTH: auth - SSL: ssl - test_loadbalancer: "true" - tags: [load-balancer, auth, ssl] - - name: test-load-balancer-noauth-ssl + TEST_NAME: aws_lambda + tags: [aws_lambda] + + # Aws tests + - name: test-auth-aws-4.4-regular-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - TOPOLOGY: sharded_cluster - AUTH: noauth - SSL: ssl - LOAD_BALANCER: "true" - - func: run load-balancer + AUTH_AWS: "1" + VERSION: "4.4" + - func: assume ec2 role - func: run tests vars: - AUTH: noauth - SSL: ssl - test_loadbalancer: "true" - tags: [load-balancer, noauth, ssl] - - name: test-load-balancer-noauth-nossl + TEST_NAME: auth_aws + SUB_TEST_NAME: regular + PYTHON_VERSION: "3.10" + tags: [auth-aws, auth-aws-regular] + - name: test-auth-aws-5.0-assume-role-python3.11 commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - TOPOLOGY: sharded_cluster - AUTH: noauth - SSL: nossl - LOAD_BALANCER: "true" - - func: run load-balancer + AUTH_AWS: "1" + VERSION: "5.0" + - func: assume ec2 role - func: run tests vars: - AUTH: noauth - SSL: nossl - test_loadbalancer: "true" - tags: [load-balancer, noauth, nossl] - - # Server tests - - name: test-4.0-standalone-auth-ssl-sync + TEST_NAME: auth_aws + SUB_TEST_NAME: assume-role + PYTHON_VERSION: "3.11" + tags: [auth-aws, auth-aws-assume-role] + - name: test-auth-aws-6.0-ec2-python3.12 commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "4.0" - TOPOLOGY: server - AUTH: auth - SSL: ssl + AUTH_AWS: "1" + VERSION: "6.0" + - func: assume ec2 role - func: run tests vars: - AUTH: auth - SSL: ssl - SYNC: sync - TEST_SUITES: default - tags: - - "4.0" - - standalone - - auth - - ssl - - sync - - name: test-4.0-standalone-auth-ssl-async + TEST_NAME: auth_aws + SUB_TEST_NAME: ec2 + PYTHON_VERSION: "3.12" + tags: [auth-aws, auth-aws-ec2] + - name: test-auth-aws-7.0-env-creds-python3.13 commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "4.0" - TOPOLOGY: server - AUTH: auth - SSL: ssl + AUTH_AWS: "1" + VERSION: "7.0" + - func: assume ec2 role - func: run tests vars: - AUTH: auth - SSL: ssl - SYNC: async - TEST_SUITES: default_async - tags: - - "4.0" - - standalone - - auth - - ssl - - async - - name: test-4.0-standalone-auth-ssl-sync_async + TEST_NAME: auth_aws + SUB_TEST_NAME: env-creds + PYTHON_VERSION: "3.13" + tags: [auth-aws, auth-aws-env-creds] + - name: test-auth-aws-8.0-session-creds-python3.14t commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "4.0" - TOPOLOGY: server - AUTH: auth - SSL: ssl + AUTH_AWS: "1" + VERSION: "8.0" + - func: assume ec2 role - func: run tests vars: - AUTH: auth - SSL: ssl - SYNC: sync_async - TEST_SUITES: "" - tags: - - "4.0" - - standalone - - auth - - ssl - - sync_async - - name: test-4.0-standalone-noauth-ssl-sync + TEST_NAME: auth_aws + SUB_TEST_NAME: session-creds + PYTHON_VERSION: 3.14t + tags: [auth-aws, auth-aws-session-creds, free-threaded] + - name: test-auth-aws-rapid-web-identity-python3.14 commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "4.0" - TOPOLOGY: server - AUTH: noauth - SSL: ssl + AUTH_AWS: "1" + VERSION: rapid + - func: assume ec2 role - func: run tests vars: - AUTH: noauth - SSL: ssl - SYNC: sync - TEST_SUITES: default - tags: - - "4.0" - - standalone - - noauth - - ssl - - sync - - name: test-4.0-standalone-noauth-ssl-async + TEST_NAME: auth_aws + SUB_TEST_NAME: web-identity + PYTHON_VERSION: "3.14" + tags: [auth-aws, auth-aws-web-identity] + - name: test-auth-aws-rapid-web-identity-session-name-python3.14 commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "4.0" - TOPOLOGY: server - AUTH: noauth - SSL: ssl + AUTH_AWS: "1" + VERSION: rapid + - func: assume ec2 role - func: run tests vars: - AUTH: noauth - SSL: ssl - SYNC: async - TEST_SUITES: default_async - tags: - - "4.0" - - standalone - - noauth - - ssl - - async - - name: test-4.0-standalone-noauth-ssl-sync_async + TEST_NAME: auth_aws + SUB_TEST_NAME: web-identity + AWS_ROLE_SESSION_NAME: test + PYTHON_VERSION: "3.14" + tags: [auth-aws, auth-aws-web-identity] + - name: test-auth-aws-latest-ecs-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "4.0" - TOPOLOGY: server - AUTH: noauth - SSL: ssl + AUTH_AWS: "1" + VERSION: latest + - func: assume ec2 role - func: run tests vars: - AUTH: noauth - SSL: ssl - SYNC: sync_async - TEST_SUITES: "" - tags: - - "4.0" - - standalone - - noauth - - ssl - - sync_async - - name: test-4.0-standalone-noauth-nossl-sync + TEST_NAME: auth_aws + SUB_TEST_NAME: ecs + PYTHON_VERSION: "3.10" + tags: [auth-aws, auth-aws-ecs] + + # Backport pr tests + - name: backport-pr + commands: + - func: assume ec2 role + - command: subprocess.exec + params: + binary: bash + args: + - ${DRIVERS_TOOLS}/.evergreen/github_app/backport-pr.sh + - mongodb + - mongo-python-driver + - ${github.amrom.workers.devmit} + working_dir: src + include_expansions_in_env: + - AWS_ACCESS_KEY_ID + - AWS_SECRET_ACCESS_KEY + - AWS_SESSION_TOKEN + type: test + + # Coverage report tests + - name: coverage-report + commands: + - func: download and merge coverage + depends_on: [{ name: .server-version, variant: .coverage_tag, status: "*", patch_optional: true }] + tags: [coverage, pr] + + # Getdata tests + - name: getdata + commands: + - command: subprocess.exec + params: + binary: bash + args: + - .evergreen/scripts/run-getdata.sh + working_dir: src + type: test + + # Import time tests + - name: check-import-time + commands: + - command: subprocess.exec + params: + binary: bash + args: + - .evergreen/scripts/check-import-time.sh + - ${revision} + - ${github.amrom.workers.devmit} + working_dir: src + type: test + tags: [pr] + + # Kms tests + - name: test-gcpkms commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: - VERSION: "4.0" - TOPOLOGY: server - AUTH: noauth - SSL: nossl + TEST_NAME: kms + SUB_TEST_NAME: gcp + tags: [] + - name: test-gcpkms-fail + commands: + - func: run server - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: sync - TEST_SUITES: default - tags: - - "4.0" - - standalone - - noauth - - nossl - - sync - - name: test-4.0-standalone-noauth-nossl-async + TEST_NAME: kms + SUB_TEST_NAME: gcp-fail + tags: [pr] + - name: test-azurekms commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: - VERSION: "4.0" - TOPOLOGY: server - AUTH: noauth - SSL: nossl + TEST_NAME: kms + SUB_TEST_NAME: azure + tags: [] + - name: test-azurekms-fail + commands: + - func: run server - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: async - TEST_SUITES: default_async - tags: - - "4.0" - - standalone - - noauth - - nossl - - async - - name: test-4.0-standalone-noauth-nossl-sync_async + TEST_NAME: kms + SUB_TEST_NAME: azure-fail + tags: [pr] + + # Min deps tests + - name: test-min-deps-python3.10-sync-noauth-nossl-standalone commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "4.0" - TOPOLOGY: server AUTH: noauth SSL: nossl + TOPOLOGY: standalone - func: run tests vars: AUTH: noauth SSL: nossl - SYNC: sync_async - TEST_SUITES: "" - tags: - - "4.0" - - standalone - - noauth - - nossl - - sync_async - - name: test-4.4-standalone-auth-ssl-sync + TOPOLOGY: standalone + TEST_MIN_DEPS: "1" + tags: [test-min-deps, standalone-noauth-nossl] + - name: test-min-deps-python3.10-sync-noauth-ssl-replica-set commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "4.4" - TOPOLOGY: server - AUTH: auth + AUTH: noauth SSL: ssl + TOPOLOGY: replica_set - func: run tests vars: - AUTH: auth + AUTH: noauth SSL: ssl - SYNC: sync - TEST_SUITES: default - tags: - - "4.4" - - standalone - - auth - - ssl - - sync - - name: test-4.4-standalone-auth-ssl-async + TOPOLOGY: replica_set + TEST_MIN_DEPS: "1" + tags: [test-min-deps, replica_set-noauth-ssl] + - name: test-min-deps-python3.10-sync-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "4.4" - TOPOLOGY: server AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster - func: run tests vars: AUTH: auth SSL: ssl - SYNC: async - TEST_SUITES: default_async - tags: - - "4.4" - - standalone - - auth - - ssl - - async - - name: test-4.4-standalone-auth-ssl-sync_async + TOPOLOGY: sharded_cluster + TEST_MIN_DEPS: "1" + tags: [test-min-deps, sharded_cluster-auth-ssl] + + # Mod wsgi tests + - name: mod-wsgi-replica-set-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "4.4" - TOPOLOGY: server - AUTH: auth - SSL: ssl + TOPOLOGY: replica_set + PYTHON_VERSION: "3.10" - func: run tests vars: - AUTH: auth - SSL: ssl - SYNC: sync_async - TEST_SUITES: "" - tags: - - "4.4" - - standalone - - auth - - ssl - - sync_async - - name: test-4.4-standalone-noauth-ssl-sync + TEST_NAME: mod_wsgi + SUB_TEST_NAME: standalone + PYTHON_VERSION: "3.10" + tags: [mod_wsgi, pr] + - name: mod-wsgi-embedded-mode-replica-set-python3.11 commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "4.4" - TOPOLOGY: server - AUTH: noauth - SSL: ssl + TOPOLOGY: replica_set + PYTHON_VERSION: "3.11" - func: run tests vars: - AUTH: noauth - SSL: ssl - SYNC: sync - TEST_SUITES: default - tags: - - "4.4" - - standalone - - noauth - - ssl - - sync - - name: test-4.4-standalone-noauth-ssl-async + TEST_NAME: mod_wsgi + SUB_TEST_NAME: embedded + PYTHON_VERSION: "3.11" + tags: [mod_wsgi, pr] + - name: mod-wsgi-replica-set-python3.12 commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "4.4" - TOPOLOGY: server - AUTH: noauth - SSL: ssl + TOPOLOGY: replica_set + PYTHON_VERSION: "3.12" - func: run tests vars: - AUTH: noauth - SSL: ssl - SYNC: async - TEST_SUITES: default_async - tags: - - "4.4" - - standalone - - noauth - - ssl - - async - - name: test-4.4-standalone-noauth-ssl-sync_async + TEST_NAME: mod_wsgi + SUB_TEST_NAME: standalone + PYTHON_VERSION: "3.12" + tags: [mod_wsgi, pr] + - name: mod-wsgi-embedded-mode-replica-set-python3.13 commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "4.4" - TOPOLOGY: server - AUTH: noauth - SSL: ssl + TOPOLOGY: replica_set + PYTHON_VERSION: "3.13" - func: run tests vars: - AUTH: noauth - SSL: ssl - SYNC: sync_async - TEST_SUITES: "" - tags: - - "4.4" - - standalone - - noauth - - ssl - - sync_async - - name: test-4.4-standalone-noauth-nossl-sync + TEST_NAME: mod_wsgi + SUB_TEST_NAME: embedded + PYTHON_VERSION: "3.13" + tags: [mod_wsgi, pr] + - name: mod-wsgi-embedded-mode-replica-set-python3.14 commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "4.4" - TOPOLOGY: server - AUTH: noauth - SSL: nossl + TOPOLOGY: replica_set + PYTHON_VERSION: "3.14" - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: sync - TEST_SUITES: default - tags: - - "4.4" - - standalone - - noauth - - nossl - - sync - - name: test-4.4-standalone-noauth-nossl-async + TEST_NAME: mod_wsgi + SUB_TEST_NAME: embedded + PYTHON_VERSION: "3.14" + tags: [mod_wsgi, pr] + + # No orchestration tests + - name: test-no-orchestration-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: assume ec2 role + - func: run tests vars: - VERSION: "4.4" - TOPOLOGY: server - AUTH: noauth - SSL: nossl + PYTHON_VERSION: "3.10" + tags: [test-no-orchestration, python-3.10] + - name: test-no-orchestration-python3.14 + commands: + - func: assume ec2 role - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: async - TEST_SUITES: default_async - tags: - - "4.4" - - standalone - - noauth - - nossl - - async - - name: test-4.4-standalone-noauth-nossl-sync_async + PYTHON_VERSION: "3.14" + tags: [test-no-orchestration, python-3.14] + - name: test-no-orchestration-pypy3.10 commands: - - func: bootstrap mongo-orchestration + - func: assume ec2 role + - func: run tests + vars: + PYTHON_VERSION: pypy3.10 + tags: [test-no-orchestration, python-pypy3.10] + + # No toolchain tests + - name: test-no-toolchain-sync-noauth-nossl-standalone + commands: + - func: run server vars: - VERSION: "4.4" - TOPOLOGY: server AUTH: noauth SSL: nossl + TOPOLOGY: standalone - func: run tests vars: AUTH: noauth SSL: nossl - SYNC: sync_async - TEST_SUITES: "" - tags: - - "4.4" - - standalone - - noauth - - nossl - - sync_async - - name: test-5.0-standalone-auth-ssl-sync + TOPOLOGY: standalone + TEST_NAME: default_sync + tags: [test-no-toolchain, standalone-noauth-nossl] + - name: test-no-toolchain-async-noauth-ssl-replica-set commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "5.0" - TOPOLOGY: server - AUTH: auth + AUTH: noauth SSL: ssl + TOPOLOGY: replica_set - func: run tests vars: - AUTH: auth + AUTH: noauth SSL: ssl - SYNC: sync - TEST_SUITES: default - tags: - - "5.0" - - standalone - - auth - - ssl - - sync - - name: test-5.0-standalone-auth-ssl-async + TOPOLOGY: replica_set + TEST_NAME: default_async + tags: [test-no-toolchain, replica_set-noauth-ssl] + - name: test-no-toolchain-sync-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "5.0" - TOPOLOGY: server AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster - func: run tests vars: AUTH: auth SSL: ssl - SYNC: async - TEST_SUITES: default_async - tags: - - "5.0" - - standalone - - auth - - ssl - - async - - name: test-5.0-standalone-auth-ssl-sync_async + TOPOLOGY: sharded_cluster + TEST_NAME: default_sync + tags: [test-no-toolchain, sharded_cluster-auth-ssl] + + # Ocsp tests + - name: test-ocsp-ecdsa-valid-cert-server-does-not-staple-v4.4-python3.10 commands: - - func: bootstrap mongo-orchestration - vars: - VERSION: "5.0" - TOPOLOGY: server - AUTH: auth - SSL: ssl - func: run tests vars: - AUTH: auth - SSL: ssl - SYNC: sync_async - TEST_SUITES: "" - tags: - - "5.0" - - standalone - - auth - - ssl - - sync_async - - name: test-5.0-standalone-noauth-ssl-sync + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: valid + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "4.4" + tags: [ocsp, ocsp-ecdsa, "4.4"] + - name: test-ocsp-ecdsa-valid-cert-server-does-not-staple-v5.0-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: valid + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" VERSION: "5.0" - TOPOLOGY: server - AUTH: noauth - SSL: ssl + tags: [ocsp, ocsp-ecdsa, "5.0"] + - name: test-ocsp-ecdsa-valid-cert-server-does-not-staple-v6.0-python3.10 + commands: - func: run tests vars: - AUTH: noauth - SSL: ssl - SYNC: sync - TEST_SUITES: default - tags: - - "5.0" - - standalone - - noauth - - ssl - - sync - - name: test-5.0-standalone-noauth-ssl-async + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: valid + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "6.0" + tags: [ocsp, ocsp-ecdsa, "6.0"] + - name: test-ocsp-ecdsa-valid-cert-server-does-not-staple-v7.0-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: - VERSION: "5.0" - TOPOLOGY: server - AUTH: noauth - SSL: ssl + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: valid + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "7.0" + tags: [ocsp, ocsp-ecdsa, "7.0"] + - name: test-ocsp-ecdsa-valid-cert-server-does-not-staple-v8.0-python3.10 + commands: - func: run tests vars: - AUTH: noauth - SSL: ssl - SYNC: async - TEST_SUITES: default_async - tags: - - "5.0" - - standalone - - noauth - - ssl - - async - - name: test-5.0-standalone-noauth-ssl-sync_async + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: valid + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "8.0" + tags: [ocsp, ocsp-ecdsa, "8.0"] + - name: test-ocsp-ecdsa-valid-cert-server-does-not-staple-rapid-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: - VERSION: "5.0" - TOPOLOGY: server - AUTH: noauth - SSL: ssl + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: valid + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: rapid + tags: [ocsp, ocsp-ecdsa, rapid] + - name: test-ocsp-ecdsa-valid-cert-server-does-not-staple-latest-python3.14 + commands: - func: run tests vars: - AUTH: noauth - SSL: ssl - SYNC: sync_async - TEST_SUITES: "" - tags: - - "5.0" - - standalone - - noauth - - ssl - - sync_async - - name: test-5.0-standalone-noauth-nossl-sync + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: valid + TEST_NAME: ocsp + PYTHON_VERSION: "3.14" + VERSION: latest + tags: [ocsp, ocsp-ecdsa, latest] + - name: test-ocsp-ecdsa-invalid-cert-server-does-not-staple-v4.4-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "4.4" + tags: [ocsp, ocsp-ecdsa, "4.4"] + - name: test-ocsp-ecdsa-invalid-cert-server-does-not-staple-v5.0-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" VERSION: "5.0" - TOPOLOGY: server - AUTH: noauth - SSL: nossl + tags: [ocsp, ocsp-ecdsa, "5.0"] + - name: test-ocsp-ecdsa-invalid-cert-server-does-not-staple-v6.0-python3.10 + commands: - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: sync - TEST_SUITES: default - tags: - - "5.0" - - standalone - - noauth - - nossl - - sync - - name: test-5.0-standalone-noauth-nossl-async + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "6.0" + tags: [ocsp, ocsp-ecdsa, "6.0"] + - name: test-ocsp-ecdsa-invalid-cert-server-does-not-staple-v7.0-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: - VERSION: "5.0" - TOPOLOGY: server - AUTH: noauth - SSL: nossl + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "7.0" + tags: [ocsp, ocsp-ecdsa, "7.0"] + - name: test-ocsp-ecdsa-invalid-cert-server-does-not-staple-v8.0-python3.10 + commands: - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: async - TEST_SUITES: default_async - tags: - - "5.0" - - standalone - - noauth - - nossl - - async - - name: test-5.0-standalone-noauth-nossl-sync_async + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "8.0" + tags: [ocsp, ocsp-ecdsa, "8.0"] + - name: test-ocsp-ecdsa-invalid-cert-server-does-not-staple-rapid-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: - VERSION: "5.0" - TOPOLOGY: server - AUTH: noauth - SSL: nossl + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: rapid + tags: [ocsp, ocsp-ecdsa, rapid] + - name: test-ocsp-ecdsa-invalid-cert-server-does-not-staple-latest-python3.14 + commands: - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: sync_async - TEST_SUITES: "" - tags: - - "5.0" - - standalone - - noauth - - nossl - - sync_async - - name: test-6.0-standalone-auth-ssl-sync + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.14" + VERSION: latest + tags: [ocsp, ocsp-ecdsa, latest] + - name: test-ocsp-ecdsa-delegate-valid-cert-server-does-not-staple-v4.4-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: - VERSION: "6.0" - TOPOLOGY: server - AUTH: auth - SSL: ssl + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: valid-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "4.4" + tags: [ocsp, ocsp-ecdsa, "4.4"] + - name: test-ocsp-ecdsa-delegate-valid-cert-server-does-not-staple-v5.0-python3.10 + commands: - func: run tests vars: - AUTH: auth - SSL: ssl - SYNC: sync - TEST_SUITES: default - tags: - - "6.0" - - standalone - - auth - - ssl - - sync - - name: test-6.0-standalone-auth-ssl-async + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: valid-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "5.0" + tags: [ocsp, ocsp-ecdsa, "5.0"] + - name: test-ocsp-ecdsa-delegate-valid-cert-server-does-not-staple-v6.0-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: valid-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" VERSION: "6.0" - TOPOLOGY: server - AUTH: auth - SSL: ssl + tags: [ocsp, ocsp-ecdsa, "6.0"] + - name: test-ocsp-ecdsa-delegate-valid-cert-server-does-not-staple-v7.0-python3.10 + commands: - func: run tests vars: - AUTH: auth - SSL: ssl - SYNC: async - TEST_SUITES: default_async - tags: - - "6.0" - - standalone - - auth - - ssl - - async - - name: test-6.0-standalone-auth-ssl-sync_async + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: valid-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "7.0" + tags: [ocsp, ocsp-ecdsa, "7.0"] + - name: test-ocsp-ecdsa-delegate-valid-cert-server-does-not-staple-v8.0-python3.10 commands: - - func: bootstrap mongo-orchestration - vars: - VERSION: "6.0" - TOPOLOGY: server - AUTH: auth - SSL: ssl - func: run tests vars: - AUTH: auth - SSL: ssl - SYNC: sync_async - TEST_SUITES: "" - tags: - - "6.0" - - standalone - - auth - - ssl - - sync_async - - name: test-6.0-standalone-noauth-ssl-sync + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: valid-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "8.0" + tags: [ocsp, ocsp-ecdsa, "8.0"] + - name: test-ocsp-ecdsa-delegate-valid-cert-server-does-not-staple-rapid-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: - VERSION: "6.0" - TOPOLOGY: server - AUTH: noauth - SSL: ssl + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: valid-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: rapid + tags: [ocsp, ocsp-ecdsa, rapid] + - name: test-ocsp-ecdsa-delegate-valid-cert-server-does-not-staple-latest-python3.14 + commands: - func: run tests vars: - AUTH: noauth - SSL: ssl - SYNC: sync - TEST_SUITES: default - tags: - - "6.0" - - standalone - - noauth - - ssl - - sync - - name: test-6.0-standalone-noauth-ssl-async + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: valid-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.14" + VERSION: latest + tags: [ocsp, ocsp-ecdsa, latest] + - name: test-ocsp-ecdsa-delegate-invalid-cert-server-does-not-staple-v4.4-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: - VERSION: "6.0" - TOPOLOGY: server - AUTH: noauth - SSL: ssl + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "4.4" + tags: [ocsp, ocsp-ecdsa, "4.4"] + - name: test-ocsp-ecdsa-delegate-invalid-cert-server-does-not-staple-v5.0-python3.10 + commands: - func: run tests vars: - AUTH: noauth - SSL: ssl - SYNC: async - TEST_SUITES: default_async - tags: - - "6.0" - - standalone - - noauth - - ssl - - async - - name: test-6.0-standalone-noauth-ssl-sync_async + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "5.0" + tags: [ocsp, ocsp-ecdsa, "5.0"] + - name: test-ocsp-ecdsa-delegate-invalid-cert-server-does-not-staple-v6.0-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" VERSION: "6.0" - TOPOLOGY: server - AUTH: noauth - SSL: ssl + tags: [ocsp, ocsp-ecdsa, "6.0"] + - name: test-ocsp-ecdsa-delegate-invalid-cert-server-does-not-staple-v7.0-python3.10 + commands: - func: run tests vars: - AUTH: noauth - SSL: ssl - SYNC: sync_async - TEST_SUITES: "" - tags: - - "6.0" - - standalone - - noauth - - ssl - - sync_async - - name: test-6.0-standalone-noauth-nossl-sync + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "7.0" + tags: [ocsp, ocsp-ecdsa, "7.0"] + - name: test-ocsp-ecdsa-delegate-invalid-cert-server-does-not-staple-v8.0-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: - VERSION: "6.0" - TOPOLOGY: server - AUTH: noauth - SSL: nossl + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "8.0" + tags: [ocsp, ocsp-ecdsa, "8.0"] + - name: test-ocsp-ecdsa-delegate-invalid-cert-server-does-not-staple-rapid-python3.10 + commands: - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: sync - TEST_SUITES: default - tags: - - "6.0" - - standalone - - noauth - - nossl - - sync - - name: test-6.0-standalone-noauth-nossl-async + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: rapid + tags: [ocsp, ocsp-ecdsa, rapid] + - name: test-ocsp-ecdsa-delegate-invalid-cert-server-does-not-staple-latest-python3.14 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: - VERSION: "6.0" - TOPOLOGY: server - AUTH: noauth - SSL: nossl + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.14" + VERSION: latest + tags: [ocsp, ocsp-ecdsa, latest] + - name: test-ocsp-ecdsa-soft-fail-v4.4-python3.10 + commands: - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: async - TEST_SUITES: default_async - tags: - - "6.0" - - standalone - - noauth - - nossl - - async - - name: test-6.0-standalone-noauth-nossl-sync_async + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: no-responder + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "4.4" + tags: [ocsp, ocsp-ecdsa, "4.4"] + - name: test-ocsp-ecdsa-soft-fail-v5.0-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: - VERSION: "6.0" - TOPOLOGY: server - AUTH: noauth - SSL: nossl + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: no-responder + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "5.0" + tags: [ocsp, ocsp-ecdsa, "5.0"] + - name: test-ocsp-ecdsa-soft-fail-v6.0-python3.10 + commands: - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: sync_async - TEST_SUITES: "" - tags: - - "6.0" - - standalone - - noauth - - nossl - - sync_async - - name: test-7.0-standalone-auth-ssl-sync + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: no-responder + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "6.0" + tags: [ocsp, ocsp-ecdsa, "6.0"] + - name: test-ocsp-ecdsa-soft-fail-v7.0-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: no-responder + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" VERSION: "7.0" - TOPOLOGY: server - AUTH: auth - SSL: ssl + tags: [ocsp, ocsp-ecdsa, "7.0"] + - name: test-ocsp-ecdsa-soft-fail-v8.0-python3.10 + commands: - func: run tests vars: - AUTH: auth - SSL: ssl - SYNC: sync - TEST_SUITES: default - tags: - - "7.0" - - standalone - - auth - - ssl - - sync - - name: test-7.0-standalone-auth-ssl-async + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: no-responder + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "8.0" + tags: [ocsp, ocsp-ecdsa, "8.0"] + - name: test-ocsp-ecdsa-soft-fail-rapid-python3.10 commands: - - func: bootstrap mongo-orchestration - vars: - VERSION: "7.0" - TOPOLOGY: server - AUTH: auth - SSL: ssl - func: run tests vars: - AUTH: auth - SSL: ssl - SYNC: async - TEST_SUITES: default_async - tags: - - "7.0" - - standalone - - auth - - ssl - - async - - name: test-7.0-standalone-auth-ssl-sync_async + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: no-responder + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: rapid + tags: [ocsp, ocsp-ecdsa, rapid] + - name: test-ocsp-ecdsa-soft-fail-latest-python3.14 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: - VERSION: "7.0" - TOPOLOGY: server - AUTH: auth - SSL: ssl + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: no-responder + TEST_NAME: ocsp + PYTHON_VERSION: "3.14" + VERSION: latest + tags: [ocsp, ocsp-ecdsa, latest] + - name: test-ocsp-ecdsa-valid-cert-server-staples-v4.4-python3.10 + commands: - func: run tests vars: - AUTH: auth - SSL: ssl - SYNC: sync_async - TEST_SUITES: "" + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: valid + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "4.4" tags: - - "7.0" - - standalone - - auth - - ssl - - sync_async - - name: test-7.0-standalone-noauth-ssl-sync + - ocsp + - ocsp-ecdsa + - "4.4" + - ocsp-staple + - name: test-ocsp-ecdsa-valid-cert-server-staples-v5.0-python3.10 commands: - - func: bootstrap mongo-orchestration - vars: - VERSION: "7.0" - TOPOLOGY: server - AUTH: noauth - SSL: ssl - func: run tests vars: - AUTH: noauth - SSL: ssl - SYNC: sync - TEST_SUITES: default + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: valid + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "5.0" tags: - - "7.0" - - standalone - - noauth - - ssl - - sync - - name: test-7.0-standalone-noauth-ssl-async + - ocsp + - ocsp-ecdsa + - "5.0" + - ocsp-staple + - name: test-ocsp-ecdsa-valid-cert-server-staples-v6.0-python3.10 commands: - - func: bootstrap mongo-orchestration - vars: - VERSION: "7.0" - TOPOLOGY: server - AUTH: noauth - SSL: ssl - func: run tests vars: - AUTH: noauth - SSL: ssl - SYNC: async - TEST_SUITES: default_async + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: valid + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "6.0" tags: - - "7.0" - - standalone - - noauth - - ssl - - async - - name: test-7.0-standalone-noauth-ssl-sync_async + - ocsp + - ocsp-ecdsa + - "6.0" + - ocsp-staple + - name: test-ocsp-ecdsa-valid-cert-server-staples-v7.0-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: valid + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" VERSION: "7.0" - TOPOLOGY: server - AUTH: noauth - SSL: ssl + tags: + - ocsp + - ocsp-ecdsa + - "7.0" + - ocsp-staple + - name: test-ocsp-ecdsa-valid-cert-server-staples-v8.0-python3.10 + commands: - func: run tests vars: - AUTH: noauth - SSL: ssl - SYNC: sync_async - TEST_SUITES: "" + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: valid + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "8.0" tags: - - "7.0" - - standalone - - noauth - - ssl - - sync_async - - name: test-7.0-standalone-noauth-nossl-sync + - ocsp + - ocsp-ecdsa + - "8.0" + - ocsp-staple + - name: test-ocsp-ecdsa-valid-cert-server-staples-rapid-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: - VERSION: "7.0" - TOPOLOGY: server - AUTH: noauth - SSL: nossl + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: valid + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: rapid + tags: + - ocsp + - ocsp-ecdsa + - rapid + - ocsp-staple + - name: test-ocsp-ecdsa-valid-cert-server-staples-latest-python3.14 + commands: - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: sync - TEST_SUITES: default + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: valid + TEST_NAME: ocsp + PYTHON_VERSION: "3.14" + VERSION: latest tags: - - "7.0" - - standalone - - noauth - - nossl - - sync - - name: test-7.0-standalone-noauth-nossl-async + - ocsp + - ocsp-ecdsa + - latest + - ocsp-staple + - name: test-ocsp-ecdsa-invalid-cert-server-staples-v4.4-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: - VERSION: "7.0" - TOPOLOGY: server - AUTH: noauth - SSL: nossl + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "4.4" + tags: + - ocsp + - ocsp-ecdsa + - "4.4" + - ocsp-staple + - name: test-ocsp-ecdsa-invalid-cert-server-staples-v5.0-python3.10 + commands: - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: async - TEST_SUITES: default_async + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "5.0" tags: - - "7.0" - - standalone - - noauth - - nossl - - async - - name: test-7.0-standalone-noauth-nossl-sync_async + - ocsp + - ocsp-ecdsa + - "5.0" + - ocsp-staple + - name: test-ocsp-ecdsa-invalid-cert-server-staples-v6.0-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: - VERSION: "7.0" - TOPOLOGY: server - AUTH: noauth - SSL: nossl + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "6.0" + tags: + - ocsp + - ocsp-ecdsa + - "6.0" + - ocsp-staple + - name: test-ocsp-ecdsa-invalid-cert-server-staples-v7.0-python3.10 + commands: - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: sync_async - TEST_SUITES: "" + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "7.0" tags: + - ocsp + - ocsp-ecdsa - "7.0" - - standalone - - noauth - - nossl - - sync_async - - name: test-8.0-standalone-auth-ssl-sync + - ocsp-staple + - name: test-ocsp-ecdsa-invalid-cert-server-staples-v8.0-python3.10 commands: - - func: bootstrap mongo-orchestration - vars: - VERSION: "8.0" - TOPOLOGY: server - AUTH: auth - SSL: ssl - func: run tests vars: - AUTH: auth - SSL: ssl - SYNC: sync - TEST_SUITES: default + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "8.0" tags: + - ocsp + - ocsp-ecdsa - "8.0" - - standalone - - auth - - ssl - - sync - - name: test-8.0-standalone-auth-ssl-async + - ocsp-staple + - name: test-ocsp-ecdsa-invalid-cert-server-staples-rapid-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: - VERSION: "8.0" - TOPOLOGY: server - AUTH: auth - SSL: ssl + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: rapid + tags: + - ocsp + - ocsp-ecdsa + - rapid + - ocsp-staple + - name: test-ocsp-ecdsa-invalid-cert-server-staples-latest-python3.14 + commands: - func: run tests vars: - AUTH: auth - SSL: ssl - SYNC: async - TEST_SUITES: default_async + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.14" + VERSION: latest tags: - - "8.0" - - standalone - - auth - - ssl - - async - - name: test-8.0-standalone-auth-ssl-sync_async + - ocsp + - ocsp-ecdsa + - latest + - ocsp-staple + - name: test-ocsp-ecdsa-delegate-valid-cert-server-staples-v4.4-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: - VERSION: "8.0" - TOPOLOGY: server - AUTH: auth - SSL: ssl + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: valid-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "4.4" + tags: + - ocsp + - ocsp-ecdsa + - "4.4" + - ocsp-staple + - name: test-ocsp-ecdsa-delegate-valid-cert-server-staples-v5.0-python3.10 + commands: - func: run tests vars: - AUTH: auth - SSL: ssl - SYNC: sync_async - TEST_SUITES: "" + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: valid-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "5.0" tags: - - "8.0" - - standalone - - auth - - ssl - - sync_async - - name: test-8.0-standalone-noauth-ssl-sync + - ocsp + - ocsp-ecdsa + - "5.0" + - ocsp-staple + - name: test-ocsp-ecdsa-delegate-valid-cert-server-staples-v6.0-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: - VERSION: "8.0" - TOPOLOGY: server - AUTH: noauth - SSL: ssl + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: valid-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "6.0" + tags: + - ocsp + - ocsp-ecdsa + - "6.0" + - ocsp-staple + - name: test-ocsp-ecdsa-delegate-valid-cert-server-staples-v7.0-python3.10 + commands: - func: run tests vars: - AUTH: noauth - SSL: ssl - SYNC: sync - TEST_SUITES: default + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: valid-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "7.0" tags: - - "8.0" - - standalone - - noauth - - ssl - - sync - - name: test-8.0-standalone-noauth-ssl-async + - ocsp + - ocsp-ecdsa + - "7.0" + - ocsp-staple + - name: test-ocsp-ecdsa-delegate-valid-cert-server-staples-v8.0-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: valid-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" VERSION: "8.0" - TOPOLOGY: server - AUTH: noauth - SSL: ssl + tags: + - ocsp + - ocsp-ecdsa + - "8.0" + - ocsp-staple + - name: test-ocsp-ecdsa-delegate-valid-cert-server-staples-rapid-python3.10 + commands: - func: run tests vars: - AUTH: noauth - SSL: ssl - SYNC: async - TEST_SUITES: default_async + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: valid-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: rapid tags: - - "8.0" - - standalone - - noauth - - ssl - - async - - name: test-8.0-standalone-noauth-ssl-sync_async + - ocsp + - ocsp-ecdsa + - rapid + - ocsp-staple + - name: test-ocsp-ecdsa-delegate-valid-cert-server-staples-latest-python3.14 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: - VERSION: "8.0" - TOPOLOGY: server - AUTH: noauth - SSL: ssl + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: valid-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.14" + VERSION: latest + tags: + - ocsp + - ocsp-ecdsa + - latest + - ocsp-staple + - name: test-ocsp-ecdsa-delegate-invalid-cert-server-staples-v4.4-python3.10 + commands: - func: run tests vars: - AUTH: noauth - SSL: ssl - SYNC: sync_async - TEST_SUITES: "" + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "4.4" tags: - - "8.0" - - standalone - - noauth - - ssl - - sync_async - - name: test-8.0-standalone-noauth-nossl-sync + - ocsp + - ocsp-ecdsa + - "4.4" + - ocsp-staple + - name: test-ocsp-ecdsa-delegate-invalid-cert-server-staples-v5.0-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: - VERSION: "8.0" - TOPOLOGY: server - AUTH: noauth - SSL: nossl + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "5.0" + tags: + - ocsp + - ocsp-ecdsa + - "5.0" + - ocsp-staple + - name: test-ocsp-ecdsa-delegate-invalid-cert-server-staples-v6.0-python3.10 + commands: - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: sync - TEST_SUITES: default + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "6.0" tags: - - "8.0" - - standalone - - noauth - - nossl - - sync - - name: test-8.0-standalone-noauth-nossl-async + - ocsp + - ocsp-ecdsa + - "6.0" + - ocsp-staple + - name: test-ocsp-ecdsa-delegate-invalid-cert-server-staples-v7.0-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: - VERSION: "8.0" - TOPOLOGY: server - AUTH: noauth - SSL: nossl + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "7.0" + tags: + - ocsp + - ocsp-ecdsa + - "7.0" + - ocsp-staple + - name: test-ocsp-ecdsa-delegate-invalid-cert-server-staples-v8.0-python3.10 + commands: - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: async - TEST_SUITES: default_async + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "8.0" tags: + - ocsp + - ocsp-ecdsa - "8.0" - - standalone - - noauth - - nossl - - async - - name: test-8.0-standalone-noauth-nossl-sync_async + - ocsp-staple + - name: test-ocsp-ecdsa-delegate-invalid-cert-server-staples-rapid-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: rapid + tags: + - ocsp + - ocsp-ecdsa + - rapid + - ocsp-staple + - name: test-ocsp-ecdsa-delegate-invalid-cert-server-staples-latest-python3.14 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.14" + VERSION: latest + tags: + - ocsp + - ocsp-ecdsa + - latest + - ocsp-staple + - name: test-ocsp-ecdsa-malicious-invalid-cert-muststaple-server-does-not-staple-v4.4-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "4.4" + tags: [ocsp, ocsp-ecdsa, "4.4"] + - name: test-ocsp-ecdsa-malicious-invalid-cert-muststaple-server-does-not-staple-v5.0-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "5.0" + tags: [ocsp, ocsp-ecdsa, "5.0"] + - name: test-ocsp-ecdsa-malicious-invalid-cert-muststaple-server-does-not-staple-v6.0-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "6.0" + tags: [ocsp, ocsp-ecdsa, "6.0"] + - name: test-ocsp-ecdsa-malicious-invalid-cert-muststaple-server-does-not-staple-v7.0-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "7.0" + tags: [ocsp, ocsp-ecdsa, "7.0"] + - name: test-ocsp-ecdsa-malicious-invalid-cert-muststaple-server-does-not-staple-v8.0-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "8.0" + tags: [ocsp, ocsp-ecdsa, "8.0"] + - name: test-ocsp-ecdsa-malicious-invalid-cert-muststaple-server-does-not-staple-rapid-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: rapid + tags: [ocsp, ocsp-ecdsa, rapid] + - name: test-ocsp-ecdsa-malicious-invalid-cert-muststaple-server-does-not-staple-latest-python3.14 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.14" + VERSION: latest + tags: [ocsp, ocsp-ecdsa, latest] + - name: test-ocsp-ecdsa-delegate-malicious-invalid-cert-muststaple-server-does-not-staple-v4.4-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "4.4" + tags: [ocsp, ocsp-ecdsa, "4.4"] + - name: test-ocsp-ecdsa-delegate-malicious-invalid-cert-muststaple-server-does-not-staple-v5.0-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "5.0" + tags: [ocsp, ocsp-ecdsa, "5.0"] + - name: test-ocsp-ecdsa-delegate-malicious-invalid-cert-muststaple-server-does-not-staple-v6.0-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "6.0" + tags: [ocsp, ocsp-ecdsa, "6.0"] + - name: test-ocsp-ecdsa-delegate-malicious-invalid-cert-muststaple-server-does-not-staple-v7.0-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "7.0" + tags: [ocsp, ocsp-ecdsa, "7.0"] + - name: test-ocsp-ecdsa-delegate-malicious-invalid-cert-muststaple-server-does-not-staple-v8.0-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "8.0" + tags: [ocsp, ocsp-ecdsa, "8.0"] + - name: test-ocsp-ecdsa-delegate-malicious-invalid-cert-muststaple-server-does-not-staple-rapid-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: rapid + tags: [ocsp, ocsp-ecdsa, rapid] + - name: test-ocsp-ecdsa-delegate-malicious-invalid-cert-muststaple-server-does-not-staple-latest-python3.14 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.14" + VERSION: latest + tags: [ocsp, ocsp-ecdsa, latest] + - name: test-ocsp-ecdsa-malicious-no-responder-muststaple-server-does-not-staple-v4.4-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: no-responder + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "4.4" + tags: [ocsp, ocsp-ecdsa, "4.4"] + - name: test-ocsp-ecdsa-malicious-no-responder-muststaple-server-does-not-staple-v5.0-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: no-responder + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "5.0" + tags: [ocsp, ocsp-ecdsa, "5.0"] + - name: test-ocsp-ecdsa-malicious-no-responder-muststaple-server-does-not-staple-v6.0-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: no-responder + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "6.0" + tags: [ocsp, ocsp-ecdsa, "6.0"] + - name: test-ocsp-ecdsa-malicious-no-responder-muststaple-server-does-not-staple-v7.0-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: no-responder + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "7.0" + tags: [ocsp, ocsp-ecdsa, "7.0"] + - name: test-ocsp-ecdsa-malicious-no-responder-muststaple-server-does-not-staple-v8.0-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: no-responder + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "8.0" + tags: [ocsp, ocsp-ecdsa, "8.0"] + - name: test-ocsp-ecdsa-malicious-no-responder-muststaple-server-does-not-staple-rapid-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: no-responder + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: rapid + tags: [ocsp, ocsp-ecdsa, rapid] + - name: test-ocsp-ecdsa-malicious-no-responder-muststaple-server-does-not-staple-latest-python3.14 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: ecdsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: no-responder + TEST_NAME: ocsp + PYTHON_VERSION: "3.14" + VERSION: latest + tags: [ocsp, ocsp-ecdsa, latest] + - name: test-ocsp-rsa-valid-cert-server-does-not-staple-v4.4-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: valid + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "4.4" + tags: [ocsp, ocsp-rsa, "4.4"] + - name: test-ocsp-rsa-valid-cert-server-does-not-staple-v5.0-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: valid + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "5.0" + tags: [ocsp, ocsp-rsa, "5.0"] + - name: test-ocsp-rsa-valid-cert-server-does-not-staple-v6.0-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: valid + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "6.0" + tags: [ocsp, ocsp-rsa, "6.0"] + - name: test-ocsp-rsa-valid-cert-server-does-not-staple-v7.0-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: valid + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "7.0" + tags: [ocsp, ocsp-rsa, "7.0"] + - name: test-ocsp-rsa-valid-cert-server-does-not-staple-v8.0-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: valid + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "8.0" + tags: [ocsp, ocsp-rsa, "8.0"] + - name: test-ocsp-rsa-valid-cert-server-does-not-staple-rapid-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: valid + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: rapid + tags: [ocsp, ocsp-rsa, rapid] + - name: test-ocsp-rsa-valid-cert-server-does-not-staple-latest-python3.14 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: valid + TEST_NAME: ocsp + PYTHON_VERSION: "3.14" + VERSION: latest + tags: [ocsp, ocsp-rsa, latest] + - name: test-ocsp-rsa-invalid-cert-server-does-not-staple-v4.4-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "4.4" + tags: [ocsp, ocsp-rsa, "4.4"] + - name: test-ocsp-rsa-invalid-cert-server-does-not-staple-v5.0-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "5.0" + tags: [ocsp, ocsp-rsa, "5.0"] + - name: test-ocsp-rsa-invalid-cert-server-does-not-staple-v6.0-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "6.0" + tags: [ocsp, ocsp-rsa, "6.0"] + - name: test-ocsp-rsa-invalid-cert-server-does-not-staple-v7.0-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "7.0" + tags: [ocsp, ocsp-rsa, "7.0"] + - name: test-ocsp-rsa-invalid-cert-server-does-not-staple-v8.0-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "8.0" + tags: [ocsp, ocsp-rsa, "8.0"] + - name: test-ocsp-rsa-invalid-cert-server-does-not-staple-rapid-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: run tests + vars: + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: rapid + tags: [ocsp, ocsp-rsa, rapid] + - name: test-ocsp-rsa-invalid-cert-server-does-not-staple-latest-python3.14 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.14" + VERSION: latest + tags: [ocsp, ocsp-rsa, latest] + - name: test-ocsp-rsa-delegate-valid-cert-server-does-not-staple-v4.4-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: valid-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "4.4" + tags: [ocsp, ocsp-rsa, "4.4"] + - name: test-ocsp-rsa-delegate-valid-cert-server-does-not-staple-v5.0-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: valid-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "5.0" + tags: [ocsp, ocsp-rsa, "5.0"] + - name: test-ocsp-rsa-delegate-valid-cert-server-does-not-staple-v6.0-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: valid-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "6.0" + tags: [ocsp, ocsp-rsa, "6.0"] + - name: test-ocsp-rsa-delegate-valid-cert-server-does-not-staple-v7.0-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: valid-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "7.0" + tags: [ocsp, ocsp-rsa, "7.0"] + - name: test-ocsp-rsa-delegate-valid-cert-server-does-not-staple-v8.0-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: valid-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "8.0" + tags: [ocsp, ocsp-rsa, "8.0"] + - name: test-ocsp-rsa-delegate-valid-cert-server-does-not-staple-rapid-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: valid-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: rapid + tags: [ocsp, ocsp-rsa, rapid] + - name: test-ocsp-rsa-delegate-valid-cert-server-does-not-staple-latest-python3.14 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: valid-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.14" + VERSION: latest + tags: [ocsp, ocsp-rsa, latest] + - name: test-ocsp-rsa-delegate-invalid-cert-server-does-not-staple-v4.4-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "4.4" + tags: [ocsp, ocsp-rsa, "4.4"] + - name: test-ocsp-rsa-delegate-invalid-cert-server-does-not-staple-v5.0-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "5.0" + tags: [ocsp, ocsp-rsa, "5.0"] + - name: test-ocsp-rsa-delegate-invalid-cert-server-does-not-staple-v6.0-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "6.0" + tags: [ocsp, ocsp-rsa, "6.0"] + - name: test-ocsp-rsa-delegate-invalid-cert-server-does-not-staple-v7.0-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "7.0" + tags: [ocsp, ocsp-rsa, "7.0"] + - name: test-ocsp-rsa-delegate-invalid-cert-server-does-not-staple-v8.0-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "8.0" + tags: [ocsp, ocsp-rsa, "8.0"] + - name: test-ocsp-rsa-delegate-invalid-cert-server-does-not-staple-rapid-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: rapid + tags: [ocsp, ocsp-rsa, rapid] + - name: test-ocsp-rsa-delegate-invalid-cert-server-does-not-staple-latest-python3.14 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.14" + VERSION: latest + tags: [ocsp, ocsp-rsa, latest] + - name: test-ocsp-rsa-soft-fail-v4.4-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: no-responder + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "4.4" + tags: [ocsp, ocsp-rsa, "4.4"] + - name: test-ocsp-rsa-soft-fail-v5.0-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: no-responder + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "5.0" + tags: [ocsp, ocsp-rsa, "5.0"] + - name: test-ocsp-rsa-soft-fail-v6.0-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: no-responder + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "6.0" + tags: [ocsp, ocsp-rsa, "6.0"] + - name: test-ocsp-rsa-soft-fail-v7.0-python3.10 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: no-responder + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "7.0" + tags: [ocsp, ocsp-rsa, "7.0"] + - name: test-ocsp-rsa-soft-fail-v8.0-python3.10 + commands: + - func: run tests vars: + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: no-responder + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" VERSION: "8.0" - TOPOLOGY: server - AUTH: noauth - SSL: nossl + tags: [ocsp, ocsp-rsa, "8.0"] + - name: test-ocsp-rsa-soft-fail-rapid-python3.10 + commands: - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: sync_async - TEST_SUITES: "" - tags: - - "8.0" - - standalone - - noauth - - nossl - - sync_async - - name: test-rapid-standalone-auth-ssl-sync + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: no-responder + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: rapid + tags: [ocsp, ocsp-rsa, rapid] + - name: test-ocsp-rsa-soft-fail-latest-python3.14 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: - VERSION: rapid - TOPOLOGY: server - AUTH: auth - SSL: ssl + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-disableStapling.json + OCSP_SERVER_TYPE: no-responder + TEST_NAME: ocsp + PYTHON_VERSION: "3.14" + VERSION: latest + tags: [ocsp, ocsp-rsa, latest] + - name: test-ocsp-rsa-valid-cert-server-staples-v4.4-python3.10 + commands: - func: run tests vars: - AUTH: auth - SSL: ssl - SYNC: sync - TEST_SUITES: default + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: valid + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "4.4" tags: - - rapid - - standalone - - auth - - ssl - - sync - - name: test-rapid-standalone-auth-ssl-async + - ocsp + - ocsp-rsa + - "4.4" + - ocsp-staple + - name: test-ocsp-rsa-valid-cert-server-staples-v5.0-python3.10 commands: - - func: bootstrap mongo-orchestration - vars: - VERSION: rapid - TOPOLOGY: server - AUTH: auth - SSL: ssl - func: run tests vars: - AUTH: auth - SSL: ssl - SYNC: async - TEST_SUITES: default_async + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: valid + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "5.0" tags: - - rapid - - standalone - - auth - - ssl - - async - - name: test-rapid-standalone-auth-ssl-sync_async + - ocsp + - ocsp-rsa + - "5.0" + - ocsp-staple + - name: test-ocsp-rsa-valid-cert-server-staples-v6.0-python3.10 commands: - - func: bootstrap mongo-orchestration - vars: - VERSION: rapid - TOPOLOGY: server - AUTH: auth - SSL: ssl - func: run tests vars: - AUTH: auth - SSL: ssl - SYNC: sync_async - TEST_SUITES: "" + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: valid + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "6.0" tags: - - rapid - - standalone - - auth - - ssl - - sync_async - - name: test-rapid-standalone-noauth-ssl-sync + - ocsp + - ocsp-rsa + - "6.0" + - ocsp-staple + - name: test-ocsp-rsa-valid-cert-server-staples-v7.0-python3.10 commands: - - func: bootstrap mongo-orchestration - vars: - VERSION: rapid - TOPOLOGY: server - AUTH: noauth - SSL: ssl - func: run tests vars: - AUTH: noauth - SSL: ssl - SYNC: sync - TEST_SUITES: default + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: valid + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "7.0" tags: - - rapid - - standalone - - noauth - - ssl - - sync - - name: test-rapid-standalone-noauth-ssl-async + - ocsp + - ocsp-rsa + - "7.0" + - ocsp-staple + - name: test-ocsp-rsa-valid-cert-server-staples-v8.0-python3.10 commands: - - func: bootstrap mongo-orchestration - vars: - VERSION: rapid - TOPOLOGY: server - AUTH: noauth - SSL: ssl - func: run tests vars: - AUTH: noauth - SSL: ssl - SYNC: async - TEST_SUITES: default_async + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: valid + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "8.0" tags: - - rapid - - standalone - - noauth - - ssl - - async - - name: test-rapid-standalone-noauth-ssl-sync_async + - ocsp + - ocsp-rsa + - "8.0" + - ocsp-staple + - name: test-ocsp-rsa-valid-cert-server-staples-rapid-python3.10 commands: - - func: bootstrap mongo-orchestration - vars: - VERSION: rapid - TOPOLOGY: server - AUTH: noauth - SSL: ssl - func: run tests vars: - AUTH: noauth - SSL: ssl - SYNC: sync_async - TEST_SUITES: "" + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: valid + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: rapid tags: + - ocsp + - ocsp-rsa - rapid - - standalone - - noauth - - ssl - - sync_async - - name: test-rapid-standalone-noauth-nossl-sync + - ocsp-staple + - name: test-ocsp-rsa-valid-cert-server-staples-latest-python3.14 commands: - - func: bootstrap mongo-orchestration - vars: - VERSION: rapid - TOPOLOGY: server - AUTH: noauth - SSL: nossl - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: sync - TEST_SUITES: default + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: valid + TEST_NAME: ocsp + PYTHON_VERSION: "3.14" + VERSION: latest tags: - - rapid - - standalone - - noauth - - nossl - - sync - - name: test-rapid-standalone-noauth-nossl-async + - ocsp + - ocsp-rsa + - latest + - ocsp-staple + - name: test-ocsp-rsa-invalid-cert-server-staples-v4.4-python3.10 commands: - - func: bootstrap mongo-orchestration - vars: - VERSION: rapid - TOPOLOGY: server - AUTH: noauth - SSL: nossl - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: async - TEST_SUITES: default_async + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "4.4" tags: - - rapid - - standalone - - noauth - - nossl - - async - - name: test-rapid-standalone-noauth-nossl-sync_async + - ocsp + - ocsp-rsa + - "4.4" + - ocsp-staple + - name: test-ocsp-rsa-invalid-cert-server-staples-v5.0-python3.10 commands: - - func: bootstrap mongo-orchestration - vars: - VERSION: rapid - TOPOLOGY: server - AUTH: noauth - SSL: nossl - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: sync_async - TEST_SUITES: "" + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "5.0" tags: - - rapid - - standalone - - noauth - - nossl - - sync_async - - name: test-latest-standalone-auth-ssl-sync + - ocsp + - ocsp-rsa + - "5.0" + - ocsp-staple + - name: test-ocsp-rsa-invalid-cert-server-staples-v6.0-python3.10 commands: - - func: bootstrap mongo-orchestration - vars: - VERSION: latest - TOPOLOGY: server - AUTH: auth - SSL: ssl - func: run tests vars: - AUTH: auth - SSL: ssl - SYNC: sync - TEST_SUITES: default + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "6.0" tags: - - latest - - standalone - - auth - - ssl - - sync - - name: test-latest-standalone-auth-ssl-async + - ocsp + - ocsp-rsa + - "6.0" + - ocsp-staple + - name: test-ocsp-rsa-invalid-cert-server-staples-v7.0-python3.10 commands: - - func: bootstrap mongo-orchestration - vars: - VERSION: latest - TOPOLOGY: server - AUTH: auth - SSL: ssl - func: run tests vars: - AUTH: auth - SSL: ssl - SYNC: async - TEST_SUITES: default_async + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "7.0" tags: - - latest - - standalone - - auth - - ssl - - async - - name: test-latest-standalone-auth-ssl-sync_async + - ocsp + - ocsp-rsa + - "7.0" + - ocsp-staple + - name: test-ocsp-rsa-invalid-cert-server-staples-v8.0-python3.10 commands: - - func: bootstrap mongo-orchestration - vars: - VERSION: latest - TOPOLOGY: server - AUTH: auth - SSL: ssl - func: run tests vars: - AUTH: auth - SSL: ssl - SYNC: sync_async - TEST_SUITES: "" + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "8.0" tags: - - latest - - standalone - - auth - - ssl - - sync_async - - name: test-latest-standalone-noauth-ssl-sync + - ocsp + - ocsp-rsa + - "8.0" + - ocsp-staple + - name: test-ocsp-rsa-invalid-cert-server-staples-rapid-python3.10 commands: - - func: bootstrap mongo-orchestration - vars: - VERSION: latest - TOPOLOGY: server - AUTH: noauth - SSL: ssl - func: run tests vars: - AUTH: noauth - SSL: ssl - SYNC: sync - TEST_SUITES: default + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: rapid tags: - - latest - - standalone - - noauth - - ssl - - sync - - name: test-latest-standalone-noauth-ssl-async + - ocsp + - ocsp-rsa + - rapid + - ocsp-staple + - name: test-ocsp-rsa-invalid-cert-server-staples-latest-python3.14 commands: - - func: bootstrap mongo-orchestration - vars: - VERSION: latest - TOPOLOGY: server - AUTH: noauth - SSL: ssl - func: run tests vars: - AUTH: noauth - SSL: ssl - SYNC: async - TEST_SUITES: default_async + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.14" + VERSION: latest tags: + - ocsp + - ocsp-rsa - latest - - standalone - - noauth - - ssl - - async - - name: test-latest-standalone-noauth-ssl-sync_async + - ocsp-staple + - name: test-ocsp-rsa-delegate-valid-cert-server-staples-v4.4-python3.10 commands: - - func: bootstrap mongo-orchestration - vars: - VERSION: latest - TOPOLOGY: server - AUTH: noauth - SSL: ssl - func: run tests vars: - AUTH: noauth - SSL: ssl - SYNC: sync_async - TEST_SUITES: "" + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: valid-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "4.4" tags: - - latest - - standalone - - noauth - - ssl - - sync_async - - name: test-latest-standalone-noauth-nossl-sync + - ocsp + - ocsp-rsa + - "4.4" + - ocsp-staple + - name: test-ocsp-rsa-delegate-valid-cert-server-staples-v5.0-python3.10 commands: - - func: bootstrap mongo-orchestration - vars: - VERSION: latest - TOPOLOGY: server - AUTH: noauth - SSL: nossl - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: sync - TEST_SUITES: default + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: valid-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "5.0" tags: - - latest - - standalone - - noauth - - nossl - - sync - - name: test-latest-standalone-noauth-nossl-async + - ocsp + - ocsp-rsa + - "5.0" + - ocsp-staple + - name: test-ocsp-rsa-delegate-valid-cert-server-staples-v6.0-python3.10 commands: - - func: bootstrap mongo-orchestration - vars: - VERSION: latest - TOPOLOGY: server - AUTH: noauth - SSL: nossl - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: async - TEST_SUITES: default_async + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: valid-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "6.0" tags: - - latest - - standalone - - noauth - - nossl - - async - - name: test-latest-standalone-noauth-nossl-sync_async + - ocsp + - ocsp-rsa + - "6.0" + - ocsp-staple + - name: test-ocsp-rsa-delegate-valid-cert-server-staples-v7.0-python3.10 commands: - - func: bootstrap mongo-orchestration - vars: - VERSION: latest - TOPOLOGY: server - AUTH: noauth - SSL: nossl - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: sync_async - TEST_SUITES: "" + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: valid-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "7.0" tags: - - latest - - standalone - - noauth - - nossl - - sync_async - - name: test-4.0-replica_set-auth-ssl-sync + - ocsp + - ocsp-rsa + - "7.0" + - ocsp-staple + - name: test-ocsp-rsa-delegate-valid-cert-server-staples-v8.0-python3.10 commands: - - func: bootstrap mongo-orchestration - vars: - VERSION: "4.0" - TOPOLOGY: replica_set - AUTH: auth - SSL: ssl - func: run tests vars: - AUTH: auth - SSL: ssl - SYNC: sync - TEST_SUITES: default + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: valid-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "8.0" tags: - - "4.0" - - replica_set - - auth - - ssl - - sync - - name: test-4.0-replica_set-auth-ssl-async + - ocsp + - ocsp-rsa + - "8.0" + - ocsp-staple + - name: test-ocsp-rsa-delegate-valid-cert-server-staples-rapid-python3.10 commands: - - func: bootstrap mongo-orchestration - vars: - VERSION: "4.0" - TOPOLOGY: replica_set - AUTH: auth - SSL: ssl - func: run tests vars: - AUTH: auth - SSL: ssl - SYNC: async - TEST_SUITES: default_async + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: valid-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: rapid tags: - - "4.0" - - replica_set - - auth - - ssl - - async - - name: test-4.0-replica_set-auth-ssl-sync_async + - ocsp + - ocsp-rsa + - rapid + - ocsp-staple + - name: test-ocsp-rsa-delegate-valid-cert-server-staples-latest-python3.14 commands: - - func: bootstrap mongo-orchestration - vars: - VERSION: "4.0" - TOPOLOGY: replica_set - AUTH: auth - SSL: ssl - func: run tests vars: - AUTH: auth - SSL: ssl - SYNC: sync_async - TEST_SUITES: "" + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: valid-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.14" + VERSION: latest tags: - - "4.0" - - replica_set - - auth - - ssl - - sync_async - - name: test-4.0-replica_set-noauth-ssl-sync + - ocsp + - ocsp-rsa + - latest + - ocsp-staple + - name: test-ocsp-rsa-delegate-invalid-cert-server-staples-v4.4-python3.10 commands: - - func: bootstrap mongo-orchestration - vars: - VERSION: "4.0" - TOPOLOGY: replica_set - AUTH: noauth - SSL: ssl - func: run tests vars: - AUTH: noauth - SSL: ssl - SYNC: sync - TEST_SUITES: default + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "4.4" tags: - - "4.0" - - replica_set - - noauth - - ssl - - sync - - name: test-4.0-replica_set-noauth-ssl-async + - ocsp + - ocsp-rsa + - "4.4" + - ocsp-staple + - name: test-ocsp-rsa-delegate-invalid-cert-server-staples-v5.0-python3.10 commands: - - func: bootstrap mongo-orchestration - vars: - VERSION: "4.0" - TOPOLOGY: replica_set - AUTH: noauth - SSL: ssl - func: run tests vars: - AUTH: noauth - SSL: ssl - SYNC: async - TEST_SUITES: default_async + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "5.0" tags: - - "4.0" - - replica_set - - noauth - - ssl - - async - - name: test-4.0-replica_set-noauth-ssl-sync_async + - ocsp + - ocsp-rsa + - "5.0" + - ocsp-staple + - name: test-ocsp-rsa-delegate-invalid-cert-server-staples-v6.0-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: - VERSION: "4.0" - TOPOLOGY: replica_set - AUTH: noauth - SSL: ssl + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "6.0" + tags: + - ocsp + - ocsp-rsa + - "6.0" + - ocsp-staple + - name: test-ocsp-rsa-delegate-invalid-cert-server-staples-v7.0-python3.10 + commands: - func: run tests vars: - AUTH: noauth - SSL: ssl - SYNC: sync_async - TEST_SUITES: "" + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "7.0" tags: - - "4.0" - - replica_set - - noauth - - ssl - - sync_async - - name: test-4.0-replica_set-noauth-nossl-sync + - ocsp + - ocsp-rsa + - "7.0" + - ocsp-staple + - name: test-ocsp-rsa-delegate-invalid-cert-server-staples-v8.0-python3.10 commands: - - func: bootstrap mongo-orchestration - vars: - VERSION: "4.0" - TOPOLOGY: replica_set - AUTH: noauth - SSL: nossl - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: sync - TEST_SUITES: default + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "8.0" tags: - - "4.0" - - replica_set - - noauth - - nossl - - sync - - name: test-4.0-replica_set-noauth-nossl-async + - ocsp + - ocsp-rsa + - "8.0" + - ocsp-staple + - name: test-ocsp-rsa-delegate-invalid-cert-server-staples-rapid-python3.10 commands: - - func: bootstrap mongo-orchestration - vars: - VERSION: "4.0" - TOPOLOGY: replica_set - AUTH: noauth - SSL: nossl - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: async - TEST_SUITES: default_async + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: rapid tags: - - "4.0" - - replica_set - - noauth - - nossl - - async - - name: test-4.0-replica_set-noauth-nossl-sync_async + - ocsp + - ocsp-rsa + - rapid + - ocsp-staple + - name: test-ocsp-rsa-delegate-invalid-cert-server-staples-latest-python3.14 commands: - - func: bootstrap mongo-orchestration - vars: - VERSION: "4.0" - TOPOLOGY: replica_set - AUTH: noauth - SSL: nossl - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: sync_async - TEST_SUITES: "" + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.14" + VERSION: latest tags: - - "4.0" - - replica_set - - noauth - - nossl - - sync_async - - name: test-4.4-replica_set-auth-ssl-sync + - ocsp + - ocsp-rsa + - latest + - ocsp-staple + - name: test-ocsp-rsa-malicious-invalid-cert-muststaple-server-does-not-staple-v4.4-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" VERSION: "4.4" - TOPOLOGY: replica_set - AUTH: auth - SSL: ssl + tags: [ocsp, ocsp-rsa, "4.4"] + - name: test-ocsp-rsa-malicious-invalid-cert-muststaple-server-does-not-staple-v5.0-python3.10 + commands: - func: run tests vars: - AUTH: auth - SSL: ssl - SYNC: sync - TEST_SUITES: default - tags: - - "4.4" - - replica_set - - auth - - ssl - - sync - - name: test-4.4-replica_set-auth-ssl-async + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "5.0" + tags: [ocsp, ocsp-rsa, "5.0"] + - name: test-ocsp-rsa-malicious-invalid-cert-muststaple-server-does-not-staple-v6.0-python3.10 commands: - - func: bootstrap mongo-orchestration - vars: - VERSION: "4.4" - TOPOLOGY: replica_set - AUTH: auth - SSL: ssl - func: run tests vars: - AUTH: auth - SSL: ssl - SYNC: async - TEST_SUITES: default_async - tags: - - "4.4" - - replica_set - - auth - - ssl - - async - - name: test-4.4-replica_set-auth-ssl-sync_async + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "6.0" + tags: [ocsp, ocsp-rsa, "6.0"] + - name: test-ocsp-rsa-malicious-invalid-cert-muststaple-server-does-not-staple-v7.0-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: - VERSION: "4.4" - TOPOLOGY: replica_set - AUTH: auth - SSL: ssl + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "7.0" + tags: [ocsp, ocsp-rsa, "7.0"] + - name: test-ocsp-rsa-malicious-invalid-cert-muststaple-server-does-not-staple-v8.0-python3.10 + commands: - func: run tests vars: - AUTH: auth - SSL: ssl - SYNC: sync_async - TEST_SUITES: "" - tags: - - "4.4" - - replica_set - - auth - - ssl - - sync_async - - name: test-4.4-replica_set-noauth-ssl-sync + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "8.0" + tags: [ocsp, ocsp-rsa, "8.0"] + - name: test-ocsp-rsa-malicious-invalid-cert-muststaple-server-does-not-staple-rapid-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: - VERSION: "4.4" - TOPOLOGY: replica_set - AUTH: noauth - SSL: ssl + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: rapid + tags: [ocsp, ocsp-rsa, rapid] + - name: test-ocsp-rsa-malicious-invalid-cert-muststaple-server-does-not-staple-latest-python3.14 + commands: - func: run tests vars: - AUTH: noauth - SSL: ssl - SYNC: sync - TEST_SUITES: default - tags: - - "4.4" - - replica_set - - noauth - - ssl - - sync - - name: test-4.4-replica_set-noauth-ssl-async + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: revoked + TEST_NAME: ocsp + PYTHON_VERSION: "3.14" + VERSION: latest + tags: [ocsp, ocsp-rsa, latest] + - name: test-ocsp-rsa-delegate-malicious-invalid-cert-muststaple-server-does-not-staple-v4.4-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" VERSION: "4.4" - TOPOLOGY: replica_set - AUTH: noauth - SSL: ssl + tags: [ocsp, ocsp-rsa, "4.4"] + - name: test-ocsp-rsa-delegate-malicious-invalid-cert-muststaple-server-does-not-staple-v5.0-python3.10 + commands: - func: run tests vars: - AUTH: noauth - SSL: ssl - SYNC: async - TEST_SUITES: default_async - tags: - - "4.4" - - replica_set - - noauth - - ssl - - async - - name: test-4.4-replica_set-noauth-ssl-sync_async + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "5.0" + tags: [ocsp, ocsp-rsa, "5.0"] + - name: test-ocsp-rsa-delegate-malicious-invalid-cert-muststaple-server-does-not-staple-v6.0-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: - VERSION: "4.4" - TOPOLOGY: replica_set - AUTH: noauth - SSL: ssl + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "6.0" + tags: [ocsp, ocsp-rsa, "6.0"] + - name: test-ocsp-rsa-delegate-malicious-invalid-cert-muststaple-server-does-not-staple-v7.0-python3.10 + commands: - func: run tests vars: - AUTH: noauth - SSL: ssl - SYNC: sync_async - TEST_SUITES: "" - tags: - - "4.4" - - replica_set - - noauth - - ssl - - sync_async - - name: test-4.4-replica_set-noauth-nossl-sync + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "7.0" + tags: [ocsp, ocsp-rsa, "7.0"] + - name: test-ocsp-rsa-delegate-malicious-invalid-cert-muststaple-server-does-not-staple-v8.0-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: - VERSION: "4.4" - TOPOLOGY: replica_set - AUTH: noauth - SSL: nossl + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "8.0" + tags: [ocsp, ocsp-rsa, "8.0"] + - name: test-ocsp-rsa-delegate-malicious-invalid-cert-muststaple-server-does-not-staple-rapid-python3.10 + commands: - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: sync - TEST_SUITES: default - tags: - - "4.4" - - replica_set - - noauth - - nossl - - sync - - name: test-4.4-replica_set-noauth-nossl-async + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: rapid + tags: [ocsp, ocsp-rsa, rapid] + - name: test-ocsp-rsa-delegate-malicious-invalid-cert-muststaple-server-does-not-staple-latest-python3.14 + commands: + - func: run tests + vars: + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: revoked-delegate + TEST_NAME: ocsp + PYTHON_VERSION: "3.14" + VERSION: latest + tags: [ocsp, ocsp-rsa, latest] + - name: test-ocsp-rsa-malicious-no-responder-muststaple-server-does-not-staple-v4.4-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: no-responder + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" VERSION: "4.4" - TOPOLOGY: replica_set - AUTH: noauth - SSL: nossl + tags: [ocsp, ocsp-rsa, "4.4"] + - name: test-ocsp-rsa-malicious-no-responder-muststaple-server-does-not-staple-v5.0-python3.10 + commands: - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: async - TEST_SUITES: default_async - tags: - - "4.4" - - replica_set - - noauth - - nossl - - async - - name: test-4.4-replica_set-noauth-nossl-sync_async + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: no-responder + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "5.0" + tags: [ocsp, ocsp-rsa, "5.0"] + - name: test-ocsp-rsa-malicious-no-responder-muststaple-server-does-not-staple-v6.0-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: - VERSION: "4.4" - TOPOLOGY: replica_set - AUTH: noauth - SSL: nossl + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: no-responder + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "6.0" + tags: [ocsp, ocsp-rsa, "6.0"] + - name: test-ocsp-rsa-malicious-no-responder-muststaple-server-does-not-staple-v7.0-python3.10 + commands: - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: sync_async - TEST_SUITES: "" - tags: - - "4.4" - - replica_set - - noauth - - nossl - - sync_async - - name: test-5.0-replica_set-auth-ssl-sync + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: no-responder + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "7.0" + tags: [ocsp, ocsp-rsa, "7.0"] + - name: test-ocsp-rsa-malicious-no-responder-muststaple-server-does-not-staple-v8.0-python3.10 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: - VERSION: "5.0" - TOPOLOGY: replica_set - AUTH: auth - SSL: ssl + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: no-responder + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: "8.0" + tags: [ocsp, ocsp-rsa, "8.0"] + - name: test-ocsp-rsa-malicious-no-responder-muststaple-server-does-not-staple-rapid-python3.10 + commands: - func: run tests vars: - AUTH: auth - SSL: ssl - SYNC: sync - TEST_SUITES: default - tags: - - "5.0" - - replica_set - - auth - - ssl - - sync - - name: test-5.0-replica_set-auth-ssl-async + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: no-responder + TEST_NAME: ocsp + PYTHON_VERSION: "3.10" + VERSION: rapid + tags: [ocsp, ocsp-rsa, rapid] + - name: test-ocsp-rsa-malicious-no-responder-muststaple-server-does-not-staple-latest-python3.14 commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: - VERSION: "5.0" - TOPOLOGY: replica_set - AUTH: auth - SSL: ssl + ORCHESTRATION_FILE: rsa-basic-tls-ocsp-mustStaple-disableStapling.json + OCSP_SERVER_TYPE: no-responder + TEST_NAME: ocsp + PYTHON_VERSION: "3.14" + VERSION: latest + tags: [ocsp, ocsp-rsa, latest] + + # Oidc tests + - name: test-auth-oidc-default + commands: - func: run tests vars: - AUTH: auth - SSL: ssl - SYNC: async - TEST_SUITES: default_async - tags: - - "5.0" - - replica_set - - auth - - ssl - - async - - name: test-5.0-replica_set-auth-ssl-sync_async + TEST_NAME: auth_oidc + SUB_TEST_NAME: default + tags: [auth_oidc] + - name: test-auth-oidc-azure commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: - VERSION: "5.0" - TOPOLOGY: replica_set - AUTH: auth - SSL: ssl + TEST_NAME: auth_oidc + SUB_TEST_NAME: azure + tags: [auth_oidc, auth_oidc_remote] + - name: test-auth-oidc-gcp + commands: - func: run tests vars: - AUTH: auth - SSL: ssl - SYNC: sync_async - TEST_SUITES: "" - tags: - - "5.0" - - replica_set - - auth - - ssl - - sync_async - - name: test-5.0-replica_set-noauth-ssl-sync + TEST_NAME: auth_oidc + SUB_TEST_NAME: gcp + tags: [auth_oidc, auth_oidc_remote] + - name: test-auth-oidc-eks commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: - VERSION: "5.0" - TOPOLOGY: replica_set - AUTH: noauth - SSL: ssl + TEST_NAME: auth_oidc + SUB_TEST_NAME: eks + tags: [auth_oidc, auth_oidc_remote] + - name: test-auth-oidc-aks + commands: - func: run tests vars: - AUTH: noauth - SSL: ssl - SYNC: sync - TEST_SUITES: default - tags: - - "5.0" - - replica_set - - noauth - - ssl - - sync - - name: test-5.0-replica_set-noauth-ssl-async + TEST_NAME: auth_oidc + SUB_TEST_NAME: aks + tags: [auth_oidc, auth_oidc_remote] + - name: test-auth-oidc-gke commands: - - func: bootstrap mongo-orchestration + - func: run tests vars: - VERSION: "5.0" - TOPOLOGY: replica_set - AUTH: noauth + TEST_NAME: auth_oidc + SUB_TEST_NAME: gke + tags: [auth_oidc, auth_oidc_remote] + + # Perf tests + - name: perf-8.0-standalone-ssl + commands: + - func: run server + vars: + VERSION: v8.0-perf SSL: ssl - func: run tests vars: - AUTH: noauth - SSL: ssl - SYNC: async - TEST_SUITES: default_async - tags: - - "5.0" - - replica_set - - noauth - - ssl - - async - - name: test-5.0-replica_set-noauth-ssl-sync_async + TEST_NAME: perf + SUB_TEST_NAME: sync + - func: attach benchmark test results + - func: send dashboard data + tags: [perf] + - name: perf-8.0-standalone-ssl-async commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "5.0" - TOPOLOGY: replica_set - AUTH: noauth + VERSION: v8.0-perf SSL: ssl - func: run tests vars: - AUTH: noauth - SSL: ssl - SYNC: sync_async - TEST_SUITES: "" - tags: - - "5.0" - - replica_set - - noauth - - ssl - - sync_async - - name: test-5.0-replica_set-noauth-nossl-sync + TEST_NAME: perf + SUB_TEST_NAME: async + - func: attach benchmark test results + - func: send dashboard data + tags: [perf] + - name: perf-8.0-standalone commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "5.0" - TOPOLOGY: replica_set - AUTH: noauth + VERSION: v8.0-perf SSL: nossl - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: sync - TEST_SUITES: default - tags: - - "5.0" - - replica_set - - noauth - - nossl - - sync - - name: test-5.0-replica_set-noauth-nossl-async + TEST_NAME: perf + SUB_TEST_NAME: sync + - func: attach benchmark test results + - func: send dashboard data + tags: [perf] + - name: perf-8.0-standalone-async commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "5.0" - TOPOLOGY: replica_set - AUTH: noauth + VERSION: v8.0-perf SSL: nossl - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: async - TEST_SUITES: default_async - tags: - - "5.0" - - replica_set - - noauth - - nossl - - async - - name: test-5.0-replica_set-noauth-nossl-sync_async + TEST_NAME: perf + SUB_TEST_NAME: async + - func: attach benchmark test results + - func: send dashboard data + tags: [perf] + + # Search index tests + - name: test-search-index-helpers commands: - - func: bootstrap mongo-orchestration + - func: assume ec2 role + - func: run server vars: - VERSION: "5.0" - TOPOLOGY: replica_set - AUTH: noauth + TEST_NAME: search_index + - func: run tests + vars: + TEST_NAME: search_index + tags: [search_index] + + # Server version tests + - name: test-server-version-python3.14t-async-auth-nossl-replica-set + commands: + - func: run server + vars: + AUTH: auth SSL: nossl + TOPOLOGY: replica_set - func: run tests vars: - AUTH: noauth + AUTH: auth SSL: nossl - SYNC: sync_async - TEST_SUITES: "" + TOPOLOGY: replica_set + PYTHON_VERSION: 3.14t + TEST_NAME: default_async tags: - - "5.0" - - replica_set - - noauth - - nossl - - sync_async - - name: test-6.0-replica_set-auth-ssl-sync + - server-version + - python-3.14t + - replica_set-auth-nossl + - async + - free-threaded + - name: test-server-version-python3.13-sync-auth-nossl-replica-set-cov commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "6.0" - TOPOLOGY: replica_set AUTH: auth - SSL: ssl + SSL: nossl + TOPOLOGY: replica_set + COVERAGE: "1" - func: run tests vars: AUTH: auth - SSL: ssl - SYNC: sync - TEST_SUITES: default + SSL: nossl + TOPOLOGY: replica_set + COVERAGE: "1" + PYTHON_VERSION: "3.13" + TEST_NAME: default_sync tags: - - "6.0" - - replica_set - - auth - - ssl + - server-version + - python-3.13 + - replica_set-auth-nossl - sync - - name: test-6.0-replica_set-auth-ssl-async + - name: test-server-version-python3.12-async-auth-ssl-replica-set-cov commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "6.0" - TOPOLOGY: replica_set AUTH: auth SSL: ssl + TOPOLOGY: replica_set + COVERAGE: "1" - func: run tests vars: AUTH: auth SSL: ssl - SYNC: async - TEST_SUITES: default_async + TOPOLOGY: replica_set + COVERAGE: "1" + PYTHON_VERSION: "3.12" + TEST_NAME: default_async tags: - - "6.0" - - replica_set - - auth - - ssl + - server-version + - python-3.12 + - replica_set-auth-ssl - async - - name: test-6.0-replica_set-auth-ssl-sync_async + - name: test-server-version-python3.11-sync-auth-ssl-replica-set-cov commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "6.0" - TOPOLOGY: replica_set AUTH: auth SSL: ssl + TOPOLOGY: replica_set + COVERAGE: "1" - func: run tests vars: AUTH: auth SSL: ssl - SYNC: sync_async - TEST_SUITES: "" + TOPOLOGY: replica_set + COVERAGE: "1" + PYTHON_VERSION: "3.11" + TEST_NAME: default_sync tags: - - "6.0" - - replica_set - - auth - - ssl - - sync_async - - name: test-6.0-replica_set-noauth-ssl-sync + - server-version + - python-3.11 + - replica_set-auth-ssl + - sync + - name: test-server-version-python3.11-async-noauth-nossl-replica-set-cov commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "6.0" + AUTH: noauth + SSL: nossl + TOPOLOGY: replica_set + COVERAGE: "1" + - func: run tests + vars: + AUTH: noauth + SSL: nossl TOPOLOGY: replica_set + COVERAGE: "1" + PYTHON_VERSION: "3.11" + TEST_NAME: default_async + tags: + - server-version + - python-3.11 + - replica_set-noauth-nossl + - async + - pr + - name: test-server-version-python3.10-sync-noauth-nossl-replica-set-cov + commands: + - func: run server + vars: AUTH: noauth - SSL: ssl + SSL: nossl + TOPOLOGY: replica_set + COVERAGE: "1" - func: run tests vars: AUTH: noauth - SSL: ssl - SYNC: sync - TEST_SUITES: default + SSL: nossl + TOPOLOGY: replica_set + COVERAGE: "1" + PYTHON_VERSION: "3.10" + TEST_NAME: default_sync tags: - - "6.0" - - replica_set - - noauth - - ssl + - server-version + - python-3.10 + - replica_set-noauth-nossl - sync - - name: test-6.0-replica_set-noauth-ssl-async + - pr + - name: test-server-version-pypy3.10-async-noauth-ssl-replica-set commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "6.0" - TOPOLOGY: replica_set AUTH: noauth SSL: ssl + TOPOLOGY: replica_set - func: run tests vars: AUTH: noauth SSL: ssl - SYNC: async - TEST_SUITES: default_async + TOPOLOGY: replica_set + PYTHON_VERSION: pypy3.10 + TEST_NAME: default_async tags: - - "6.0" - - replica_set - - noauth - - ssl + - server-version + - python-pypy3.10 + - replica_set-noauth-ssl - async - - name: test-6.0-replica_set-noauth-ssl-sync_async + - name: test-server-version-python3.14-sync-noauth-ssl-replica-set-cov commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "6.0" - TOPOLOGY: replica_set AUTH: noauth SSL: ssl + TOPOLOGY: replica_set + COVERAGE: "1" - func: run tests vars: AUTH: noauth SSL: ssl - SYNC: sync_async - TEST_SUITES: "" + TOPOLOGY: replica_set + COVERAGE: "1" + PYTHON_VERSION: "3.14" + TEST_NAME: default_sync tags: - - "6.0" - - replica_set - - noauth - - ssl - - sync_async - - name: test-6.0-replica_set-noauth-nossl-sync + - server-version + - python-3.14 + - replica_set-noauth-ssl + - sync + - name: test-server-version-python3.14-async-auth-nossl-sharded-cluster-cov commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "6.0" - TOPOLOGY: replica_set - AUTH: noauth + AUTH: auth SSL: nossl + TOPOLOGY: sharded_cluster + COVERAGE: "1" - func: run tests vars: - AUTH: noauth + AUTH: auth SSL: nossl - SYNC: sync - TEST_SUITES: default + TOPOLOGY: sharded_cluster + COVERAGE: "1" + PYTHON_VERSION: "3.14" + TEST_NAME: default_async tags: - - "6.0" - - replica_set - - noauth - - nossl - - sync - - name: test-6.0-replica_set-noauth-nossl-async + - server-version + - python-3.14 + - sharded_cluster-auth-nossl + - async + - name: test-server-version-python3.14t-sync-auth-nossl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "6.0" - TOPOLOGY: replica_set - AUTH: noauth + AUTH: auth SSL: nossl + TOPOLOGY: sharded_cluster - func: run tests vars: - AUTH: noauth + AUTH: auth SSL: nossl - SYNC: async - TEST_SUITES: default_async + TOPOLOGY: sharded_cluster + PYTHON_VERSION: 3.14t + TEST_NAME: default_sync tags: - - "6.0" - - replica_set - - noauth - - nossl - - async - - name: test-6.0-replica_set-noauth-nossl-sync_async + - server-version + - python-3.14t + - sharded_cluster-auth-nossl + - sync + - free-threaded + - name: test-server-version-python3.10-async-auth-ssl-sharded-cluster-cov commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "6.0" - TOPOLOGY: replica_set - AUTH: noauth - SSL: nossl + AUTH: auth + SSL: ssl + TOPOLOGY: sharded_cluster + COVERAGE: "1" - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: sync_async - TEST_SUITES: "" + AUTH: auth + SSL: ssl + TOPOLOGY: sharded_cluster + COVERAGE: "1" + PYTHON_VERSION: "3.10" + TEST_NAME: default_async tags: - - "6.0" - - replica_set - - noauth - - nossl - - sync_async - - name: test-7.0-replica_set-auth-ssl-sync + - server-version + - python-3.10 + - sharded_cluster-auth-ssl + - async + - pr + - name: test-server-version-python3.11-async-auth-ssl-sharded-cluster-cov commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "7.0" - TOPOLOGY: replica_set AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster + COVERAGE: "1" - func: run tests vars: AUTH: auth SSL: ssl - SYNC: sync - TEST_SUITES: default + TOPOLOGY: sharded_cluster + COVERAGE: "1" + PYTHON_VERSION: "3.11" + TEST_NAME: default_async tags: - - "7.0" - - replica_set - - auth - - ssl - - sync - - name: test-7.0-replica_set-auth-ssl-async + - server-version + - python-3.11 + - sharded_cluster-auth-ssl + - async + - name: test-server-version-python3.12-async-auth-ssl-sharded-cluster-cov commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "7.0" - TOPOLOGY: replica_set AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster + COVERAGE: "1" - func: run tests vars: AUTH: auth SSL: ssl - SYNC: async - TEST_SUITES: default_async + TOPOLOGY: sharded_cluster + COVERAGE: "1" + PYTHON_VERSION: "3.12" + TEST_NAME: default_async tags: - - "7.0" - - replica_set - - auth - - ssl + - server-version + - python-3.12 + - sharded_cluster-auth-ssl - async - - name: test-7.0-replica_set-auth-ssl-sync_async + - name: test-server-version-python3.13-async-auth-ssl-sharded-cluster-cov commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "7.0" - TOPOLOGY: replica_set AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster + COVERAGE: "1" - func: run tests vars: AUTH: auth SSL: ssl - SYNC: sync_async - TEST_SUITES: "" + TOPOLOGY: sharded_cluster + COVERAGE: "1" + PYTHON_VERSION: "3.13" + TEST_NAME: default_async tags: - - "7.0" - - replica_set - - auth - - ssl - - sync_async - - name: test-7.0-replica_set-noauth-ssl-sync + - server-version + - python-3.13 + - sharded_cluster-auth-ssl + - async + - name: test-server-version-python3.14-async-auth-ssl-sharded-cluster-cov commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "7.0" - TOPOLOGY: replica_set - AUTH: noauth + AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster + COVERAGE: "1" - func: run tests vars: - AUTH: noauth + AUTH: auth SSL: ssl - SYNC: sync - TEST_SUITES: default + TOPOLOGY: sharded_cluster + COVERAGE: "1" + PYTHON_VERSION: "3.14" + TEST_NAME: default_async tags: - - "7.0" - - replica_set - - noauth - - ssl - - sync - - name: test-7.0-replica_set-noauth-ssl-async + - server-version + - python-3.14 + - sharded_cluster-auth-ssl + - async + - name: test-server-version-python3.14t-async-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "7.0" - TOPOLOGY: replica_set - AUTH: noauth + AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster - func: run tests vars: - AUTH: noauth + AUTH: auth SSL: ssl - SYNC: async - TEST_SUITES: default_async + TOPOLOGY: sharded_cluster + PYTHON_VERSION: 3.14t + TEST_NAME: default_async tags: - - "7.0" - - replica_set - - noauth - - ssl + - server-version + - python-3.14t + - sharded_cluster-auth-ssl - async - - name: test-7.0-replica_set-noauth-ssl-sync_async + - free-threaded + - name: test-server-version-pypy3.10-async-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "7.0" - TOPOLOGY: replica_set - AUTH: noauth + AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster - func: run tests vars: - AUTH: noauth + AUTH: auth SSL: ssl - SYNC: sync_async - TEST_SUITES: "" + TOPOLOGY: sharded_cluster + PYTHON_VERSION: pypy3.10 + TEST_NAME: default_async tags: - - "7.0" - - replica_set - - noauth - - ssl - - sync_async - - name: test-7.0-replica_set-noauth-nossl-sync + - server-version + - python-pypy3.10 + - sharded_cluster-auth-ssl + - async + - name: test-server-version-python3.10-sync-auth-ssl-sharded-cluster-cov commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "7.0" - TOPOLOGY: replica_set - AUTH: noauth - SSL: nossl + AUTH: auth + SSL: ssl + TOPOLOGY: sharded_cluster + COVERAGE: "1" - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: sync - TEST_SUITES: default + AUTH: auth + SSL: ssl + TOPOLOGY: sharded_cluster + COVERAGE: "1" + PYTHON_VERSION: "3.10" + TEST_NAME: default_sync tags: - - "7.0" - - replica_set - - noauth - - nossl + - server-version + - python-3.10 + - sharded_cluster-auth-ssl - sync - - name: test-7.0-replica_set-noauth-nossl-async + - pr + - name: test-server-version-python3.11-sync-auth-ssl-sharded-cluster-cov commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "7.0" - TOPOLOGY: replica_set - AUTH: noauth - SSL: nossl + AUTH: auth + SSL: ssl + TOPOLOGY: sharded_cluster + COVERAGE: "1" - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: async - TEST_SUITES: default_async + AUTH: auth + SSL: ssl + TOPOLOGY: sharded_cluster + COVERAGE: "1" + PYTHON_VERSION: "3.11" + TEST_NAME: default_sync tags: - - "7.0" - - replica_set - - noauth - - nossl - - async - - name: test-7.0-replica_set-noauth-nossl-sync_async + - server-version + - python-3.11 + - sharded_cluster-auth-ssl + - sync + - name: test-server-version-python3.12-sync-auth-ssl-sharded-cluster-cov commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "7.0" - TOPOLOGY: replica_set - AUTH: noauth - SSL: nossl + AUTH: auth + SSL: ssl + TOPOLOGY: sharded_cluster + COVERAGE: "1" - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: sync_async - TEST_SUITES: "" + AUTH: auth + SSL: ssl + TOPOLOGY: sharded_cluster + COVERAGE: "1" + PYTHON_VERSION: "3.12" + TEST_NAME: default_sync tags: - - "7.0" - - replica_set - - noauth - - nossl - - sync_async - - name: test-8.0-replica_set-auth-ssl-sync + - server-version + - python-3.12 + - sharded_cluster-auth-ssl + - sync + - name: test-server-version-python3.13-sync-auth-ssl-sharded-cluster-cov commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "8.0" - TOPOLOGY: replica_set AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster + COVERAGE: "1" - func: run tests vars: AUTH: auth SSL: ssl - SYNC: sync - TEST_SUITES: default + TOPOLOGY: sharded_cluster + COVERAGE: "1" + PYTHON_VERSION: "3.13" + TEST_NAME: default_sync tags: - - "8.0" - - replica_set - - auth - - ssl + - server-version + - python-3.13 + - sharded_cluster-auth-ssl - sync - - name: test-8.0-replica_set-auth-ssl-async + - name: test-server-version-python3.14-sync-auth-ssl-sharded-cluster-cov commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "8.0" - TOPOLOGY: replica_set AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster + COVERAGE: "1" - func: run tests vars: AUTH: auth SSL: ssl - SYNC: async - TEST_SUITES: default_async + TOPOLOGY: sharded_cluster + COVERAGE: "1" + PYTHON_VERSION: "3.14" + TEST_NAME: default_sync tags: - - "8.0" - - replica_set - - auth - - ssl - - async - - name: test-8.0-replica_set-auth-ssl-sync_async + - server-version + - python-3.14 + - sharded_cluster-auth-ssl + - sync + - name: test-server-version-python3.14t-sync-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "8.0" - TOPOLOGY: replica_set AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster - func: run tests vars: AUTH: auth SSL: ssl - SYNC: sync_async - TEST_SUITES: "" + TOPOLOGY: sharded_cluster + PYTHON_VERSION: 3.14t + TEST_NAME: default_sync tags: - - "8.0" - - replica_set - - auth - - ssl - - sync_async - - name: test-8.0-replica_set-noauth-ssl-sync + - server-version + - python-3.14t + - sharded_cluster-auth-ssl + - sync + - free-threaded + - name: test-server-version-pypy3.10-sync-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "8.0" - TOPOLOGY: replica_set - AUTH: noauth + AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster - func: run tests vars: - AUTH: noauth + AUTH: auth SSL: ssl - SYNC: sync - TEST_SUITES: default + TOPOLOGY: sharded_cluster + PYTHON_VERSION: pypy3.10 + TEST_NAME: default_sync tags: - - "8.0" - - replica_set - - noauth - - ssl + - server-version + - python-pypy3.10 + - sharded_cluster-auth-ssl - sync - - name: test-8.0-replica_set-noauth-ssl-async + - name: test-server-version-python3.12-async-noauth-nossl-sharded-cluster-cov commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "8.0" - TOPOLOGY: replica_set AUTH: noauth - SSL: ssl + SSL: nossl + TOPOLOGY: sharded_cluster + COVERAGE: "1" - func: run tests vars: AUTH: noauth - SSL: ssl - SYNC: async - TEST_SUITES: default_async + SSL: nossl + TOPOLOGY: sharded_cluster + COVERAGE: "1" + PYTHON_VERSION: "3.12" + TEST_NAME: default_async tags: - - "8.0" - - replica_set - - noauth - - ssl + - server-version + - python-3.12 + - sharded_cluster-noauth-nossl - async - - name: test-8.0-replica_set-noauth-ssl-sync_async + - name: test-server-version-python3.11-sync-noauth-nossl-sharded-cluster-cov commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "8.0" - TOPOLOGY: replica_set AUTH: noauth - SSL: ssl + SSL: nossl + TOPOLOGY: sharded_cluster + COVERAGE: "1" - func: run tests vars: AUTH: noauth - SSL: ssl - SYNC: sync_async - TEST_SUITES: "" + SSL: nossl + TOPOLOGY: sharded_cluster + COVERAGE: "1" + PYTHON_VERSION: "3.11" + TEST_NAME: default_sync tags: - - "8.0" - - replica_set - - noauth - - ssl - - sync_async - - name: test-8.0-replica_set-noauth-nossl-sync + - server-version + - python-3.11 + - sharded_cluster-noauth-nossl + - sync + - name: test-server-version-python3.10-async-noauth-ssl-sharded-cluster-cov commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "8.0" - TOPOLOGY: replica_set AUTH: noauth - SSL: nossl + SSL: ssl + TOPOLOGY: sharded_cluster + COVERAGE: "1" - func: run tests vars: AUTH: noauth - SSL: nossl - SYNC: sync - TEST_SUITES: default + SSL: ssl + TOPOLOGY: sharded_cluster + COVERAGE: "1" + PYTHON_VERSION: "3.10" + TEST_NAME: default_async tags: - - "8.0" - - replica_set - - noauth - - nossl - - sync - - name: test-8.0-replica_set-noauth-nossl-async + - server-version + - python-3.10 + - sharded_cluster-noauth-ssl + - async + - name: test-server-version-pypy3.10-sync-noauth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "8.0" - TOPOLOGY: replica_set AUTH: noauth - SSL: nossl + SSL: ssl + TOPOLOGY: sharded_cluster - func: run tests vars: AUTH: noauth - SSL: nossl - SYNC: async - TEST_SUITES: default_async + SSL: ssl + TOPOLOGY: sharded_cluster + PYTHON_VERSION: pypy3.10 + TEST_NAME: default_sync tags: - - "8.0" - - replica_set - - noauth - - nossl - - async - - name: test-8.0-replica_set-noauth-nossl-sync_async + - server-version + - python-pypy3.10 + - sharded_cluster-noauth-ssl + - sync + - name: test-server-version-python3.13-async-auth-nossl-standalone-cov commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "8.0" - TOPOLOGY: replica_set - AUTH: noauth + AUTH: auth SSL: nossl + TOPOLOGY: standalone + COVERAGE: "1" - func: run tests vars: - AUTH: noauth + AUTH: auth SSL: nossl - SYNC: sync_async - TEST_SUITES: "" + TOPOLOGY: standalone + COVERAGE: "1" + PYTHON_VERSION: "3.13" + TEST_NAME: default_async tags: - - "8.0" - - replica_set - - noauth - - nossl - - sync_async - - name: test-rapid-replica_set-auth-ssl-sync + - server-version + - python-3.13 + - standalone-auth-nossl + - async + - name: test-server-version-python3.12-sync-auth-nossl-standalone-cov commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: rapid - TOPOLOGY: replica_set AUTH: auth - SSL: ssl + SSL: nossl + TOPOLOGY: standalone + COVERAGE: "1" - func: run tests vars: AUTH: auth - SSL: ssl - SYNC: sync - TEST_SUITES: default + SSL: nossl + TOPOLOGY: standalone + COVERAGE: "1" + PYTHON_VERSION: "3.12" + TEST_NAME: default_sync tags: - - rapid - - replica_set - - auth - - ssl + - server-version + - python-3.12 + - standalone-auth-nossl - sync - - name: test-rapid-replica_set-auth-ssl-async + - name: test-server-version-python3.11-async-auth-ssl-standalone-cov commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: rapid - TOPOLOGY: replica_set AUTH: auth SSL: ssl + TOPOLOGY: standalone + COVERAGE: "1" - func: run tests vars: AUTH: auth SSL: ssl - SYNC: async - TEST_SUITES: default_async + TOPOLOGY: standalone + COVERAGE: "1" + PYTHON_VERSION: "3.11" + TEST_NAME: default_async tags: - - rapid - - replica_set - - auth - - ssl + - server-version + - python-3.11 + - standalone-auth-ssl - async - - name: test-rapid-replica_set-auth-ssl-sync_async + - name: test-server-version-python3.10-sync-auth-ssl-standalone-cov commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: rapid - TOPOLOGY: replica_set AUTH: auth SSL: ssl + TOPOLOGY: standalone + COVERAGE: "1" - func: run tests vars: AUTH: auth SSL: ssl - SYNC: sync_async - TEST_SUITES: "" + TOPOLOGY: standalone + COVERAGE: "1" + PYTHON_VERSION: "3.10" + TEST_NAME: default_sync tags: - - rapid - - replica_set - - auth - - ssl - - sync_async - - name: test-rapid-replica_set-noauth-ssl-sync + - server-version + - python-3.10 + - standalone-auth-ssl + - sync + - name: test-server-version-python3.10-async-noauth-nossl-standalone-cov commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: rapid - TOPOLOGY: replica_set AUTH: noauth - SSL: ssl + SSL: nossl + TOPOLOGY: standalone + COVERAGE: "1" - func: run tests vars: AUTH: noauth - SSL: ssl - SYNC: sync - TEST_SUITES: default + SSL: nossl + TOPOLOGY: standalone + COVERAGE: "1" + PYTHON_VERSION: "3.10" + TEST_NAME: default_async tags: - - rapid - - replica_set - - noauth - - ssl - - sync - - name: test-rapid-replica_set-noauth-ssl-async + - server-version + - python-3.10 + - standalone-noauth-nossl + - async + - pr + - name: test-server-version-pypy3.10-sync-noauth-nossl-standalone commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: rapid - TOPOLOGY: replica_set AUTH: noauth - SSL: ssl + SSL: nossl + TOPOLOGY: standalone - func: run tests vars: AUTH: noauth - SSL: ssl - SYNC: async - TEST_SUITES: default_async + SSL: nossl + TOPOLOGY: standalone + PYTHON_VERSION: pypy3.10 + TEST_NAME: default_sync tags: - - rapid - - replica_set - - noauth - - ssl - - async - - name: test-rapid-replica_set-noauth-ssl-sync_async + - server-version + - python-pypy3.10 + - standalone-noauth-nossl + - sync + - pr + - name: test-server-version-python3.14-async-noauth-ssl-standalone-cov commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: rapid - TOPOLOGY: replica_set AUTH: noauth SSL: ssl + TOPOLOGY: standalone + COVERAGE: "1" - func: run tests vars: AUTH: noauth SSL: ssl - SYNC: sync_async - TEST_SUITES: "" + TOPOLOGY: standalone + COVERAGE: "1" + PYTHON_VERSION: "3.14" + TEST_NAME: default_async tags: - - rapid - - replica_set - - noauth - - ssl - - sync_async - - name: test-rapid-replica_set-noauth-nossl-sync + - server-version + - python-3.14 + - standalone-noauth-ssl + - async + - name: test-server-version-python3.14t-sync-noauth-ssl-standalone commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: rapid - TOPOLOGY: replica_set AUTH: noauth - SSL: nossl + SSL: ssl + TOPOLOGY: standalone - func: run tests vars: AUTH: noauth - SSL: nossl - SYNC: sync - TEST_SUITES: default + SSL: ssl + TOPOLOGY: standalone + PYTHON_VERSION: 3.14t + TEST_NAME: default_sync tags: - - rapid - - replica_set - - noauth - - nossl + - server-version + - python-3.14t + - standalone-noauth-ssl - sync - - name: test-rapid-replica_set-noauth-nossl-async + - free-threaded + + # Standard tests + - name: test-standard-v4.2-python3.11-sync-noauth-ssl-replica-set commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: rapid - TOPOLOGY: replica_set AUTH: noauth - SSL: nossl + SSL: ssl + TOPOLOGY: replica_set + VERSION: "4.2" - func: run tests vars: AUTH: noauth - SSL: nossl - SYNC: async - TEST_SUITES: default_async - tags: - - rapid - - replica_set - - noauth - - nossl - - async - - name: test-rapid-replica_set-noauth-nossl-sync_async + SSL: ssl + TOPOLOGY: replica_set + VERSION: "4.2" + PYTHON_VERSION: "3.11" + TEST_NAME: default_sync + tags: + - test-standard + - server-4.2 + - python-3.11 + - replica_set-noauth-ssl + - sync + - name: test-standard-v4.2-python3.14-sync-noauth-ssl-replica-set commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: rapid - TOPOLOGY: replica_set AUTH: noauth - SSL: nossl + SSL: ssl + TOPOLOGY: replica_set + VERSION: "4.2" - func: run tests vars: AUTH: noauth - SSL: nossl - SYNC: sync_async - TEST_SUITES: "" - tags: - - rapid - - replica_set - - noauth - - nossl - - sync_async - - name: test-latest-replica_set-auth-ssl-sync + SSL: ssl + TOPOLOGY: replica_set + VERSION: "4.2" + PYTHON_VERSION: "3.14" + TEST_NAME: default_sync + tags: + - test-standard + - server-4.2 + - python-3.14 + - replica_set-noauth-ssl + - sync + - name: test-standard-v4.2-python3.12-sync-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: latest - TOPOLOGY: replica_set AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: "4.2" - func: run tests vars: AUTH: auth SSL: ssl - SYNC: sync - TEST_SUITES: default - tags: - - latest - - replica_set - - auth - - ssl + TOPOLOGY: sharded_cluster + VERSION: "4.2" + PYTHON_VERSION: "3.12" + TEST_NAME: default_sync + tags: + - test-standard + - server-4.2 + - python-3.12 + - sharded_cluster-auth-ssl - sync - - name: test-latest-replica_set-auth-ssl-async + - name: test-standard-v4.2-pypy3.10-sync-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: latest - TOPOLOGY: replica_set AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: "4.2" - func: run tests vars: AUTH: auth SSL: ssl - SYNC: async - TEST_SUITES: default_async - tags: - - latest - - replica_set - - auth - - ssl - - async - - name: test-latest-replica_set-auth-ssl-sync_async + TOPOLOGY: sharded_cluster + VERSION: "4.2" + PYTHON_VERSION: pypy3.10 + TEST_NAME: default_sync + tags: + - test-standard + - server-4.2 + - python-pypy3.10 + - sharded_cluster-auth-ssl + - sync + - pypy + - name: test-standard-v4.2-python3.10-sync-noauth-nossl-standalone commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: latest - TOPOLOGY: replica_set - AUTH: auth - SSL: ssl + AUTH: noauth + SSL: nossl + TOPOLOGY: standalone + VERSION: "4.2" - func: run tests vars: - AUTH: auth - SSL: ssl - SYNC: sync_async - TEST_SUITES: "" + AUTH: noauth + SSL: nossl + TOPOLOGY: standalone + VERSION: "4.2" + PYTHON_VERSION: "3.10" + TEST_NAME: default_sync tags: - - latest - - replica_set - - auth - - ssl - - sync_async - - name: test-latest-replica_set-noauth-ssl-sync + - test-standard + - server-4.2 + - python-3.10 + - standalone-noauth-nossl + - sync + - name: test-standard-v4.2-python3.14t-sync-noauth-nossl-standalone commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: latest - TOPOLOGY: replica_set AUTH: noauth - SSL: ssl + SSL: nossl + TOPOLOGY: standalone + VERSION: "4.2" - func: run tests vars: AUTH: noauth - SSL: ssl - SYNC: sync - TEST_SUITES: default + SSL: nossl + TOPOLOGY: standalone + VERSION: "4.2" + PYTHON_VERSION: 3.14t + TEST_NAME: default_sync tags: - - latest - - replica_set - - noauth - - ssl + - test-standard + - server-4.2 + - python-3.14t + - standalone-noauth-nossl - sync - - name: test-latest-replica_set-noauth-ssl-async + - free-threaded + - name: test-standard-v4.4-python3.11-async-noauth-ssl-replica-set commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: latest - TOPOLOGY: replica_set AUTH: noauth SSL: ssl + TOPOLOGY: replica_set + VERSION: "4.4" - func: run tests vars: AUTH: noauth SSL: ssl - SYNC: async - TEST_SUITES: default_async + TOPOLOGY: replica_set + VERSION: "4.4" + PYTHON_VERSION: "3.11" + TEST_NAME: default_async tags: - - latest - - replica_set - - noauth - - ssl + - test-standard + - server-4.4 + - python-3.11 + - replica_set-noauth-ssl - async - - name: test-latest-replica_set-noauth-ssl-sync_async + - name: test-standard-v4.4-python3.14-async-noauth-ssl-replica-set commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: latest - TOPOLOGY: replica_set AUTH: noauth SSL: ssl + TOPOLOGY: replica_set + VERSION: "4.4" - func: run tests vars: AUTH: noauth SSL: ssl - SYNC: sync_async - TEST_SUITES: "" + TOPOLOGY: replica_set + VERSION: "4.4" + PYTHON_VERSION: "3.14" + TEST_NAME: default_async tags: - - latest - - replica_set - - noauth - - ssl - - sync_async - - name: test-latest-replica_set-noauth-nossl-sync + - test-standard + - server-4.4 + - python-3.14 + - replica_set-noauth-ssl + - async + - name: test-standard-v4.4-python3.12-async-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server + vars: + AUTH: auth + SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: "4.4" + - func: run tests + vars: + AUTH: auth + SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: "4.4" + PYTHON_VERSION: "3.12" + TEST_NAME: default_async + tags: + - test-standard + - server-4.4 + - python-3.12 + - sharded_cluster-auth-ssl + - async + - name: test-standard-v4.4-pypy3.10-async-auth-ssl-sharded-cluster + commands: + - func: run server + vars: + AUTH: auth + SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: "4.4" + - func: run tests + vars: + AUTH: auth + SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: "4.4" + PYTHON_VERSION: pypy3.10 + TEST_NAME: default_async + tags: + - test-standard + - server-4.4 + - python-pypy3.10 + - sharded_cluster-auth-ssl + - async + - pypy + - name: test-standard-v4.4-python3.10-async-noauth-nossl-standalone + commands: + - func: run server vars: - VERSION: latest - TOPOLOGY: replica_set AUTH: noauth SSL: nossl + TOPOLOGY: standalone + VERSION: "4.4" - func: run tests vars: AUTH: noauth SSL: nossl - SYNC: sync - TEST_SUITES: default + TOPOLOGY: standalone + VERSION: "4.4" + PYTHON_VERSION: "3.10" + TEST_NAME: default_async tags: - - latest - - replica_set - - noauth - - nossl - - sync - - name: test-latest-replica_set-noauth-nossl-async + - test-standard + - server-4.4 + - python-3.10 + - standalone-noauth-nossl + - async + - name: test-standard-v4.4-python3.14t-async-noauth-nossl-standalone commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: latest - TOPOLOGY: replica_set AUTH: noauth SSL: nossl + TOPOLOGY: standalone + VERSION: "4.4" - func: run tests vars: AUTH: noauth SSL: nossl - SYNC: async - TEST_SUITES: default_async + TOPOLOGY: standalone + VERSION: "4.4" + PYTHON_VERSION: 3.14t + TEST_NAME: default_async tags: - - latest - - replica_set - - noauth - - nossl + - test-standard + - server-4.4 + - python-3.14t + - standalone-noauth-nossl - async - - name: test-latest-replica_set-noauth-nossl-sync_async + - free-threaded + - name: test-standard-v5.0-python3.10-sync-noauth-ssl-replica-set commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: latest - TOPOLOGY: replica_set AUTH: noauth - SSL: nossl + SSL: ssl + TOPOLOGY: replica_set + VERSION: "5.0" - func: run tests vars: AUTH: noauth - SSL: nossl - SYNC: sync_async - TEST_SUITES: "" + SSL: ssl + TOPOLOGY: replica_set + VERSION: "5.0" + PYTHON_VERSION: "3.10" + TEST_NAME: default_sync tags: - - latest - - replica_set - - noauth - - nossl - - sync_async - - name: test-4.0-sharded_cluster-auth-ssl-sync + - test-standard + - server-5.0 + - python-3.10 + - replica_set-noauth-ssl + - sync + - name: test-standard-v5.0-python3.14t-sync-noauth-ssl-replica-set commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "4.0" - TOPOLOGY: sharded_cluster - AUTH: auth + AUTH: noauth SSL: ssl + TOPOLOGY: replica_set + VERSION: "5.0" - func: run tests vars: - AUTH: auth + AUTH: noauth SSL: ssl - SYNC: sync - TEST_SUITES: default + TOPOLOGY: replica_set + VERSION: "5.0" + PYTHON_VERSION: 3.14t + TEST_NAME: default_sync tags: - - "4.0" - - sharded_cluster - - auth - - ssl + - test-standard + - server-5.0 + - python-3.14t + - replica_set-noauth-ssl - sync - - name: test-4.0-sharded_cluster-auth-ssl-async + - free-threaded + - name: test-standard-v5.0-python3.11-sync-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "4.0" - TOPOLOGY: sharded_cluster AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: "5.0" - func: run tests vars: AUTH: auth SSL: ssl - SYNC: async - TEST_SUITES: default_async + TOPOLOGY: sharded_cluster + VERSION: "5.0" + PYTHON_VERSION: "3.11" + TEST_NAME: default_sync tags: - - "4.0" - - sharded_cluster - - auth - - ssl - - async - - name: test-4.0-sharded_cluster-auth-ssl-sync_async + - test-standard + - server-5.0 + - python-3.11 + - sharded_cluster-auth-ssl + - sync + - name: test-standard-v5.0-python3.14-sync-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "4.0" - TOPOLOGY: sharded_cluster AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: "5.0" - func: run tests vars: AUTH: auth SSL: ssl - SYNC: sync_async - TEST_SUITES: "" + TOPOLOGY: sharded_cluster + VERSION: "5.0" + PYTHON_VERSION: "3.14" + TEST_NAME: default_sync tags: - - "4.0" - - sharded_cluster - - auth - - ssl - - sync_async - - name: test-4.0-sharded_cluster-noauth-ssl-sync + - test-standard + - server-5.0 + - python-3.14 + - sharded_cluster-auth-ssl + - sync + - name: test-standard-v5.0-python3.13-sync-noauth-nossl-standalone commands: - - func: bootstrap mongo-orchestration + - func: run server + vars: + AUTH: noauth + SSL: nossl + TOPOLOGY: standalone + VERSION: "5.0" + - func: run tests + vars: + AUTH: noauth + SSL: nossl + TOPOLOGY: standalone + VERSION: "5.0" + PYTHON_VERSION: "3.13" + TEST_NAME: default_sync + tags: + - test-standard + - server-5.0 + - python-3.13 + - standalone-noauth-nossl + - sync + - name: test-standard-v6.0-python3.10-async-noauth-ssl-replica-set + commands: + - func: run server vars: - VERSION: "4.0" - TOPOLOGY: sharded_cluster AUTH: noauth SSL: ssl + TOPOLOGY: replica_set + VERSION: "6.0" - func: run tests vars: AUTH: noauth SSL: ssl - SYNC: sync - TEST_SUITES: default + TOPOLOGY: replica_set + VERSION: "6.0" + PYTHON_VERSION: "3.10" + TEST_NAME: default_async tags: - - "4.0" - - sharded_cluster - - noauth - - ssl - - sync - - name: test-4.0-sharded_cluster-noauth-ssl-async + - test-standard + - server-6.0 + - python-3.10 + - replica_set-noauth-ssl + - async + - name: test-standard-v6.0-python3.14t-async-noauth-ssl-replica-set commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "4.0" - TOPOLOGY: sharded_cluster AUTH: noauth SSL: ssl + TOPOLOGY: replica_set + VERSION: "6.0" - func: run tests vars: AUTH: noauth SSL: ssl - SYNC: async - TEST_SUITES: default_async + TOPOLOGY: replica_set + VERSION: "6.0" + PYTHON_VERSION: 3.14t + TEST_NAME: default_async tags: - - "4.0" - - sharded_cluster - - noauth - - ssl + - test-standard + - server-6.0 + - python-3.14t + - replica_set-noauth-ssl - async - - name: test-4.0-sharded_cluster-noauth-ssl-sync_async + - free-threaded + - name: test-standard-v6.0-python3.11-async-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "4.0" - TOPOLOGY: sharded_cluster - AUTH: noauth + AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: "6.0" - func: run tests vars: - AUTH: noauth + AUTH: auth SSL: ssl - SYNC: sync_async - TEST_SUITES: "" + TOPOLOGY: sharded_cluster + VERSION: "6.0" + PYTHON_VERSION: "3.11" + TEST_NAME: default_async tags: - - "4.0" - - sharded_cluster - - noauth - - ssl - - sync_async - - name: test-4.0-sharded_cluster-noauth-nossl-sync + - test-standard + - server-6.0 + - python-3.11 + - sharded_cluster-auth-ssl + - async + - name: test-standard-v6.0-python3.14-async-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "4.0" + AUTH: auth + SSL: ssl TOPOLOGY: sharded_cluster - AUTH: noauth - SSL: nossl + VERSION: "6.0" - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: sync - TEST_SUITES: default + AUTH: auth + SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: "6.0" + PYTHON_VERSION: "3.14" + TEST_NAME: default_async tags: - - "4.0" - - sharded_cluster - - noauth - - nossl - - sync - - name: test-4.0-sharded_cluster-noauth-nossl-async + - test-standard + - server-6.0 + - python-3.14 + - sharded_cluster-auth-ssl + - async + - name: test-standard-v6.0-python3.13-async-noauth-nossl-standalone commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "4.0" - TOPOLOGY: sharded_cluster AUTH: noauth SSL: nossl + TOPOLOGY: standalone + VERSION: "6.0" - func: run tests vars: AUTH: noauth SSL: nossl - SYNC: async - TEST_SUITES: default_async + TOPOLOGY: standalone + VERSION: "6.0" + PYTHON_VERSION: "3.13" + TEST_NAME: default_async tags: - - "4.0" - - sharded_cluster - - noauth - - nossl + - test-standard + - server-6.0 + - python-3.13 + - standalone-noauth-nossl - async - - name: test-4.0-sharded_cluster-noauth-nossl-sync_async + - name: test-standard-v7.0-python3.13-sync-noauth-ssl-replica-set commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "4.0" - TOPOLOGY: sharded_cluster AUTH: noauth - SSL: nossl + SSL: ssl + TOPOLOGY: replica_set + VERSION: "7.0" - func: run tests vars: AUTH: noauth - SSL: nossl - SYNC: sync_async - TEST_SUITES: "" + SSL: ssl + TOPOLOGY: replica_set + VERSION: "7.0" + PYTHON_VERSION: "3.13" + TEST_NAME: default_sync tags: - - "4.0" - - sharded_cluster - - noauth - - nossl - - sync_async - - name: test-4.4-sharded_cluster-auth-ssl-sync + - test-standard + - server-7.0 + - python-3.13 + - replica_set-noauth-ssl + - sync + - name: test-standard-v7.0-python3.10-sync-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "4.4" - TOPOLOGY: sharded_cluster AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: "7.0" - func: run tests vars: AUTH: auth SSL: ssl - SYNC: sync - TEST_SUITES: default + TOPOLOGY: sharded_cluster + VERSION: "7.0" + PYTHON_VERSION: "3.10" + TEST_NAME: default_sync tags: - - "4.4" - - sharded_cluster - - auth - - ssl + - test-standard + - server-7.0 + - python-3.10 + - sharded_cluster-auth-ssl - sync - - name: test-4.4-sharded_cluster-auth-ssl-async + - name: test-standard-v7.0-python3.14t-sync-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "4.4" - TOPOLOGY: sharded_cluster AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: "7.0" - func: run tests vars: AUTH: auth SSL: ssl - SYNC: async - TEST_SUITES: default_async + TOPOLOGY: sharded_cluster + VERSION: "7.0" + PYTHON_VERSION: 3.14t + TEST_NAME: default_sync tags: - - "4.4" - - sharded_cluster - - auth - - ssl - - async - - name: test-4.4-sharded_cluster-auth-ssl-sync_async + - test-standard + - server-7.0 + - python-3.14t + - sharded_cluster-auth-ssl + - sync + - free-threaded + - name: test-standard-v7.0-python3.12-sync-noauth-nossl-standalone commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "4.4" - TOPOLOGY: sharded_cluster - AUTH: auth - SSL: ssl + AUTH: noauth + SSL: nossl + TOPOLOGY: standalone + VERSION: "7.0" - func: run tests vars: - AUTH: auth - SSL: ssl - SYNC: sync_async - TEST_SUITES: "" + AUTH: noauth + SSL: nossl + TOPOLOGY: standalone + VERSION: "7.0" + PYTHON_VERSION: "3.12" + TEST_NAME: default_sync tags: - - "4.4" - - sharded_cluster - - auth - - ssl - - sync_async - - name: test-4.4-sharded_cluster-noauth-ssl-sync + - test-standard + - server-7.0 + - python-3.12 + - standalone-noauth-nossl + - sync + - name: test-standard-v7.0-pypy3.10-sync-noauth-nossl-standalone commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "4.4" - TOPOLOGY: sharded_cluster AUTH: noauth - SSL: ssl + SSL: nossl + TOPOLOGY: standalone + VERSION: "7.0" - func: run tests vars: AUTH: noauth - SSL: ssl - SYNC: sync - TEST_SUITES: default + SSL: nossl + TOPOLOGY: standalone + VERSION: "7.0" + PYTHON_VERSION: pypy3.10 + TEST_NAME: default_sync tags: - - "4.4" - - sharded_cluster - - noauth - - ssl + - test-standard + - server-7.0 + - python-pypy3.10 + - standalone-noauth-nossl - sync - - name: test-4.4-sharded_cluster-noauth-ssl-async + - pypy + - name: test-standard-v8.0-python3.13-async-noauth-ssl-replica-set commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "4.4" - TOPOLOGY: sharded_cluster AUTH: noauth SSL: ssl + TOPOLOGY: replica_set + VERSION: "8.0" - func: run tests vars: AUTH: noauth SSL: ssl - SYNC: async - TEST_SUITES: default_async + TOPOLOGY: replica_set + VERSION: "8.0" + PYTHON_VERSION: "3.13" + TEST_NAME: default_async tags: - - "4.4" - - sharded_cluster - - noauth - - ssl + - test-standard + - server-8.0 + - python-3.13 + - replica_set-noauth-ssl - async - - name: test-4.4-sharded_cluster-noauth-ssl-sync_async + - name: test-standard-v8.0-python3.10-async-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "4.4" - TOPOLOGY: sharded_cluster - AUTH: noauth + AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: "8.0" - func: run tests vars: - AUTH: noauth + AUTH: auth SSL: ssl - SYNC: sync_async - TEST_SUITES: "" + TOPOLOGY: sharded_cluster + VERSION: "8.0" + PYTHON_VERSION: "3.10" + TEST_NAME: default_async tags: - - "4.4" - - sharded_cluster - - noauth - - ssl - - sync_async - - name: test-4.4-sharded_cluster-noauth-nossl-sync + - test-standard + - server-8.0 + - python-3.10 + - sharded_cluster-auth-ssl + - async + - name: test-standard-v8.0-python3.14t-async-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "4.4" + AUTH: auth + SSL: ssl TOPOLOGY: sharded_cluster - AUTH: noauth - SSL: nossl + VERSION: "8.0" - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: sync - TEST_SUITES: default + AUTH: auth + SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: "8.0" + PYTHON_VERSION: 3.14t + TEST_NAME: default_async tags: - - "4.4" - - sharded_cluster - - noauth - - nossl - - sync - - name: test-4.4-sharded_cluster-noauth-nossl-async + - test-standard + - server-8.0 + - python-3.14t + - sharded_cluster-auth-ssl + - async + - free-threaded + - name: test-standard-v8.0-python3.12-async-noauth-nossl-standalone commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "4.4" - TOPOLOGY: sharded_cluster AUTH: noauth SSL: nossl + TOPOLOGY: standalone + VERSION: "8.0" - func: run tests vars: AUTH: noauth SSL: nossl - SYNC: async - TEST_SUITES: default_async + TOPOLOGY: standalone + VERSION: "8.0" + PYTHON_VERSION: "3.12" + TEST_NAME: default_async tags: - - "4.4" - - sharded_cluster - - noauth - - nossl + - test-standard + - server-8.0 + - python-3.12 + - standalone-noauth-nossl - async - - name: test-4.4-sharded_cluster-noauth-nossl-sync_async + - name: test-standard-v8.0-pypy3.10-async-noauth-nossl-standalone commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "4.4" - TOPOLOGY: sharded_cluster AUTH: noauth SSL: nossl + TOPOLOGY: standalone + VERSION: "8.0" - func: run tests vars: AUTH: noauth SSL: nossl - SYNC: sync_async - TEST_SUITES: "" + TOPOLOGY: standalone + VERSION: "8.0" + PYTHON_VERSION: pypy3.10 + TEST_NAME: default_async tags: - - "4.4" - - sharded_cluster - - noauth - - nossl - - sync_async - - name: test-5.0-sharded_cluster-auth-ssl-sync + - test-standard + - server-8.0 + - python-pypy3.10 + - standalone-noauth-nossl + - async + - pypy + - name: test-standard-latest-python3.12-async-noauth-ssl-replica-set commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "5.0" - TOPOLOGY: sharded_cluster - AUTH: auth + AUTH: noauth SSL: ssl + TOPOLOGY: replica_set + VERSION: latest - func: run tests vars: - AUTH: auth + AUTH: noauth SSL: ssl - SYNC: sync - TEST_SUITES: default + TOPOLOGY: replica_set + VERSION: latest + PYTHON_VERSION: "3.12" + TEST_NAME: default_async tags: - - "5.0" - - sharded_cluster - - auth - - ssl - - sync - - name: test-5.0-sharded_cluster-auth-ssl-async + - test-standard + - server-latest + - python-3.12 + - replica_set-noauth-ssl + - async + - pr + - name: test-standard-latest-pypy3.10-async-noauth-ssl-replica-set commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "5.0" - TOPOLOGY: sharded_cluster - AUTH: auth + AUTH: noauth SSL: ssl + TOPOLOGY: replica_set + VERSION: latest - func: run tests vars: - AUTH: auth + AUTH: noauth SSL: ssl - SYNC: async - TEST_SUITES: default_async + TOPOLOGY: replica_set + VERSION: latest + PYTHON_VERSION: pypy3.10 + TEST_NAME: default_async tags: - - "5.0" - - sharded_cluster - - auth - - ssl + - test-standard + - server-latest + - python-pypy3.10 + - replica_set-noauth-ssl - async - - name: test-5.0-sharded_cluster-auth-ssl-sync_async + - pypy + - name: test-standard-latest-python3.13-async-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "5.0" - TOPOLOGY: sharded_cluster AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: latest - func: run tests vars: AUTH: auth SSL: ssl - SYNC: sync_async - TEST_SUITES: "" + TOPOLOGY: sharded_cluster + VERSION: latest + PYTHON_VERSION: "3.13" + TEST_NAME: default_async tags: - - "5.0" - - sharded_cluster - - auth - - ssl - - sync_async - - name: test-5.0-sharded_cluster-noauth-ssl-sync + - test-standard + - server-latest + - python-3.13 + - sharded_cluster-auth-ssl + - async + - pr + - name: test-standard-latest-python3.11-async-noauth-nossl-standalone commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "5.0" - TOPOLOGY: sharded_cluster AUTH: noauth - SSL: ssl + SSL: nossl + TOPOLOGY: standalone + VERSION: latest - func: run tests vars: AUTH: noauth - SSL: ssl - SYNC: sync - TEST_SUITES: default + SSL: nossl + TOPOLOGY: standalone + VERSION: latest + PYTHON_VERSION: "3.11" + TEST_NAME: default_async tags: - - "5.0" - - sharded_cluster - - noauth - - ssl - - sync - - name: test-5.0-sharded_cluster-noauth-ssl-async + - test-standard + - server-latest + - python-3.11 + - standalone-noauth-nossl + - async + - pr + - name: test-standard-latest-python3.14-async-noauth-nossl-standalone commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "5.0" - TOPOLOGY: sharded_cluster AUTH: noauth - SSL: ssl + SSL: nossl + TOPOLOGY: standalone + VERSION: latest - func: run tests vars: AUTH: noauth - SSL: ssl - SYNC: async - TEST_SUITES: default_async + SSL: nossl + TOPOLOGY: standalone + VERSION: latest + PYTHON_VERSION: "3.14" + TEST_NAME: default_async tags: - - "5.0" - - sharded_cluster - - noauth - - ssl + - test-standard + - server-latest + - python-3.14 + - standalone-noauth-nossl - async - - name: test-5.0-sharded_cluster-noauth-ssl-sync_async + - pr + - name: test-standard-rapid-python3.12-sync-noauth-ssl-replica-set commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "5.0" - TOPOLOGY: sharded_cluster AUTH: noauth SSL: ssl + TOPOLOGY: replica_set + VERSION: rapid - func: run tests vars: AUTH: noauth SSL: ssl - SYNC: sync_async - TEST_SUITES: "" + TOPOLOGY: replica_set + VERSION: rapid + PYTHON_VERSION: "3.12" + TEST_NAME: default_sync tags: - - "5.0" - - sharded_cluster - - noauth - - ssl - - sync_async - - name: test-5.0-sharded_cluster-noauth-nossl-sync + - test-standard + - server-rapid + - python-3.12 + - replica_set-noauth-ssl + - sync + - name: test-standard-rapid-pypy3.10-sync-noauth-ssl-replica-set commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "5.0" - TOPOLOGY: sharded_cluster AUTH: noauth - SSL: nossl + SSL: ssl + TOPOLOGY: replica_set + VERSION: rapid - func: run tests vars: AUTH: noauth - SSL: nossl - SYNC: sync - TEST_SUITES: default + SSL: ssl + TOPOLOGY: replica_set + VERSION: rapid + PYTHON_VERSION: pypy3.10 + TEST_NAME: default_sync tags: - - "5.0" - - sharded_cluster - - noauth - - nossl + - test-standard + - server-rapid + - python-pypy3.10 + - replica_set-noauth-ssl - sync - - name: test-5.0-sharded_cluster-noauth-nossl-async + - pypy + - name: test-standard-rapid-python3.13-sync-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "5.0" + AUTH: auth + SSL: ssl TOPOLOGY: sharded_cluster - AUTH: noauth - SSL: nossl + VERSION: rapid - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: async - TEST_SUITES: default_async + AUTH: auth + SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: rapid + PYTHON_VERSION: "3.13" + TEST_NAME: default_sync tags: - - "5.0" - - sharded_cluster - - noauth - - nossl - - async - - name: test-5.0-sharded_cluster-noauth-nossl-sync_async + - test-standard + - server-rapid + - python-3.13 + - sharded_cluster-auth-ssl + - sync + - name: test-standard-rapid-python3.11-sync-noauth-nossl-standalone commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "5.0" - TOPOLOGY: sharded_cluster AUTH: noauth SSL: nossl + TOPOLOGY: standalone + VERSION: rapid - func: run tests vars: AUTH: noauth SSL: nossl - SYNC: sync_async - TEST_SUITES: "" + TOPOLOGY: standalone + VERSION: rapid + PYTHON_VERSION: "3.11" + TEST_NAME: default_sync tags: - - "5.0" - - sharded_cluster - - noauth - - nossl - - sync_async - - name: test-6.0-sharded_cluster-auth-ssl-sync + - test-standard + - server-rapid + - python-3.11 + - standalone-noauth-nossl + - sync + - name: test-standard-rapid-python3.14-sync-noauth-nossl-standalone commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "6.0" - TOPOLOGY: sharded_cluster - AUTH: auth - SSL: ssl + AUTH: noauth + SSL: nossl + TOPOLOGY: standalone + VERSION: rapid - func: run tests vars: - AUTH: auth - SSL: ssl - SYNC: sync - TEST_SUITES: default + AUTH: noauth + SSL: nossl + TOPOLOGY: standalone + VERSION: rapid + PYTHON_VERSION: "3.14" + TEST_NAME: default_sync tags: - - "6.0" - - sharded_cluster - - auth - - ssl + - test-standard + - server-rapid + - python-3.14 + - standalone-noauth-nossl - sync - - name: test-6.0-sharded_cluster-auth-ssl-async + + # Test non standard tests + - name: test-non-standard-v4.2-python3.11-noauth-ssl-replica-set commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "6.0" - TOPOLOGY: sharded_cluster - AUTH: auth + AUTH: noauth SSL: ssl + TOPOLOGY: replica_set + VERSION: "4.2" - func: run tests vars: - AUTH: auth + AUTH: noauth SSL: ssl - SYNC: async - TEST_SUITES: default_async + TOPOLOGY: replica_set + VERSION: "4.2" + PYTHON_VERSION: "3.11" tags: - - "6.0" - - sharded_cluster - - auth - - ssl - - async - - name: test-6.0-sharded_cluster-auth-ssl-sync_async + - test-non-standard + - server-4.2 + - python-3.11 + - replica_set-noauth-ssl + - noauth + - name: test-non-standard-v4.2-python3.12-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "6.0" - TOPOLOGY: sharded_cluster AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: "4.2" - func: run tests vars: AUTH: auth SSL: ssl - SYNC: sync_async - TEST_SUITES: "" + TOPOLOGY: sharded_cluster + VERSION: "4.2" + PYTHON_VERSION: "3.12" tags: - - "6.0" - - sharded_cluster + - test-non-standard + - server-4.2 + - python-3.12 + - sharded_cluster-auth-ssl - auth - - ssl - - sync_async - - name: test-6.0-sharded_cluster-noauth-ssl-sync + - name: test-non-standard-v4.2-python3.10-noauth-nossl-standalone commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "6.0" - TOPOLOGY: sharded_cluster AUTH: noauth - SSL: ssl + SSL: nossl + TOPOLOGY: standalone + VERSION: "4.2" - func: run tests vars: AUTH: noauth - SSL: ssl - SYNC: sync - TEST_SUITES: default + SSL: nossl + TOPOLOGY: standalone + VERSION: "4.2" + PYTHON_VERSION: "3.10" tags: - - "6.0" - - sharded_cluster + - test-non-standard + - server-4.2 + - python-3.10 + - standalone-noauth-nossl - noauth - - ssl - - sync - - name: test-6.0-sharded_cluster-noauth-ssl-async + - name: test-non-standard-v4.2-pypy3.10-noauth-nossl-standalone commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "6.0" - TOPOLOGY: sharded_cluster AUTH: noauth - SSL: ssl + SSL: nossl + TOPOLOGY: standalone + VERSION: "4.2" - func: run tests vars: AUTH: noauth - SSL: ssl - SYNC: async - TEST_SUITES: default_async + SSL: nossl + TOPOLOGY: standalone + VERSION: "4.2" + PYTHON_VERSION: pypy3.10 tags: - - "6.0" - - sharded_cluster + - test-non-standard + - server-4.2 + - python-pypy3.10 + - standalone-noauth-nossl - noauth - - ssl - - async - - name: test-6.0-sharded_cluster-noauth-ssl-sync_async + - pypy + - name: test-non-standard-v4.4-python3.14t-noauth-ssl-replica-set commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "6.0" - TOPOLOGY: sharded_cluster AUTH: noauth SSL: ssl + TOPOLOGY: replica_set + VERSION: "4.4" - func: run tests vars: AUTH: noauth SSL: ssl - SYNC: sync_async - TEST_SUITES: "" + TOPOLOGY: replica_set + VERSION: "4.4" + PYTHON_VERSION: 3.14t tags: - - "6.0" - - sharded_cluster + - test-non-standard + - server-4.4 + - python-3.14t + - replica_set-noauth-ssl - noauth - - ssl - - sync_async - - name: test-6.0-sharded_cluster-noauth-nossl-sync + - free-threaded + - name: test-non-standard-v4.4-pypy3.10-noauth-ssl-replica-set commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "6.0" - TOPOLOGY: sharded_cluster AUTH: noauth - SSL: nossl + SSL: ssl + TOPOLOGY: replica_set + VERSION: "4.4" - func: run tests vars: AUTH: noauth - SSL: nossl - SYNC: sync - TEST_SUITES: default + SSL: ssl + TOPOLOGY: replica_set + VERSION: "4.4" + PYTHON_VERSION: pypy3.10 tags: - - "6.0" - - sharded_cluster + - test-non-standard + - server-4.4 + - python-pypy3.10 + - replica_set-noauth-ssl - noauth - - nossl - - sync - - name: test-6.0-sharded_cluster-noauth-nossl-async + - pypy + - name: test-non-standard-v4.4-python3.14-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "6.0" + AUTH: auth + SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: "4.4" + - func: run tests + vars: + AUTH: auth + SSL: ssl TOPOLOGY: sharded_cluster + VERSION: "4.4" + PYTHON_VERSION: "3.14" + tags: + - test-non-standard + - server-4.4 + - python-3.14 + - sharded_cluster-auth-ssl + - auth + - name: test-non-standard-v4.4-python3.13-noauth-nossl-standalone + commands: + - func: run server + vars: AUTH: noauth SSL: nossl + TOPOLOGY: standalone + VERSION: "4.4" - func: run tests vars: AUTH: noauth SSL: nossl - SYNC: async - TEST_SUITES: default_async + TOPOLOGY: standalone + VERSION: "4.4" + PYTHON_VERSION: "3.13" tags: - - "6.0" - - sharded_cluster + - test-non-standard + - server-4.4 + - python-3.13 + - standalone-noauth-nossl - noauth - - nossl - - async - - name: test-6.0-sharded_cluster-noauth-nossl-sync_async + - name: test-non-standard-v5.0-python3.11-noauth-ssl-replica-set commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "6.0" - TOPOLOGY: sharded_cluster AUTH: noauth - SSL: nossl + SSL: ssl + TOPOLOGY: replica_set + VERSION: "5.0" - func: run tests vars: AUTH: noauth - SSL: nossl - SYNC: sync_async - TEST_SUITES: "" + SSL: ssl + TOPOLOGY: replica_set + VERSION: "5.0" + PYTHON_VERSION: "3.11" tags: - - "6.0" - - sharded_cluster + - test-non-standard + - server-5.0 + - python-3.11 + - replica_set-noauth-ssl - noauth - - nossl - - sync_async - - name: test-7.0-sharded_cluster-auth-ssl-sync + - name: test-non-standard-v5.0-python3.12-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "7.0" - TOPOLOGY: sharded_cluster AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: "5.0" - func: run tests vars: AUTH: auth SSL: ssl - SYNC: sync - TEST_SUITES: default + TOPOLOGY: sharded_cluster + VERSION: "5.0" + PYTHON_VERSION: "3.12" tags: - - "7.0" - - sharded_cluster + - test-non-standard + - server-5.0 + - python-3.12 + - sharded_cluster-auth-ssl - auth - - ssl - - sync - - name: test-7.0-sharded_cluster-auth-ssl-async + - name: test-non-standard-v5.0-pypy3.10-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "7.0" - TOPOLOGY: sharded_cluster AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: "5.0" - func: run tests vars: AUTH: auth SSL: ssl - SYNC: async - TEST_SUITES: default_async + TOPOLOGY: sharded_cluster + VERSION: "5.0" + PYTHON_VERSION: pypy3.10 tags: - - "7.0" - - sharded_cluster + - test-non-standard + - server-5.0 + - python-pypy3.10 + - sharded_cluster-auth-ssl - auth - - ssl - - async - - name: test-7.0-sharded_cluster-auth-ssl-sync_async + - pypy + - name: test-non-standard-v5.0-python3.10-noauth-nossl-standalone commands: - - func: bootstrap mongo-orchestration + - func: run server + vars: + AUTH: noauth + SSL: nossl + TOPOLOGY: standalone + VERSION: "5.0" + - func: run tests + vars: + AUTH: noauth + SSL: nossl + TOPOLOGY: standalone + VERSION: "5.0" + PYTHON_VERSION: "3.10" + tags: + - test-non-standard + - server-5.0 + - python-3.10 + - standalone-noauth-nossl + - noauth + - name: test-non-standard-v6.0-python3.14t-noauth-ssl-replica-set + commands: + - func: run server + vars: + AUTH: noauth + SSL: ssl + TOPOLOGY: replica_set + VERSION: "6.0" + - func: run tests + vars: + AUTH: noauth + SSL: ssl + TOPOLOGY: replica_set + VERSION: "6.0" + PYTHON_VERSION: 3.14t + tags: + - test-non-standard + - server-6.0 + - python-3.14t + - replica_set-noauth-ssl + - noauth + - free-threaded + - name: test-non-standard-v6.0-python3.14-auth-ssl-sharded-cluster + commands: + - func: run server vars: - VERSION: "7.0" - TOPOLOGY: sharded_cluster AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: "6.0" - func: run tests vars: AUTH: auth SSL: ssl - SYNC: sync_async - TEST_SUITES: "" + TOPOLOGY: sharded_cluster + VERSION: "6.0" + PYTHON_VERSION: "3.14" tags: - - "7.0" - - sharded_cluster + - test-non-standard + - server-6.0 + - python-3.14 + - sharded_cluster-auth-ssl - auth - - ssl - - sync_async - - name: test-7.0-sharded_cluster-noauth-ssl-sync + - name: test-non-standard-v6.0-python3.13-noauth-nossl-standalone commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "7.0" - TOPOLOGY: sharded_cluster AUTH: noauth - SSL: ssl + SSL: nossl + TOPOLOGY: standalone + VERSION: "6.0" - func: run tests vars: AUTH: noauth - SSL: ssl - SYNC: sync - TEST_SUITES: default + SSL: nossl + TOPOLOGY: standalone + VERSION: "6.0" + PYTHON_VERSION: "3.13" tags: - - "7.0" - - sharded_cluster + - test-non-standard + - server-6.0 + - python-3.13 + - standalone-noauth-nossl - noauth - - ssl - - sync - - name: test-7.0-sharded_cluster-noauth-ssl-async + - name: test-non-standard-v6.0-pypy3.10-noauth-nossl-standalone commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "7.0" - TOPOLOGY: sharded_cluster AUTH: noauth - SSL: ssl + SSL: nossl + TOPOLOGY: standalone + VERSION: "6.0" - func: run tests vars: AUTH: noauth - SSL: ssl - SYNC: async - TEST_SUITES: default_async + SSL: nossl + TOPOLOGY: standalone + VERSION: "6.0" + PYTHON_VERSION: pypy3.10 tags: - - "7.0" - - sharded_cluster + - test-non-standard + - server-6.0 + - python-pypy3.10 + - standalone-noauth-nossl - noauth - - ssl - - async - - name: test-7.0-sharded_cluster-noauth-ssl-sync_async + - pypy + - name: test-non-standard-v7.0-python3.11-noauth-ssl-replica-set commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "7.0" - TOPOLOGY: sharded_cluster AUTH: noauth SSL: ssl + TOPOLOGY: replica_set + VERSION: "7.0" - func: run tests vars: AUTH: noauth SSL: ssl - SYNC: sync_async - TEST_SUITES: "" + TOPOLOGY: replica_set + VERSION: "7.0" + PYTHON_VERSION: "3.11" tags: - - "7.0" - - sharded_cluster + - test-non-standard + - server-7.0 + - python-3.11 + - replica_set-noauth-ssl - noauth - - ssl - - sync_async - - name: test-7.0-sharded_cluster-noauth-nossl-sync + - name: test-non-standard-v7.0-pypy3.10-noauth-ssl-replica-set commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "7.0" - TOPOLOGY: sharded_cluster AUTH: noauth - SSL: nossl + SSL: ssl + TOPOLOGY: replica_set + VERSION: "7.0" - func: run tests vars: AUTH: noauth - SSL: nossl - SYNC: sync - TEST_SUITES: default + SSL: ssl + TOPOLOGY: replica_set + VERSION: "7.0" + PYTHON_VERSION: pypy3.10 tags: - - "7.0" - - sharded_cluster + - test-non-standard + - server-7.0 + - python-pypy3.10 + - replica_set-noauth-ssl - noauth - - nossl - - sync - - name: test-7.0-sharded_cluster-noauth-nossl-async + - pypy + - name: test-non-standard-v7.0-python3.12-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: + AUTH: auth + SSL: ssl + TOPOLOGY: sharded_cluster VERSION: "7.0" + - func: run tests + vars: + AUTH: auth + SSL: ssl TOPOLOGY: sharded_cluster + VERSION: "7.0" + PYTHON_VERSION: "3.12" + tags: + - test-non-standard + - server-7.0 + - python-3.12 + - sharded_cluster-auth-ssl + - auth + - name: test-non-standard-v7.0-python3.10-noauth-nossl-standalone + commands: + - func: run server + vars: AUTH: noauth SSL: nossl + TOPOLOGY: standalone + VERSION: "7.0" - func: run tests vars: AUTH: noauth SSL: nossl - SYNC: async - TEST_SUITES: default_async + TOPOLOGY: standalone + VERSION: "7.0" + PYTHON_VERSION: "3.10" tags: - - "7.0" - - sharded_cluster + - test-non-standard + - server-7.0 + - python-3.10 + - standalone-noauth-nossl - noauth - - nossl - - async - - name: test-7.0-sharded_cluster-noauth-nossl-sync_async + - name: test-non-standard-v8.0-python3.14t-noauth-ssl-replica-set commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "7.0" - TOPOLOGY: sharded_cluster AUTH: noauth - SSL: nossl + SSL: ssl + TOPOLOGY: replica_set + VERSION: "8.0" - func: run tests vars: AUTH: noauth - SSL: nossl - SYNC: sync_async - TEST_SUITES: "" + SSL: ssl + TOPOLOGY: replica_set + VERSION: "8.0" + PYTHON_VERSION: 3.14t tags: - - "7.0" - - sharded_cluster + - test-non-standard + - server-8.0 + - python-3.14t + - replica_set-noauth-ssl - noauth - - nossl - - sync_async - - name: test-8.0-sharded_cluster-auth-ssl-sync + - free-threaded + - name: test-non-standard-v8.0-python3.14-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "8.0" - TOPOLOGY: sharded_cluster AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: "8.0" - func: run tests vars: AUTH: auth SSL: ssl - SYNC: sync - TEST_SUITES: default + TOPOLOGY: sharded_cluster + VERSION: "8.0" + PYTHON_VERSION: "3.14" tags: - - "8.0" - - sharded_cluster + - test-non-standard + - server-8.0 + - python-3.14 + - sharded_cluster-auth-ssl - auth - - ssl - - sync - - name: test-8.0-sharded_cluster-auth-ssl-async + - name: test-non-standard-v8.0-pypy3.10-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "8.0" - TOPOLOGY: sharded_cluster AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: "8.0" - func: run tests vars: AUTH: auth SSL: ssl - SYNC: async - TEST_SUITES: default_async + TOPOLOGY: sharded_cluster + VERSION: "8.0" + PYTHON_VERSION: pypy3.10 tags: - - "8.0" - - sharded_cluster + - test-non-standard + - server-8.0 + - python-pypy3.10 + - sharded_cluster-auth-ssl - auth - - ssl - - async - - name: test-8.0-sharded_cluster-auth-ssl-sync_async + - pypy + - name: test-non-standard-v8.0-python3.13-noauth-nossl-standalone commands: - - func: bootstrap mongo-orchestration + - func: run server vars: + AUTH: noauth + SSL: nossl + TOPOLOGY: standalone VERSION: "8.0" - TOPOLOGY: sharded_cluster - AUTH: auth + - func: run tests + vars: + AUTH: noauth + SSL: nossl + TOPOLOGY: standalone + VERSION: "8.0" + PYTHON_VERSION: "3.13" + tags: + - test-non-standard + - server-8.0 + - python-3.13 + - standalone-noauth-nossl + - noauth + - name: test-non-standard-latest-python3.14t-noauth-ssl-replica-set + commands: + - func: run server + vars: + AUTH: noauth SSL: ssl + TOPOLOGY: replica_set + VERSION: latest - func: run tests vars: - AUTH: auth + AUTH: noauth SSL: ssl - SYNC: sync_async - TEST_SUITES: "" + TOPOLOGY: replica_set + VERSION: latest + PYTHON_VERSION: 3.14t tags: - - "8.0" - - sharded_cluster - - auth - - ssl - - sync_async - - name: test-8.0-sharded_cluster-noauth-ssl-sync + - test-non-standard + - server-latest + - python-3.14t + - replica_set-noauth-ssl + - noauth + - free-threaded + - pr + - name: test-non-standard-latest-pypy3.10-noauth-ssl-replica-set commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "8.0" - TOPOLOGY: sharded_cluster AUTH: noauth SSL: ssl + TOPOLOGY: replica_set + VERSION: latest - func: run tests vars: AUTH: noauth SSL: ssl - SYNC: sync - TEST_SUITES: default + TOPOLOGY: replica_set + VERSION: latest + PYTHON_VERSION: pypy3.10 tags: - - "8.0" - - sharded_cluster + - test-non-standard + - server-latest + - python-pypy3.10 + - replica_set-noauth-ssl - noauth - - ssl - - sync - - name: test-8.0-sharded_cluster-noauth-ssl-async + - pypy + - name: test-non-standard-latest-python3.14-auth-ssl-sharded-cluster + commands: + - func: run server + vars: + AUTH: auth + SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: latest + - func: run tests + vars: + AUTH: auth + SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: latest + PYTHON_VERSION: "3.14" + tags: + - test-non-standard + - server-latest + - python-3.14 + - sharded_cluster-auth-ssl + - auth + - pr + - name: test-non-standard-latest-python3.13-noauth-nossl-standalone commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "8.0" - TOPOLOGY: sharded_cluster AUTH: noauth - SSL: ssl + SSL: nossl + TOPOLOGY: standalone + VERSION: latest - func: run tests vars: AUTH: noauth - SSL: ssl - SYNC: async - TEST_SUITES: default_async + SSL: nossl + TOPOLOGY: standalone + VERSION: latest + PYTHON_VERSION: "3.13" tags: - - "8.0" - - sharded_cluster + - test-non-standard + - server-latest + - python-3.13 + - standalone-noauth-nossl - noauth - - ssl - - async - - name: test-8.0-sharded_cluster-noauth-ssl-sync_async + - pr + - name: test-non-standard-rapid-python3.11-noauth-ssl-replica-set commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "8.0" - TOPOLOGY: sharded_cluster AUTH: noauth SSL: ssl + TOPOLOGY: replica_set + VERSION: rapid - func: run tests vars: AUTH: noauth SSL: ssl - SYNC: sync_async - TEST_SUITES: "" + TOPOLOGY: replica_set + VERSION: rapid + PYTHON_VERSION: "3.11" tags: - - "8.0" - - sharded_cluster + - test-non-standard + - server-rapid + - python-3.11 + - replica_set-noauth-ssl - noauth - - ssl - - sync_async - - name: test-8.0-sharded_cluster-noauth-nossl-sync + - name: test-non-standard-rapid-python3.12-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "8.0" + AUTH: auth + SSL: ssl TOPOLOGY: sharded_cluster - AUTH: noauth - SSL: nossl + VERSION: rapid - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: sync - TEST_SUITES: default + AUTH: auth + SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: rapid + PYTHON_VERSION: "3.12" tags: - - "8.0" - - sharded_cluster - - noauth - - nossl - - sync - - name: test-8.0-sharded_cluster-noauth-nossl-async + - test-non-standard + - server-rapid + - python-3.12 + - sharded_cluster-auth-ssl + - auth + - name: test-non-standard-rapid-python3.10-noauth-nossl-standalone commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "8.0" - TOPOLOGY: sharded_cluster AUTH: noauth SSL: nossl + TOPOLOGY: standalone + VERSION: rapid - func: run tests vars: AUTH: noauth SSL: nossl - SYNC: async - TEST_SUITES: default_async + TOPOLOGY: standalone + VERSION: rapid + PYTHON_VERSION: "3.10" tags: - - "8.0" - - sharded_cluster + - test-non-standard + - server-rapid + - python-3.10 + - standalone-noauth-nossl - noauth - - nossl - - async - - name: test-8.0-sharded_cluster-noauth-nossl-sync_async + - name: test-non-standard-rapid-pypy3.10-noauth-nossl-standalone commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: "8.0" - TOPOLOGY: sharded_cluster AUTH: noauth SSL: nossl + TOPOLOGY: standalone + VERSION: rapid - func: run tests vars: AUTH: noauth SSL: nossl - SYNC: sync_async - TEST_SUITES: "" + TOPOLOGY: standalone + VERSION: rapid + PYTHON_VERSION: pypy3.10 tags: - - "8.0" - - sharded_cluster + - test-non-standard + - server-rapid + - python-pypy3.10 + - standalone-noauth-nossl - noauth - - nossl - - sync_async - - name: test-rapid-sharded_cluster-auth-ssl-sync + - pypy + + # Test standard auth tests + - name: test-standard-auth-v4.2-python3.10-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: rapid - TOPOLOGY: sharded_cluster AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: "4.2" - func: run tests vars: AUTH: auth SSL: ssl - SYNC: sync - TEST_SUITES: default + TOPOLOGY: sharded_cluster + VERSION: "4.2" + PYTHON_VERSION: "3.10" tags: - - rapid - - sharded_cluster + - test-standard-auth + - server-4.2 + - python-3.10 + - sharded_cluster-auth-ssl - auth - - ssl - - sync - - name: test-rapid-sharded_cluster-auth-ssl-async + - name: test-standard-auth-v4.2-pypy3.10-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: rapid - TOPOLOGY: sharded_cluster AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: "4.2" - func: run tests vars: AUTH: auth SSL: ssl - SYNC: async - TEST_SUITES: default_async + TOPOLOGY: sharded_cluster + VERSION: "4.2" + PYTHON_VERSION: pypy3.10 tags: - - rapid - - sharded_cluster + - test-standard-auth + - server-4.2 + - python-pypy3.10 + - sharded_cluster-auth-ssl - auth - - ssl - - async - - name: test-rapid-sharded_cluster-auth-ssl-sync_async + - pypy + - name: test-standard-auth-v4.4-python3.11-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: rapid - TOPOLOGY: sharded_cluster AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: "4.4" - func: run tests vars: AUTH: auth SSL: ssl - SYNC: sync_async - TEST_SUITES: "" + TOPOLOGY: sharded_cluster + VERSION: "4.4" + PYTHON_VERSION: "3.11" tags: - - rapid - - sharded_cluster + - test-standard-auth + - server-4.4 + - python-3.11 + - sharded_cluster-auth-ssl - auth - - ssl - - sync_async - - name: test-rapid-sharded_cluster-noauth-ssl-sync + - name: test-standard-auth-v4.4-pypy3.10-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: rapid - TOPOLOGY: sharded_cluster - AUTH: noauth + AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: "4.4" - func: run tests vars: - AUTH: noauth + AUTH: auth SSL: ssl - SYNC: sync - TEST_SUITES: default + TOPOLOGY: sharded_cluster + VERSION: "4.4" + PYTHON_VERSION: pypy3.10 tags: - - rapid - - sharded_cluster - - noauth - - ssl - - sync - - name: test-rapid-sharded_cluster-noauth-ssl-async + - test-standard-auth + - server-4.4 + - python-pypy3.10 + - sharded_cluster-auth-ssl + - auth + - pypy + - name: test-standard-auth-v5.0-python3.12-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: rapid - TOPOLOGY: sharded_cluster - AUTH: noauth + AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: "5.0" - func: run tests vars: - AUTH: noauth + AUTH: auth SSL: ssl - SYNC: async - TEST_SUITES: default_async + TOPOLOGY: sharded_cluster + VERSION: "5.0" + PYTHON_VERSION: "3.12" tags: - - rapid - - sharded_cluster - - noauth - - ssl - - async - - name: test-rapid-sharded_cluster-noauth-ssl-sync_async + - test-standard-auth + - server-5.0 + - python-3.12 + - sharded_cluster-auth-ssl + - auth + - name: test-standard-auth-v5.0-pypy3.10-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: rapid - TOPOLOGY: sharded_cluster - AUTH: noauth + AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: "5.0" - func: run tests vars: - AUTH: noauth + AUTH: auth SSL: ssl - SYNC: sync_async - TEST_SUITES: "" - tags: - - rapid - - sharded_cluster - - noauth - - ssl - - sync_async - - name: test-rapid-sharded_cluster-noauth-nossl-sync - commands: - - func: bootstrap mongo-orchestration - vars: - VERSION: rapid TOPOLOGY: sharded_cluster - AUTH: noauth - SSL: nossl - - func: run tests - vars: - AUTH: noauth - SSL: nossl - SYNC: sync - TEST_SUITES: default + VERSION: "5.0" + PYTHON_VERSION: pypy3.10 tags: - - rapid - - sharded_cluster - - noauth - - nossl - - sync - - name: test-rapid-sharded_cluster-noauth-nossl-async + - test-standard-auth + - server-5.0 + - python-pypy3.10 + - sharded_cluster-auth-ssl + - auth + - pypy + - name: test-standard-auth-v6.0-python3.13-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: rapid + AUTH: auth + SSL: ssl TOPOLOGY: sharded_cluster - AUTH: noauth - SSL: nossl + VERSION: "6.0" - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: async - TEST_SUITES: default_async - tags: - - rapid - - sharded_cluster - - noauth - - nossl - - async - - name: test-rapid-sharded_cluster-noauth-nossl-sync_async - commands: - - func: bootstrap mongo-orchestration - vars: - VERSION: rapid + AUTH: auth + SSL: ssl TOPOLOGY: sharded_cluster - AUTH: noauth - SSL: nossl - - func: run tests - vars: - AUTH: noauth - SSL: nossl - SYNC: sync_async - TEST_SUITES: "" + VERSION: "6.0" + PYTHON_VERSION: "3.13" tags: - - rapid - - sharded_cluster - - noauth - - nossl - - sync_async - - name: test-latest-sharded_cluster-auth-ssl-sync + - test-standard-auth + - server-6.0 + - python-3.13 + - sharded_cluster-auth-ssl + - auth + - name: test-standard-auth-v6.0-pypy3.10-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: latest - TOPOLOGY: sharded_cluster AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: "6.0" - func: run tests vars: AUTH: auth SSL: ssl - SYNC: sync - TEST_SUITES: default + TOPOLOGY: sharded_cluster + VERSION: "6.0" + PYTHON_VERSION: pypy3.10 tags: - - latest - - sharded_cluster + - test-standard-auth + - server-6.0 + - python-pypy3.10 + - sharded_cluster-auth-ssl - auth - - ssl - - sync - - name: test-latest-sharded_cluster-auth-ssl-async + - pypy + - name: test-standard-auth-v7.0-python3.14t-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: latest - TOPOLOGY: sharded_cluster AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: "7.0" - func: run tests vars: AUTH: auth SSL: ssl - SYNC: async - TEST_SUITES: default_async + TOPOLOGY: sharded_cluster + VERSION: "7.0" + PYTHON_VERSION: 3.14t tags: - - latest - - sharded_cluster + - test-standard-auth + - server-7.0 + - python-3.14t + - sharded_cluster-auth-ssl - auth - - ssl - - async - - name: test-latest-sharded_cluster-auth-ssl-sync_async + - free-threaded + - name: test-standard-auth-v7.0-pypy3.10-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: latest - TOPOLOGY: sharded_cluster AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: "7.0" - func: run tests vars: AUTH: auth SSL: ssl - SYNC: sync_async - TEST_SUITES: "" + TOPOLOGY: sharded_cluster + VERSION: "7.0" + PYTHON_VERSION: pypy3.10 tags: - - latest - - sharded_cluster + - test-standard-auth + - server-7.0 + - python-pypy3.10 + - sharded_cluster-auth-ssl - auth - - ssl - - sync_async - - name: test-latest-sharded_cluster-noauth-ssl-sync + - pypy + - name: test-standard-auth-v8.0-python3.14-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: latest - TOPOLOGY: sharded_cluster - AUTH: noauth + AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: "8.0" - func: run tests vars: - AUTH: noauth + AUTH: auth SSL: ssl - SYNC: sync - TEST_SUITES: default + TOPOLOGY: sharded_cluster + VERSION: "8.0" + PYTHON_VERSION: "3.14" tags: - - latest - - sharded_cluster - - noauth - - ssl - - sync - - name: test-latest-sharded_cluster-noauth-ssl-async + - test-standard-auth + - server-8.0 + - python-3.14 + - sharded_cluster-auth-ssl + - auth + - name: test-standard-auth-v8.0-pypy3.10-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: latest - TOPOLOGY: sharded_cluster - AUTH: noauth + AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: "8.0" - func: run tests vars: - AUTH: noauth + AUTH: auth SSL: ssl - SYNC: async - TEST_SUITES: default_async + TOPOLOGY: sharded_cluster + VERSION: "8.0" + PYTHON_VERSION: pypy3.10 tags: - - latest - - sharded_cluster - - noauth - - ssl - - async - - name: test-latest-sharded_cluster-noauth-ssl-sync_async + - test-standard-auth + - server-8.0 + - python-pypy3.10 + - sharded_cluster-auth-ssl + - auth + - pypy + - name: test-standard-auth-latest-python3.11-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: latest - TOPOLOGY: sharded_cluster - AUTH: noauth + AUTH: auth SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: latest - func: run tests vars: - AUTH: noauth + AUTH: auth SSL: ssl - SYNC: sync_async - TEST_SUITES: "" + TOPOLOGY: sharded_cluster + VERSION: latest + PYTHON_VERSION: "3.11" tags: - - latest - - sharded_cluster - - noauth - - ssl - - sync_async - - name: test-latest-sharded_cluster-noauth-nossl-sync + - test-standard-auth + - server-latest + - python-3.11 + - sharded_cluster-auth-ssl + - auth + - pr + - name: test-standard-auth-latest-pypy3.10-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: latest + AUTH: auth + SSL: ssl TOPOLOGY: sharded_cluster - AUTH: noauth - SSL: nossl + VERSION: latest - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: sync - TEST_SUITES: default + AUTH: auth + SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: latest + PYTHON_VERSION: pypy3.10 tags: - - latest - - sharded_cluster - - noauth - - nossl - - sync - - name: test-latest-sharded_cluster-noauth-nossl-async + - test-standard-auth + - server-latest + - python-pypy3.10 + - sharded_cluster-auth-ssl + - auth + - pypy + - name: test-standard-auth-rapid-python3.10-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: latest + AUTH: auth + SSL: ssl TOPOLOGY: sharded_cluster - AUTH: noauth - SSL: nossl + VERSION: rapid - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: async - TEST_SUITES: default_async + AUTH: auth + SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: rapid + PYTHON_VERSION: "3.10" tags: - - latest - - sharded_cluster - - noauth - - nossl - - async - - name: test-latest-sharded_cluster-noauth-nossl-sync_async + - test-standard-auth + - server-rapid + - python-3.10 + - sharded_cluster-auth-ssl + - auth + - name: test-standard-auth-rapid-pypy3.10-auth-ssl-sharded-cluster commands: - - func: bootstrap mongo-orchestration + - func: run server vars: - VERSION: latest + AUTH: auth + SSL: ssl TOPOLOGY: sharded_cluster - AUTH: noauth - SSL: nossl + VERSION: rapid - func: run tests vars: - AUTH: noauth - SSL: nossl - SYNC: sync_async - TEST_SUITES: "" + AUTH: auth + SSL: ssl + TOPOLOGY: sharded_cluster + VERSION: rapid + PYTHON_VERSION: pypy3.10 tags: - - latest - - sharded_cluster - - noauth - - nossl - - sync_async + - test-standard-auth + - server-rapid + - python-pypy3.10 + - sharded_cluster-auth-ssl + - auth + - pypy diff --git a/.evergreen/generated_configs/variants.yml b/.evergreen/generated_configs/variants.yml index b17a500ade..9bae5f4680 100644 --- a/.evergreen/generated_configs/variants.yml +++ b/.evergreen/generated_configs/variants.yml @@ -1,1377 +1,616 @@ buildvariants: # Alternative hosts tests - - name: openssl-1.0.2-rhel7-python3.9 + - name: other-hosts-rhel9-fips-latest tasks: - - name: .5.0 .standalone !.sync_async - display_name: OpenSSL 1.0.2 RHEL7 Python3.9 - run_on: - - rhel79-small - batchtime: 10080 - expansions: - PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: other-hosts-rhel9-fips - tasks: - - name: .6.0 .standalone !.sync_async - display_name: Other hosts RHEL9-FIPS + - name: .test-no-toolchain + display_name: Other hosts RHEL9-FIPS latest run_on: - rhel92-fips - batchtime: 10080 + batchtime: 1440 expansions: + VERSION: latest NO_EXT: "1" - - name: other-hosts-rhel8-zseries + REQUIRE_FIPS: "1" + PYTHON_BINARY: /usr/bin/python3.11 + tags: [] + - name: other-hosts-rhel8-zseries-latest tasks: - - name: .6.0 .standalone !.sync_async - display_name: Other hosts RHEL8-zseries + - name: .test-no-toolchain + display_name: Other hosts RHEL8-zseries latest run_on: - rhel8-zseries-small - batchtime: 10080 + batchtime: 1440 expansions: + VERSION: latest NO_EXT: "1" - - name: other-hosts-rhel8-power8 + tags: [] + - name: other-hosts-rhel8-power8-latest tasks: - - name: .6.0 .standalone !.sync_async - display_name: Other hosts RHEL8-POWER8 + - name: .test-no-toolchain + display_name: Other hosts RHEL8-POWER8 latest run_on: - rhel8-power-small - batchtime: 10080 + batchtime: 1440 expansions: + VERSION: latest NO_EXT: "1" - - name: other-hosts-rhel8-arm64 + tags: [] + - name: other-hosts-rhel8-arm64-latest tasks: - - name: .6.0 .standalone !.sync_async - display_name: Other hosts RHEL8-arm64 + - name: .test-no-toolchain + display_name: Other hosts RHEL8-arm64 latest run_on: - rhel82-arm64-small - batchtime: 10080 - expansions: - NO_EXT: "1" - - # Atlas connect tests - - name: atlas-connect-rhel8-python3.9 - tasks: - - name: atlas-connect - display_name: Atlas connect RHEL8 Python3.9 - run_on: - - rhel87-small - expansions: - PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: atlas-connect-rhel8-python3.13 - tasks: - - name: atlas-connect - display_name: Atlas connect RHEL8 Python3.13 - run_on: - - rhel87-small + batchtime: 1440 expansions: - PYTHON_BINARY: /opt/python/3.13/bin/python3 - - # Atlas data lake tests - - name: atlas-data-lake-ubuntu-22-python3.9-auth-no-c - tasks: - - name: atlas-data-lake-tests - display_name: Atlas Data Lake Ubuntu-22 Python3.9 Auth No C - run_on: - - ubuntu2204-small - expansions: - AUTH: auth + VERSION: latest NO_EXT: "1" - PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: atlas-data-lake-ubuntu-22-python3.9-auth + tags: [] + - name: other-hosts-amazon2023-latest tasks: - - name: atlas-data-lake-tests - display_name: Atlas Data Lake Ubuntu-22 Python3.9 Auth + - name: .test-no-toolchain + display_name: Other hosts Amazon2023 latest run_on: - - ubuntu2204-small - expansions: - AUTH: auth - PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: atlas-data-lake-ubuntu-22-python3.13-auth-no-c - tasks: - - name: atlas-data-lake-tests - display_name: Atlas Data Lake Ubuntu-22 Python3.13 Auth No C - run_on: - - ubuntu2204-small + - amazon2023-arm64-latest-large-m8g + batchtime: 1440 expansions: - AUTH: auth + VERSION: latest NO_EXT: "1" - PYTHON_BINARY: /opt/python/3.13/bin/python3 - - name: atlas-data-lake-ubuntu-22-python3.13-auth + tags: [pr] + + # Atlas connect tests + - name: atlas-connect-rhel8 tasks: - - name: atlas-data-lake-tests - display_name: Atlas Data Lake Ubuntu-22 Python3.13 Auth + - name: .test-no-orchestration + display_name: Atlas connect RHEL8 run_on: - - ubuntu2204-small + - rhel87-small expansions: - AUTH: auth - PYTHON_BINARY: /opt/python/3.13/bin/python3 + TEST_NAME: atlas_connect + tags: [pr] # Aws auth tests - - name: auth-aws-ubuntu-20-python3.9 + - name: auth-aws-ubuntu-20 tasks: - - name: aws-auth-test-4.4 - - name: aws-auth-test-5.0 - - name: aws-auth-test-6.0 - - name: aws-auth-test-7.0 - - name: aws-auth-test-8.0 - - name: aws-auth-test-rapid - - name: aws-auth-test-latest - display_name: Auth AWS Ubuntu-20 Python3.9 + - name: .auth-aws !.auth-aws-ecs + display_name: Auth AWS Ubuntu-20 run_on: - ubuntu2004-small - expansions: - PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: auth-aws-ubuntu-20-python3.13 - tasks: - - name: aws-auth-test-4.4 - - name: aws-auth-test-5.0 - - name: aws-auth-test-6.0 - - name: aws-auth-test-7.0 - - name: aws-auth-test-8.0 - - name: aws-auth-test-rapid - - name: aws-auth-test-latest - display_name: Auth AWS Ubuntu-20 Python3.13 - run_on: - - ubuntu2004-small - expansions: - PYTHON_BINARY: /opt/python/3.13/bin/python3 - - name: auth-aws-win64-python3.9 - tasks: - - name: aws-auth-test-4.4 - - name: aws-auth-test-5.0 - - name: aws-auth-test-6.0 - - name: aws-auth-test-7.0 - - name: aws-auth-test-8.0 - - name: aws-auth-test-rapid - - name: aws-auth-test-latest - display_name: Auth AWS Win64 Python3.9 - run_on: - - windows-64-vsMulti-small - expansions: - skip_ECS_auth_test: "true" - PYTHON_BINARY: C:/python/Python39/python.exe - - name: auth-aws-win64-python3.13 + tags: [] + - name: auth-aws-win64 tasks: - - name: aws-auth-test-4.4 - - name: aws-auth-test-5.0 - - name: aws-auth-test-6.0 - - name: aws-auth-test-7.0 - - name: aws-auth-test-8.0 - - name: aws-auth-test-rapid - - name: aws-auth-test-latest - display_name: Auth AWS Win64 Python3.13 + - name: .auth-aws !.auth-aws-ecs + display_name: Auth AWS Win64 run_on: - windows-64-vsMulti-small - expansions: - skip_ECS_auth_test: "true" - PYTHON_BINARY: C:/python/Python313/python.exe - - name: auth-aws-macos-python3.9 - tasks: - - name: aws-auth-test-4.4 - - name: aws-auth-test-5.0 - - name: aws-auth-test-6.0 - - name: aws-auth-test-7.0 - - name: aws-auth-test-8.0 - - name: aws-auth-test-rapid - - name: aws-auth-test-latest - display_name: Auth AWS macOS Python3.9 - run_on: - - macos-14 - expansions: - skip_ECS_auth_test: "true" - skip_EC2_auth_test: "true" - skip_web_identity_auth_test: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 - - name: auth-aws-macos-python3.13 + tags: [] + - name: auth-aws-macos tasks: - - name: aws-auth-test-4.4 - - name: aws-auth-test-5.0 - - name: aws-auth-test-6.0 - - name: aws-auth-test-7.0 - - name: aws-auth-test-8.0 - - name: aws-auth-test-rapid - - name: aws-auth-test-latest - display_name: Auth AWS macOS Python3.13 + - name: .auth-aws !.auth-aws-web-identity !.auth-aws-ecs !.auth-aws-ec2 + display_name: Auth AWS macOS run_on: - macos-14 - expansions: - skip_ECS_auth_test: "true" - skip_EC2_auth_test: "true" - skip_web_identity_auth_test: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 + tags: [pr] - # Compression tests - - name: compression-snappy-rhel8-python3.9-no-c - tasks: - - name: .standalone .noauth .nossl .sync_async - display_name: Compression snappy RHEL8 Python3.9 No C - run_on: - - rhel87-small - expansions: - COMPRESSORS: snappy - NO_EXT: "1" - PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: compression-snappy-rhel8-python3.10 - tasks: - - name: .standalone .noauth .nossl .sync_async - display_name: Compression snappy RHEL8 Python3.10 - run_on: - - rhel87-small - expansions: - COMPRESSORS: snappy - PYTHON_BINARY: /opt/python/3.10/bin/python3 - - name: compression-zlib-rhel8-python3.11-no-c - tasks: - - name: .standalone .noauth .nossl .sync_async - display_name: Compression zlib RHEL8 Python3.11 No C - run_on: - - rhel87-small - expansions: - COMPRESSORS: zlib - NO_EXT: "1" - PYTHON_BINARY: /opt/python/3.11/bin/python3 - - name: compression-zlib-rhel8-python3.12 + # Aws lambda tests + - name: faas-lambda tasks: - - name: .standalone .noauth .nossl .sync_async - display_name: Compression zlib RHEL8 Python3.12 + - name: .aws_lambda + display_name: FaaS Lambda run_on: - rhel87-small - expansions: - COMPRESSORS: zlib - PYTHON_BINARY: /opt/python/3.12/bin/python3 - - name: compression-zstd-rhel8-python3.13-no-c + + # Backport pr tests + - name: backport-pr tasks: - - name: .standalone .noauth .nossl .sync_async !.4.0 - display_name: Compression zstd RHEL8 Python3.13 No C + - name: backport-pr + display_name: Backport PR run_on: - rhel87-small - expansions: - COMPRESSORS: zstd - NO_EXT: "1" - PYTHON_BINARY: /opt/python/3.13/bin/python3 - - name: compression-zstd-rhel8-python3.9 + + # Compression tests + - name: compression-snappy-rhel8 tasks: - - name: .standalone .noauth .nossl .sync_async !.4.0 - display_name: Compression zstd RHEL8 Python3.9 + - name: .test-standard + display_name: Compression snappy RHEL8 run_on: - rhel87-small expansions: - COMPRESSORS: zstd - PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: compression-snappy-rhel8-pypy3.9 + COMPRESSOR: snappy + - name: compression-zlib-rhel8 tasks: - - name: .standalone .noauth .nossl .sync_async - display_name: Compression snappy RHEL8 PyPy3.9 + - name: .test-standard + display_name: Compression zlib RHEL8 run_on: - rhel87-small expansions: - COMPRESSORS: snappy - PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 - - name: compression-zlib-rhel8-pypy3.10 + COMPRESSOR: zlib + - name: compression-zstd-rhel8 tasks: - - name: .standalone .noauth .nossl .sync_async - display_name: Compression zlib RHEL8 PyPy3.10 + - name: .test-standard !.server-4.2 + display_name: Compression zstd RHEL8 run_on: - rhel87-small expansions: - COMPRESSORS: zlib - PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 - - name: compression-zstd-rhel8-pypy3.9 + COMPRESSOR: zstd + + # Coverage report tests + - name: coverage-report tasks: - - name: .standalone .noauth .nossl .sync_async !.4.0 - display_name: Compression zstd RHEL8 PyPy3.9 + - name: coverage-report + display_name: Coverage Report run_on: - rhel87-small - expansions: - COMPRESSORS: zstd - PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 # Disable test commands tests - - name: disable-test-commands-rhel8-python3.9 + - name: disable-test-commands-rhel8 tasks: - - name: .latest .sync_async - display_name: Disable test commands RHEL8 Python3.9 + - name: .test-standard .server-latest + display_name: Disable test commands RHEL8 run_on: - rhel87-small expansions: AUTH: auth SSL: ssl DISABLE_TEST_COMMANDS: "1" - PYTHON_BINARY: /opt/python/3.9/bin/python3 # Doctests tests - - name: doctests-rhel8-python3.9 + - name: doctests-rhel8 tasks: - - name: doctests - display_name: Doctests RHEL8 Python3.9 + - name: .test-non-standard .standalone-noauth-nossl + display_name: Doctests RHEL8 run_on: - rhel87-small expansions: - PYTHON_BINARY: /opt/python/3.9/bin/python3 + TEST_NAME: doctest # Encryption tests - - name: encryption-rhel8-python3.9 + - name: encryption-rhel8 tasks: - - name: .sharded_cluster .auth .ssl .sync_async - - name: .replica_set .noauth .ssl .sync_async - - name: .standalone .noauth .nossl .sync_async - display_name: Encryption RHEL8 Python3.9 + - name: .test-non-standard + - name: .test-min-deps + display_name: Encryption RHEL8 run_on: - rhel87-small - batchtime: 10080 + batchtime: 1440 expansions: - test_encryption: "true" - PYTHON_BINARY: /opt/python/3.9/bin/python3 + TEST_NAME: encryption tags: [encryption_tag] - - name: encryption-rhel8-python3.13 + - name: encryption-macos tasks: - - name: .sharded_cluster .auth .ssl .sync_async - - name: .replica_set .noauth .ssl .sync_async - - name: .standalone .noauth .nossl .sync_async - display_name: Encryption RHEL8 Python3.13 - run_on: - - rhel87-small - batchtime: 10080 - expansions: - test_encryption: "true" - PYTHON_BINARY: /opt/python/3.13/bin/python3 - tags: [encryption_tag] - - name: encryption-rhel8-pypy3.10 - tasks: - - name: .sharded_cluster .auth .ssl .sync_async - - name: .replica_set .noauth .ssl .sync_async - - name: .standalone .noauth .nossl .sync_async - display_name: Encryption RHEL8 PyPy3.10 - run_on: - - rhel87-small - batchtime: 10080 - expansions: - test_encryption: "true" - PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 - tags: [encryption_tag] - - name: encryption-crypt_shared-rhel8-python3.9 - tasks: - - name: .sharded_cluster .auth .ssl .sync_async - - name: .replica_set .noauth .ssl .sync_async - - name: .standalone .noauth .nossl .sync_async - display_name: Encryption crypt_shared RHEL8 Python3.9 - run_on: - - rhel87-small - batchtime: 10080 - expansions: - test_encryption: "true" - test_crypt_shared: "true" - PYTHON_BINARY: /opt/python/3.9/bin/python3 - tags: [encryption_tag] - - name: encryption-crypt_shared-rhel8-python3.13 - tasks: - - name: .sharded_cluster .auth .ssl .sync_async - - name: .replica_set .noauth .ssl .sync_async - - name: .standalone .noauth .nossl .sync_async - display_name: Encryption crypt_shared RHEL8 Python3.13 - run_on: - - rhel87-small - batchtime: 10080 - expansions: - test_encryption: "true" - test_crypt_shared: "true" - PYTHON_BINARY: /opt/python/3.13/bin/python3 - tags: [encryption_tag] - - name: encryption-crypt_shared-rhel8-pypy3.10 - tasks: - - name: .sharded_cluster .auth .ssl .sync_async - - name: .replica_set .noauth .ssl .sync_async - - name: .standalone .noauth .nossl .sync_async - display_name: Encryption crypt_shared RHEL8 PyPy3.10 - run_on: - - rhel87-small - batchtime: 10080 - expansions: - test_encryption: "true" - test_crypt_shared: "true" - PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 - tags: [encryption_tag] - - name: encryption-pyopenssl-rhel8-python3.9 - tasks: - - name: .sharded_cluster .auth .ssl .sync_async - - name: .replica_set .noauth .ssl .sync_async - - name: .standalone .noauth .nossl .sync_async - display_name: Encryption PyOpenSSL RHEL8 Python3.9 - run_on: - - rhel87-small - batchtime: 10080 - expansions: - test_encryption: "true" - test_encryption_pyopenssl: "true" - PYTHON_BINARY: /opt/python/3.9/bin/python3 - tags: [encryption_tag] - - name: encryption-pyopenssl-rhel8-python3.13 - tasks: - - name: .sharded_cluster .auth .ssl .sync_async - - name: .replica_set .noauth .ssl .sync_async - - name: .standalone .noauth .nossl .sync_async - display_name: Encryption PyOpenSSL RHEL8 Python3.13 - run_on: - - rhel87-small - batchtime: 10080 - expansions: - test_encryption: "true" - test_encryption_pyopenssl: "true" - PYTHON_BINARY: /opt/python/3.13/bin/python3 - tags: [encryption_tag] - - name: encryption-pyopenssl-rhel8-pypy3.10 - tasks: - - name: .sharded_cluster .auth .ssl .sync_async - - name: .replica_set .noauth .ssl .sync_async - - name: .standalone .noauth .nossl .sync_async - display_name: Encryption PyOpenSSL RHEL8 PyPy3.10 - run_on: - - rhel87-small - batchtime: 10080 - expansions: - test_encryption: "true" - test_encryption_pyopenssl: "true" - PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 - tags: [encryption_tag] - - name: encryption-rhel8-python3.10 - tasks: - - name: .sharded_cluster .auth .ssl .sync_async - display_name: Encryption RHEL8 Python3.10 - run_on: - - rhel87-small - expansions: - test_encryption: "true" - PYTHON_BINARY: /opt/python/3.10/bin/python3 - - name: encryption-crypt_shared-rhel8-python3.11 - tasks: - - name: .replica_set .noauth .ssl .sync_async - display_name: Encryption crypt_shared RHEL8 Python3.11 - run_on: - - rhel87-small - expansions: - test_encryption: "true" - test_crypt_shared: "true" - PYTHON_BINARY: /opt/python/3.11/bin/python3 - - name: encryption-pyopenssl-rhel8-python3.12 - tasks: - - name: .standalone .noauth .nossl .sync_async - display_name: Encryption PyOpenSSL RHEL8 Python3.12 - run_on: - - rhel87-small - expansions: - test_encryption: "true" - test_encryption_pyopenssl: "true" - PYTHON_BINARY: /opt/python/3.12/bin/python3 - - name: encryption-rhel8-pypy3.9 - tasks: - - name: .sharded_cluster .auth .ssl .sync_async - display_name: Encryption RHEL8 PyPy3.9 - run_on: - - rhel87-small - expansions: - test_encryption: "true" - PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 - - name: encryption-macos-python3.9 - tasks: - - name: .latest .replica_set .sync_async - display_name: Encryption macOS Python3.9 + - name: .test-non-standard !.pypy + display_name: Encryption macOS run_on: - macos-14 - batchtime: 10080 + batchtime: 1440 expansions: - test_encryption: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 + TEST_NAME: encryption tags: [encryption_tag] - - name: encryption-macos-python3.13 + - name: encryption-win64 tasks: - - name: .latest .replica_set .sync_async - display_name: Encryption macOS Python3.13 + - name: .test-non-standard !.pypy + display_name: Encryption Win64 run_on: - - macos-14 - batchtime: 10080 + - windows-64-vsMulti-small + batchtime: 1440 expansions: - test_encryption: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 + TEST_NAME: encryption tags: [encryption_tag] - - name: encryption-crypt_shared-macos-python3.9 + - name: encryption-crypt_shared-rhel8 tasks: - - name: .latest .replica_set .sync_async - display_name: Encryption crypt_shared macOS Python3.9 + - name: .test-non-standard + - name: .test-min-deps + display_name: Encryption crypt_shared RHEL8 run_on: - - macos-14 - batchtime: 10080 + - rhel87-small + batchtime: 1440 expansions: - test_encryption: "true" - test_crypt_shared: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 + TEST_NAME: encryption + TEST_CRYPT_SHARED: "true" tags: [encryption_tag] - - name: encryption-crypt_shared-macos-python3.13 + - name: encryption-crypt_shared-macos tasks: - - name: .latest .replica_set .sync_async - display_name: Encryption crypt_shared macOS Python3.13 + - name: .test-non-standard !.pypy + display_name: Encryption crypt_shared macOS run_on: - macos-14 - batchtime: 10080 - expansions: - test_encryption: "true" - test_crypt_shared: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 - tags: [encryption_tag] - - name: encryption-win64-python3.9 - tasks: - - name: .latest .replica_set .sync_async - display_name: Encryption Win64 Python3.9 - run_on: - - windows-64-vsMulti-small - batchtime: 10080 - expansions: - test_encryption: "true" - PYTHON_BINARY: C:/python/Python39/python.exe - tags: [encryption_tag] - - name: encryption-win64-python3.13 - tasks: - - name: .latest .replica_set .sync_async - display_name: Encryption Win64 Python3.13 - run_on: - - windows-64-vsMulti-small - batchtime: 10080 + batchtime: 1440 expansions: - test_encryption: "true" - PYTHON_BINARY: C:/python/Python313/python.exe + TEST_NAME: encryption + TEST_CRYPT_SHARED: "true" tags: [encryption_tag] - - name: encryption-crypt_shared-win64-python3.9 + - name: encryption-crypt_shared-win64 tasks: - - name: .latest .replica_set .sync_async - display_name: Encryption crypt_shared Win64 Python3.9 + - name: .test-non-standard !.pypy + display_name: Encryption crypt_shared Win64 run_on: - windows-64-vsMulti-small - batchtime: 10080 + batchtime: 1440 expansions: - test_encryption: "true" - test_crypt_shared: "true" - PYTHON_BINARY: C:/python/Python39/python.exe + TEST_NAME: encryption + TEST_CRYPT_SHARED: "true" tags: [encryption_tag] - - name: encryption-crypt_shared-win64-python3.13 + - name: encryption-pyopenssl-rhel8 tasks: - - name: .latest .replica_set .sync_async - display_name: Encryption crypt_shared Win64 Python3.13 + - name: .test-non-standard + display_name: Encryption PyOpenSSL RHEL8 run_on: - - windows-64-vsMulti-small - batchtime: 10080 + - rhel87-small + batchtime: 1440 expansions: - test_encryption: "true" - test_crypt_shared: "true" - PYTHON_BINARY: C:/python/Python313/python.exe + TEST_NAME: encryption + SUB_TEST_NAME: pyopenssl tags: [encryption_tag] # Enterprise auth tests - - name: auth-enterprise-macos-python3.9-auth - tasks: - - name: test-enterprise-auth - display_name: Auth Enterprise macOS Python3.9 Auth - run_on: - - macos-14 - expansions: - AUTH: auth - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 - - name: auth-enterprise-rhel8-python3.10-auth - tasks: - - name: test-enterprise-auth - display_name: Auth Enterprise RHEL8 Python3.10 Auth - run_on: - - rhel87-small - expansions: - AUTH: auth - PYTHON_BINARY: /opt/python/3.10/bin/python3 - - name: auth-enterprise-rhel8-python3.11-auth + - name: auth-enterprise-rhel8 tasks: - - name: test-enterprise-auth - display_name: Auth Enterprise RHEL8 Python3.11 Auth + - name: .test-standard-auth .auth !.free-threaded + display_name: Auth Enterprise RHEL8 run_on: - rhel87-small expansions: + TEST_NAME: enterprise_auth AUTH: auth - PYTHON_BINARY: /opt/python/3.11/bin/python3 - - name: auth-enterprise-rhel8-python3.12-auth + - name: auth-enterprise-macos tasks: - - name: test-enterprise-auth - display_name: Auth Enterprise RHEL8 Python3.12 Auth + - name: .test-standard-auth !.pypy .auth !.free-threaded + display_name: Auth Enterprise macOS run_on: - - rhel87-small + - macos-14 expansions: + TEST_NAME: enterprise_auth AUTH: auth - PYTHON_BINARY: /opt/python/3.12/bin/python3 - - name: auth-enterprise-win64-python3.13-auth + - name: auth-enterprise-win64 tasks: - - name: test-enterprise-auth - display_name: Auth Enterprise Win64 Python3.13 Auth + - name: .test-standard-auth !.pypy .auth + display_name: Auth Enterprise Win64 run_on: - windows-64-vsMulti-small expansions: + TEST_NAME: enterprise_auth AUTH: auth - PYTHON_BINARY: C:/python/Python313/python.exe - - name: auth-enterprise-rhel8-pypy3.9-auth - tasks: - - name: test-enterprise-auth - display_name: Auth Enterprise RHEL8 PyPy3.9 Auth - run_on: - - rhel87-small - expansions: - AUTH: auth - PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 - - name: auth-enterprise-rhel8-pypy3.10-auth - tasks: - - name: test-enterprise-auth - display_name: Auth Enterprise RHEL8 PyPy3.10 Auth - run_on: - - rhel87-small - expansions: - AUTH: auth - PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 # Green framework tests - - name: green-eventlet-rhel8-python3.9 + - name: green-gevent-rhel8 tasks: - - name: .standalone .noauth .nossl .sync_async - display_name: Green Eventlet RHEL8 Python3.9 - run_on: - - rhel87-small - expansions: - GREEN_FRAMEWORK: eventlet - AUTH: auth - SSL: ssl - PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: green-gevent-rhel8-python3.9 - tasks: - - name: .standalone .noauth .nossl .sync_async - display_name: Green Gevent RHEL8 Python3.9 + - name: .test-standard .sync !.free-threaded + display_name: Green Gevent RHEL8 run_on: - rhel87-small expansions: GREEN_FRAMEWORK: gevent - AUTH: auth - SSL: ssl - PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: green-eventlet-rhel8-python3.12 - tasks: - - name: .standalone .noauth .nossl .sync_async - display_name: Green Eventlet RHEL8 Python3.12 - run_on: - - rhel87-small - expansions: - GREEN_FRAMEWORK: eventlet - AUTH: auth - SSL: ssl - PYTHON_BINARY: /opt/python/3.12/bin/python3 - - name: green-gevent-rhel8-python3.12 + + # Import time tests + - name: import-time tasks: - - name: .standalone .noauth .nossl .sync_async - display_name: Green Gevent RHEL8 Python3.12 + - name: check-import-time + display_name: Import Time run_on: - rhel87-small - expansions: - GREEN_FRAMEWORK: gevent - AUTH: auth - SSL: ssl - PYTHON_BINARY: /opt/python/3.12/bin/python3 + + # Kms tests + - name: kms + tasks: + - name: test-gcpkms + batchtime: 1440 + - name: test-gcpkms-fail + - name: test-azurekms + batchtime: 1440 + - name: test-azurekms-fail + display_name: KMS + run_on: + - debian11-small # Load balancer tests - - name: load-balancer-rhel8-v6.0-python3.9 + - name: load-balancer tasks: - - name: .load-balancer - display_name: Load Balancer RHEL8 v6.0 Python3.9 + - name: .test-non-standard .server-6.0 .sharded_cluster-auth-ssl + - name: .test-non-standard .server-7.0 .sharded_cluster-auth-ssl + - name: .test-non-standard .server-8.0 .sharded_cluster-auth-ssl + - name: .test-non-standard .server-rapid .sharded_cluster-auth-ssl + - name: .test-non-standard .server-latest .sharded_cluster-auth-ssl + display_name: Load Balancer run_on: - rhel87-small - batchtime: 10080 - expansions: - VERSION: "6.0" - PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: load-balancer-rhel8-v7.0-python3.9 - tasks: - - name: .load-balancer - display_name: Load Balancer RHEL8 v7.0 Python3.9 - run_on: - - rhel87-small - batchtime: 10080 + batchtime: 1440 expansions: - VERSION: "7.0" - PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: load-balancer-rhel8-v8.0-python3.9 - tasks: - - name: .load-balancer - display_name: Load Balancer RHEL8 v8.0 Python3.9 - run_on: - - rhel87-small - batchtime: 10080 - expansions: - VERSION: "8.0" - PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: load-balancer-rhel8-rapid-python3.9 - tasks: - - name: .load-balancer - display_name: Load Balancer RHEL8 rapid Python3.9 - run_on: - - rhel87-small - batchtime: 10080 - expansions: - VERSION: rapid - PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: load-balancer-rhel8-latest-python3.9 - tasks: - - name: .load-balancer - display_name: Load Balancer RHEL8 latest Python3.9 - run_on: - - rhel87-small - batchtime: 10080 - expansions: - VERSION: latest - PYTHON_BINARY: /opt/python/3.9/bin/python3 + TEST_NAME: load_balancer # Mockupdb tests - - name: mockupdb-rhel8-python3.9 + - name: mockupdb-rhel8 tasks: - - name: mockupdb - display_name: MockupDB RHEL8 Python3.9 + - name: .test-no-orchestration + display_name: MockupDB RHEL8 run_on: - rhel87-small expansions: - PYTHON_BINARY: /opt/python/3.9/bin/python3 + TEST_NAME: mockupdb + tags: [pr] # Mod wsgi tests - - name: mod_wsgi-ubuntu-22-python3.9 - tasks: - - name: mod-wsgi-standalone - - name: mod-wsgi-replica-set - - name: mod-wsgi-embedded-mode-standalone - - name: mod-wsgi-embedded-mode-replica-set - display_name: mod_wsgi Ubuntu-22 Python3.9 - run_on: - - ubuntu2204-small - expansions: - MOD_WSGI_VERSION: "4" - PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: mod_wsgi-ubuntu-22-python3.13 + - name: mod_wsgi-ubuntu-22 tasks: - - name: mod-wsgi-standalone - - name: mod-wsgi-replica-set - - name: mod-wsgi-embedded-mode-standalone - - name: mod-wsgi-embedded-mode-replica-set - display_name: mod_wsgi Ubuntu-22 Python3.13 + - name: .mod_wsgi + display_name: Mod_WSGI Ubuntu-22 run_on: - ubuntu2204-small expansions: MOD_WSGI_VERSION: "4" - PYTHON_BINARY: /opt/python/3.13/bin/python3 # No c ext tests - - name: no-c-ext-rhel8-python3.9 + - name: no-c-ext-rhel8 tasks: - - name: .standalone .noauth .nossl .sync_async - display_name: No C Ext RHEL8 Python3.9 + - name: .test-standard + display_name: No C Ext RHEL8 run_on: - rhel87-small - expansions: - NO_EXT: "1" - PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: no-c-ext-rhel8-python3.10 - tasks: - - name: .replica_set .noauth .nossl .sync_async - display_name: No C Ext RHEL8 Python3.10 - run_on: - - rhel87-small - expansions: - NO_EXT: "1" - PYTHON_BINARY: /opt/python/3.10/bin/python3 - - name: no-c-ext-rhel8-python3.11 - tasks: - - name: .sharded_cluster .noauth .nossl .sync_async - display_name: No C Ext RHEL8 Python3.11 - run_on: - - rhel87-small - expansions: - NO_EXT: "1" - PYTHON_BINARY: /opt/python/3.11/bin/python3 - - name: no-c-ext-rhel8-python3.12 - tasks: - - name: .standalone .noauth .nossl .sync_async - display_name: No C Ext RHEL8 Python3.12 - run_on: - - rhel87-small - expansions: - NO_EXT: "1" - PYTHON_BINARY: /opt/python/3.12/bin/python3 - - name: no-c-ext-rhel8-python3.13 + + # No server tests + - name: no-server-rhel8 tasks: - - name: .replica_set .noauth .nossl .sync_async - display_name: No C Ext RHEL8 Python3.13 + - name: .test-no-orchestration + display_name: No server RHEL8 run_on: - rhel87-small - expansions: - NO_EXT: "1" - PYTHON_BINARY: /opt/python/3.13/bin/python3 + tags: [pr] # Ocsp tests - - name: ocsp-rhel8-v4.4-python3.9 - tasks: - - name: .ocsp - display_name: OCSP RHEL8 v4.4 Python3.9 - run_on: - - rhel87-small - batchtime: 20160 - expansions: - AUTH: noauth - SSL: ssl - TOPOLOGY: server - VERSION: "4.4" - PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: ocsp-rhel8-v5.0-python3.10 - tasks: - - name: .ocsp - display_name: OCSP RHEL8 v5.0 Python3.10 - run_on: - - rhel87-small - batchtime: 20160 - expansions: - AUTH: noauth - SSL: ssl - TOPOLOGY: server - VERSION: "5.0" - PYTHON_BINARY: /opt/python/3.10/bin/python3 - - name: ocsp-rhel8-v6.0-python3.11 - tasks: - - name: .ocsp - display_name: OCSP RHEL8 v6.0 Python3.11 - run_on: - - rhel87-small - batchtime: 20160 - expansions: - AUTH: noauth - SSL: ssl - TOPOLOGY: server - VERSION: "6.0" - PYTHON_BINARY: /opt/python/3.11/bin/python3 - - name: ocsp-rhel8-v7.0-python3.12 - tasks: - - name: .ocsp - display_name: OCSP RHEL8 v7.0 Python3.12 - run_on: - - rhel87-small - batchtime: 20160 - expansions: - AUTH: noauth - SSL: ssl - TOPOLOGY: server - VERSION: "7.0" - PYTHON_BINARY: /opt/python/3.12/bin/python3 - - name: ocsp-rhel8-v8.0-python3.13 - tasks: - - name: .ocsp - display_name: OCSP RHEL8 v8.0 Python3.13 - run_on: - - rhel87-small - batchtime: 20160 - expansions: - AUTH: noauth - SSL: ssl - TOPOLOGY: server - VERSION: "8.0" - PYTHON_BINARY: /opt/python/3.13/bin/python3 - - name: ocsp-rhel8-rapid-pypy3.9 + - name: ocsp-rhel8 tasks: - name: .ocsp - display_name: OCSP RHEL8 rapid PyPy3.9 + display_name: OCSP RHEL8 run_on: - rhel87-small - batchtime: 20160 - expansions: - AUTH: noauth - SSL: ssl - TOPOLOGY: server - VERSION: rapid - PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 - - name: ocsp-rhel8-latest-pypy3.10 - tasks: - - name: .ocsp - display_name: OCSP RHEL8 latest PyPy3.10 - run_on: - - rhel87-small - batchtime: 20160 - expansions: - AUTH: noauth - SSL: ssl - TOPOLOGY: server - VERSION: latest - PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 - - name: ocsp-win64-v4.4-python3.9 - tasks: - - name: .ocsp-rsa !.ocsp-staple - display_name: OCSP Win64 v4.4 Python3.9 - run_on: - - windows-64-vsMulti-small - batchtime: 20160 - expansions: - AUTH: noauth - SSL: ssl - TOPOLOGY: server - VERSION: "4.4" - PYTHON_BINARY: C:/python/Python39/python.exe - - name: ocsp-win64-v8.0-python3.13 + batchtime: 10080 + - name: ocsp-win64 tasks: - - name: .ocsp-rsa !.ocsp-staple - display_name: OCSP Win64 v8.0 Python3.13 + - name: .ocsp-rsa !.ocsp-staple .latest + - name: .ocsp-rsa !.ocsp-staple .4.4 + display_name: OCSP Win64 run_on: - windows-64-vsMulti-small - batchtime: 20160 - expansions: - AUTH: noauth - SSL: ssl - TOPOLOGY: server - VERSION: "8.0" - PYTHON_BINARY: C:/python/Python313/python.exe - - name: ocsp-macos-v4.4-python3.9 - tasks: - - name: .ocsp-rsa !.ocsp-staple - display_name: OCSP macOS v4.4 Python3.9 - run_on: - - macos-14 - batchtime: 20160 - expansions: - AUTH: noauth - SSL: ssl - TOPOLOGY: server - VERSION: "4.4" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 - - name: ocsp-macos-v8.0-python3.13 + batchtime: 10080 + - name: ocsp-macos tasks: - - name: .ocsp-rsa !.ocsp-staple - display_name: OCSP macOS v8.0 Python3.13 + - name: .ocsp-rsa !.ocsp-staple .latest + - name: .ocsp-rsa !.ocsp-staple .4.4 + display_name: OCSP macOS run_on: - macos-14 - batchtime: 20160 - expansions: - AUTH: noauth - SSL: ssl - TOPOLOGY: server - VERSION: "8.0" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 + batchtime: 10080 # Oidc auth tests - name: auth-oidc-ubuntu-22 tasks: - - name: testoidc_task_group - - name: testazureoidc_task_group - - name: testgcpoidc_task_group - - name: testk8soidc_task_group + - name: .auth_oidc_remote display_name: Auth OIDC Ubuntu-22 run_on: - ubuntu2204-small - batchtime: 20160 + batchtime: 1440 + - name: auth-oidc-local-ubuntu-22 + tasks: + - name: "!.auth_oidc_remote .auth_oidc" + display_name: Auth OIDC Local Ubuntu-22 + run_on: + - ubuntu2204-small + batchtime: 1440 + tags: [pr] - name: auth-oidc-macos tasks: - - name: testoidc_task_group + - name: "!.auth_oidc_remote .auth_oidc" display_name: Auth OIDC macOS run_on: - macos-14 - batchtime: 20160 + batchtime: 1440 - name: auth-oidc-win64 tasks: - - name: testoidc_task_group + - name: "!.auth_oidc_remote .auth_oidc" display_name: Auth OIDC Win64 run_on: - windows-64-vsMulti-small - batchtime: 20160 + batchtime: 1440 - # Pyopenssl tests - - name: pyopenssl-macos-python3.9 - tasks: - - name: .replica_set .noauth .nossl .sync_async - - name: .7.0 .noauth .nossl .sync_async - display_name: PyOpenSSL macOS Python3.9 - run_on: - - macos-14 - batchtime: 10080 - expansions: - test_pyopenssl: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 - - name: pyopenssl-rhel8-python3.10 + # Perf tests + - name: performance-benchmarks tasks: - - name: .replica_set .auth .ssl .sync_async - - name: .7.0 .auth .ssl .sync_async - display_name: PyOpenSSL RHEL8 Python3.10 + - name: .perf + display_name: Performance Benchmarks run_on: - - rhel87-small - batchtime: 10080 - expansions: - test_pyopenssl: "true" - PYTHON_BINARY: /opt/python/3.10/bin/python3 - - name: pyopenssl-rhel8-python3.11 + - rhel90-dbx-perf-large + batchtime: 1440 + + # Pyopenssl tests + - name: pyopenssl-rhel8 tasks: - - name: .replica_set .auth .ssl .sync_async - - name: .7.0 .auth .ssl .sync_async - display_name: PyOpenSSL RHEL8 Python3.11 + - name: .test-standard .sync + - name: .test-standard .async .replica_set-noauth-ssl + display_name: PyOpenSSL RHEL8 run_on: - rhel87-small - batchtime: 10080 + batchtime: 1440 expansions: - test_pyopenssl: "true" - PYTHON_BINARY: /opt/python/3.11/bin/python3 - - name: pyopenssl-rhel8-python3.12 + SUB_TEST_NAME: pyopenssl + - name: pyopenssl-macos tasks: - - name: .replica_set .auth .ssl .sync_async - - name: .7.0 .auth .ssl .sync_async - display_name: PyOpenSSL RHEL8 Python3.12 + - name: .test-standard !.pypy .sync + - name: .test-standard !.pypy .async .replica_set-noauth-ssl + display_name: PyOpenSSL macOS run_on: - rhel87-small - batchtime: 10080 - expansions: - test_pyopenssl: "true" - PYTHON_BINARY: /opt/python/3.12/bin/python3 - - name: pyopenssl-win64-python3.13 - tasks: - - name: .replica_set .auth .ssl .sync_async - - name: .7.0 .auth .ssl .sync_async - display_name: PyOpenSSL Win64 Python3.13 - run_on: - - windows-64-vsMulti-small - batchtime: 10080 + batchtime: 1440 expansions: - test_pyopenssl: "true" - PYTHON_BINARY: C:/python/Python313/python.exe - - name: pyopenssl-rhel8-pypy3.9 + SUB_TEST_NAME: pyopenssl + - name: pyopenssl-win64 tasks: - - name: .replica_set .auth .ssl .sync_async - - name: .7.0 .auth .ssl .sync_async - display_name: PyOpenSSL RHEL8 PyPy3.9 + - name: .test-standard !.pypy .sync + - name: .test-standard !.pypy .async .replica_set-noauth-ssl + display_name: PyOpenSSL Win64 run_on: - rhel87-small - batchtime: 10080 + batchtime: 1440 expansions: - test_pyopenssl: "true" - PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 - - name: pyopenssl-rhel8-pypy3.10 - tasks: - - name: .replica_set .auth .ssl .sync_async - - name: .7.0 .auth .ssl .sync_async - display_name: PyOpenSSL RHEL8 PyPy3.10 - run_on: - - rhel87-small - batchtime: 10080 - expansions: - test_pyopenssl: "true" - PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 + SUB_TEST_NAME: pyopenssl # Search index tests - - name: search-index-helpers-rhel8-python3.9 + - name: search-index-helpers-rhel8-python3.10 tasks: - - name: test_atlas_task_group_search_indexes - display_name: Search Index Helpers RHEL8 Python3.9 + - name: .search_index + display_name: Search Index Helpers RHEL8 Python3.10 run_on: - rhel87-small expansions: - PYTHON_BINARY: /opt/python/3.9/bin/python3 + PYTHON_BINARY: /opt/python/3.10/bin/python3 - # Server tests - - name: test-rhel8-python3.9-cov + # Server version tests + - name: mongodb-v4.2 tasks: - - name: .standalone .sync_async - - name: .replica_set .sync_async - - name: .sharded_cluster .sync_async - display_name: "* Test RHEL8 Python3.9 cov" + - name: .server-version + display_name: "* MongoDB v4.2" run_on: - rhel87-small expansions: - COVERAGE: coverage - PYTHON_BINARY: /opt/python/3.9/bin/python3 + VERSION: "4.2" tags: [coverage_tag] - - name: test-rhel8-python3.13-cov + - name: mongodb-v4.4 tasks: - - name: .standalone .sync_async - - name: .replica_set .sync_async - - name: .sharded_cluster .sync_async - display_name: "* Test RHEL8 Python3.13 cov" + - name: .server-version + display_name: "* MongoDB v4.4" run_on: - rhel87-small expansions: - COVERAGE: coverage - PYTHON_BINARY: /opt/python/3.13/bin/python3 + VERSION: "4.4" tags: [coverage_tag] - - name: test-rhel8-pypy3.10-cov + - name: mongodb-v5.0 tasks: - - name: .standalone .sync_async - - name: .replica_set .sync_async - - name: .sharded_cluster .sync_async - display_name: "* Test RHEL8 PyPy3.10 cov" + - name: .server-version + display_name: "* MongoDB v5.0" run_on: - rhel87-small expansions: - COVERAGE: coverage - PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 + VERSION: "5.0" tags: [coverage_tag] - - name: test-rhel8-python3.10 - tasks: - - name: .sharded_cluster .auth .ssl .sync_async - - name: .replica_set .noauth .ssl .sync_async - - name: .standalone .noauth .nossl .sync_async - display_name: "* Test RHEL8 Python3.10" - run_on: - - rhel87-small - expansions: - COVERAGE: coverage - PYTHON_BINARY: /opt/python/3.10/bin/python3 - - name: test-rhel8-python3.11 + - name: mongodb-v6.0 tasks: - - name: .sharded_cluster .auth .ssl .sync_async - - name: .replica_set .noauth .ssl .sync_async - - name: .standalone .noauth .nossl .sync_async - display_name: "* Test RHEL8 Python3.11" + - name: .server-version + display_name: "* MongoDB v6.0" run_on: - rhel87-small expansions: - COVERAGE: coverage - PYTHON_BINARY: /opt/python/3.11/bin/python3 - - name: test-rhel8-python3.12 + VERSION: "6.0" + tags: [coverage_tag] + - name: mongodb-v7.0 tasks: - - name: .sharded_cluster .auth .ssl .sync_async - - name: .replica_set .noauth .ssl .sync_async - - name: .standalone .noauth .nossl .sync_async - display_name: "* Test RHEL8 Python3.12" + - name: .server-version + display_name: "* MongoDB v7.0" run_on: - rhel87-small expansions: - COVERAGE: coverage - PYTHON_BINARY: /opt/python/3.12/bin/python3 - - name: test-rhel8-pypy3.9 + VERSION: "7.0" + tags: [coverage_tag] + - name: mongodb-v8.0 tasks: - - name: .sharded_cluster .auth .ssl .sync_async - - name: .replica_set .noauth .ssl .sync_async - - name: .standalone .noauth .nossl .sync_async - display_name: "* Test RHEL8 PyPy3.9" + - name: .server-version + display_name: "* MongoDB v8.0" run_on: - rhel87-small expansions: - COVERAGE: coverage - PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 - - name: test-macos-python3.9 - tasks: - - name: .sharded_cluster .auth .ssl !.sync_async - - name: .replica_set .noauth .ssl !.sync_async - - name: .standalone .noauth .nossl !.sync_async - display_name: "* Test macOS Python3.9" - run_on: - - macos-14 - expansions: - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 - - name: test-macos-python3.13 - tasks: - - name: .sharded_cluster .auth .ssl !.sync_async - - name: .replica_set .noauth .ssl !.sync_async - - name: .standalone .noauth .nossl !.sync_async - display_name: "* Test macOS Python3.13" - run_on: - - macos-14 - expansions: - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 - - name: test-macos-arm64-python3.9 - tasks: - - name: .sharded_cluster .auth .ssl .6.0 !.sync_async - - name: .replica_set .noauth .ssl .6.0 !.sync_async - - name: .standalone .noauth .nossl .6.0 !.sync_async - - name: .sharded_cluster .auth .ssl .7.0 !.sync_async - - name: .replica_set .noauth .ssl .7.0 !.sync_async - - name: .standalone .noauth .nossl .7.0 !.sync_async - - name: .sharded_cluster .auth .ssl .8.0 !.sync_async - - name: .replica_set .noauth .ssl .8.0 !.sync_async - - name: .standalone .noauth .nossl .8.0 !.sync_async - - name: .sharded_cluster .auth .ssl .rapid !.sync_async - - name: .replica_set .noauth .ssl .rapid !.sync_async - - name: .standalone .noauth .nossl .rapid !.sync_async - - name: .sharded_cluster .auth .ssl .latest !.sync_async - - name: .replica_set .noauth .ssl .latest !.sync_async - - name: .standalone .noauth .nossl .latest !.sync_async - display_name: "* Test macOS Arm64 Python3.9" - run_on: - - macos-14-arm64 - expansions: - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 - - name: test-macos-arm64-python3.13 - tasks: - - name: .sharded_cluster .auth .ssl .6.0 !.sync_async - - name: .replica_set .noauth .ssl .6.0 !.sync_async - - name: .standalone .noauth .nossl .6.0 !.sync_async - - name: .sharded_cluster .auth .ssl .7.0 !.sync_async - - name: .replica_set .noauth .ssl .7.0 !.sync_async - - name: .standalone .noauth .nossl .7.0 !.sync_async - - name: .sharded_cluster .auth .ssl .8.0 !.sync_async - - name: .replica_set .noauth .ssl .8.0 !.sync_async - - name: .standalone .noauth .nossl .8.0 !.sync_async - - name: .sharded_cluster .auth .ssl .rapid !.sync_async - - name: .replica_set .noauth .ssl .rapid !.sync_async - - name: .standalone .noauth .nossl .rapid !.sync_async - - name: .sharded_cluster .auth .ssl .latest !.sync_async - - name: .replica_set .noauth .ssl .latest !.sync_async - - name: .standalone .noauth .nossl .latest !.sync_async - display_name: "* Test macOS Arm64 Python3.13" - run_on: - - macos-14-arm64 - expansions: - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 - - name: test-win64-python3.9 - tasks: - - name: .sharded_cluster .auth .ssl !.sync_async - - name: .replica_set .noauth .ssl !.sync_async - - name: .standalone .noauth .nossl !.sync_async - display_name: "* Test Win64 Python3.9" - run_on: - - windows-64-vsMulti-small - expansions: - PYTHON_BINARY: C:/python/Python39/python.exe - - name: test-win64-python3.13 - tasks: - - name: .sharded_cluster .auth .ssl !.sync_async - - name: .replica_set .noauth .ssl !.sync_async - - name: .standalone .noauth .nossl !.sync_async - display_name: "* Test Win64 Python3.13" - run_on: - - windows-64-vsMulti-small - expansions: - PYTHON_BINARY: C:/python/Python313/python.exe - - name: test-win32-python3.9 - tasks: - - name: .sharded_cluster .auth .ssl !.sync_async - - name: .replica_set .noauth .ssl !.sync_async - - name: .standalone .noauth .nossl !.sync_async - display_name: "* Test Win32 Python3.9" - run_on: - - windows-64-vsMulti-small - expansions: - PYTHON_BINARY: C:/python/32/Python39/python.exe - - name: test-win32-python3.13 - tasks: - - name: .sharded_cluster .auth .ssl !.sync_async - - name: .replica_set .noauth .ssl !.sync_async - - name: .standalone .noauth .nossl !.sync_async - display_name: "* Test Win32 Python3.13" - run_on: - - windows-64-vsMulti-small - expansions: - PYTHON_BINARY: C:/python/32/Python313/python.exe - - # Serverless tests - - name: serverless-rhel8-python3.9 + VERSION: "8.0" + tags: [coverage_tag] + - name: mongodb-rapid tasks: - - name: serverless_task_group - display_name: Serverless RHEL8 Python3.9 + - name: .server-version + display_name: "* MongoDB rapid" run_on: - rhel87-small - batchtime: 10080 expansions: - test_serverless: "true" - AUTH: auth - SSL: ssl - PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: serverless-rhel8-python3.13 + VERSION: rapid + tags: [coverage_tag] + - name: mongodb-latest tasks: - - name: serverless_task_group - display_name: Serverless RHEL8 Python3.13 + - name: .server-version + display_name: "* MongoDB latest" run_on: - rhel87-small - batchtime: 10080 expansions: - test_serverless: "true" - AUTH: auth - SSL: ssl - PYTHON_BINARY: /opt/python/3.13/bin/python3 + VERSION: latest + tags: [coverage_tag] # Stable api tests - - name: stable-api-require-v1-rhel8-python3.9-auth + - name: stable-api-require-v1-rhel8-auth tasks: - - name: .standalone .5.0 .noauth .nossl .sync_async - - name: .standalone .6.0 .noauth .nossl .sync_async - - name: .standalone .7.0 .noauth .nossl .sync_async - - name: .standalone .8.0 .noauth .nossl .sync_async - - name: .standalone .rapid .noauth .nossl .sync_async - - name: .standalone .latest .noauth .nossl .sync_async - display_name: Stable API require v1 RHEL8 Python3.9 Auth + - name: .test-standard !.replica_set-noauth-ssl .server-5.0 + - name: .test-standard !.replica_set-noauth-ssl .server-6.0 + - name: .test-standard !.replica_set-noauth-ssl .server-7.0 + - name: .test-standard !.replica_set-noauth-ssl .server-8.0 + - name: .test-standard !.replica_set-noauth-ssl .server-rapid + - name: .test-standard !.replica_set-noauth-ssl .server-latest + display_name: Stable API require v1 RHEL8 Auth run_on: - rhel87-small expansions: AUTH: auth REQUIRE_API_VERSION: "1" MONGODB_API_VERSION: "1" - PYTHON_BINARY: /opt/python/3.9/bin/python3 tags: [versionedApi_tag] - - name: stable-api-accept-v2-rhel8-python3.9-auth + - name: stable-api-accept-v2-rhel8-auth tasks: - - name: .standalone .5.0 .noauth .nossl .sync_async - - name: .standalone .6.0 .noauth .nossl .sync_async - - name: .standalone .7.0 .noauth .nossl .sync_async - - name: .standalone .8.0 .noauth .nossl .sync_async - - name: .standalone .rapid .noauth .nossl .sync_async - - name: .standalone .latest .noauth .nossl .sync_async - display_name: Stable API accept v2 RHEL8 Python3.9 Auth + - name: .test-standard .server-5.0 .standalone-noauth-nossl + - name: .test-standard .server-6.0 .standalone-noauth-nossl + - name: .test-standard .server-7.0 .standalone-noauth-nossl + - name: .test-standard .server-8.0 .standalone-noauth-nossl + - name: .test-standard .server-rapid .standalone-noauth-nossl + - name: .test-standard .server-latest .standalone-noauth-nossl + display_name: Stable API accept v2 RHEL8 Auth run_on: - rhel87-small expansions: AUTH: auth ORCHESTRATION_FILE: versioned-api-testing.json - PYTHON_BINARY: /opt/python/3.9/bin/python3 tags: [versionedApi_tag] - - name: stable-api-require-v1-rhel8-python3.13-auth + + # Standard nonlinux tests + - name: test-macos tasks: - - name: .standalone .5.0 .noauth .nossl .sync_async - - name: .standalone .6.0 .noauth .nossl .sync_async - - name: .standalone .7.0 .noauth .nossl .sync_async - - name: .standalone .8.0 .noauth .nossl .sync_async - - name: .standalone .rapid .noauth .nossl .sync_async - - name: .standalone .latest .noauth .nossl .sync_async - display_name: Stable API require v1 RHEL8 Python3.13 Auth + - name: .test-standard !.pypy + display_name: "* Test macOS" run_on: - - rhel87-small - expansions: - AUTH: auth - REQUIRE_API_VERSION: "1" - MONGODB_API_VERSION: "1" - PYTHON_BINARY: /opt/python/3.13/bin/python3 - tags: [versionedApi_tag] - - name: stable-api-accept-v2-rhel8-python3.13-auth + - macos-14 + tags: [standard-non-linux] + - name: test-macos-arm64 tasks: - - name: .standalone .5.0 .noauth .nossl .sync_async - - name: .standalone .6.0 .noauth .nossl .sync_async - - name: .standalone .7.0 .noauth .nossl .sync_async - - name: .standalone .8.0 .noauth .nossl .sync_async - - name: .standalone .rapid .noauth .nossl .sync_async - - name: .standalone .latest .noauth .nossl .sync_async - display_name: Stable API accept v2 RHEL8 Python3.13 Auth + - name: .test-standard !.pypy .server-6.0 + - name: .test-standard !.pypy .server-7.0 + - name: .test-standard !.pypy .server-8.0 + - name: .test-standard !.pypy .server-rapid + - name: .test-standard !.pypy .server-latest + display_name: "* Test macOS Arm64" run_on: - - rhel87-small + - macos-14-arm64 + tags: [standard-non-linux] + - name: test-win64 + tasks: + - name: .test-standard !.pypy + display_name: "* Test Win64" + run_on: + - windows-64-vsMulti-small + tags: [standard-non-linux] + - name: test-win32 + tasks: + - name: .test-standard !.pypy + display_name: "* Test Win32" + run_on: + - windows-64-vsMulti-small expansions: - AUTH: auth - ORCHESTRATION_FILE: versioned-api-testing.json - PYTHON_BINARY: /opt/python/3.13/bin/python3 - tags: [versionedApi_tag] + IS_WIN32: "1" + tags: [standard-non-linux] # Storage engine tests - - name: storage-inmemory-rhel8-python3.9 + - name: storage-inmemory-rhel8 tasks: - - name: .standalone .noauth .nossl .4.0 .sync_async - - name: .standalone .noauth .nossl .4.4 .sync_async - - name: .standalone .noauth .nossl .5.0 .sync_async - - name: .standalone .noauth .nossl .6.0 .sync_async - - name: .standalone .noauth .nossl .7.0 .sync_async - - name: .standalone .noauth .nossl .8.0 .sync_async - - name: .standalone .noauth .nossl .rapid .sync_async - - name: .standalone .noauth .nossl .latest .sync_async - display_name: Storage InMemory RHEL8 Python3.9 + - name: .test-standard .standalone-noauth-nossl + display_name: Storage InMemory RHEL8 run_on: - rhel87-small expansions: STORAGE_ENGINE: inmemory - PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: storage-mmapv1-rhel8-python3.9 - tasks: - - name: .standalone .4.0 .noauth .nossl .sync_async - - name: .replica_set .4.0 .noauth .nossl .sync_async - display_name: Storage MMAPv1 RHEL8 Python3.9 - run_on: - - rhel87-small - expansions: - STORAGE_ENGINE: mmapv1 - PYTHON_BINARY: /opt/python/3.9/bin/python3 diff --git a/.evergreen/hatch.sh b/.evergreen/hatch.sh deleted file mode 100755 index c01dfcd19e..0000000000 --- a/.evergreen/hatch.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash -set -eu - -. .evergreen/scripts/ensure-hatch.sh -hatch run "$@" diff --git a/.evergreen/install-dependencies.sh b/.evergreen/install-dependencies.sh deleted file mode 100755 index d90ff4ab45..0000000000 --- a/.evergreen/install-dependencies.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/bash -set -eu - -# Copy PyMongo's test certificates over driver-evergreen-tools' -cp ${PROJECT_DIRECTORY}/test/certificates/* ${DRIVERS_TOOLS}/.evergreen/x509gen/ - -# Replace MongoOrchestration's client certificate. -cp ${PROJECT_DIRECTORY}/test/certificates/client.pem ${MONGO_ORCHESTRATION_HOME}/lib/client.pem - -# Ensure hatch is installed. -bash ${PROJECT_DIRECTORY}/.evergreen/scripts/ensure-hatch.sh - -if [ -w /etc/hosts ]; then - SUDO="" -else - SUDO="sudo" -fi - -# Add 'server' and 'hostname_not_in_cert' as a hostnames -echo "127.0.0.1 server" | $SUDO tee -a /etc/hosts -echo "127.0.0.1 hostname_not_in_cert" | $SUDO tee -a /etc/hosts diff --git a/.evergreen/just.sh b/.evergreen/just.sh new file mode 100755 index 0000000000..bebbca8282 --- /dev/null +++ b/.evergreen/just.sh @@ -0,0 +1,5 @@ +#!/bin/bash +set -eu + +. .evergreen/scripts/setup-dev-env.sh +just "$@" diff --git a/.evergreen/remove-unimplemented-tests.sh b/.evergreen/remove-unimplemented-tests.sh new file mode 100755 index 0000000000..88ef137f86 --- /dev/null +++ b/.evergreen/remove-unimplemented-tests.sh @@ -0,0 +1,53 @@ +#!/bin/bash +PYMONGO=$(dirname "$(cd "$(dirname "$0")" || exit; pwd)") + +rm $PYMONGO/test/transactions/legacy/errors-client.json # PYTHON-1894 +rm $PYMONGO/test/connection_monitoring/wait-queue-fairness.json # PYTHON-1873 +rm $PYMONGO/test/discovery_and_monitoring/unified/pool-clear-application-error.json # PYTHON-4918 +rm $PYMONGO/test/discovery_and_monitoring/unified/pool-clear-checkout-error.json # PYTHON-4918 +rm $PYMONGO/test/discovery_and_monitoring/unified/pool-clear-min-pool-size-error.json # PYTHON-4918 +rm $PYMONGO/test/client-side-encryption/spec/unified/client-bulkWrite-qe.json # PYTHON-4929 + +# Python doesn't implement DRIVERS-3064 +rm $PYMONGO/test/collection_management/listCollections-rawdata.json +rm $PYMONGO/test/crud/unified/aggregate-rawdata.json +rm $PYMONGO/test/crud/unified/bulkWrite-deleteMany-rawdata.json +rm $PYMONGO/test/crud/unified/bulkWrite-deleteOne-rawdata.json +rm $PYMONGO/test/crud/unified/bulkWrite-replaceOne-rawdata.json +rm $PYMONGO/test/crud/unified/bulkWrite-updateMany-rawdata.json +rm $PYMONGO/test/crud/unified/bulkWrite-updateOne-rawdata.json +rm $PYMONGO/test/crud/unified/client-bulkWrite-delete-rawdata.json +rm $PYMONGO/test/crud/unified/client-bulkWrite-replaceOne-rawdata.json +rm $PYMONGO/test/crud/unified/client-bulkWrite-update-rawdata.json +rm $PYMONGO/test/crud/unified/count-rawdata.json +rm $PYMONGO/test/crud/unified/countDocuments-rawdata.json +rm $PYMONGO/test/crud/unified/db-aggregate-rawdata.json +rm $PYMONGO/test/crud/unified/deleteMany-rawdata.json +rm $PYMONGO/test/crud/unified/deleteOne-rawdata.json +rm $PYMONGO/test/crud/unified/distinct-rawdata.json +rm $PYMONGO/test/crud/unified/estimatedDocumentCount-rawdata.json +rm $PYMONGO/test/crud/unified/find-rawdata.json +rm $PYMONGO/test/crud/unified/findOneAndDelete-rawdata.json +rm $PYMONGO/test/crud/unified/findOneAndReplace-rawdata.json +rm $PYMONGO/test/crud/unified/findOneAndUpdate-rawdata.json +rm $PYMONGO/test/crud/unified/insertMany-rawdata.json +rm $PYMONGO/test/crud/unified/insertOne-rawdata.json +rm $PYMONGO/test/crud/unified/replaceOne-rawdata.json +rm $PYMONGO/test/crud/unified/updateMany-rawdata.json +rm $PYMONGO/test/crud/unified/updateOne-rawdata.json +rm $PYMONGO/test/index_management/index-rawdata.json + +# PyMongo does not support modifyCollection +rm $PYMONGO/test/collection_management/modifyCollection-*.json + +# PYTHON-5248 - Remove support for MongoDB 4.0 +find /$PYMONGO /test -type f -name 'pre-42-*.json' -delete + +# PYTHON-3359 - Remove Database and Collection level timeout override +rm $PYMONGO/test/csot/override-collection-timeoutMS.json +rm $PYMONGO/test/csot/override-database-timeoutMS.json + +# PYTHON-2943 - Socks5 Proxy Support +rm $PYMONGO/test/uri_options/proxy-options.json + +echo "Done removing unimplemented tests" diff --git a/.evergreen/resync-specs.sh b/.evergreen/resync-specs.sh index dca116c2d3..d2bd89c781 100755 --- a/.evergreen/resync-specs.sh +++ b/.evergreen/resync-specs.sh @@ -1,6 +1,6 @@ #!/bin/bash -# exit when any command fails -set -e +# Resync test files from the specifications repo. +set -eu PYMONGO=$(dirname "$(cd "$(dirname "$0")"; pwd)") SPECS=${MDB_SPECS:-~/Work/specifications} @@ -45,9 +45,12 @@ then fi # Ensure the JSON files are up to date. -cd $SPECS/source -make -cd - +if ! [ -n "${CI:-}" ] +then + cd $SPECS/source + make + cd - +fi # cpjson unified-test-format/tests/invalid unified-test-format/invalid # * param1: Path to spec tests dir in specifications repo # * param2: Path to where the corresponding tests live in Python. @@ -73,9 +76,6 @@ do auth) cpjson auth/tests/ auth ;; - atlas-data-lake-testing|data_lake) - cpjson atlas-data-lake-testing/tests/ data_lake - ;; bson-binary-vector|bson_binary_vector) cpjson bson-binary-vector/tests/ bson_binary_vector ;; @@ -110,7 +110,6 @@ do cmap|CMAP|connection-monitoring-and-pooling) cpjson connection-monitoring-and-pooling/tests/logging connection_logging cpjson connection-monitoring-and-pooling/tests/cmap-format connection_monitoring - rm $PYMONGO/test/connection_monitoring/wait-queue-fairness.json # PYTHON-1873 ;; apm|APM|command-monitoring|command_monitoring) cpjson command-logging-and-monitoring/tests/monitoring command_monitoring @@ -131,6 +130,9 @@ do gridfs) cpjson gridfs/tests gridfs ;; + handshake) + cpjson mongodb-handshake/tests handshake + ;; index|index-management) cpjson index-management/tests index_management ;; @@ -171,7 +173,7 @@ do ;; server-selection|server_selection) cpjson server-selection/tests/ server_selection - rm -rf $PYMONGO/test/server_selection/logging + rm -rf $PYMONGO/test/server_selection/logging # these tests live in server_selection_logging cpjson server-selection/tests/logging server_selection_logging ;; server-selection-logging|server_selection_logging) @@ -183,7 +185,6 @@ do transactions|transactions-convenient-api) cpjson transactions/tests/ transactions cpjson transactions-convenient-api/tests/ transactions-convenient-api - rm $PYMONGO/test/transactions/legacy/errors-client.json # PYTHON-1894 ;; unified|unified-test-format) cpjson unified-test-format/tests/ unified-test-format/ diff --git a/.evergreen/run-azurekms-fail-test.sh b/.evergreen/run-azurekms-fail-test.sh deleted file mode 100755 index d99c178fb9..0000000000 --- a/.evergreen/run-azurekms-fail-test.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash -set -o errexit # Exit the script with error if any of the commands fail -HERE=$(dirname ${BASH_SOURCE:-$0}) -. $DRIVERS_TOOLS/.evergreen/csfle/azurekms/setup-secrets.sh -export LIBMONGOCRYPT_URL=https://s3.amazonaws.com/mciuploads/libmongocrypt/debian11/master/latest/libmongocrypt.tar.gz -SKIP_SERVERS=1 bash $HERE/setup-encryption.sh -PYTHON_BINARY=/opt/mongodbtoolchain/v4/bin/python3 \ - KEY_NAME="${AZUREKMS_KEYNAME}" \ - KEY_VAULT_ENDPOINT="${AZUREKMS_KEYVAULTENDPOINT}" \ - SUCCESS=false TEST_FLE_AZURE_AUTO=1 \ - $HERE/hatch.sh test:test-eg -bash $HERE/teardown-encryption.sh diff --git a/.evergreen/run-azurekms-test.sh b/.evergreen/run-azurekms-test.sh deleted file mode 100755 index bb515a9386..0000000000 --- a/.evergreen/run-azurekms-test.sh +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/bash -set -o errexit # Exit the script with error if any of the commands fail -HERE=$(dirname ${BASH_SOURCE:-$0}) -source ${DRIVERS_TOOLS}/.evergreen/csfle/azurekms/secrets-export.sh -echo "Copying files ... begin" -export AZUREKMS_RESOURCEGROUP=${AZUREKMS_RESOURCEGROUP} -export AZUREKMS_VMNAME=${AZUREKMS_VMNAME} -export AZUREKMS_PRIVATEKEYPATH=/tmp/testazurekms_privatekey -export LIBMONGOCRYPT_URL=https://s3.amazonaws.com/mciuploads/libmongocrypt/debian11/master/latest/libmongocrypt.tar.gz -SKIP_SERVERS=1 bash $HERE/setup-encryption.sh -tar czf /tmp/mongo-python-driver.tgz . -# shellcheck disable=SC2088 -AZUREKMS_SRC="/tmp/mongo-python-driver.tgz" AZUREKMS_DST="~/" \ - $DRIVERS_TOOLS/.evergreen/csfle/azurekms/copy-file.sh -echo "Copying files ... end" -echo "Untarring file ... begin" -AZUREKMS_CMD="tar xf mongo-python-driver.tgz" \ - $DRIVERS_TOOLS/.evergreen/csfle/azurekms/run-command.sh -echo "Untarring file ... end" -echo "Running test ... begin" -AZUREKMS_CMD="KEY_NAME=\"$AZUREKMS_KEYNAME\" KEY_VAULT_ENDPOINT=\"$AZUREKMS_KEYVAULTENDPOINT\" SUCCESS=true TEST_FLE_AZURE_AUTO=1 ./.evergreen/hatch.sh test:test-eg" \ - $DRIVERS_TOOLS/.evergreen/csfle/azurekms/run-command.sh -echo "Running test ... end" -bash $HERE/teardown-encryption.sh diff --git a/.evergreen/run-deployed-lambda-aws-tests.sh b/.evergreen/run-deployed-lambda-aws-tests.sh deleted file mode 100755 index aa16d62650..0000000000 --- a/.evergreen/run-deployed-lambda-aws-tests.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash -set -o errexit # Exit the script with error if any of the commands fail - -export PATH="/opt/python/3.9/bin:${PATH}" -python --version -pushd ./test/lambda - -. build.sh -popd -. ${DRIVERS_TOOLS}/.evergreen/aws_lambda/run-deployed-lambda-aws-tests.sh diff --git a/.evergreen/run-gcpkms-test.sh b/.evergreen/run-gcpkms-test.sh deleted file mode 100755 index 7ccc74b453..0000000000 --- a/.evergreen/run-gcpkms-test.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/bin/bash -set -o errexit # Exit the script with error if any of the commands fail -HERE=$(dirname ${BASH_SOURCE:-$0}) - -source ${DRIVERS_TOOLS}/.evergreen/csfle/gcpkms/secrets-export.sh -echo "Copying files ... begin" -export GCPKMS_GCLOUD=${GCPKMS_GCLOUD} -export GCPKMS_PROJECT=${GCPKMS_PROJECT} -export GCPKMS_ZONE=${GCPKMS_ZONE} -export GCPKMS_INSTANCENAME=${GCPKMS_INSTANCENAME} -export LIBMONGOCRYPT_URL=https://s3.amazonaws.com/mciuploads/libmongocrypt/debian11/master/latest/libmongocrypt.tar.gz -SKIP_SERVERS=1 bash $HERE/setup-encryption.sh -tar czf /tmp/mongo-python-driver.tgz . -GCPKMS_SRC=/tmp/mongo-python-driver.tgz GCPKMS_DST=$GCPKMS_INSTANCENAME: $DRIVERS_TOOLS/.evergreen/csfle/gcpkms/copy-file.sh -echo "Copying files ... end" -echo "Untarring file ... begin" -GCPKMS_CMD="tar xf mongo-python-driver.tgz" $DRIVERS_TOOLS/.evergreen/csfle/gcpkms/run-command.sh -echo "Untarring file ... end" -echo "Running test ... begin" -GCPKMS_CMD="SUCCESS=true TEST_FLE_GCP_AUTO=1 ./.evergreen/hatch.sh test:test-eg" $DRIVERS_TOOLS/.evergreen/csfle/gcpkms/run-command.sh -echo "Running test ... end" -bash $HERE/teardown-encryption.sh diff --git a/.evergreen/run-import-time-test.sh b/.evergreen/run-import-time-test.sh deleted file mode 100755 index e9f6161bcc..0000000000 --- a/.evergreen/run-import-time-test.sh +++ /dev/null @@ -1,33 +0,0 @@ -#!/bin/bash -ex - -set -o errexit # Exit the script with error if any of the commands fail -set -x - -. .evergreen/utils.sh - -if [ -z "$PYTHON_BINARY" ]; then - PYTHON_BINARY=$(find_python3) -fi - -# Use the previous commit if this was not a PR run. -if [ "$BASE_SHA" == "$HEAD_SHA" ]; then - BASE_SHA=$(git rev-parse HEAD~1) -fi - -function get_import_time() { - local log_file - createvirtualenv "$PYTHON_BINARY" import-venv - python -m pip install -q ".[aws,encryption,gssapi,ocsp,snappy,zstd]" - # Import once to cache modules - python -c "import pymongo" - log_file="pymongo-$1.log" - python -X importtime -c "import pymongo" 2> $log_file -} - -get_import_time $HEAD_SHA -git stash || true -git checkout $BASE_SHA -get_import_time $BASE_SHA -git checkout $HEAD_SHA -git stash apply || true -python tools/compare_import_time.py $HEAD_SHA $BASE_SHA diff --git a/.evergreen/run-mongodb-aws-ecs-test.sh b/.evergreen/run-mongodb-aws-ecs-test.sh index 3189a6cc6c..b8330de511 100755 --- a/.evergreen/run-mongodb-aws-ecs-test.sh +++ b/.evergreen/run-mongodb-aws-ecs-test.sh @@ -1,7 +1,6 @@ #!/bin/bash - -# Don't trace since the URI contains a password that shouldn't show up in the logs -set -o errexit # Exit the script with error if any of the commands fail +# Script run on an ECS host to test MONGODB-AWS. +set -eu ############################################ # Main Program # @@ -21,14 +20,19 @@ fi set -o xtrace # Install python with pip. -PYTHON_VER="python3.9" +PYTHON_VER="python3.10" apt-get -qq update < /dev/null > /dev/null +apt-get -q install -y software-properties-common +# Use openpgp to avoid gpg key timeout. +mkdir -p $HOME/.gnupg +echo "keyserver keys.openpgp.org" >> $HOME/.gnupg/gpg.conf +add-apt-repository -y 'ppa:deadsnakes/ppa' apt-get -qq install $PYTHON_VER $PYTHON_VER-venv build-essential $PYTHON_VER-dev -y < /dev/null > /dev/null export PYTHON_BINARY=$PYTHON_VER -export TEST_AUTH_AWS=1 -export AUTH="auth" export SET_XTRACE_ON=1 cd src rm -rf .venv -bash .evergreen/hatch.sh test:test-eg +rm -f .evergreen/scripts/test-env.sh || true +bash ./.evergreen/just.sh setup-tests auth_aws ecs-remote +bash .evergreen/just.sh run-tests diff --git a/.evergreen/run-mongodb-oidc-remote-test.sh b/.evergreen/run-mongodb-oidc-remote-test.sh deleted file mode 100755 index bb90bddf07..0000000000 --- a/.evergreen/run-mongodb-oidc-remote-test.sh +++ /dev/null @@ -1,60 +0,0 @@ -#!/bin/bash - -set +x # Disable debug trace -set -eu - -echo "Running MONGODB-OIDC remote tests" - -OIDC_ENV=${OIDC_ENV:-"test"} - -# Make sure DRIVERS_TOOLS is set. -if [ -z "$DRIVERS_TOOLS" ]; then - echo "Must specify DRIVERS_TOOLS" - exit 1 -fi - -# Set up the remote files to test. -git add . -git commit -m "add files" || true -export TEST_TAR_FILE=/tmp/mongo-python-driver.tgz -git archive -o $TEST_TAR_FILE HEAD - -pushd $DRIVERS_TOOLS - -if [ $OIDC_ENV == "test" ]; then - echo "Test OIDC environment does not support remote test!" - exit 1 - -elif [ $OIDC_ENV == "azure" ]; then - export AZUREOIDC_DRIVERS_TAR_FILE=$TEST_TAR_FILE - export AZUREOIDC_TEST_CMD="OIDC_ENV=azure ./.evergreen/run-mongodb-oidc-test.sh" - bash ./.evergreen/auth_oidc/azure/run-driver-test.sh - -elif [ $OIDC_ENV == "gcp" ]; then - export GCPOIDC_DRIVERS_TAR_FILE=$TEST_TAR_FILE - export GCPOIDC_TEST_CMD="OIDC_ENV=gcp ./.evergreen/run-mongodb-oidc-test.sh" - bash ./.evergreen/auth_oidc/gcp/run-driver-test.sh - -elif [ $OIDC_ENV == "k8s" ]; then - # Make sure K8S_VARIANT is set. - if [ -z "$K8S_VARIANT" ]; then - echo "Must specify K8S_VARIANT" - popd - exit 1 - fi - - bash ./.evergreen/auth_oidc/k8s/setup-pod.sh - bash ./.evergreen/auth_oidc/k8s/run-self-test.sh - export K8S_DRIVERS_TAR_FILE=$TEST_TAR_FILE - export K8S_TEST_CMD="OIDC_ENV=k8s ./.evergreen/run-mongodb-oidc-test.sh" - source ./.evergreen/auth_oidc/k8s/secrets-export.sh # for MONGODB_URI - bash ./.evergreen/auth_oidc/k8s/run-driver-test.sh - bash ./.evergreen/auth_oidc/k8s/teardown-pod.sh - -else - echo "Unrecognized OIDC_ENV $OIDC_ENV" - pod - exit 1 -fi - -popd diff --git a/.evergreen/run-mongodb-oidc-test.sh b/.evergreen/run-mongodb-oidc-test.sh index 22864528c0..b34013a6ac 100755 --- a/.evergreen/run-mongodb-oidc-test.sh +++ b/.evergreen/run-mongodb-oidc-test.sh @@ -1,35 +1,17 @@ #!/bin/bash - -set +x # Disable debug trace +# Script run on a remote host to test MONGODB-OIDC. set -eu -echo "Running MONGODB-OIDC authentication tests" - -OIDC_ENV=${OIDC_ENV:-"test"} - -if [ $OIDC_ENV == "test" ]; then - # Make sure DRIVERS_TOOLS is set. - if [ -z "$DRIVERS_TOOLS" ]; then - echo "Must specify DRIVERS_TOOLS" - exit 1 - fi - source ${DRIVERS_TOOLS}/.evergreen/auth_oidc/secrets-export.sh - -elif [ $OIDC_ENV == "azure" ]; then - source ./env.sh - -elif [ $OIDC_ENV == "gcp" ]; then - source ./secrets-export.sh - -elif [ $OIDC_ENV == "k8s" ]; then - echo "Running oidc on k8s" +echo "Running MONGODB-OIDC authentication tests on ${OIDC_ENV}..." +if [ ${OIDC_ENV} == "k8s" ]; then + SUB_TEST_NAME=$K8S_VARIANT-remote else - echo "Unrecognized OIDC_ENV $OIDC_ENV" - exit 1 + SUB_TEST_NAME=$OIDC_ENV-remote + sudo apt-get install -y python3-dev build-essential fi -export TEST_AUTH_OIDC=1 -export COVERAGE=1 -export AUTH="auth" -bash ./.evergreen/hatch.sh test:test-eg -- "${@:1}" +bash ./.evergreen/just.sh setup-tests auth_oidc $SUB_TEST_NAME +bash ./.evergreen/just.sh run-tests "${@:1}" + +echo "Running MONGODB-OIDC authentication tests on ${OIDC_ENV}... done." diff --git a/.evergreen/run-perf-tests.sh b/.evergreen/run-perf-tests.sh deleted file mode 100755 index ff8d81a837..0000000000 --- a/.evergreen/run-perf-tests.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/bash - -set -o xtrace -set -o errexit - -git clone --depth 1 https://github.com/mongodb/specifications.git -pushd specifications/source/benchmarking/data -tar xf extended_bson.tgz -tar xf parallel.tgz -tar xf single_and_multi_document.tgz -popd - -export TEST_PATH="${PROJECT_DIRECTORY}/specifications/source/benchmarking/data" -export OUTPUT_FILE="${PROJECT_DIRECTORY}/results.json" - -export PYTHON_BINARY=/opt/mongodbtoolchain/v4/bin/python3 -export PERF_TEST=1 - -bash ./.evergreen/hatch.sh test:test-eg diff --git a/.evergreen/run-tests.sh b/.evergreen/run-tests.sh index 95fe10a6c3..c14215244e 100755 --- a/.evergreen/run-tests.sh +++ b/.evergreen/run-tests.sh @@ -1,290 +1,42 @@ #!/bin/bash -set -o errexit # Exit the script with error if any of the commands fail -set -o xtrace +# Run a test suite that was configured with setup-tests.sh. +set -eu -# Note: It is assumed that you have already set up a virtual environment before running this file. +SCRIPT_DIR=$(dirname ${BASH_SOURCE:-$0}) +SCRIPT_DIR="$( cd -- "$SCRIPT_DIR" > /dev/null 2>&1 && pwd )" +ROOT_DIR="$(dirname $SCRIPT_DIR)" -# Supported/used environment variables: -# AUTH Set to enable authentication. Defaults to "noauth" -# SSL Set to enable SSL. Defaults to "nossl" -# GREEN_FRAMEWORK The green framework to test with, if any. -# COVERAGE If non-empty, run the test suite with coverage. -# COMPRESSORS If non-empty, install appropriate compressor. -# LIBMONGOCRYPT_URL The URL to download libmongocrypt. -# TEST_DATA_LAKE If non-empty, run data lake tests. -# TEST_ENCRYPTION If non-empty, run encryption tests. -# TEST_CRYPT_SHARED If non-empty, install crypt_shared lib. -# TEST_SERVERLESS If non-empy, test on serverless. -# TEST_LOADBALANCER If non-empy, test load balancing. -# TEST_FLE_AZURE_AUTO If non-empy, test auto FLE on Azure -# TEST_FLE_GCP_AUTO If non-empy, test auto FLE on GCP -# TEST_PYOPENSSL If non-empy, test with PyOpenSSL -# TEST_ENTERPRISE_AUTH If non-empty, test with Enterprise Auth -# TEST_AUTH_AWS If non-empty, test AWS Auth Mechanism -# TEST_AUTH_OIDC If non-empty, test OIDC Auth Mechanism -# TEST_PERF If non-empty, run performance tests -# TEST_OCSP If non-empty, run OCSP tests -# TEST_ATLAS If non-empty, test Atlas connections -# TEST_INDEX_MANAGEMENT If non-empty, run index management tests -# TEST_ENCRYPTION_PYOPENSSL If non-empy, test encryption with PyOpenSSL +PREV_DIR=$(pwd) +cd $ROOT_DIR -AUTH=${AUTH:-noauth} -SSL=${SSL:-nossl} -TEST_SUITES=${TEST_SUITES:-} -TEST_ARGS="${*:1}" - -export PIP_QUIET=1 # Quiet by default -export PIP_PREFER_BINARY=1 # Prefer binary dists by default - -set +x -python -c "import sys; sys.exit(sys.prefix == sys.base_prefix)" || (echo "Not inside a virtual env!"; exit 1) -PYTHON_IMPL=$(python -c "import platform; print(platform.python_implementation())") - -# Try to source local Drivers Secrets -if [ -f ./secrets-export.sh ]; then - echo "Sourcing secrets" - source ./secrets-export.sh +# Try to source the env file. +if [ -f $SCRIPT_DIR/scripts/env.sh ]; then + echo "Sourcing env inputs" + . $SCRIPT_DIR/scripts/env.sh else - echo "Not sourcing secrets" -fi - -# Ensure C extensions have compiled. -if [ -z "${NO_EXT:-}" ] && [ "$PYTHON_IMPL" = "CPython" ]; then - python tools/fail_if_no_c.py -fi - -if [ "$AUTH" != "noauth" ]; then - if [ -n "$TEST_DATA_LAKE" ]; then - export DB_USER="mhuser" - export DB_PASSWORD="pencil" - elif [ -n "$TEST_SERVERLESS" ]; then - source "${DRIVERS_TOOLS}"/.evergreen/serverless/secrets-export.sh - export DB_USER=$SERVERLESS_ATLAS_USER - export DB_PASSWORD=$SERVERLESS_ATLAS_PASSWORD - export MONGODB_URI="$SERVERLESS_URI" - echo "MONGODB_URI=$MONGODB_URI" - export SINGLE_MONGOS_LB_URI=$MONGODB_URI - export MULTI_MONGOS_LB_URI=$MONGODB_URI - elif [ -n "$TEST_AUTH_OIDC" ]; then - export DB_USER=$OIDC_ADMIN_USER - export DB_PASSWORD=$OIDC_ADMIN_PWD - export DB_IP="$MONGODB_URI" - else - export DB_USER="bob" - export DB_PASSWORD="pwd123" - fi - echo "Added auth, DB_USER: $DB_USER" -fi - -if [ -n "$TEST_ENTERPRISE_AUTH" ]; then - python -m pip install '.[gssapi]' - if [ "Windows_NT" = "$OS" ]; then - echo "Setting GSSAPI_PASS" - export GSSAPI_PASS=${SASL_PASS} - export GSSAPI_CANONICALIZE="true" - else - # BUILD-3830 - touch krb5.conf.empty - export KRB5_CONFIG=${PROJECT_DIRECTORY}/.evergreen/krb5.conf.empty - - echo "Writing keytab" - echo ${KEYTAB_BASE64} | base64 -d > ${PROJECT_DIRECTORY}/.evergreen/drivers.keytab - echo "Running kinit" - kinit -k -t ${PROJECT_DIRECTORY}/.evergreen/drivers.keytab -p ${PRINCIPAL} - fi - echo "Setting GSSAPI variables" - export GSSAPI_HOST=${SASL_HOST} - export GSSAPI_PORT=${SASL_PORT} - export GSSAPI_PRINCIPAL=${PRINCIPAL} - - export TEST_SUITES="auth" -fi - -if [ -n "$TEST_LOADBALANCER" ]; then - export LOAD_BALANCER=1 - export SINGLE_MONGOS_LB_URI="${SINGLE_MONGOS_LB_URI:-mongodb://127.0.0.1:8000/?loadBalanced=true}" - export MULTI_MONGOS_LB_URI="${MULTI_MONGOS_LB_URI:-mongodb://127.0.0.1:8001/?loadBalanced=true}" - export TEST_SUITES="load_balancer" -fi - -if [ "$SSL" != "nossl" ]; then - export CLIENT_PEM="$DRIVERS_TOOLS/.evergreen/x509gen/client.pem" - export CA_PEM="$DRIVERS_TOOLS/.evergreen/x509gen/ca.pem" - - if [ -n "$TEST_LOADBALANCER" ]; then - export SINGLE_MONGOS_LB_URI="${SINGLE_MONGOS_LB_URI}&tls=true" - export MULTI_MONGOS_LB_URI="${MULTI_MONGOS_LB_URI}&tls=true" - fi -fi - -if [ "$COMPRESSORS" = "snappy" ]; then - python -m pip install '.[snappy]' -elif [ "$COMPRESSORS" = "zstd" ]; then - python -m pip install zstandard -fi - -# PyOpenSSL test setup. -if [ -n "$TEST_PYOPENSSL" ]; then - python -m pip install '.[ocsp]' -fi - -if [ -n "$TEST_ENCRYPTION" ] || [ -n "$TEST_FLE_AZURE_AUTO" ] || [ -n "$TEST_FLE_GCP_AUTO" ]; then - # Check for libmongocrypt checkout. - if [ ! -d "libmongocrypt" ]; then - echo "Run encryption setup first!" - exit 1 - fi - - python -m pip install '.[encryption]' - - # Use the nocrypto build to avoid dependency issues with older windows/python versions. - BASE=$(pwd)/libmongocrypt/nocrypto - if [ -f "${BASE}/lib/libmongocrypt.so" ]; then - PYMONGOCRYPT_LIB=${BASE}/lib/libmongocrypt.so - elif [ -f "${BASE}/lib/libmongocrypt.dylib" ]; then - PYMONGOCRYPT_LIB=${BASE}/lib/libmongocrypt.dylib - elif [ -f "${BASE}/bin/mongocrypt.dll" ]; then - PYMONGOCRYPT_LIB=${BASE}/bin/mongocrypt.dll - # libmongocrypt's windows dll is not marked executable. - chmod +x $PYMONGOCRYPT_LIB - PYMONGOCRYPT_LIB=$(cygpath -m $PYMONGOCRYPT_LIB) - elif [ -f "${BASE}/lib64/libmongocrypt.so" ]; then - PYMONGOCRYPT_LIB=${BASE}/lib64/libmongocrypt.so - else - echo "Cannot find libmongocrypt shared object file" - exit 1 - fi - export PYMONGOCRYPT_LIB - - # TODO: Test with 'pip install pymongocrypt' - if [ ! -d "libmongocrypt_git" ]; then - git clone https://github.com/mongodb/libmongocrypt.git libmongocrypt_git - fi - python -m pip install -U setuptools - python -m pip install ./libmongocrypt_git/bindings/python - python -c "import pymongocrypt; print('pymongocrypt version: '+pymongocrypt.__version__)" - python -c "import pymongocrypt; print('libmongocrypt version: '+pymongocrypt.libmongocrypt_version())" - # PATH is updated by PREPARE_SHELL for access to mongocryptd. -fi - -if [ -n "$TEST_ENCRYPTION" ]; then - if [ -n "$TEST_ENCRYPTION_PYOPENSSL" ]; then - python -m pip install '.[ocsp]' - fi - - if [ -n "$TEST_CRYPT_SHARED" ]; then - CRYPT_SHARED_DIR=`dirname $CRYPT_SHARED_LIB_PATH` - echo "using crypt_shared_dir $CRYPT_SHARED_DIR" - export DYLD_FALLBACK_LIBRARY_PATH=$CRYPT_SHARED_DIR:$DYLD_FALLBACK_LIBRARY_PATH - export LD_LIBRARY_PATH=$CRYPT_SHARED_DIR:$LD_LIBRARY_PATH - export PATH=$CRYPT_SHARED_DIR:$PATH - fi - # Only run the encryption tests. - TEST_SUITES="encryption" + echo "Not sourcing env inputs" fi -if [ -n "$TEST_FLE_AZURE_AUTO" ] || [ -n "$TEST_FLE_GCP_AUTO" ]; then - if [[ -z "$SUCCESS" ]]; then - echo "Must define SUCCESS" - exit 1 - fi - - if echo "$MONGODB_URI" | grep -q "@"; then - echo "MONGODB_URI unexpectedly contains user credentials in FLE test!"; - exit 1 - fi - TEST_SUITES="csfle" -fi - -if [ -n "$TEST_INDEX_MANAGEMENT" ]; then - source $DRIVERS_TOOLS/.evergreen/atlas/secrets-export.sh - export DB_USER="${DRIVERS_ATLAS_LAMBDA_USER}" - set +x - export DB_PASSWORD="${DRIVERS_ATLAS_LAMBDA_PASSWORD}" - set -x - TEST_SUITES="index_management" -fi - -if [ -n "$TEST_DATA_LAKE" ] && [ -z "$TEST_ARGS" ]; then - TEST_SUITES="data_lake" -fi - -if [ -n "$TEST_ATLAS" ]; then - TEST_SUITES="atlas" -fi - -if [ -n "$TEST_OCSP" ]; then - python -m pip install ".[ocsp]" - TEST_SUITES="ocsp" -fi - -if [ -n "$TEST_AUTH_AWS" ]; then - python -m pip install ".[aws]" - TEST_SUITES="auth_aws" -fi - -if [ -n "$TEST_AUTH_OIDC" ]; then - python -m pip install ".[aws]" - TEST_SUITES="auth_oidc" -fi - -if [ -n "$PERF_TEST" ]; then - python -m pip install simplejson - start_time=$(date +%s) - TEST_SUITES="perf" - # PYTHON-4769 Run perf_test.py directly otherwise pytest's test collection negatively - # affects the benchmark results. - TEST_ARGS="test/performance/perf_test.py $TEST_ARGS" -fi - -echo "Running $AUTH tests over $SSL with python $(which python)" -python -c 'import sys; print(sys.version)' - - -# Run the tests, and store the results in Evergreen compatible XUnit XML -# files in the xunit-results/ directory. - -# Run the tests with coverage if requested and coverage is installed. -# Only cover CPython. PyPy reports suspiciously low coverage. -if [ -n "$COVERAGE" ] && [ "$PYTHON_IMPL" = "CPython" ]; then - # Keep in sync with combine-coverage.sh. - # coverage >=5 is needed for relative_files=true. - python -m pip install pytest-cov "coverage>=5,<=7.5" - TEST_ARGS="$TEST_ARGS --cov" -fi - -if [ -n "$GREEN_FRAMEWORK" ]; then - python -m pip install $GREEN_FRAMEWORK -fi - -# Show the installed packages -PIP_QUIET=0 python -m pip list - -if [ -z "$GREEN_FRAMEWORK" ]; then - # Use --capture=tee-sys so pytest prints test output inline: - # https://docs.pytest.org/en/stable/how-to/capture-stdout-stderr.html - if [ -z "$TEST_SUITES" ]; then - python -m pytest -v --capture=tee-sys --durations=5 $TEST_ARGS - else - python -m pytest -v --capture=tee-sys --durations=5 -m $TEST_SUITES $TEST_ARGS - fi +# Handle test inputs. +if [ -f $SCRIPT_DIR/scripts/test-env.sh ]; then + echo "Sourcing test inputs" + . $SCRIPT_DIR/scripts/test-env.sh else - python green_framework_test.py $GREEN_FRAMEWORK -v $TEST_ARGS + echo "Missing test inputs, please run 'just setup-tests'" + exit 1 fi -# Handle perf test post actions. -if [ -n "$PERF_TEST" ]; then - end_time=$(date +%s) - elapsed_secs=$((end_time-start_time)) +cleanup_tests() { + # Avoid leaving the lock file in a changed state when we change the resolution type. + if [ -n "${TEST_MIN_DEPS:-}" ]; then + git checkout uv.lock || true + fi + cd $PREV_DIR +} - cat results.json +trap "cleanup_tests" SIGINT ERR - echo "{\"failures\": 0, \"results\": [{\"status\": \"pass\", \"exit_code\": 0, \"test_file\": \"BenchMarkTests\", \"start\": $start_time, \"end\": $end_time, \"elapsed\": $elapsed_secs}]}" > report.json +# Start the test runner. +uv run ${UV_ARGS} --reinstall-package pymongo .evergreen/scripts/run_tests.py "$@" - cat report.json -fi - -# Handle coverage post actions. -if [ -n "$COVERAGE" ]; then - rm -rf .pytest_cache -fi +cleanup_tests diff --git a/.evergreen/scripts/__init__.py b/.evergreen/scripts/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/.evergreen/scripts/archive-mongodb-logs.sh b/.evergreen/scripts/archive-mongodb-logs.sh deleted file mode 100755 index 70a337cd11..0000000000 --- a/.evergreen/scripts/archive-mongodb-logs.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash - -set -o xtrace -mkdir out_dir -# shellcheck disable=SC2156 -find "$MONGO_ORCHESTRATION_HOME" -name \*.log -exec sh -c 'x="{}"; mv $x $PWD/out_dir/$(basename $(dirname $x))_$(basename $x)' \; -tar zcvf mongodb-logs.tar.gz -C out_dir/ . -rm -rf out_dir diff --git a/.evergreen/scripts/bootstrap-mongo-orchestration.sh b/.evergreen/scripts/bootstrap-mongo-orchestration.sh deleted file mode 100755 index 1d2b145de8..0000000000 --- a/.evergreen/scripts/bootstrap-mongo-orchestration.sh +++ /dev/null @@ -1,46 +0,0 @@ -#!/bin/bash - -set -o xtrace - -# Enable core dumps if enabled on the machine -# Copied from https://github.com/mongodb/mongo/blob/master/etc/evergreen.yml -if [ -f /proc/self/coredump_filter ]; then - # Set the shell process (and its children processes) to dump ELF headers (bit 4), - # anonymous shared mappings (bit 1), and anonymous private mappings (bit 0). - echo 0x13 >/proc/self/coredump_filter - - if [ -f /sbin/sysctl ]; then - # Check that the core pattern is set explicitly on our distro image instead - # of being the OS's default value. This ensures that coredump names are consistent - # across distros and can be picked up by Evergreen. - core_pattern=$(/sbin/sysctl -n "kernel.core_pattern") - if [ "$core_pattern" = "dump_%e.%p.core" ]; then - echo "Enabling coredumps" - ulimit -c unlimited - fi - fi -fi - -if [ "$(uname -s)" = "Darwin" ]; then - core_pattern_mac=$(/usr/sbin/sysctl -n "kern.corefile") - if [ "$core_pattern_mac" = "dump_%N.%P.core" ]; then - echo "Enabling coredumps" - ulimit -c unlimited - fi -fi - -if [ -n "${skip_crypt_shared}" ]; then - export SKIP_CRYPT_SHARED=1 -fi - -MONGODB_VERSION=${VERSION} \ - TOPOLOGY=${TOPOLOGY} \ - AUTH=${AUTH:-noauth} \ - SSL=${SSL:-nossl} \ - STORAGE_ENGINE=${STORAGE_ENGINE:-} \ - DISABLE_TEST_COMMANDS=${DISABLE_TEST_COMMANDS:-} \ - ORCHESTRATION_FILE=${ORCHESTRATION_FILE:-} \ - REQUIRE_API_VERSION=${REQUIRE_API_VERSION:-} \ - LOAD_BALANCER=${LOAD_BALANCER:-} \ - bash ${DRIVERS_TOOLS}/.evergreen/run-orchestration.sh -# run-orchestration generates expansion file with the MONGODB_URI for the cluster diff --git a/.evergreen/scripts/check-import-time.sh b/.evergreen/scripts/check-import-time.sh index cdd2025d59..f7a1117b97 100755 --- a/.evergreen/scripts/check-import-time.sh +++ b/.evergreen/scripts/check-import-time.sh @@ -1,7 +1,43 @@ #!/bin/bash +# Check for regressions in the import time of pymongo. +set -eu -. .evergreen/scripts/env.sh -set -x -export BASE_SHA="$1" -export HEAD_SHA="$2" -bash .evergreen/run-import-time-test.sh +HERE=$(dirname ${BASH_SOURCE:-$0}) + +source $HERE/env.sh + +pushd $HERE/../.. >/dev/null + +BASE_SHA="$1" +HEAD_SHA="$2" + +. .evergreen/utils.sh + +if [ -z "${PYTHON_BINARY:-}" ]; then + PYTHON_BINARY=$(find_python3) +fi + +# Use the previous commit if this was not a PR run. +if [ "$BASE_SHA" == "$HEAD_SHA" ]; then + BASE_SHA=$(git rev-parse HEAD~1) +fi + +function get_import_time() { + local log_file + createvirtualenv "$PYTHON_BINARY" import-venv + python -m pip install -q ".[aws,encryption,gssapi,ocsp,snappy,zstd]" + # Import once to cache modules + python -c "import pymongo" + log_file="pymongo-$1.log" + python -X importtime -c "import pymongo" 2> $log_file +} + +get_import_time $HEAD_SHA +git stash || true +git checkout $BASE_SHA +get_import_time $BASE_SHA +git checkout $HEAD_SHA +git stash apply || true +python tools/compare_import_time.py $HEAD_SHA $BASE_SHA + +popd >/dev/null diff --git a/.evergreen/scripts/cleanup.sh b/.evergreen/scripts/cleanup.sh index 9e583e4f1e..f04a936fd2 100755 --- a/.evergreen/scripts/cleanup.sh +++ b/.evergreen/scripts/cleanup.sh @@ -1,7 +1,14 @@ #!/bin/bash +# Clean up resources at the end of an evergreen run. +set -eu -if [ -f "$DRIVERS_TOOLS"/.evergreen/csfle/secrets-export.sh ]; then - . .evergreen/hatch.sh encryption:teardown +HERE=$(dirname ${BASH_SOURCE:-$0}) + +# Try to source the env file. +if [ -f $HERE/env.sh ]; then + echo "Sourcing env file" + source $HERE/env.sh fi + rm -rf "${DRIVERS_TOOLS}" || true -rm -f ./secrets-export.sh || true +rm -f $HERE/../../secrets-export.sh || true diff --git a/.evergreen/scripts/configure-env.sh b/.evergreen/scripts/configure-env.sh index e0c845a333..8dc328aab3 100755 --- a/.evergreen/scripts/configure-env.sh +++ b/.evergreen/scripts/configure-env.sh @@ -1,5 +1,5 @@ #!/bin/bash - +# Configure an evergreen test environment. set -eu # Get the current unique version of this checkout @@ -14,12 +14,31 @@ fi PROJECT_DIRECTORY="$(pwd)" DRIVERS_TOOLS="$(dirname $PROJECT_DIRECTORY)/drivers-tools" CARGO_HOME=${CARGO_HOME:-${DRIVERS_TOOLS}/.cargo} +UV_TOOL_DIR=$PROJECT_DIRECTORY/.local/uv/tools +UV_CACHE_DIR=$PROJECT_DIRECTORY/.local/uv/cache +DRIVERS_TOOLS_BINARIES="$DRIVERS_TOOLS/.bin" +MONGODB_BINARIES="$DRIVERS_TOOLS/mongodb/bin" + +# On Evergreen jobs, "CI" will be set, and we don't want to write to $HOME. +if [ "${CI:-}" == "true" ]; then + PYMONGO_BIN_DIR=${DRIVERS_TOOLS_BINARIES:-} +# We want to use a path that's already on PATH on spawn hosts. +else + PYMONGO_BIN_DIR=$HOME/cli_bin +fi + +PATH_EXT="$MONGODB_BINARIES:$DRIVERS_TOOLS_BINARIES:$PYMONGO_BIN_DIR:\$PATH" # Python has cygwin path problems on Windows. Detect prospective mongo-orchestration home directory if [ "Windows_NT" = "${OS:-}" ]; then # Magic variable in cygwin DRIVERS_TOOLS=$(cygpath -m $DRIVERS_TOOLS) PROJECT_DIRECTORY=$(cygpath -m $PROJECT_DIRECTORY) CARGO_HOME=$(cygpath -m $CARGO_HOME) + UV_TOOL_DIR=$(cygpath -m "$UV_TOOL_DIR") + UV_CACHE_DIR=$(cygpath -m "$UV_CACHE_DIR") + DRIVERS_TOOLS_BINARIES=$(cygpath -m "$DRIVERS_TOOLS_BINARIES") + MONGODB_BINARIES=$(cygpath -m "$MONGODB_BINARIES") + PYMONGO_BIN_DIR=$(cygpath -m "$PYMONGO_BIN_DIR") fi SCRIPT_DIR="$PROJECT_DIRECTORY/.evergreen/scripts" @@ -34,44 +53,62 @@ export MONGO_ORCHESTRATION_HOME="$DRIVERS_TOOLS/.evergreen/orchestration" export MONGODB_BINARIES="$DRIVERS_TOOLS/mongodb/bin" cat < "$SCRIPT_DIR"/env.sh -set -o errexit export PROJECT_DIRECTORY="$PROJECT_DIRECTORY" export CURRENT_VERSION="$CURRENT_VERSION" -export SKIP_LEGACY_SHELL=1 export DRIVERS_TOOLS="$DRIVERS_TOOLS" export MONGO_ORCHESTRATION_HOME="$MONGO_ORCHESTRATION_HOME" export MONGODB_BINARIES="$MONGODB_BINARIES" +export DRIVERS_TOOLS_BINARIES="$DRIVERS_TOOLS_BINARIES" export PROJECT_DIRECTORY="$PROJECT_DIRECTORY" -export SETDEFAULTENCODING="${SETDEFAULTENCODING:-}" -export SKIP_CSOT_TESTS="${SKIP_CSOT_TESTS:-}" -export MONGODB_STARTED="${MONGODB_STARTED:-}" -export DISABLE_TEST_COMMANDS="${DISABLE_TEST_COMMANDS:-}" -export GREEN_FRAMEWORK="${GREEN_FRAMEWORK:-}" -export NO_EXT="${NO_EXT:-}" -export COVERAGE="${COVERAGE:-}" -export COMPRESSORS="${COMPRESSORS:-}" -export MONGODB_API_VERSION="${MONGODB_API_VERSION:-}" -export skip_crypt_shared="${skip_crypt_shared:-}" -export STORAGE_ENGINE="${STORAGE_ENGINE:-}" -export REQUIRE_API_VERSION="${REQUIRE_API_VERSION:-}" -export skip_web_identity_auth_test="${skip_web_identity_auth_test:-}" -export skip_ECS_auth_test="${skip_ECS_auth_test:-}" export CARGO_HOME="$CARGO_HOME" -export TMPDIR="$MONGO_ORCHESTRATION_HOME/db" -export PATH="$MONGODB_BINARIES:$PATH" +export UV_TOOL_DIR="$UV_TOOL_DIR" +export UV_CACHE_DIR="$UV_CACHE_DIR" +export UV_TOOL_BIN_DIR="$DRIVERS_TOOLS_BINARIES" +export PYMONGO_BIN_DIR="$PYMONGO_BIN_DIR" +export PATH="$PATH_EXT" # shellcheck disable=SC2154 export PROJECT="${project:-mongo-python-driver}" export PIP_QUIET=1 EOT -# Skip CSOT tests on non-linux platforms. -if [ "$(uname -s)" != "Linux" ]; then - echo "export SKIP_CSOT_TESTS=1" >> $SCRIPT_DIR/env.sh -fi +# Write the .env file for drivers-tools. +rm -rf $DRIVERS_TOOLS +BRANCH=master +ORG=mongodb-labs +git clone --branch $BRANCH https://github.com/$ORG/drivers-evergreen-tools.git $DRIVERS_TOOLS + +cat < ${DRIVERS_TOOLS}/.env +SKIP_LEGACY_SHELL=1 +DRIVERS_TOOLS="$DRIVERS_TOOLS" +MONGO_ORCHESTRATION_HOME="$MONGO_ORCHESTRATION_HOME" +MONGODB_BINARIES="$MONGODB_BINARIES" +EOT # Add these expansions to make it easier to call out tests scripts from the EVG yaml cat < expansion.yml DRIVERS_TOOLS: "$DRIVERS_TOOLS" PROJECT_DIRECTORY: "$PROJECT_DIRECTORY" EOT + +# If the toolchain is available, symlink binaries to the bin dir. This has to be done +# after drivers-tools is cloned, since we might be using its binary dir. +_bin_path="" +if [ "Windows_NT" == "${OS:-}" ]; then + _bin_path="/cygdrive/c/Python/Current/Scripts" +elif [ "$(uname -s)" == "Darwin" ]; then + _bin_path="/Library/Frameworks/Python.Framework/Versions/Current/bin" +else + _bin_path="/opt/python/Current/bin" +fi +if [ -d "${_bin_path}" ]; then + _suffix="" + if [ "Windows_NT" == "${OS:-}" ]; then + _suffix=".exe" + fi + echo "Symlinking binaries from toolchain" + mkdir -p $PYMONGO_BIN_DIR + ln -s ${_bin_path}/just${_suffix} $PYMONGO_BIN_DIR/just${_suffix} + ln -s ${_bin_path}/uv${_suffix} $PYMONGO_BIN_DIR/uv${_suffix} + ln -s ${_bin_path}/uvx${_suffix} $PYMONGO_BIN_DIR/uvx${_suffix} +fi diff --git a/.evergreen/scripts/create-spec-pr.sh b/.evergreen/scripts/create-spec-pr.sh new file mode 100755 index 0000000000..a5e49bb211 --- /dev/null +++ b/.evergreen/scripts/create-spec-pr.sh @@ -0,0 +1,50 @@ +#!/usr/bin/env bash + +tools="$(realpath -s "../drivers-tools")" +pushd $tools/.evergreen/github_app || exit + +owner="mongodb" +repo="mongo-python-driver" + +# Bootstrap the app. +echo "bootstrapping" +source utils.sh +bootstrap drivers/comment-bot + +# Run the app. +source ./secrets-export.sh + +# Get a github access token for the git checkout. +echo "Getting github token..." + +token=$(bash ./get-access-token.sh $repo $owner) +if [ -z "${token}" ]; then + echo "Failed to get github access token!" + popd || exit + exit 1 +fi +echo "Getting github token... done." +popd || exit + +# Make the git checkout and create a new branch. +echo "Creating the git checkout..." +branch="spec-resync-"$(date '+%m-%d-%Y') + +git remote set-url origin https://x-access-token:${token}@github.com/$owner/$repo.git +git checkout -b $branch "origin/master" +git add ./test +git commit -am "resyncing specs $(date '+%m-%d-%Y')" +echo "Creating the git checkout... done." + +git push origin $branch +resp=$(curl -L \ + -X POST \ + -H "Accept: application/vnd.github+json" \ + -H "Authorization: Bearer $token" \ + -H "X-GitHub-Api-Version: 2022-11-28" \ + -d "{\"title\":\"[Spec Resync] $(date '+%m-%d-%Y')\",\"body\":\"$(cat "$1")\",\"head\":\"${branch}\",\"base\":\"master\"}" \ + --url https://api.github.com/repos/$owner/$repo/pulls) +echo $resp | jq '.html_url' +echo "Creating the PR... done." + +rm -rf $tools diff --git a/.evergreen/scripts/download-and-merge-coverage.sh b/.evergreen/scripts/download-and-merge-coverage.sh index 808bb957ef..c006813ba9 100755 --- a/.evergreen/scripts/download-and-merge-coverage.sh +++ b/.evergreen/scripts/download-and-merge-coverage.sh @@ -1,4 +1,4 @@ #!/bin/bash - # Download all the task coverage files. +set -eu aws s3 cp --recursive s3://"$1"/coverage/"$2"/"$3"/coverage/ coverage/ diff --git a/.evergreen/scripts/ensure-hatch.sh b/.evergreen/scripts/ensure-hatch.sh deleted file mode 100755 index a57b705127..0000000000 --- a/.evergreen/scripts/ensure-hatch.sh +++ /dev/null @@ -1,55 +0,0 @@ -#!/bin/bash - -set -eu - -# Ensure hatch is available. -if [ ! -x "$(command -v hatch)" ]; then - # Install a virtual env with "hatch" - # Ensure there is a python venv. - . .evergreen/utils.sh - - if [ -z "${PYTHON_BINARY:-}" ]; then - PYTHON_BINARY=$(find_python3) - fi - VENV_DIR=.venv - if [ ! -d $VENV_DIR ]; then - echo "Creating virtual environment..." - createvirtualenv "$PYTHON_BINARY" .venv - echo "Creating virtual environment... done." - fi - if [ -f $VENV_DIR/Scripts/activate ]; then - . $VENV_DIR/Scripts/activate - else - . $VENV_DIR/bin/activate - fi - - python --version - - echo "Installing hatch..." - python -m pip install -U pip - python -m pip install hatch || { - # Install rust and try again. - CARGO_HOME=${CARGO_HOME:-${DRIVERS_TOOLS}/.cargo} - # Handle paths on Windows. - if [ "Windows_NT" = "${OS:-}" ]; then # Magic variable in cygwin - CARGO_HOME=$(cygpath -m $CARGO_HOME) - fi - export RUSTUP_HOME="${CARGO_HOME}/.rustup" - ${DRIVERS_TOOLS}/.evergreen/install-rust.sh - source "${CARGO_HOME}/env" - python -m pip install hatch - } - # Ensure hatch does not write to user or global locations. - touch hatch_config.toml - HATCH_CONFIG=$(pwd)/hatch_config.toml - if [ "Windows_NT" = "${OS:-}" ]; then # Magic variable in cygwin - HATCH_CONFIG=$(cygpath -m "$HATCH_CONFIG") - fi - export HATCH_CONFIG - hatch config restore - hatch config set dirs.data "$(pwd)/.hatch/data" - hatch config set dirs.cache "$(pwd)/.hatch/cache" - - echo "Installing hatch... done." -fi -hatch --version diff --git a/.evergreen/scripts/fix-absolute-paths.sh b/.evergreen/scripts/fix-absolute-paths.sh deleted file mode 100755 index eb9433c673..0000000000 --- a/.evergreen/scripts/fix-absolute-paths.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash - -set +x -. src/.evergreen/scripts/env.sh -# shellcheck disable=SC2044 -for filename in $(find $DRIVERS_TOOLS -name \*.json); do - perl -p -i -e "s|ABSOLUTE_PATH_REPLACEMENT_TOKEN|$DRIVERS_TOOLS|g" $filename -done diff --git a/.evergreen/scripts/generate-config.sh b/.evergreen/scripts/generate-config.sh new file mode 100755 index 0000000000..70b4578cf9 --- /dev/null +++ b/.evergreen/scripts/generate-config.sh @@ -0,0 +1,6 @@ +#!/bin/bash +# Entry point for the generate-config pre-commit hook. + +set -eu + +python .evergreen/scripts/generate_config.py diff --git a/.evergreen/scripts/generate_config.py b/.evergreen/scripts/generate_config.py index 1637ae9711..daec0841d5 100644 --- a/.evergreen/scripts/generate_config.py +++ b/.evergreen/scripts/generate_config.py @@ -1,227 +1,48 @@ -# /// script -# requires-python = ">=3.9" -# dependencies = [ -# "shrub.py>=3.2.0", -# "pyyaml>=6.0.2" -# ] -# /// - -# Note: Run this file with `hatch run`, `pipx run`, or `uv run`. +# Note: See CONTRIBUTING.md for how to update/run this file. from __future__ import annotations import sys -from dataclasses import dataclass -from inspect import getmembers, isfunction -from itertools import cycle, product, zip_longest -from pathlib import Path -from typing import Any - +from itertools import product + +from generate_config_utils import ( + ALL_PYTHONS, + ALL_VERSIONS, + BATCHTIME_DAY, + BATCHTIME_WEEK, + C_EXTS, + CPYTHONS, + DEFAULT_HOST, + HOSTS, + MIN_MAX_PYTHON, + OTHER_HOSTS, + PYPYS, + SYNCS, + TOPOLOGIES, + create_variant, + get_assume_role, + get_s3_put, + get_standard_auth_ssl, + get_subprocess_exec, + get_task_name, + get_variant_name, + get_versions_from, + handle_c_ext, + write_functions_to_file, + write_tasks_to_file, + write_variants_to_file, + zip_cycle, +) from shrub.v3.evg_build_variant import BuildVariant -from shrub.v3.evg_command import FunctionCall -from shrub.v3.evg_project import EvgProject -from shrub.v3.evg_task import EvgTask, EvgTaskRef -from shrub.v3.shrub_service import ShrubService - -############## -# Globals -############## - -ALL_VERSIONS = ["4.0", "4.4", "5.0", "6.0", "7.0", "8.0", "rapid", "latest"] -CPYTHONS = ["3.9", "3.10", "3.11", "3.12", "3.13"] -PYPYS = ["pypy3.9", "pypy3.10"] -ALL_PYTHONS = CPYTHONS + PYPYS -MIN_MAX_PYTHON = [CPYTHONS[0], CPYTHONS[-1]] -BATCHTIME_WEEK = 10080 -AUTH_SSLS = [("auth", "ssl"), ("noauth", "ssl"), ("noauth", "nossl")] -TOPOLOGIES = ["standalone", "replica_set", "sharded_cluster"] -C_EXTS = ["with_ext", "without_ext"] -# By default test each of the topologies with a subset of auth/ssl. -SUB_TASKS = [ - ".sharded_cluster .auth .ssl", - ".replica_set .noauth .ssl", - ".standalone .noauth .nossl", -] -SYNCS = ["sync", "async", "sync_async"] -DISPLAY_LOOKUP = dict( - ssl=dict(ssl="SSL", nossl="NoSSL"), - auth=dict(auth="Auth", noauth="NoAuth"), - test_suites=dict(default="Sync", default_async="Async"), - coverage=dict(coverage="cov"), - no_ext={"1": "No C"}, +from shrub.v3.evg_command import ( + FunctionCall, + archive_targz_pack, + attach_results, + attach_xunit_results, + ec2_assume_role, + expansions_update, + git_get_project, ) -HOSTS = dict() - - -@dataclass -class Host: - name: str - run_on: str - display_name: str - variables: dict[str, str] | None - - -# Hosts with toolchains. -HOSTS["rhel8"] = Host("rhel8", "rhel87-small", "RHEL8", dict()) -HOSTS["win64"] = Host("win64", "windows-64-vsMulti-small", "Win64", dict()) -HOSTS["win32"] = Host("win32", "windows-64-vsMulti-small", "Win32", dict()) -HOSTS["macos"] = Host("macos", "macos-14", "macOS", dict()) -HOSTS["macos-arm64"] = Host("macos-arm64", "macos-14-arm64", "macOS Arm64", dict()) -HOSTS["ubuntu20"] = Host("ubuntu20", "ubuntu2004-small", "Ubuntu-20", dict()) -HOSTS["ubuntu22"] = Host("ubuntu22", "ubuntu2204-small", "Ubuntu-22", dict()) -HOSTS["rhel7"] = Host("rhel7", "rhel79-small", "RHEL7", dict()) -DEFAULT_HOST = HOSTS["rhel8"] - -# Other hosts -OTHER_HOSTS = ["RHEL9-FIPS", "RHEL8-zseries", "RHEL8-POWER8", "RHEL8-arm64"] -for name, run_on in zip( - OTHER_HOSTS, ["rhel92-fips", "rhel8-zseries-small", "rhel8-power-small", "rhel82-arm64-small"] -): - HOSTS[name] = Host(name, run_on, name, dict()) - - -############## -# Helpers -############## - - -def create_variant_generic( - task_names: list[str], - display_name: str, - *, - host: Host | None = None, - default_run_on="rhel87-small", - expansions: dict | None = None, - **kwargs: Any, -) -> BuildVariant: - """Create a build variant for the given inputs.""" - task_refs = [EvgTaskRef(name=n) for n in task_names] - expansions = expansions and expansions.copy() or dict() - if "run_on" in kwargs: - run_on = kwargs.pop("run_on") - elif host: - run_on = [host.run_on] - if host.variables: - expansions.update(host.variables) - else: - run_on = [default_run_on] - if isinstance(run_on, str): - run_on = [run_on] - name = display_name.replace(" ", "-").replace("*-", "").lower() - return BuildVariant( - name=name, - display_name=display_name, - tasks=task_refs, - expansions=expansions or None, - run_on=run_on, - **kwargs, - ) - - -def create_variant( - task_names: list[str], - display_name: str, - *, - version: str | None = None, - host: Host | None = None, - python: str | None = None, - expansions: dict | None = None, - **kwargs: Any, -) -> BuildVariant: - expansions = expansions and expansions.copy() or dict() - if version: - expansions["VERSION"] = version - if python: - expansions["PYTHON_BINARY"] = get_python_binary(python, host) - return create_variant_generic( - task_names, display_name, version=version, host=host, expansions=expansions, **kwargs - ) - - -def get_python_binary(python: str, host: Host) -> str: - """Get the appropriate python binary given a python version and host.""" - name = host.name - if name in ["win64", "win32"]: - if name == "win32": - base = "C:/python/32" - else: - base = "C:/python" - python = python.replace(".", "") - return f"{base}/Python{python}/python.exe" - - if name in ["rhel8", "ubuntu22", "ubuntu20", "rhel7"]: - return f"/opt/python/{python}/bin/python3" - - if name in ["macos", "macos-arm64"]: - return f"/Library/Frameworks/Python.Framework/Versions/{python}/bin/python3" - - raise ValueError(f"no match found for python {python} on {name}") - - -def get_versions_from(min_version: str) -> list[str]: - """Get all server versions starting from a minimum version.""" - min_version_float = float(min_version) - rapid_latest = ["rapid", "latest"] - versions = [v for v in ALL_VERSIONS if v not in rapid_latest] - return [v for v in versions if float(v) >= min_version_float] + rapid_latest - - -def get_versions_until(max_version: str) -> list[str]: - """Get all server version up to a max version.""" - max_version_float = float(max_version) - versions = [v for v in ALL_VERSIONS if v not in ["rapid", "latest"]] - versions = [v for v in versions if float(v) <= max_version_float] - if not len(versions): - raise ValueError(f"No server versions found less <= {max_version}") - return versions - - -def get_display_name(base: str, host: Host | None = None, **kwargs) -> str: - """Get the display name of a variant.""" - display_name = base - if host is not None: - display_name += f" {host.display_name}" - version = kwargs.pop("VERSION", None) - version = version or kwargs.pop("version", None) - if version: - if version not in ["rapid", "latest"]: - version = f"v{version}" - display_name = f"{display_name} {version}" - for key, value in kwargs.items(): - name = value - if key.lower() == "python": - if not value.startswith("pypy"): - name = f"Python{value}" - else: - name = f"PyPy{value.replace('pypy', '')}" - elif key.lower() in DISPLAY_LOOKUP: - name = DISPLAY_LOOKUP[key.lower()][value] - else: - continue - display_name = f"{display_name} {name}" - return display_name - - -def zip_cycle(*iterables, empty_default=None): - """Get all combinations of the inputs, cycling over the shorter list(s).""" - cycles = [cycle(i) for i in iterables] - for _ in zip_longest(*iterables): - yield tuple(next(i, empty_default) for i in cycles) - - -def handle_c_ext(c_ext, expansions): - """Handle c extension option.""" - if c_ext == C_EXTS[0]: - expansions["NO_EXT"] = "1" - - -def generate_yaml(tasks=None, variants=None): - """Generate the yaml for a given set of tasks and variants.""" - project = EvgProject(tasks=tasks, buildvariants=variants) - out = ShrubService.generate_yaml(project) - # Dedent by two spaces to match what we use in config.yml - lines = [line[2:] for line in out.splitlines()] - print("\n".join(lines)) # noqa: T201 - +from shrub.v3.evg_task import EvgTask, EvgTaskDependency, EvgTaskRef ############## # Variants @@ -230,90 +51,60 @@ def generate_yaml(tasks=None, variants=None): def create_ocsp_variants() -> list[BuildVariant]: variants = [] - batchtime = BATCHTIME_WEEK * 2 - expansions = dict(AUTH="noauth", SSL="ssl", TOPOLOGY="server") - base_display = "OCSP" - - # OCSP tests on default host with all servers v4.4+ and all python versions. - versions = [v for v in ALL_VERSIONS if v != "4.0"] - for version, python in zip_cycle(versions, ALL_PYTHONS): - host = DEFAULT_HOST - variant = create_variant( - [".ocsp"], - get_display_name(base_display, host, version=version, python=python), - python=python, - version=version, - host=host, - expansions=expansions, - batchtime=batchtime, - ) - variants.append(variant) - - # OCSP tests on Windows and MacOS. - # MongoDB servers on these hosts do not staple OCSP responses and only support RSA. - for host_name, version in product(["win64", "macos"], ["4.4", "8.0"]): + # OCSP tests on default host with all servers v4.4+. + # MongoDB servers on Windows and MacOS do not staple OCSP responses and only support RSA. + # Only test with MongoDB 4.4 and latest. + for host_name in ["rhel8", "win64", "macos"]: host = HOSTS[host_name] - python = CPYTHONS[0] if version == "4.4" else CPYTHONS[-1] + if host == DEFAULT_HOST: + tasks = [".ocsp"] + else: + tasks = [".ocsp-rsa !.ocsp-staple .latest", ".ocsp-rsa !.ocsp-staple .4.4"] variant = create_variant( - [".ocsp-rsa !.ocsp-staple"], - get_display_name(base_display, host, version=version, python=python), - python=python, - version=version, + tasks, + get_variant_name("OCSP", host), host=host, - expansions=expansions, - batchtime=batchtime, + batchtime=BATCHTIME_WEEK, ) variants.append(variant) - return variants -def create_server_variants() -> list[BuildVariant]: +def create_server_version_variants() -> list[BuildVariant]: variants = [] - - # Run the full matrix on linux with min and max CPython, and latest pypy. - host = DEFAULT_HOST - # Prefix the display name with an asterisk so it is sorted first. - base_display_name = "* Test" - for python in [*MIN_MAX_PYTHON, PYPYS[-1]]: - expansions = dict(COVERAGE="coverage") - display_name = get_display_name(base_display_name, host, python=python, **expansions) + for version in ALL_VERSIONS: + display_name = get_variant_name("* MongoDB", version=version) variant = create_variant( - [f".{t} .sync_async" for t in TOPOLOGIES], + [".server-version"], display_name, - python=python, - host=host, + version=version, + host=DEFAULT_HOST, tags=["coverage_tag"], - expansions=expansions, ) variants.append(variant) + return variants - # Test the rest of the pythons. - for python in CPYTHONS[1:-1] + PYPYS[:-1]: - display_name = f"Test {host}" - display_name = get_display_name(base_display_name, host, python=python) - variant = create_variant( - [f"{t} .sync_async" for t in SUB_TASKS], - display_name, - python=python, - host=host, - expansions=expansions, - ) - variants.append(variant) + +def create_standard_nonlinux_variants() -> list[BuildVariant]: + variants = [] + base_display_name = "* Test" # Test a subset on each of the other platforms. for host_name in ("macos", "macos-arm64", "win64", "win32"): - for python in MIN_MAX_PYTHON: - tasks = [f"{t} !.sync_async" for t in SUB_TASKS] - # MacOS arm64 only works on server versions 6.0+ - if host_name == "macos-arm64": - tasks = [] - for version in get_versions_from("6.0"): - tasks.extend(f"{t} .{version} !.sync_async" for t in SUB_TASKS) - host = HOSTS[host_name] - display_name = get_display_name(base_display_name, host, python=python) - variant = create_variant(tasks, display_name, python=python, host=host) - variants.append(variant) + tasks = [".test-standard !.pypy"] + # MacOS arm64 only works on server versions 6.0+ + if host_name == "macos-arm64": + tasks = [ + f".test-standard !.pypy .server-{version}" for version in get_versions_from("6.0") + ] + host = HOSTS[host_name] + tags = ["standard-non-linux"] + expansions = dict() + if host_name == "win32": + expansions["IS_WIN32"] = "1" + display_name = get_variant_name(base_display_name, host) + variant = create_variant(tasks, display_name, host=host, tags=tags, expansions=expansions) + variants.append(variant) return variants @@ -321,27 +112,28 @@ def create_server_variants() -> list[BuildVariant]: def create_encryption_variants() -> list[BuildVariant]: variants = [] tags = ["encryption_tag"] - batchtime = BATCHTIME_WEEK + batchtime = BATCHTIME_DAY def get_encryption_expansions(encryption): - expansions = dict(test_encryption="true") + expansions = dict(TEST_NAME="encryption") if "crypt_shared" in encryption: - expansions["test_crypt_shared"] = "true" + expansions["TEST_CRYPT_SHARED"] = "true" if "PyOpenSSL" in encryption: - expansions["test_encryption_pyopenssl"] = "true" + expansions["SUB_TEST_NAME"] = "pyopenssl" return expansions - host = DEFAULT_HOST - - # Test against all server versions for the three main python versions. - encryptions = ["Encryption", "Encryption crypt_shared", "Encryption PyOpenSSL"] - for encryption, python in product(encryptions, [*MIN_MAX_PYTHON, PYPYS[-1]]): + # Test encryption on all hosts. + for encryption, host in product( + ["Encryption", "Encryption crypt_shared"], ["rhel8", "macos", "win64"] + ): expansions = get_encryption_expansions(encryption) - display_name = get_display_name(encryption, host, python=python, **expansions) + display_name = get_variant_name(encryption, host, **expansions) + tasks = [".test-non-standard", ".test-min-deps"] + if host != "rhel8": + tasks = [".test-non-standard !.pypy"] variant = create_variant( - [f"{t} .sync_async" for t in SUB_TASKS], + tasks, display_name, - python=python, host=host, expansions=expansions, batchtime=batchtime, @@ -349,171 +141,110 @@ def get_encryption_expansions(encryption): ) variants.append(variant) - # Test the rest of the pythons on linux for all server versions. - for encryption, python, task in zip_cycle(encryptions, CPYTHONS[1:-1] + PYPYS[:-1], SUB_TASKS): - expansions = get_encryption_expansions(encryption) - display_name = get_display_name(encryption, host, python=python, **expansions) - variant = create_variant( - [f"{task} .sync_async"], - display_name, - python=python, - host=host, - expansions=expansions, - ) - variants.append(variant) - - # Test on macos and linux on one server version and topology for min and max python. - encryptions = ["Encryption", "Encryption crypt_shared"] - task_names = [".latest .replica_set .sync_async"] - for host_name, encryption, python in product(["macos", "win64"], encryptions, MIN_MAX_PYTHON): - host = HOSTS[host_name] - expansions = get_encryption_expansions(encryption) - display_name = get_display_name(encryption, host, python=python, **expansions) - variant = create_variant( - task_names, - display_name, - python=python, - host=host, - expansions=expansions, - batchtime=batchtime, - tags=tags, - ) - variants.append(variant) + # Test PyOpenSSL on linux. + host = DEFAULT_HOST + encryption = "Encryption PyOpenSSL" + expansions = get_encryption_expansions(encryption) + display_name = get_variant_name(encryption, host, **expansions) + variant = create_variant( + [".test-non-standard"], + display_name, + host=host, + expansions=expansions, + batchtime=batchtime, + tags=tags, + ) + variants.append(variant) return variants def create_load_balancer_variants(): - # Load balancer tests - run all supported server versions using the lowest supported python. - host = DEFAULT_HOST - batchtime = BATCHTIME_WEEK - versions = get_versions_from("6.0") - variants = [] - for version in versions: - python = CPYTHONS[0] - display_name = get_display_name("Load Balancer", host, python=python, version=version) - variant = create_variant( - [".load-balancer"], - display_name, - python=python, - host=host, - version=version, - batchtime=batchtime, + tasks = [ + f".test-non-standard .server-{v} .sharded_cluster-auth-ssl" + for v in get_versions_from("6.0") + ] + expansions = dict(TEST_NAME="load_balancer") + return [ + create_variant( + tasks, + "Load Balancer", + host=DEFAULT_HOST, + batchtime=BATCHTIME_DAY, + expansions=expansions, ) - variants.append(variant) - return variants + ] def create_compression_variants(): - # Compression tests - standalone versions of each server, across python versions, with and without c extensions. - # PyPy interpreters are always tested without extensions. + # Compression tests - use the standard linux tests. host = DEFAULT_HOST - base_task = ".standalone .noauth .nossl .sync_async" - task_names = dict(snappy=[base_task], zlib=[base_task], zstd=[f"{base_task} !.4.0"]) variants = [] - for ind, (compressor, c_ext) in enumerate(product(["snappy", "zlib", "zstd"], C_EXTS)): - expansions = dict(COMPRESSORS=compressor) - handle_c_ext(c_ext, expansions) - base_name = f"Compression {compressor}" - python = CPYTHONS[ind % len(CPYTHONS)] - display_name = get_display_name(base_name, host, python=python, **expansions) - variant = create_variant( - task_names[compressor], - display_name, - python=python, - host=host, - expansions=expansions, - ) - variants.append(variant) - - other_pythons = PYPYS + CPYTHONS[ind:] - for compressor, python in zip_cycle(["snappy", "zlib", "zstd"], other_pythons): - expansions = dict(COMPRESSORS=compressor) - handle_c_ext(c_ext, expansions) - base_name = f"Compression {compressor}" - display_name = get_display_name(base_name, host, python=python, **expansions) - variant = create_variant( - task_names[compressor], - display_name, - python=python, - host=host, - expansions=expansions, + for compressor in "snappy", "zlib", "zstd": + expansions = dict(COMPRESSOR=compressor) + if compressor == "zstd": + tasks = [".test-standard !.server-4.2"] + else: + tasks = [".test-standard"] + display_name = get_variant_name(f"Compression {compressor}", host) + variants.append( + create_variant( + tasks, + display_name, + host=host, + expansions=expansions, + ) ) - variants.append(variant) - return variants def create_enterprise_auth_variants(): - expansions = dict(AUTH="auth") variants = [] - - # All python versions across platforms. - for python in ALL_PYTHONS: - if python == CPYTHONS[0]: - host = HOSTS["macos"] - elif python == CPYTHONS[-1]: - host = HOSTS["win64"] - else: - host = DEFAULT_HOST - display_name = get_display_name("Auth Enterprise", host, python=python, **expansions) - variant = create_variant( - ["test-enterprise-auth"], display_name, host=host, python=python, expansions=expansions - ) + for host in ["rhel8", "macos", "win64"]: + expansions = dict(TEST_NAME="enterprise_auth", AUTH="auth") + display_name = get_variant_name("Auth Enterprise", host) + tasks = [".test-standard-auth .auth !.free-threaded"] + # https://jira.mongodb.org/browse/PYTHON-5586 + if host == "macos": + tasks = [".test-standard-auth !.pypy .auth !.free-threaded"] + if host == "win64": + tasks = [".test-standard-auth !.pypy .auth"] + variant = create_variant(tasks, display_name, host=host, expansions=expansions) variants.append(variant) - return variants def create_pyopenssl_variants(): base_name = "PyOpenSSL" - batchtime = BATCHTIME_WEEK - expansions = dict(test_pyopenssl="true") + batchtime = BATCHTIME_DAY + expansions = dict(SUB_TEST_NAME="pyopenssl") variants = [] - for python in ALL_PYTHONS: - # Only test "noauth" with min python. - auth = "noauth" if python == CPYTHONS[0] else "auth" - ssl = "nossl" if auth == "noauth" else "ssl" - if python == CPYTHONS[0]: - host = HOSTS["macos"] - elif python == CPYTHONS[-1]: - host = HOSTS["win64"] - else: - host = DEFAULT_HOST - - display_name = get_display_name(base_name, host, python=python) - variant = create_variant( - [f".replica_set .{auth} .{ssl} .sync_async", f".7.0 .{auth} .{ssl} .sync_async"], - display_name, - python=python, - host=host, - expansions=expansions, - batchtime=batchtime, + for host in ["rhel8", "macos", "win64"]: + display_name = get_variant_name(base_name, host) + base_task = ".test-standard" if host == "rhel8" else ".test-standard !.pypy" + # We only need to run a subset on async. + tasks = [f"{base_task} .sync", f"{base_task} .async .replica_set-noauth-ssl"] + variants.append( + create_variant( + tasks, + display_name, + expansions=expansions, + batchtime=batchtime, + ) ) - variants.append(variant) return variants def create_storage_engine_variants(): host = DEFAULT_HOST - engines = ["InMemory", "MMAPv1"] + engines = ["InMemory"] variants = [] for engine in engines: - python = CPYTHONS[0] expansions = dict(STORAGE_ENGINE=engine.lower()) - if engine == engines[0]: - tasks = [f".standalone .noauth .nossl .{v} .sync_async" for v in ALL_VERSIONS] - else: - # MongoDB 4.2 drops support for MMAPv1 - versions = get_versions_until("4.0") - tasks = [f".standalone .{v} .noauth .nossl .sync_async" for v in versions] + [ - f".replica_set .{v} .noauth .nossl .sync_async" for v in versions - ] - display_name = get_display_name(f"Storage {engine}", host, python=python) - variant = create_variant( - tasks, display_name, host=host, python=python, expansions=expansions - ) + tasks = [".test-standard .standalone-noauth-nossl"] + display_name = get_variant_name(f"Storage {engine}", host) + variant = create_variant(tasks, display_name, host=host, expansions=expansions) variants.append(variant) return variants @@ -521,12 +252,11 @@ def create_storage_engine_variants(): def create_stable_api_variants(): host = DEFAULT_HOST tags = ["versionedApi_tag"] - tasks = [f".standalone .{v} .noauth .nossl .sync_async" for v in get_versions_from("5.0")] variants = [] types = ["require v1", "accept v2"] # All python versions across platforms. - for python, test_type in product(MIN_MAX_PYTHON, types): + for test_type in types: expansions = dict(AUTH="auth") # Test against a cluster with requireApiVersion=1. if test_type == types[0]: @@ -535,16 +265,22 @@ def create_stable_api_variants(): expansions["REQUIRE_API_VERSION"] = "1" # MONGODB_API_VERSION is the apiVersion to use in the test suite. expansions["MONGODB_API_VERSION"] = "1" + tasks = [ + f".test-standard !.replica_set-noauth-ssl .server-{v}" + for v in get_versions_from("5.0") + ] else: # Test against a cluster with acceptApiVersion2 but without # requireApiVersion, and don't automatically add apiVersion to # clients created in the test suite. expansions["ORCHESTRATION_FILE"] = "versioned-api-testing.json" + tasks = [ + f".test-standard .server-{v} .standalone-noauth-nossl" + for v in get_versions_from("5.0") + ] base_display_name = f"Stable API {test_type}" - display_name = get_display_name(base_display_name, host, python=python, **expansions) - variant = create_variant( - tasks, display_name, host=host, python=python, tags=tags, expansions=expansions - ) + display_name = get_variant_name(base_display_name, host, **expansions) + variant = create_variant(tasks, display_name, host=host, tags=tags, expansions=expansions) variants.append(variant) return variants @@ -552,111 +288,69 @@ def create_stable_api_variants(): def create_green_framework_variants(): variants = [] - tasks = [".standalone .noauth .nossl .sync_async"] host = DEFAULT_HOST - for python, framework in product([CPYTHONS[0], CPYTHONS[-2]], ["eventlet", "gevent"]): - expansions = dict(GREEN_FRAMEWORK=framework, AUTH="auth", SSL="ssl") - display_name = get_display_name(f"Green {framework.capitalize()}", host, python=python) - variant = create_variant( - tasks, display_name, host=host, python=python, expansions=expansions - ) + for framework in ["gevent"]: + tasks = [".test-standard .sync !.free-threaded"] + expansions = dict(GREEN_FRAMEWORK=framework) + display_name = get_variant_name(f"Green {framework.capitalize()}", host) + variant = create_variant(tasks, display_name, host=host, expansions=expansions) variants.append(variant) return variants def create_no_c_ext_variants(): - variants = [] host = DEFAULT_HOST - for python, topology in zip_cycle(CPYTHONS, TOPOLOGIES): - tasks = [f".{topology} .noauth .nossl .sync_async"] - expansions = dict() - handle_c_ext(C_EXTS[0], expansions) - display_name = get_display_name("No C Ext", host, python=python) - variant = create_variant( - tasks, display_name, host=host, python=python, expansions=expansions - ) - variants.append(variant) - return variants - - -def create_atlas_data_lake_variants(): - variants = [] - host = HOSTS["ubuntu22"] - for python, c_ext in product(MIN_MAX_PYTHON, C_EXTS): - tasks = ["atlas-data-lake-tests"] - expansions = dict(AUTH="auth") - handle_c_ext(c_ext, expansions) - display_name = get_display_name("Atlas Data Lake", host, python=python, **expansions) - variant = create_variant( - tasks, display_name, host=host, python=python, expansions=expansions - ) - variants.append(variant) - return variants + tasks = [".test-standard"] + expansions = dict() + handle_c_ext(C_EXTS[0], expansions) + display_name = get_variant_name("No C Ext", host) + return [create_variant(tasks, display_name, host=host)] def create_mod_wsgi_variants(): - variants = [] host = HOSTS["ubuntu22"] - tasks = [ - "mod-wsgi-standalone", - "mod-wsgi-replica-set", - "mod-wsgi-embedded-mode-standalone", - "mod-wsgi-embedded-mode-replica-set", - ] + tasks = [".mod_wsgi"] expansions = dict(MOD_WSGI_VERSION="4") - for python in MIN_MAX_PYTHON: - display_name = get_display_name("mod_wsgi", host, python=python) - variant = create_variant( - tasks, display_name, host=host, python=python, expansions=expansions - ) - variants.append(variant) - return variants + display_name = get_variant_name("Mod_WSGI", host) + return [create_variant(tasks, display_name, host=host, expansions=expansions)] def create_disable_test_commands_variants(): host = DEFAULT_HOST expansions = dict(AUTH="auth", SSL="ssl", DISABLE_TEST_COMMANDS="1") - python = CPYTHONS[0] - display_name = get_display_name("Disable test commands", host, python=python) - tasks = [".latest .sync_async"] - return [create_variant(tasks, display_name, host=host, python=python, expansions=expansions)] - - -def create_serverless_variants(): - host = DEFAULT_HOST - batchtime = BATCHTIME_WEEK - expansions = dict(test_serverless="true", AUTH="auth", SSL="ssl") - tasks = ["serverless_task_group"] - base_name = "Serverless" - return [ - create_variant( - tasks, - get_display_name(base_name, host, python=python), - host=host, - python=python, - expansions=expansions, - batchtime=batchtime, - ) - for python in MIN_MAX_PYTHON - ] + display_name = get_variant_name("Disable test commands", host) + tasks = [".test-standard .server-latest"] + return [create_variant(tasks, display_name, host=host, expansions=expansions)] def create_oidc_auth_variants(): variants = [] - other_tasks = ["testazureoidc_task_group", "testgcpoidc_task_group", "testk8soidc_task_group"] for host_name in ["ubuntu22", "macos", "win64"]: - tasks = ["testoidc_task_group"] if host_name == "ubuntu22": - tasks += other_tasks + tasks = [".auth_oidc_remote"] + else: + tasks = ["!.auth_oidc_remote .auth_oidc"] host = HOSTS[host_name] variants.append( create_variant( tasks, - get_display_name("Auth OIDC", host), + get_variant_name("Auth OIDC", host), host=host, - batchtime=BATCHTIME_WEEK * 2, + batchtime=BATCHTIME_DAY, ) ) + # Add a specific local test to run on PRs. + if host_name == "ubuntu22": + tasks = ["!.auth_oidc_remote .auth_oidc"] + variants.append( + create_variant( + tasks, + get_variant_name("Auth OIDC Local", host), + tags=["pr"], + host=host, + batchtime=BATCHTIME_DAY, + ) + ) return variants @@ -665,8 +359,8 @@ def create_search_index_variants(): python = CPYTHONS[0] return [ create_variant( - ["test_atlas_task_group_search_indexes"], - get_display_name("Search Index Helpers", host, python=python), + [".search_index"], + get_variant_name("Search Index Helpers", host, python=python), python=python, host=host, ) @@ -675,26 +369,27 @@ def create_search_index_variants(): def create_mockupdb_variants(): host = DEFAULT_HOST - python = CPYTHONS[0] + expansions = dict(TEST_NAME="mockupdb") return [ create_variant( - ["mockupdb"], - get_display_name("MockupDB", host, python=python), - python=python, + [".test-no-orchestration"], + get_variant_name("MockupDB", host), host=host, + tags=["pr"], + expansions=expansions, ) ] def create_doctests_variants(): host = DEFAULT_HOST - python = CPYTHONS[0] + expansions = dict(TEST_NAME="doctest") return [ create_variant( - ["doctests"], - get_display_name("Doctests", host, python=python), - python=python, + [".test-non-standard .standalone-noauth-nossl"], + get_variant_name("Doctests", host), host=host, + expansions=expansions, ) ] @@ -703,190 +398,780 @@ def create_atlas_connect_variants(): host = DEFAULT_HOST return [ create_variant( - ["atlas-connect"], - get_display_name("Atlas connect", host, python=python), - python=python, - host=host, + [".test-no-orchestration"], + get_variant_name("Atlas connect", host), + tags=["pr"], + host=DEFAULT_HOST, + expansions=dict(TEST_NAME="atlas_connect"), ) - for python in MIN_MAX_PYTHON ] +def create_coverage_report_variants(): + return [create_variant(["coverage-report"], "Coverage Report", host=DEFAULT_HOST)] + + +def create_kms_variants(): + tasks = [] + tasks.append(EvgTaskRef(name="test-gcpkms", batchtime=BATCHTIME_DAY)) + tasks.append("test-gcpkms-fail") + tasks.append(EvgTaskRef(name="test-azurekms", batchtime=BATCHTIME_DAY)) + tasks.append("test-azurekms-fail") + return [create_variant(tasks, "KMS", host=HOSTS["debian11"])] + + +def create_import_time_variants(): + return [create_variant(["check-import-time"], "Import Time", host=DEFAULT_HOST)] + + +def create_backport_pr_variants(): + return [create_variant(["backport-pr"], "Backport PR", host=DEFAULT_HOST)] + + +def create_perf_variants(): + host = HOSTS["perf"] + return [create_variant([".perf"], "Performance Benchmarks", host=host, batchtime=BATCHTIME_DAY)] + + def create_aws_auth_variants(): variants = [] - tasks = [ - "aws-auth-test-4.4", - "aws-auth-test-5.0", - "aws-auth-test-6.0", - "aws-auth-test-7.0", - "aws-auth-test-8.0", - "aws-auth-test-rapid", - "aws-auth-test-latest", - ] - for host_name, python in product(["ubuntu20", "win64", "macos"], MIN_MAX_PYTHON): + for host_name in ["ubuntu20", "win64", "macos"]: expansions = dict() - if host_name != "ubuntu20": - expansions["skip_ECS_auth_test"] = "true" + # PYTHON-5604 - we need to skip ECS tests for now. + tasks = [".auth-aws !.auth-aws-ecs"] + tags = [] if host_name == "macos": - expansions["skip_EC2_auth_test"] = "true" - expansions["skip_web_identity_auth_test"] = "true" + tasks = [".auth-aws !.auth-aws-web-identity !.auth-aws-ecs !.auth-aws-ec2"] + tags = ["pr"] + elif host_name == "win64": + tasks = [".auth-aws !.auth-aws-ecs"] host = HOSTS[host_name] variant = create_variant( tasks, - get_display_name("Auth AWS", host, python=python), + get_variant_name("Auth AWS", host), host=host, - python=python, + tags=tags, expansions=expansions, ) variants.append(variant) return variants +def create_no_server_variants(): + host = HOSTS["rhel8"] + name = get_variant_name("No server", host=host) + return [create_variant([".test-no-orchestration"], name, host=host, tags=["pr"])] + + def create_alternative_hosts_variants(): - batchtime = BATCHTIME_WEEK + batchtime = BATCHTIME_DAY variants = [] - host = HOSTS["rhel7"] - variants.append( - create_variant( - [".5.0 .standalone !.sync_async"], - get_display_name("OpenSSL 1.0.2", host, python=CPYTHONS[0]), - host=host, - python=CPYTHONS[0], - batchtime=batchtime, - ) - ) - - expansions = dict() - handle_c_ext(C_EXTS[0], expansions) + version = "latest" for host_name in OTHER_HOSTS: + expansions = dict(VERSION="latest") + handle_c_ext(C_EXTS[0], expansions) host = HOSTS[host_name] + tags = [] + if "fips" in host_name.lower(): + expansions["REQUIRE_FIPS"] = "1" + # Use explicit Python 3.11 binary on the host since the default python3 is 3.9. + expansions["PYTHON_BINARY"] = "/usr/bin/python3.11" + if "amazon" in host_name.lower(): + tags.append("pr") variants.append( create_variant( - [".6.0 .standalone !.sync_async"], - display_name=get_display_name("Other hosts", host), + [".test-no-toolchain"], + display_name=get_variant_name("Other hosts", host, version=version), batchtime=batchtime, host=host, + tags=tags, expansions=expansions, ) ) return variants +def create_aws_lambda_variants(): + host = HOSTS["rhel8"] + return [create_variant([".aws_lambda"], display_name="FaaS Lambda", host=host)] + + ############## # Tasks ############## -def create_server_tasks(): +def create_server_version_tasks(): tasks = [] - for topo, version, (auth, ssl), sync in product(TOPOLOGIES, ALL_VERSIONS, AUTH_SSLS, SYNCS): - name = f"test-{version}-{topo}-{auth}-{ssl}-{sync}".lower() - tags = [version, topo, auth, ssl, sync] - bootstrap_vars = dict( - VERSION=version, - TOPOLOGY=topo if topo != "standalone" else "server", - AUTH=auth, - SSL=ssl, - ) - bootstrap_func = FunctionCall(func="bootstrap mongo-orchestration", vars=bootstrap_vars) - test_suites = "" - if sync == "sync": - test_suites = "default" - elif sync == "async": - test_suites = "default_async" - test_vars = dict( - AUTH=auth, - SSL=ssl, - SYNC=sync, - TEST_SUITES=test_suites, + task_combos = set() + # All combinations of topology, auth, ssl, and sync should be tested. + for (topology, auth, ssl, sync), python in zip_cycle( + list(product(TOPOLOGIES, ["auth", "noauth"], ["ssl", "nossl"], SYNCS)), ALL_PYTHONS + ): + task_combos.add((topology, auth, ssl, sync, python)) + + # Every python should be tested with sharded cluster, auth, ssl, with sync and async. + for python, sync in product(ALL_PYTHONS, SYNCS): + task_combos.add(("sharded_cluster", "auth", "ssl", sync, python)) + + # Assemble the tasks. + seen = set() + for topology, auth, ssl, sync, python in sorted(task_combos): + combo = f"{topology}-{auth}-{ssl}" + tags = ["server-version", f"python-{python}", combo, sync] + if combo in [ + "standalone-noauth-nossl", + "replica_set-noauth-nossl", + "sharded_cluster-auth-ssl", + ]: + combo = f"{combo}-{sync}" + if combo not in seen: + seen.add(combo) + tags.append("pr") + expansions = dict(AUTH=auth, SSL=ssl, TOPOLOGY=topology) + if "t" in python: + tags.append("free-threaded") + if python not in PYPYS and "t" not in python: + expansions["COVERAGE"] = "1" + name = get_task_name( + "test-server-version", + python=python, + sync=sync, + **expansions, ) + server_func = FunctionCall(func="run server", vars=expansions) + test_vars = expansions.copy() + test_vars["PYTHON_VERSION"] = python + test_vars["TEST_NAME"] = f"default_{sync}" test_func = FunctionCall(func="run tests", vars=test_vars) - tasks.append(EvgTask(name=name, tags=tags, commands=[bootstrap_func, test_func])) + tasks.append(EvgTask(name=name, tags=tags, commands=[server_func, test_func])) return tasks -def create_load_balancer_tasks(): +def create_no_toolchain_tasks(): tasks = [] - for auth, ssl in AUTH_SSLS: - name = f"test-load-balancer-{auth}-{ssl}".lower() - tags = ["load-balancer", auth, ssl] - bootstrap_vars = dict(TOPOLOGY="sharded_cluster", AUTH=auth, SSL=ssl, LOAD_BALANCER="true") - bootstrap_func = FunctionCall(func="bootstrap mongo-orchestration", vars=bootstrap_vars) - balancer_func = FunctionCall(func="run load-balancer") - test_vars = dict(AUTH=auth, SSL=ssl, test_loadbalancer="true") + + for topology, sync in zip_cycle(TOPOLOGIES, SYNCS): + auth, ssl = get_standard_auth_ssl(topology) + tags = [ + "test-no-toolchain", + f"{topology}-{auth}-{ssl}", + ] + expansions = dict(AUTH=auth, SSL=ssl, TOPOLOGY=topology) + name = get_task_name("test-no-toolchain", sync=sync, **expansions) + server_func = FunctionCall(func="run server", vars=expansions) + test_vars = expansions.copy() + test_vars["TEST_NAME"] = f"default_{sync}" test_func = FunctionCall(func="run tests", vars=test_vars) - tasks.append( - EvgTask(name=name, tags=tags, commands=[bootstrap_func, balancer_func, test_func]) - ) + tasks.append(EvgTask(name=name, tags=tags, commands=[server_func, test_func])) + return tasks + + +def create_test_non_standard_tasks(): + """For variants that set a TEST_NAME.""" + tasks = [] + task_combos = set() + # For each version and topology, rotate through the CPythons. + for (version, topology), python in zip_cycle(list(product(ALL_VERSIONS, TOPOLOGIES)), CPYTHONS): + pr = version == "latest" + task_combos.add((version, topology, python, pr)) + # For each PyPy and topology, rotate through the MongoDB versions. + for (python, topology), version in zip_cycle(list(product(PYPYS, TOPOLOGIES)), ALL_VERSIONS): + task_combos.add((version, topology, python, False)) + for version, topology, python, pr in sorted(task_combos): + auth, ssl = get_standard_auth_ssl(topology) + tags = [ + "test-non-standard", + f"server-{version}", + f"python-{python}", + f"{topology}-{auth}-{ssl}", + auth, + ] + if "t" in python: + tags.append("free-threaded") + if python in PYPYS: + tags.append("pypy") + if pr: + tags.append("pr") + expansions = dict(AUTH=auth, SSL=ssl, TOPOLOGY=topology, VERSION=version) + name = get_task_name("test-non-standard", python=python, **expansions) + server_func = FunctionCall(func="run server", vars=expansions) + test_vars = expansions.copy() + test_vars["PYTHON_VERSION"] = python + test_func = FunctionCall(func="run tests", vars=test_vars) + tasks.append(EvgTask(name=name, tags=tags, commands=[server_func, test_func])) + return tasks + + +def create_test_standard_auth_tasks(): + """We only use auth on sharded clusters""" + tasks = [] + task_combos = set() + # Rotate through the CPython and MongoDB versions + for (version, topology), python in zip_cycle( + list(product(ALL_VERSIONS, ["sharded_cluster"])), CPYTHONS + ): + pr = version == "latest" + task_combos.add((version, topology, python, pr)) + # Rotate through each PyPy and MongoDB versions. + for (python, topology), version in zip_cycle( + list(product(PYPYS, ["sharded_cluster"])), ALL_VERSIONS + ): + task_combos.add((version, topology, python, False)) + for version, topology, python, pr in sorted(task_combos): + auth, ssl = get_standard_auth_ssl(topology) + tags = [ + "test-standard-auth", + f"server-{version}", + f"python-{python}", + f"{topology}-{auth}-{ssl}", + auth, + ] + if "t" in python: + tags.append("free-threaded") + if python in PYPYS: + tags.append("pypy") + if pr: + tags.append("pr") + expansions = dict(AUTH=auth, SSL=ssl, TOPOLOGY=topology, VERSION=version) + name = get_task_name("test-standard-auth", python=python, **expansions) + server_func = FunctionCall(func="run server", vars=expansions) + test_vars = expansions.copy() + test_vars["PYTHON_VERSION"] = python + test_func = FunctionCall(func="run tests", vars=test_vars) + tasks.append(EvgTask(name=name, tags=tags, commands=[server_func, test_func])) return tasks -################## -# Generate Config -################## +def create_min_deps_tasks(): + """For variants that support testing with minimum dependencies.""" + tasks = [] + for topology in TOPOLOGIES: + auth, ssl = get_standard_auth_ssl(topology) + tags = ["test-min-deps", f"{topology}-{auth}-{ssl}"] + expansions = dict(AUTH=auth, SSL=ssl, TOPOLOGY=topology) + server_func = FunctionCall(func="run server", vars=expansions) + test_vars = expansions.copy() + test_vars["TEST_MIN_DEPS"] = "1" + name = get_task_name("test-min-deps", python=CPYTHONS[0], sync="sync", **test_vars) + test_func = FunctionCall(func="run tests", vars=test_vars) + tasks.append(EvgTask(name=name, tags=tags, commands=[server_func, test_func])) + return tasks + +def create_standard_tasks(): + """For variants that do not set a TEST_NAME.""" + tasks = [] + task_combos = set() + # For each python and topology and sync/async, rotate through the the versions. + for (python, topology, sync), version in zip_cycle( + list(product(CPYTHONS + PYPYS, TOPOLOGIES, SYNCS)), ALL_VERSIONS + ): + pr = version == "latest" and python not in PYPYS + task_combos.add((version, topology, python, sync, pr)) + + for version, topology, python, sync, pr in sorted(task_combos): + auth, ssl = get_standard_auth_ssl(topology) + tags = [ + "test-standard", + f"server-{version}", + f"python-{python}", + f"{topology}-{auth}-{ssl}", + sync, + ] + if "t" in python: + tags.append("free-threaded") + if python in PYPYS: + tags.append("pypy") + if pr: + tags.append("pr") + expansions = dict(AUTH=auth, SSL=ssl, TOPOLOGY=topology, VERSION=version) + name = get_task_name("test-standard", python=python, sync=sync, **expansions) + server_func = FunctionCall(func="run server", vars=expansions) + test_vars = expansions.copy() + test_vars["PYTHON_VERSION"] = python + test_vars["TEST_NAME"] = f"default_{sync}" + test_func = FunctionCall(func="run tests", vars=test_vars) + tasks.append(EvgTask(name=name, tags=tags, commands=[server_func, test_func])) + return tasks -def write_variants_to_file(): - mod = sys.modules[__name__] - here = Path(__file__).absolute().parent - target = here.parent / "generated_configs" / "variants.yml" - if target.exists(): - target.unlink() - with target.open("w") as fid: - fid.write("buildvariants:\n") - for name, func in getmembers(mod, isfunction): - if not name.endswith("_variants"): - continue - if not name.startswith("create_"): - raise ValueError("Variant creators must start with create_") - title = name.replace("create_", "").replace("_variants", "").replace("_", " ").capitalize() - project = EvgProject(tasks=None, buildvariants=func()) - out = ShrubService.generate_yaml(project).splitlines() - with target.open("a") as fid: - fid.write(f" # {title} tests\n") - for line in out[1:]: - fid.write(f"{line}\n") - fid.write("\n") - - # Remove extra trailing newline: - data = target.read_text().splitlines() - with target.open("w") as fid: - for line in data[:-1]: - fid.write(f"{line}\n") - - -def write_tasks_to_file(): - mod = sys.modules[__name__] - here = Path(__file__).absolute().parent - target = here.parent / "generated_configs" / "tasks.yml" - if target.exists(): - target.unlink() - with target.open("w") as fid: - fid.write("tasks:\n") - - for name, func in getmembers(mod, isfunction): - if not name.endswith("_tasks"): +def create_no_orchestration_tasks(): + tasks = [] + for python in [*MIN_MAX_PYTHON, PYPYS[-1]]: + tags = [ + "test-no-orchestration", + f"python-{python}", + ] + name = get_task_name("test-no-orchestration", python=python) + assume_func = FunctionCall(func="assume ec2 role") + test_vars = dict(PYTHON_VERSION=python) + test_func = FunctionCall(func="run tests", vars=test_vars) + commands = [assume_func, test_func] + tasks.append(EvgTask(name=name, tags=tags, commands=commands)) + return tasks + + +def create_kms_tasks(): + tasks = [] + for kms_type in ["gcp", "azure"]: + for success in [True, False]: + name = f"test-{kms_type}kms" + sub_test_name = kms_type + tags = [] + if not success: + name += "-fail" + sub_test_name += "-fail" + tags.append("pr") + commands = [] + if not success: + commands.append(FunctionCall(func="run server")) + test_vars = dict(TEST_NAME="kms", SUB_TEST_NAME=sub_test_name) + test_func = FunctionCall(func="run tests", vars=test_vars) + commands.append(test_func) + tasks.append(EvgTask(name=name, tags=tags, commands=commands)) + return tasks + + +def create_aws_tasks(): + tasks = [] + aws_test_types = [ + "regular", + "assume-role", + "ec2", + "env-creds", + "session-creds", + "web-identity", + "ecs", + ] + for version, test_type, python in zip_cycle(get_versions_from("4.4"), aws_test_types, CPYTHONS): + base_name = f"test-auth-aws-{version}" + base_tags = ["auth-aws"] + server_vars = dict(AUTH_AWS="1", VERSION=version) + server_func = FunctionCall(func="run server", vars=server_vars) + assume_func = FunctionCall(func="assume ec2 role") + tags = [*base_tags, f"auth-aws-{test_type}"] + if "t" in python: + tags.append("free-threaded") + name = get_task_name(f"{base_name}-{test_type}", python=python) + test_vars = dict(TEST_NAME="auth_aws", SUB_TEST_NAME=test_type, PYTHON_VERSION=python) + test_func = FunctionCall(func="run tests", vars=test_vars) + funcs = [server_func, assume_func, test_func] + tasks.append(EvgTask(name=name, tags=tags, commands=funcs)) + + if test_type == "web-identity": + tags = [*base_tags, "auth-aws-web-identity"] + name = get_task_name(f"{base_name}-web-identity-session-name", python=python) + test_vars = dict( + TEST_NAME="auth_aws", + SUB_TEST_NAME="web-identity", + AWS_ROLE_SESSION_NAME="test", + PYTHON_VERSION=python, + ) + if "t" in python: + tags.append("free-threaded") + test_func = FunctionCall(func="run tests", vars=test_vars) + funcs = [server_func, assume_func, test_func] + tasks.append(EvgTask(name=name, tags=tags, commands=funcs)) + + return tasks + + +def create_oidc_tasks(): + tasks = [] + for sub_test in ["default", "azure", "gcp", "eks", "aks", "gke"]: + vars = dict(TEST_NAME="auth_oidc", SUB_TEST_NAME=sub_test) + test_func = FunctionCall(func="run tests", vars=vars) + task_name = f"test-auth-oidc-{sub_test}" + tags = ["auth_oidc"] + if sub_test != "default": + tags.append("auth_oidc_remote") + tasks.append(EvgTask(name=task_name, tags=tags, commands=[test_func])) + + return tasks + + +def create_mod_wsgi_tasks(): + tasks = [] + for (test, topology), python in zip_cycle( + product(["standalone", "embedded-mode"], ["standalone", "replica_set"]), CPYTHONS + ): + if "t" in python: continue - if not name.startswith("create_"): - raise ValueError("Task creators must start with create_") - title = name.replace("create_", "").replace("_tasks", "").replace("_", " ").capitalize() - project = EvgProject(tasks=func(), buildvariants=None) - out = ShrubService.generate_yaml(project).splitlines() - with target.open("a") as fid: - fid.write(f" # {title} tests\n") - for line in out[1:]: - fid.write(f"{line}\n") - fid.write("\n") - - # Remove extra trailing newline: - data = target.read_text().splitlines() - with target.open("w") as fid: - for line in data[:-1]: - fid.write(f"{line}\n") - - -write_variants_to_file() -write_tasks_to_file() + if test == "standalone": + task_name = "mod-wsgi-" + else: + task_name = "mod-wsgi-embedded-mode-" + task_name += topology.replace("_", "-") + task_name = get_task_name(task_name, python=python) + server_vars = dict(TOPOLOGY=topology, PYTHON_VERSION=python) + server_func = FunctionCall(func="run server", vars=server_vars) + vars = dict(TEST_NAME="mod_wsgi", SUB_TEST_NAME=test.split("-")[0], PYTHON_VERSION=python) + test_func = FunctionCall(func="run tests", vars=vars) + tags = ["mod_wsgi", "pr"] + commands = [server_func, test_func] + tasks.append(EvgTask(name=task_name, tags=tags, commands=commands)) + return tasks + + +def _create_ocsp_tasks(algo, variant, server_type, base_task_name): + tasks = [] + file_name = f"{algo}-basic-tls-ocsp-{variant}.json" + + for version in get_versions_from("4.4"): + if version == "latest": + python = MIN_MAX_PYTHON[-1] + else: + python = MIN_MAX_PYTHON[0] + + vars = dict( + ORCHESTRATION_FILE=file_name, + OCSP_SERVER_TYPE=server_type, + TEST_NAME="ocsp", + PYTHON_VERSION=python, + VERSION=version, + ) + test_func = FunctionCall(func="run tests", vars=vars) + + tags = ["ocsp", f"ocsp-{algo}", version] + if "disableStapling" not in variant: + tags.append("ocsp-staple") + if algo == "valid-cert-server-staples" and version == "latest": + tags.append("pr") + + task_name = get_task_name( + f"test-ocsp-{algo}-{base_task_name}", + python=python, + version=version, + ) + tasks.append(EvgTask(name=task_name, tags=tags, commands=[test_func])) + + return tasks + + +def create_aws_lambda_tasks(): + assume_func = FunctionCall(func="assume ec2 role") + vars = dict(TEST_NAME="aws_lambda") + test_func = FunctionCall(func="run tests", vars=vars) + task_name = "test-aws-lambda-deployed" + tags = ["aws_lambda"] + commands = [assume_func, test_func] + return [EvgTask(name=task_name, tags=tags, commands=commands)] + + +def create_search_index_tasks(): + assume_func = FunctionCall(func="assume ec2 role") + server_func = FunctionCall(func="run server", vars=dict(TEST_NAME="search_index")) + vars = dict(TEST_NAME="search_index") + test_func = FunctionCall(func="run tests", vars=vars) + task_name = "test-search-index-helpers" + tags = ["search_index"] + commands = [assume_func, server_func, test_func] + return [EvgTask(name=task_name, tags=tags, commands=commands)] + + +def create_perf_tasks(): + tasks = [] + for version, ssl, sync in product(["8.0"], ["ssl", "nossl"], ["sync", "async"]): + vars = dict(VERSION=f"v{version}-perf", SSL=ssl) + server_func = FunctionCall(func="run server", vars=vars) + vars = dict(TEST_NAME="perf", SUB_TEST_NAME=sync) + test_func = FunctionCall(func="run tests", vars=vars) + attach_func = FunctionCall(func="attach benchmark test results") + send_func = FunctionCall(func="send dashboard data") + task_name = f"perf-{version}-standalone" + if ssl == "ssl": + task_name += "-ssl" + if sync == "async": + task_name += "-async" + tags = ["perf"] + commands = [server_func, test_func, attach_func, send_func] + tasks.append(EvgTask(name=task_name, tags=tags, commands=commands)) + return tasks + + +def create_getdata_tasks(): + # Wildcard task. Do you need to find out what tools are available and where? + # Throw it here, and execute this task on all buildvariants + cmd = get_subprocess_exec(args=[".evergreen/scripts/run-getdata.sh"]) + return [EvgTask(name="getdata", commands=[cmd])] + + +def create_coverage_report_tasks(): + tags = ["coverage", "pr"] + task_name = "coverage-report" + # BUILD-3165: We can't use "*" (all tasks) and specify "variant". + # Instead list out all coverage tasks using tags. + # Run the coverage task even if some tasks fail. + # Run the coverage task even if some tasks are not scheduled in a patch build. + task_deps = [ + EvgTaskDependency( + name=".server-version", variant=".coverage_tag", status="*", patch_optional=True + ) + ] + cmd = FunctionCall(func="download and merge coverage") + return [EvgTask(name=task_name, tags=tags, depends_on=task_deps, commands=[cmd])] + + +def create_import_time_tasks(): + name = "check-import-time" + tags = ["pr"] + args = [".evergreen/scripts/check-import-time.sh", "${revision}", "${github.amrom.workers.devmit}"] + cmd = get_subprocess_exec(args=args) + return [EvgTask(name=name, tags=tags, commands=[cmd])] + + +def create_backport_pr_tasks(): + name = "backport-pr" + args = [ + "${DRIVERS_TOOLS}/.evergreen/github_app/backport-pr.sh", + "mongodb", + "mongo-python-driver", + "${github.amrom.workers.devmit}", + ] + include_expansions = ["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN"] + cmd = get_subprocess_exec(args=args, include_expansions_in_env=include_expansions) + assume_func = FunctionCall(func="assume ec2 role") + return [EvgTask(name=name, commands=[assume_func, cmd], allowed_requesters=["commit"])] + + +def create_ocsp_tasks(): + tasks = [] + tests = [ + ("disableStapling", "valid", "valid-cert-server-does-not-staple"), + ("disableStapling", "revoked", "invalid-cert-server-does-not-staple"), + ("disableStapling", "valid-delegate", "delegate-valid-cert-server-does-not-staple"), + ("disableStapling", "revoked-delegate", "delegate-invalid-cert-server-does-not-staple"), + ("disableStapling", "no-responder", "soft-fail"), + ("mustStaple", "valid", "valid-cert-server-staples"), + ("mustStaple", "revoked", "invalid-cert-server-staples"), + ("mustStaple", "valid-delegate", "delegate-valid-cert-server-staples"), + ("mustStaple", "revoked-delegate", "delegate-invalid-cert-server-staples"), + ( + "mustStaple-disableStapling", + "revoked", + "malicious-invalid-cert-mustStaple-server-does-not-staple", + ), + ( + "mustStaple-disableStapling", + "revoked-delegate", + "delegate-malicious-invalid-cert-mustStaple-server-does-not-staple", + ), + ( + "mustStaple-disableStapling", + "no-responder", + "malicious-no-responder-mustStaple-server-does-not-staple", + ), + ] + for algo in ["ecdsa", "rsa"]: + for variant, server_type, base_task_name in tests: + new_tasks = _create_ocsp_tasks(algo, variant, server_type, base_task_name) + tasks.extend(new_tasks) + + return tasks + + +############## +# Functions +############## + + +def create_upload_coverage_func(): + # Upload the coverage report for all tasks in a single build to the same directory. + remote_file = ( + "coverage/${revision}/${version_id}/coverage/coverage.${build_variant}.${task_name}" + ) + display_name = "Raw Coverage Report" + cmd = get_s3_put( + local_file="src/.coverage", + remote_file=remote_file, + display_name=display_name, + content_type="text/html", + ) + return "upload coverage", [get_assume_role(), cmd] + + +def create_download_and_merge_coverage_func(): + include_expansions = ["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN"] + args = [ + ".evergreen/scripts/download-and-merge-coverage.sh", + "${bucket_name}", + "${revision}", + "${version_id}", + ] + merge_cmd = get_subprocess_exec( + silent=True, include_expansions_in_env=include_expansions, args=args + ) + combine_cmd = get_subprocess_exec(args=[".evergreen/combine-coverage.sh"]) + # Upload the resulting html coverage report. + args = [ + ".evergreen/scripts/upload-coverage-report.sh", + "${bucket_name}", + "${revision}", + "${version_id}", + ] + upload_cmd = get_subprocess_exec( + silent=True, include_expansions_in_env=include_expansions, args=args + ) + display_name = "Coverage Report HTML" + remote_file = "coverage/${revision}/${version_id}/htmlcov/index.html" + put_cmd = get_s3_put( + local_file="src/htmlcov/index.html", + remote_file=remote_file, + display_name=display_name, + content_type="text/html", + ) + cmds = [get_assume_role(), merge_cmd, combine_cmd, upload_cmd, put_cmd] + return "download and merge coverage", cmds + + +def create_upload_mo_artifacts_func(): + include = ["./**.core", "./**.mdmp"] # Windows: minidumps + archive_cmd = archive_targz_pack(target="mongo-coredumps.tgz", source_dir="./", include=include) + display_name = "Core Dumps - Execution" + remote_file = "${build_variant}/${revision}/${version_id}/${build_id}/coredumps/${task_id}-${execution}-mongodb-coredumps.tar.gz" + s3_dumps = get_s3_put( + local_file="mongo-coredumps.tgz", remote_file=remote_file, display_name=display_name + ) + display_name = "drivers-tools-logs.tar.gz" + remote_file = "${build_variant}/${revision}/${version_id}/${build_id}/logs/${task_id}-${execution}-drivers-tools-logs.tar.gz" + s3_logs = get_s3_put( + local_file="${DRIVERS_TOOLS}/.evergreen/test_logs.tar.gz", + remote_file=remote_file, + display_name=display_name, + ) + cmds = [get_assume_role(), archive_cmd, s3_dumps, s3_logs] + return "upload mo artifacts", cmds + + +def create_fetch_source_func(): + # Executes clone and applies the submitted patch, if any. + cmd = git_get_project(directory="src") + return "fetch source", [cmd] + + +def create_setup_system_func(): + # Make an evergreen expansion file with dynamic values. + includes = ["is_patch", "project", "version_id"] + args = [".evergreen/scripts/setup-system.sh"] + setup_cmd = get_subprocess_exec(include_expansions_in_env=includes, args=args) + # Load the expansion file to make an evergreen variable with the current unique version. + expansion_cmd = expansions_update(file="src/expansion.yml") + return "setup system", [setup_cmd, expansion_cmd] + + +def create_upload_test_results_func(): + results_cmd = attach_results(file_location="${DRIVERS_TOOLS}/results.json") + xresults_cmd = attach_xunit_results(file="src/xunit-results/TEST-*.xml") + return "upload test results", [results_cmd, xresults_cmd] + + +def create_run_server_func(): + includes = [ + "VERSION", + "TOPOLOGY", + "AUTH", + "SSL", + "ORCHESTRATION_FILE", + "PYTHON_BINARY", + "PYTHON_VERSION", + "STORAGE_ENGINE", + "REQUIRE_API_VERSION", + "DRIVERS_TOOLS", + "TEST_CRYPT_SHARED", + "AUTH_AWS", + "LOAD_BALANCER", + "LOCAL_ATLAS", + "NO_EXT", + ] + args = [".evergreen/just.sh", "run-server", "${TEST_NAME}"] + sub_cmd = get_subprocess_exec(include_expansions_in_env=includes, args=args) + expansion_cmd = expansions_update(file="${DRIVERS_TOOLS}/mo-expansion.yml") + return "run server", [sub_cmd, expansion_cmd] + + +def create_run_tests_func(): + includes = [ + "AUTH", + "SSL", + "AWS_ACCESS_KEY_ID", + "AWS_SECRET_ACCESS_KEY", + "AWS_SESSION_TOKEN", + "COVERAGE", + "PYTHON_BINARY", + "LIBMONGOCRYPT_URL", + "MONGODB_URI", + "PYTHON_VERSION", + "DISABLE_TEST_COMMANDS", + "GREEN_FRAMEWORK", + "NO_EXT", + "COMPRESSORS", + "MONGODB_API_VERSION", + "REQUIRE_API_VERSION", + "DEBUG_LOG", + "DISABLE_FLAKY", + "ORCHESTRATION_FILE", + "OCSP_SERVER_TYPE", + "VERSION", + "IS_WIN32", + "REQUIRE_FIPS", + "TEST_MIN_DEPS", + ] + args = [".evergreen/just.sh", "setup-tests", "${TEST_NAME}", "${SUB_TEST_NAME}"] + setup_cmd = get_subprocess_exec(include_expansions_in_env=includes, args=args) + test_cmd = get_subprocess_exec(args=[".evergreen/just.sh", "run-tests"]) + return "run tests", [setup_cmd, test_cmd] + + +def create_cleanup_func(): + cmd = get_subprocess_exec(args=[".evergreen/scripts/cleanup.sh"]) + return "cleanup", [cmd] + + +def create_teardown_system_func(): + tests_cmd = get_subprocess_exec(args=[".evergreen/just.sh", "teardown-tests"]) + drivers_cmd = get_subprocess_exec(args=["${DRIVERS_TOOLS}/.evergreen/teardown.sh"]) + return "teardown system", [tests_cmd, drivers_cmd] + + +def create_assume_ec2_role_func(): + cmd = ec2_assume_role(role_arn="${aws_test_secrets_role}", duration_seconds=3600) + return "assume ec2 role", [cmd] + + +def create_attach_benchmark_test_results_func(): + cmd = attach_results(file_location="src/report.json") + return "attach benchmark test results", [cmd] + + +def create_send_dashboard_data_func(): + includes = [ + "requester", + "revision_order_id", + "project_id", + "version_id", + "build_variant", + "parsed_order_id", + "task_name", + "task_id", + "execution", + "is_mainline", + ] + cmds = [ + get_subprocess_exec( + include_expansions_in_env=includes, args=[".evergreen/scripts/perf-submission-setup.sh"] + ), + expansions_update(file="src/expansion.yml"), + get_subprocess_exec( + include_expansions_in_env=includes, args=[".evergreen/scripts/perf-submission.sh"] + ), + ] + return "send dashboard data", cmds + + +mod = sys.modules[__name__] +write_variants_to_file(mod) +write_tasks_to_file(mod) +write_functions_to_file(mod) diff --git a/.evergreen/scripts/generate_config_utils.py b/.evergreen/scripts/generate_config_utils.py new file mode 100644 index 0000000000..4eb6bcb0dc --- /dev/null +++ b/.evergreen/scripts/generate_config_utils.py @@ -0,0 +1,374 @@ +from __future__ import annotations + +from dataclasses import dataclass +from inspect import getmembers, isfunction +from itertools import cycle, zip_longest +from pathlib import Path +from typing import Any + +from shrub.v3.evg_build_variant import BuildVariant +from shrub.v3.evg_command import ( + EvgCommandType, + ec2_assume_role, + s3_put, + subprocess_exec, +) +from shrub.v3.evg_project import EvgProject +from shrub.v3.evg_task import EvgTaskRef +from shrub.v3.shrub_service import ShrubService + +############## +# Globals +############## + +ALL_VERSIONS = ["4.2", "4.4", "5.0", "6.0", "7.0", "8.0", "rapid", "latest"] +CPYTHONS = ["3.10", "3.11", "3.12", "3.13", "3.14t", "3.14"] +PYPYS = ["pypy3.10"] +ALL_PYTHONS = CPYTHONS + PYPYS +MIN_MAX_PYTHON = [CPYTHONS[0], CPYTHONS[-1]] +BATCHTIME_WEEK = 10080 +BATCHTIME_DAY = 1440 +AUTH_SSLS = [("auth", "ssl"), ("noauth", "ssl"), ("noauth", "nossl")] +TOPOLOGIES = ["standalone", "replica_set", "sharded_cluster"] +C_EXTS = ["without_ext", "with_ext"] +SYNCS = ["sync", "async"] +DISPLAY_LOOKUP = dict( + ssl=dict(ssl="SSL", nossl="NoSSL"), + auth=dict(auth="Auth", noauth="NoAuth"), + topology=dict( + standalone="Standalone", replica_set="Replica Set", sharded_cluster="Sharded Cluster" + ), + test_suites=dict(default="Sync", default_async="Async"), + sync={"sync": "Sync", "async": "Async"}, + coverage={"1": "cov"}, + no_ext={"1": "No C"}, + test_min_deps={True: "Min Deps"}, +) +HOSTS = dict() + + +@dataclass +class Host: + name: str + run_on: str + display_name: str + variables: dict[str, str] | None + + +# Hosts with toolchains. +HOSTS["rhel8"] = Host("rhel8", "rhel87-small", "RHEL8", dict()) +HOSTS["win64"] = Host("win64", "windows-64-vsMulti-small", "Win64", dict()) +HOSTS["win32"] = Host("win32", "windows-64-vsMulti-small", "Win32", dict()) +HOSTS["macos"] = Host("macos", "macos-14", "macOS", dict()) +HOSTS["macos-arm64"] = Host("macos-arm64", "macos-14-arm64", "macOS Arm64", dict()) +HOSTS["ubuntu20"] = Host("ubuntu20", "ubuntu2004-small", "Ubuntu-20", dict()) +HOSTS["ubuntu22"] = Host("ubuntu22", "ubuntu2204-small", "Ubuntu-22", dict()) +HOSTS["rhel7"] = Host("rhel7", "rhel79-small", "RHEL7", dict()) +HOSTS["perf"] = Host("perf", "rhel90-dbx-perf-large", "", dict()) +HOSTS["debian11"] = Host("debian11", "debian11-small", "Debian11", dict()) +DEFAULT_HOST = HOSTS["rhel8"] + +# Other hosts +OTHER_HOSTS = ["RHEL9-FIPS", "RHEL8-zseries", "RHEL8-POWER8", "RHEL8-arm64", "Amazon2023"] +for name, run_on in zip( + OTHER_HOSTS, + [ + "rhel92-fips", + "rhel8-zseries-small", + "rhel8-power-small", + "rhel82-arm64-small", + "amazon2023-arm64-latest-large-m8g", + ], +): + HOSTS[name] = Host(name, run_on, name, dict()) + +############## +# Helpers +############## + + +def create_variant_generic( + tasks: list[str | EvgTaskRef], + display_name: str, + *, + host: Host | str | None = None, + default_run_on="rhel87-small", + expansions: dict | None = None, + **kwargs: Any, +) -> BuildVariant: + """Create a build variant for the given inputs.""" + task_refs = [] + if isinstance(host, str): + host = HOSTS[host] + for t in tasks: + if isinstance(t, EvgTaskRef): + task_refs.append(t) + else: + task_refs.append(EvgTaskRef(name=t)) + expansions = expansions and expansions.copy() or dict() + if "run_on" in kwargs: + run_on = kwargs.pop("run_on") + elif host: + run_on = [host.run_on] + if host.variables: + expansions.update(host.variables) + else: + run_on = [default_run_on] + if isinstance(run_on, str): + run_on = [run_on] + name = display_name.replace(" ", "-").replace("*-", "").lower() + return BuildVariant( + name=name, + display_name=display_name, + tasks=task_refs, + expansions=expansions or None, + run_on=run_on, + **kwargs, + ) + + +def create_variant( + tasks: list[str | EvgTaskRef], + display_name: str, + *, + version: str | None = None, + host: Host | str | None = None, + python: str | None = None, + expansions: dict | None = None, + **kwargs: Any, +) -> BuildVariant: + expansions = expansions and expansions.copy() or dict() + if version: + expansions["VERSION"] = version + if python: + expansions["PYTHON_BINARY"] = get_python_binary(python, host) + return create_variant_generic( + tasks, display_name, version=version, host=host, expansions=expansions, **kwargs + ) + + +def get_python_binary(python: str, host: Host) -> str: + """Get the appropriate python binary given a python version and host.""" + name = host.name + if name in ["win64", "win32"]: + if name == "win32": + base = "C:/python/32" + else: + base = "C:/python" + python_dir = python.replace(".", "").replace("t", "") + return f"{base}/Python{python_dir}/python{python}.exe" + + if name in ["rhel8", "ubuntu22", "ubuntu20", "rhel7"]: + return f"/opt/python/{python}/bin/python3" + + if name in ["macos", "macos-arm64"]: + bin_name = "python3t" if "t" in python else "python3" + python_dir = python.replace("t", "") + framework_dir = "PythonT" if "t" in python else "Python" + return f"/Library/Frameworks/{framework_dir}.Framework/Versions/{python_dir}/bin/{bin_name}" + + raise ValueError(f"no match found for python {python} on {name}") + + +def get_versions_from(min_version: str) -> list[str]: + """Get all server versions starting from a minimum version.""" + min_version_float = float(min_version) + rapid_latest = ["rapid", "latest"] + versions = [v for v in ALL_VERSIONS if v not in rapid_latest] + return [v for v in versions if float(v) >= min_version_float] + rapid_latest + + +def get_versions_until(max_version: str) -> list[str]: + """Get all server version up to a max version.""" + max_version_float = float(max_version) + versions = [v for v in ALL_VERSIONS if v not in ["rapid", "latest"]] + versions = [v for v in versions if float(v) <= max_version_float] + if not len(versions): + raise ValueError(f"No server versions found less <= {max_version}") + return versions + + +def get_common_name(base: str, sep: str, **kwargs) -> str: + display_name = base + version = kwargs.pop("VERSION", None) + version = version or kwargs.pop("version", None) + if version: + if version not in ["rapid", "latest"]: + version = f"v{version}" + display_name = f"{display_name}{sep}{version}" + for key, value in kwargs.items(): + name = value + if key.lower() == "python": + if not value.startswith("pypy"): + name = f"Python{value}" + else: + name = f"PyPy{value.replace('pypy', '')}" + elif key.lower() in DISPLAY_LOOKUP and value in DISPLAY_LOOKUP[key.lower()]: + name = DISPLAY_LOOKUP[key.lower()][value] + else: + continue + display_name = f"{display_name}{sep}{name}" + return display_name + + +def get_variant_name(base: str, host: str | Host | None = None, **kwargs) -> str: + """Get the display name of a variant.""" + display_name = base + if isinstance(host, str): + host = HOSTS[host] + if host is not None: + display_name += f" {host.display_name}" + return get_common_name(display_name, " ", **kwargs) + + +def get_task_name(base: str, **kwargs): + return get_common_name(base, "-", **kwargs).replace(" ", "-").lower() + + +def zip_cycle(*iterables, empty_default=None): + """Get all combinations of the inputs, cycling over the shorter list(s).""" + cycles = [cycle(i) for i in iterables] + for _ in zip_longest(*iterables): + yield tuple(next(i, empty_default) for i in cycles) + + +def handle_c_ext(c_ext, expansions) -> None: + """Handle c extension option.""" + if c_ext == C_EXTS[0]: + expansions["NO_EXT"] = "1" + + +def get_standard_auth_ssl(topology): + auth = "auth" if topology == "sharded_cluster" else "noauth" + ssl = "nossl" if topology == "standalone" else "ssl" + return auth, ssl + + +def get_assume_role(**kwargs): + kwargs.setdefault("command_type", EvgCommandType.SETUP) + kwargs.setdefault("role_arn", "${assume_role_arn}") + return ec2_assume_role(**kwargs) + + +def get_subprocess_exec(**kwargs): + kwargs.setdefault("binary", "bash") + kwargs.setdefault("working_dir", "src") + kwargs.setdefault("command_type", EvgCommandType.TEST) + return subprocess_exec(**kwargs) + + +def get_s3_put(**kwargs): + kwargs["aws_key"] = "${AWS_ACCESS_KEY_ID}" + kwargs["aws_secret"] = "${AWS_SECRET_ACCESS_KEY}" # noqa:S105 + kwargs["aws_session_token"] = "${AWS_SESSION_TOKEN}" # noqa:S105 + kwargs["bucket"] = "${bucket_name}" + kwargs.setdefault("optional", "true") + kwargs.setdefault("permissions", "public-read") + kwargs.setdefault("content_type", "${content_type|application/x-gzip}") + kwargs.setdefault("command_type", EvgCommandType.SETUP) + return s3_put(**kwargs) + + +def generate_yaml(tasks=None, variants=None): + """Generate the yaml for a given set of tasks and variants.""" + project = EvgProject(tasks=tasks, buildvariants=variants) + out = ShrubService.generate_yaml(project) + # Dedent by two spaces to match what we use in config.yml + lines = [line[2:] for line in out.splitlines()] + print("\n".join(lines)) + + +################## +# Generate Config +################## + + +def write_variants_to_file(mod): + here = Path(__file__).absolute().parent + target = here.parent / "generated_configs" / "variants.yml" + if target.exists(): + target.unlink() + with target.open("w") as fid: + fid.write("buildvariants:\n") + + for name, func in sorted(getmembers(mod, isfunction)): + if not name.endswith("_variants"): + continue + if not name.startswith("create_"): + raise ValueError("Variant creators must start with create_") + title = name.replace("create_", "").replace("_variants", "").replace("_", " ").capitalize() + project = EvgProject(tasks=None, buildvariants=func()) + out = ShrubService.generate_yaml(project).splitlines() + with target.open("a") as fid: + fid.write(f" # {title} tests\n") + for line in out[1:]: + fid.write(f"{line}\n") + fid.write("\n") + + # Remove extra trailing newline: + data = target.read_text().splitlines() + with target.open("w") as fid: + for line in data[:-1]: + fid.write(f"{line}\n") + + +def write_tasks_to_file(mod): + here = Path(__file__).absolute().parent + target = here.parent / "generated_configs" / "tasks.yml" + if target.exists(): + target.unlink() + with target.open("w") as fid: + fid.write("tasks:\n") + + for name, func in sorted(getmembers(mod, isfunction)): + if name.startswith("_") or not name.endswith("_tasks"): + continue + if not name.startswith("create_"): + raise ValueError("Task creators must start with create_") + title = name.replace("create_", "").replace("_tasks", "").replace("_", " ").capitalize() + project = EvgProject(tasks=func(), buildvariants=None) + out = ShrubService.generate_yaml(project).splitlines() + with target.open("a") as fid: + fid.write(f" # {title} tests\n") + for line in out[1:]: + fid.write(f"{line}\n") + fid.write("\n") + + # Remove extra trailing newline: + data = target.read_text().splitlines() + with target.open("w") as fid: + for line in data[:-1]: + fid.write(f"{line}\n") + + +def write_functions_to_file(mod): + here = Path(__file__).absolute().parent + target = here.parent / "generated_configs" / "functions.yml" + if target.exists(): + target.unlink() + with target.open("w") as fid: + fid.write("functions:\n") + + functions = dict() + for name, func in sorted(getmembers(mod, isfunction)): + if name.startswith("_") or not name.endswith("_func"): + continue + if not name.startswith("create_"): + raise ValueError("Function creators must start with create_") + title = name.replace("create_", "").replace("_func", "").replace("_", " ").capitalize() + func_name, cmds = func() + functions = dict() + functions[func_name] = cmds + project = EvgProject(functions=functions, tasks=None, buildvariants=None) + out = ShrubService.generate_yaml(project).splitlines() + with target.open("a") as fid: + fid.write(f" # {title}\n") + for line in out[1:]: + fid.write(f"{line}\n") + fid.write("\n") + + # Remove extra trailing newline: + data = target.read_text().splitlines() + with target.open("w") as fid: + for line in data[:-1]: + fid.write(f"{line}\n") diff --git a/.evergreen/scripts/init-test-results.sh b/.evergreen/scripts/init-test-results.sh deleted file mode 100755 index 666ac60620..0000000000 --- a/.evergreen/scripts/init-test-results.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash - -set +x -. src/.evergreen/scripts/env.sh -echo '{"results": [{ "status": "FAIL", "test_file": "Build", "log_raw": "No test-results.json found was created" } ]}' >$PROJECT_DIRECTORY/test-results.json diff --git a/.evergreen/scripts/install-dependencies.sh b/.evergreen/scripts/install-dependencies.sh index ebcc8f3069..23d865d0d8 100755 --- a/.evergreen/scripts/install-dependencies.sh +++ b/.evergreen/scripts/install-dependencies.sh @@ -1,6 +1,77 @@ #!/bin/bash +# Install the dependencies needed for an evergreen run. +set -eu -set -o xtrace -file="$PROJECT_DIRECTORY/.evergreen/install-dependencies.sh" -# Don't use ${file} syntax here because evergreen treats it as an empty expansion. -[ -f "$file" ] && bash "$file" || echo "$file not available, skipping" +HERE=$(dirname ${BASH_SOURCE:-$0}) +pushd "$(dirname "$(dirname $HERE)")" > /dev/null + +# Source the env files to pick up common variables. +if [ -f $HERE/env.sh ]; then + . $HERE/env.sh +fi + +# Set up the default bin directory. +if [ -z "${PYMONGO_BIN_DIR:-}" ]; then + PYMONGO_BIN_DIR="$HOME/.local/bin" + export PATH="$PYMONGO_BIN_DIR:$PATH" +fi + +# Helper function to pip install a dependency using a temporary python env. +function _pip_install() { + _HERE=$(dirname ${BASH_SOURCE:-$0}) + . $_HERE/../utils.sh + _VENV_PATH=$(mktemp -d) + if [ "Windows_NT" = "${OS:-}" ]; then + _VENV_PATH=$(cygpath -m $_VENV_PATH) + fi + echo "Installing $2 using pip..." + createvirtualenv "$(find_python3)" $_VENV_PATH + python -m pip install $1 + _suffix="" + if [ "Windows_NT" = "${OS:-}" ]; then + _suffix=".exe" + fi + ln -s "$(which $2)" $PYMONGO_BIN_DIR/${2}${_suffix} + # uv also comes with a uvx binary. + if [ $2 == "uv" ]; then + ln -s "$(which uvx)" $PYMONGO_BIN_DIR/uvx${_suffix} + fi + echo "Installed to ${PYMONGO_BIN_DIR}" + echo "Installing $2 using pip... done." +} + +# Ensure just is installed. +if ! command -v just &>/dev/null; then + # On most systems we can install directly. + _TARGET="" + if [ "Windows_NT" = "${OS:-}" ]; then + _TARGET="--target x86_64-pc-windows-msvc" + fi + _BIN_DIR=$PYMONGO_BIN_DIR + mkdir -p ${_BIN_DIR} + echo "Installing just..." + mkdir -p "$_BIN_DIR" 2>/dev/null || true + curl --proto '=https' --tlsv1.2 -sSf https://just.systems/install.sh | bash -s -- $_TARGET --to "$_BIN_DIR" || { + # Remove just file if it exists (can be created if there was an install error). + rm -f ${_BIN_DIR}/just + _pip_install rust-just just + } + echo "Installing just... done." +fi + +# Ensure uv is installed. +if ! command -v uv &>/dev/null; then + _BIN_DIR=$PYMONGO_BIN_DIR + mkdir -p ${_BIN_DIR} + echo "Installing uv..." + # On most systems we can install directly. + curl -LsSf https://astral.sh/uv/install.sh | env UV_INSTALL_DIR="$_BIN_DIR" INSTALLER_NO_MODIFY_PATH=1 sh || { + _pip_install uv uv + } + if [ "Windows_NT" = "${OS:-}" ]; then + chmod +x "$(cygpath -u $_BIN_DIR)/uv.exe" + fi + echo "Installing uv... done." +fi + +popd > /dev/null diff --git a/.evergreen/scripts/kms_tester.py b/.evergreen/scripts/kms_tester.py new file mode 100644 index 0000000000..e3833ae63a --- /dev/null +++ b/.evergreen/scripts/kms_tester.py @@ -0,0 +1,144 @@ +from __future__ import annotations + +import os + +from utils import ( + DRIVERS_TOOLS, + LOGGER, + TMP_DRIVER_FILE, + create_archive, + read_env, + run_command, + write_env, +) + +DIRS = dict( + gcp=f"{DRIVERS_TOOLS}/.evergreen/csfle/gcpkms", + azure=f"{DRIVERS_TOOLS}/.evergreen/csfle/azurekms", +) + + +def _setup_azure_vm(base_env: dict[str, str]) -> None: + LOGGER.info("Setting up Azure VM...") + azure_dir = DIRS["azure"] + env = base_env.copy() + env["AZUREKMS_SRC"] = TMP_DRIVER_FILE + env["AZUREKMS_DST"] = "~/" + run_command(f"{azure_dir}/copy-file.sh", env=env) + + env = base_env.copy() + env["AZUREKMS_CMD"] = "tar xf mongo-python-driver.tgz" + run_command(f"{azure_dir}/run-command.sh", env=env) + + env["AZUREKMS_CMD"] = "sudo apt-get install -y python3-dev build-essential" + run_command(f"{azure_dir}/run-command.sh", env=env) + + env["AZUREKMS_CMD"] = "NO_EXT=1 bash .evergreen/just.sh setup-tests kms azure-remote" + run_command(f"{azure_dir}/run-command.sh", env=env) + LOGGER.info("Setting up Azure VM... done.") + + +def _setup_gcp_vm(base_env: dict[str, str]) -> None: + LOGGER.info("Setting up GCP VM...") + gcp_dir = DIRS["gcp"] + env = base_env.copy() + env["GCPKMS_SRC"] = TMP_DRIVER_FILE + env["GCPKMS_DST"] = f"{env['GCPKMS_INSTANCENAME']}:" + run_command(f"{gcp_dir}/copy-file.sh", env=env) + + env = base_env.copy() + env["GCPKMS_CMD"] = "tar xf mongo-python-driver.tgz" + run_command(f"{gcp_dir}/run-command.sh", env=env) + + env["GCPKMS_CMD"] = "sudo apt-get install -y python3-dev build-essential" + run_command(f"{gcp_dir}/run-command.sh", env=env) + + env["GCPKMS_CMD"] = "NO_EXT=1 bash ./.evergreen/just.sh setup-tests kms gcp-remote" + run_command(f"{gcp_dir}/run-command.sh", env=env) + LOGGER.info("Setting up GCP VM...") + + +def _load_kms_config(sub_test_target: str) -> dict[str, str]: + target_dir = DIRS[sub_test_target] + config = read_env(f"{target_dir}/secrets-export.sh") + base_env = os.environ.copy() + for key, value in config.items(): + base_env[key] = str(value) + return base_env + + +def setup_kms(sub_test_name: str) -> None: + if "-" in sub_test_name: + sub_test_target, sub_test_type = sub_test_name.split("-") + else: + sub_test_target = sub_test_name + sub_test_type = "" + + assert sub_test_target in ["azure", "gcp"], sub_test_target + assert sub_test_type in ["", "remote", "fail"], sub_test_type + success = sub_test_type != "fail" + kms_dir = DIRS[sub_test_target] + + if sub_test_target == "azure": + write_env("TEST_FLE_AZURE_AUTO") + else: + write_env("TEST_FLE_GCP_AUTO") + + write_env("SUCCESS", success) + + # For remote tests, there is no further work required. + if sub_test_type == "remote": + return + + if sub_test_target == "azure": + run_command("./setup-secrets.sh", cwd=kms_dir) + + if success: + create_archive() + if sub_test_target == "azure": + os.environ["AZUREKMS_VMNAME_PREFIX"] = "PYTHON_DRIVER" + + # Found using "az vm image list --output table" + os.environ[ + "AZUREKMS_IMAGE" + ] = "Canonical:0001-com-ubuntu-server-jammy:22_04-lts-gen2:latest" + else: + os.environ["GCPKMS_IMAGEFAMILY"] = "debian-12" + + run_command("./setup.sh", cwd=kms_dir) + base_env = _load_kms_config(sub_test_target) + + if sub_test_target == "azure": + _setup_azure_vm(base_env) + else: + _setup_gcp_vm(base_env) + + if sub_test_target == "azure": + config = read_env(f"{kms_dir}/secrets-export.sh") + if success: + write_env("AZUREKMS_VMNAME", config["AZUREKMS_VMNAME"]) + + write_env("KEY_NAME", config["AZUREKMS_KEYNAME"]) + write_env("KEY_VAULT_ENDPOINT", config["AZUREKMS_KEYVAULTENDPOINT"]) + + +def test_kms_send_to_remote(sub_test_name: str) -> None: + env = _load_kms_config(sub_test_name) + if sub_test_name == "azure": + key_name = os.environ["KEY_NAME"] + key_vault_endpoint = os.environ["KEY_VAULT_ENDPOINT"] + env[ + "AZUREKMS_CMD" + ] = f'KEY_NAME="{key_name}" KEY_VAULT_ENDPOINT="{key_vault_endpoint}" bash ./.evergreen/just.sh run-tests' + else: + env["GCPKMS_CMD"] = "./.evergreen/just.sh run-tests" + cmd = f"{DIRS[sub_test_name]}/run-command.sh" + run_command(cmd, env=env) + + +def teardown_kms(sub_test_name: str) -> None: + run_command(f"{DIRS[sub_test_name]}/teardown.sh") + + +if __name__ == "__main__": + setup_kms() diff --git a/.evergreen/scripts/make-files-executable.sh b/.evergreen/scripts/make-files-executable.sh deleted file mode 100755 index 806be7c599..0000000000 --- a/.evergreen/scripts/make-files-executable.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash - -set +x -. src/.evergreen/scripts/env.sh -# shellcheck disable=SC2044 -for i in $(find "$DRIVERS_TOOLS"/.evergreen "$PROJECT_DIRECTORY"/.evergreen -name \*.sh); do - chmod +x "$i" -done diff --git a/.evergreen/scripts/mod_wsgi_tester.py b/.evergreen/scripts/mod_wsgi_tester.py new file mode 100644 index 0000000000..5968849068 --- /dev/null +++ b/.evergreen/scripts/mod_wsgi_tester.py @@ -0,0 +1,93 @@ +from __future__ import annotations + +import os +import sys +import time +import urllib.error +import urllib.request +from pathlib import Path +from shutil import which + +from utils import LOGGER, ROOT, run_command, write_env + + +def make_request(url, timeout=10): + for _ in range(int(timeout)): + try: + urllib.request.urlopen(url) # noqa: S310 + return + except urllib.error.HTTPError: + pass + time.sleep(1) + raise TimeoutError(f"Failed to access {url}") + + +def setup_mod_wsgi(sub_test_name: str) -> None: + env = os.environ.copy() + if sub_test_name == "embedded": + env["MOD_WSGI_CONF"] = "mod_wsgi_test_embedded.conf" + elif sub_test_name == "standalone": + env["MOD_WSGI_CONF"] = "mod_wsgi_test.conf" + else: + raise ValueError("mod_wsgi sub test must be either 'standalone' or 'embedded'") + write_env("MOD_WSGI_CONF", env["MOD_WSGI_CONF"]) + apache = which("apache2") + if not apache and Path("/usr/lib/apache2/mpm-prefork/apache2").exists(): + apache = "/usr/lib/apache2/mpm-prefork/apache2" + if apache: + apache_config = "apache24ubuntu161404.conf" + else: + apache = which("httpd") + if not apache: + raise ValueError("Could not find apache2 or httpd") + apache_config = "apache22amazon.conf" + python_version = ".".join(str(val) for val in sys.version_info[:2]) + mod_wsgi_version = 4 + so_file = f"/opt/python/mod_wsgi/python_version/{python_version}/mod_wsgi_version/{mod_wsgi_version}/mod_wsgi.so" + write_env("MOD_WSGI_SO", so_file) + env["MOD_WSGI_SO"] = so_file + env["PYTHONHOME"] = f"/opt/python/{python_version}" + env["PROJECT_DIRECTORY"] = project_directory = str(ROOT) + write_env("APACHE_BINARY", apache) + write_env("APACHE_CONFIG", apache_config) + uri1 = f"http://localhost:8080/interpreter1{project_directory}" + write_env("TEST_URI1", uri1) + uri2 = f"http://localhost:8080/interpreter2{project_directory}" + write_env("TEST_URI2", uri2) + run_command(f"{apache} -k start -f {ROOT}/test/mod_wsgi_test/{apache_config}", env=env) + + # Wait for the endpoints to be available. + try: + make_request(uri1, 10) + make_request(uri2, 10) + except Exception as e: + LOGGER.error(Path("error_log").read_text()) + raise e + + +def test_mod_wsgi() -> None: + sys.path.insert(0, ROOT) + from test.mod_wsgi_test.test_client import main, parse_args + + uri1 = os.environ["TEST_URI1"] + uri2 = os.environ["TEST_URI2"] + args = f"-n 25000 -t 100 parallel {uri1} {uri2}" + try: + main(*parse_args(args.split())) + + args = f"-n 25000 serial {uri1} {uri2}" + main(*parse_args(args.split())) + except Exception as e: + LOGGER.error(Path("error_log").read_text()) + raise e + + +def teardown_mod_wsgi() -> None: + apache = os.environ["APACHE_BINARY"] + apache_config = os.environ["APACHE_CONFIG"] + + run_command(f"{apache} -k stop -f {ROOT}/test/mod_wsgi_test/{apache_config}") + + +if __name__ == "__main__": + setup_mod_wsgi() diff --git a/.evergreen/scripts/oidc_tester.py b/.evergreen/scripts/oidc_tester.py new file mode 100644 index 0000000000..ac2960371e --- /dev/null +++ b/.evergreen/scripts/oidc_tester.py @@ -0,0 +1,111 @@ +from __future__ import annotations + +import os + +from utils import ( + DRIVERS_TOOLS, + TMP_DRIVER_FILE, + create_archive, + read_env, + run_command, + write_env, +) + +K8S_NAMES = ["aks", "gke", "eks"] +K8S_REMOTE_NAMES = [f"{n}-remote" for n in K8S_NAMES] + + +def _get_target_dir(sub_test_name: str) -> str: + if sub_test_name == "default": + target_dir = "auth_oidc" + elif sub_test_name.startswith("azure"): + target_dir = "auth_oidc/azure" + elif sub_test_name.startswith("gcp"): + target_dir = "auth_oidc/gcp" + elif sub_test_name in K8S_NAMES + K8S_REMOTE_NAMES: + target_dir = "auth_oidc/k8s" + else: + raise ValueError(f"Invalid sub test name '{sub_test_name}'") + return f"{DRIVERS_TOOLS}/.evergreen/{target_dir}" + + +def setup_oidc(sub_test_name: str) -> dict[str, str] | None: + target_dir = _get_target_dir(sub_test_name) + env = os.environ.copy() + + if sub_test_name == "eks" and "AWS_ACCESS_KEY_ID" in os.environ: + # Store AWS creds for kubectl access. + for key in ["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN"]: + if key in os.environ: + write_env(key, os.environ[key]) + + if sub_test_name == "azure": + env["AZUREOIDC_VMNAME_PREFIX"] = "PYTHON_DRIVER" + if "-remote" not in sub_test_name: + if sub_test_name == "azure": + # Found using "az vm image list --output table" + env["AZUREOIDC_IMAGE"] = "Canonical:0001-com-ubuntu-server-jammy:22_04-lts-gen2:latest" + else: + env["GCPKMS_IMAGEFAMILY"] = "debian-12" + run_command(f"bash {target_dir}/setup.sh", env=env) + if sub_test_name in K8S_NAMES: + run_command(f"bash {target_dir}/setup-pod.sh {sub_test_name}") + run_command(f"bash {target_dir}/run-self-test.sh") + return None + + source_file = None + if sub_test_name == "default": + source_file = f"{target_dir}/secrets-export.sh" + elif sub_test_name in ["azure-remote", "gcp-remote"]: + source_file = "./secrets-export.sh" + if sub_test_name in K8S_REMOTE_NAMES: + return os.environ.copy() + if source_file is None: + return None + + config = read_env(source_file) + write_env("MONGODB_URI_SINGLE", config["MONGODB_URI_SINGLE"]) + write_env("MONGODB_URI", config["MONGODB_URI"]) + write_env("DB_IP", config["MONGODB_URI"]) + + if sub_test_name == "default": + write_env("OIDC_TOKEN_FILE", config["OIDC_TOKEN_FILE"]) + write_env("OIDC_TOKEN_DIR", config["OIDC_TOKEN_DIR"]) + if "OIDC_DOMAIN" in config: + write_env("OIDC_DOMAIN", config["OIDC_DOMAIN"]) + elif sub_test_name == "azure-remote": + write_env("AZUREOIDC_RESOURCE", config["AZUREOIDC_RESOURCE"]) + elif sub_test_name == "gcp-remote": + write_env("GCPOIDC_AUDIENCE", config["GCPOIDC_AUDIENCE"]) + return config + + +def test_oidc_send_to_remote(sub_test_name: str) -> None: + env = os.environ.copy() + target_dir = _get_target_dir(sub_test_name) + create_archive() + if sub_test_name in ["azure", "gcp"]: + upper_name = sub_test_name.upper() + env[f"{upper_name}OIDC_DRIVERS_TAR_FILE"] = TMP_DRIVER_FILE + env[ + f"{upper_name}OIDC_TEST_CMD" + ] = f"NO_EXT=1 OIDC_ENV={sub_test_name} ./.evergreen/run-mongodb-oidc-test.sh" + elif sub_test_name in K8S_NAMES: + env["K8S_DRIVERS_TAR_FILE"] = TMP_DRIVER_FILE + env["K8S_TEST_CMD"] = "OIDC_ENV=k8s ./.evergreen/run-mongodb-oidc-test.sh" + run_command(f"bash {target_dir}/run-driver-test.sh", env=env) + + +def teardown_oidc(sub_test_name: str) -> None: + target_dir = _get_target_dir(sub_test_name) + # For k8s, make sure an error while tearing down the pod doesn't prevent + # the Altas server teardown. + error = None + if sub_test_name in K8S_NAMES: + try: + run_command(f"bash {target_dir}/teardown-pod.sh") + except Exception as e: + error = e + run_command(f"bash {target_dir}/teardown.sh") + if error: + raise error diff --git a/.evergreen/scripts/perf-submission-setup.sh b/.evergreen/scripts/perf-submission-setup.sh new file mode 100755 index 0000000000..ecb38751a5 --- /dev/null +++ b/.evergreen/scripts/perf-submission-setup.sh @@ -0,0 +1,15 @@ +#!/bin/bash +# We use the requester expansion to determine whether the data is from a mainline evergreen run or not + +set -eu + +# shellcheck disable=SC2154 +if [ "${requester}" == "commit" ]; then + echo "is_mainline: true" >> expansion.yml +else + echo "is_mainline: false" >> expansion.yml +fi + +# We parse the username out of the order_id as patches append that in and SPS does not need that information +# shellcheck disable=SC2154 +echo "parsed_order_id: $(echo "${revision_order_id}" | awk -F'_' '{print $NF}')" >> expansion.yml diff --git a/.evergreen/scripts/perf-submission.sh b/.evergreen/scripts/perf-submission.sh new file mode 100755 index 0000000000..f7c3ea6664 --- /dev/null +++ b/.evergreen/scripts/perf-submission.sh @@ -0,0 +1,25 @@ +#!/bin/bash +# We use the requester expansion to determine whether the data is from a mainline evergreen run or not + +set -eu + +# Submit the performance data to the SPS endpoint +# shellcheck disable=SC2154 +response=$(curl -s -w "\nHTTP_STATUS:%{http_code}" -X 'POST' \ + "https://performance-monitoring-api.corp.mongodb.com/raw_perf_results/cedar_report?project=${project_id}&version=${version_id}&variant=${build_variant}&order=${parsed_order_id}&task_name=${task_name}&task_id=${task_id}&execution=${execution}&mainline=${is_mainline}" \ + -H 'accept: application/json' \ + -H 'Content-Type: application/json' \ + -d @results.json) + +http_status=$(echo "$response" | grep "HTTP_STATUS" | awk -F':' '{print $2}') +response_body=$(echo "$response" | sed '/HTTP_STATUS/d') + +# We want to throw an error if the data was not successfully submitted +if [ "$http_status" -ne 200 ]; then + echo "Error: Received HTTP status $http_status" + echo "Response Body: $response_body" + exit 1 +fi + +echo "Response Body: $response_body" +echo "HTTP Status: $http_status" diff --git a/.evergreen/scripts/prepare-resources.sh b/.evergreen/scripts/prepare-resources.sh deleted file mode 100755 index 3cfa2c4efd..0000000000 --- a/.evergreen/scripts/prepare-resources.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -set -eu - -HERE=$(dirname ${BASH_SOURCE:-$0}) -pushd $HERE -. env.sh - -rm -rf $DRIVERS_TOOLS -if [ "$PROJECT" = "drivers-tools" ]; then - # If this was a patch build, doing a fresh clone would not actually test the patch - cp -R $PROJECT_DIRECTORY/ $DRIVERS_TOOLS -else - git clone https://github.com/mongodb-labs/drivers-evergreen-tools.git $DRIVERS_TOOLS -fi -echo "{ \"releases\": { \"default\": \"$MONGODB_BINARIES\" }}" >$MONGO_ORCHESTRATION_HOME/orchestration.config - -popd diff --git a/.evergreen/scripts/resync-all-specs.py b/.evergreen/scripts/resync-all-specs.py new file mode 100644 index 0000000000..8e58e56da2 --- /dev/null +++ b/.evergreen/scripts/resync-all-specs.py @@ -0,0 +1,126 @@ +from __future__ import annotations + +import argparse +import os +import pathlib +import subprocess +from argparse import Namespace +from subprocess import CalledProcessError +from typing import Optional + + +def resync_specs(directory: pathlib.Path, errored: dict[str, str]) -> None: + """Actually sync the specs""" + print("Beginning to sync specs") + for spec in os.scandir(directory): + if not spec.is_dir(): + continue + + if spec.name in ["asynchronous"]: + continue + try: + subprocess.run( + ["bash", "./.evergreen/resync-specs.sh", spec.name], # noqa: S603, S607 + capture_output=True, + text=True, + check=True, + ) + except CalledProcessError as exc: + errored[spec.name] = exc.stderr + print("Done syncing specs") + + +def apply_patches(errored): + print("Beginning to apply patches") + subprocess.run(["bash", "./.evergreen/remove-unimplemented-tests.sh"], check=True) # noqa: S603, S607 + try: + subprocess.run( + ["git apply -R --allow-empty --whitespace=fix ./.evergreen/spec-patch/*"], # noqa: S607 + shell=True, # noqa: S602 + check=True, + stderr=subprocess.PIPE, + ) + except CalledProcessError as exc: + errored["applying patches"] = exc.stderr + + +def check_new_spec_directories(directory: pathlib.Path) -> list[str]: + """Check to see if there are any directories in the spec repo that don't exist in pymongo/test""" + spec_dir = pathlib.Path(os.environ["MDB_SPECS"]) / "source" + spec_set = { + entry.name.replace("-", "_") + for entry in os.scandir(spec_dir) + if entry.is_dir() + and (pathlib.Path(entry.path) / "tests").is_dir() + and len(list(os.scandir(pathlib.Path(entry.path) / "tests"))) > 1 + } + test_set = {entry.name.replace("-", "_") for entry in os.scandir(directory) if entry.is_dir()} + known_mappings = { + "ocsp_support": "ocsp", + "client_side_operations_timeout": "csot", + "mongodb_handshake": "handshake", + "load_balancers": "load_balancer", + "connection_monitoring_and_pooling": "connection_monitoring", + "command_logging_and_monitoring": "command_logging", + "initial_dns_seedlist_discovery": "srv_seedlist", + "server_discovery_and_monitoring": "sdam_monitoring", + } + + for k, v in known_mappings.items(): + if k in spec_set: + spec_set.remove(k) + spec_set.add(v) + return list(spec_set - test_set) + + +def write_summary(errored: dict[str, str], new: list[str], filename: Optional[str]) -> None: + """Generate the PR description""" + pr_body = "" + process = subprocess.run( + ["git diff --name-only | awk -F'/' '{print $2}' | sort | uniq"], # noqa: S607 + shell=True, # noqa: S602 + capture_output=True, + text=True, + check=True, + ) + succeeded = process.stdout.strip().split() + if len(succeeded) > 0: + pr_body += "The following specs were changed:\n -" + pr_body += "\n -".join(succeeded) + pr_body += "\n" + if len(errored) > 0: + pr_body += "\n\nThe following spec syncs encountered errors:" + for k, v in errored.items(): + pr_body += f"\n -{k}\n```{v}\n```" + pr_body += "\n" + if len(new) > 0: + pr_body += "\n\nThe following directories are in the specification repository and not in our test directory:\n -" + pr_body += "\n -".join(new) + pr_body += "\n" + if pr_body != "": + if filename is None: + print(f"\n{pr_body}") + else: + with open(filename, "w") as f: + # replacements made for proper json + f.write(pr_body.replace("\n", "\\n").replace("\t", "\\t")) + + +def main(args: Namespace): + directory = pathlib.Path("./test") + errored: dict[str, str] = {} + resync_specs(directory, errored) + apply_patches(errored) + new = check_new_spec_directories(directory) + write_summary(errored, new, args.filename) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description="Python Script to resync all specs and generate summary for PR." + ) + parser.add_argument( + "--filename", help="Name of file for the summary to be written into.", default=None + ) + args = parser.parse_args() + main(args) diff --git a/.evergreen/scripts/resync-all-specs.sh b/.evergreen/scripts/resync-all-specs.sh new file mode 100755 index 0000000000..41e4a2bc73 --- /dev/null +++ b/.evergreen/scripts/resync-all-specs.sh @@ -0,0 +1,43 @@ +#!/usr/bin/env bash +# Run spec syncing script and create PR +set -eu + +# SETUP +SRC_URL="https://github.com/mongodb/specifications.git" +# needs to be set for resync-specs.sh +SPEC_SRC="$(realpath "../specifications")" +SCRIPT="$(realpath "./.evergreen/resync-specs.sh")" + +# Clone the spec repo if the directory does not exist +if [[ ! -d $SPEC_SRC ]]; then + git clone $SRC_URL $SPEC_SRC + if [[ $? -ne 0 ]]; then + echo "Error: Failed to clone repository." + exit 1 + fi +fi + +# Set environment variable to the cloned spec repo for resync-specs.sh +export MDB_SPECS="$SPEC_SRC" + +# Check that resync-specs.sh exists and is executable +if [[ ! -x $SCRIPT ]]; then + echo "Error: $SCRIPT not found or is not executable." + exit 1 +fi + +PR_DESC="spec_sync.txt" + +# run python script that actually does all the resyncing +if ! [ -n "${CI:-}" ] +then + # we're running locally + python3 ./.evergreen/scripts/resync-all-specs.py +else + /opt/devtools/bin/python3.11 ./.evergreen/scripts/resync-all-specs.py --filename "$PR_DESC" + if [[ -f $PR_DESC ]]; then + # changes were made -> call scrypt to create PR for us + .evergreen/scripts/create-spec-pr.sh "$PR_DESC" + rm "$PR_DESC" + fi +fi diff --git a/.evergreen/scripts/run-atlas-tests.sh b/.evergreen/scripts/run-atlas-tests.sh deleted file mode 100755 index 98a19f047f..0000000000 --- a/.evergreen/scripts/run-atlas-tests.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash - -# Disable xtrace for security reasons (just in case it was accidentally set). -set +x -set -o errexit -bash "${DRIVERS_TOOLS}"/.evergreen/auth_aws/setup_secrets.sh drivers/atlas_connect -TEST_ATLAS=1 bash "${PROJECT_DIRECTORY}"/.evergreen/hatch.sh test:test-eg diff --git a/.evergreen/scripts/run-aws-ecs-auth-test.sh b/.evergreen/scripts/run-aws-ecs-auth-test.sh deleted file mode 100755 index 787e0a710b..0000000000 --- a/.evergreen/scripts/run-aws-ecs-auth-test.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash - -# shellcheck disable=SC2154 -if [ "${skip_ECS_auth_test}" = "true" ]; then - echo "This platform does not support the ECS auth test, skipping..." - exit 0 -fi -set -ex -cd "$DRIVERS_TOOLS"/.evergreen/auth_aws -. ./activate-authawsvenv.sh -. aws_setup.sh ecs -export MONGODB_BINARIES="$MONGODB_BINARIES" -export PROJECT_DIRECTORY="$PROJECT_DIRECTORY" -python aws_tester.py ecs -cd - diff --git a/.evergreen/scripts/run-doctests.sh b/.evergreen/scripts/run-doctests.sh deleted file mode 100755 index f7215ad347..0000000000 --- a/.evergreen/scripts/run-doctests.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash - -set -o xtrace -PYTHON_BINARY=${PYTHON_BINARY} bash "${PROJECT_DIRECTORY}"/.evergreen/hatch.sh doctest:test diff --git a/.evergreen/scripts/run-enterprise-auth-tests.sh b/.evergreen/scripts/run-enterprise-auth-tests.sh deleted file mode 100755 index 7f936b1955..0000000000 --- a/.evergreen/scripts/run-enterprise-auth-tests.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash -set -eu - -# Disable xtrace for security reasons (just in case it was accidentally set). -set +x -# Use the default python to bootstrap secrets. -bash "${DRIVERS_TOOLS}"/.evergreen/secrets_handling/setup-secrets.sh drivers/enterprise_auth -TEST_ENTERPRISE_AUTH=1 AUTH=auth bash "${PROJECT_DIRECTORY}"/.evergreen/hatch.sh test:test-eg diff --git a/.evergreen/scripts/run-gcpkms-fail-test.sh b/.evergreen/scripts/run-gcpkms-fail-test.sh deleted file mode 100755 index dd9d522c8a..0000000000 --- a/.evergreen/scripts/run-gcpkms-fail-test.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash - -. .evergreen/scripts/env.sh -export PYTHON_BINARY=/opt/mongodbtoolchain/v4/bin/python3 -export LIBMONGOCRYPT_URL=https://s3.amazonaws.com/mciuploads/libmongocrypt/debian11/master/latest/libmongocrypt.tar.gz -SKIP_SERVERS=1 bash ./.evergreen/setup-encryption.sh -SUCCESS=false TEST_FLE_GCP_AUTO=1 ./.evergreen/hatch.sh test:test-eg diff --git a/.evergreen/scripts/run-getdata.sh b/.evergreen/scripts/run-getdata.sh index b2d6ecb476..9435a5fcc3 100755 --- a/.evergreen/scripts/run-getdata.sh +++ b/.evergreen/scripts/run-getdata.sh @@ -1,11 +1,14 @@ #!/bin/bash +# Get the debug data for an evergreen task. +set -eu -set -o xtrace -. ${DRIVERS_TOOLS}/.evergreen/download-mongodb.sh || true +. ${DRIVERS_TOOLS}/.evergreen/get-distro.sh || true get_distro || true echo $DISTRO echo $MARCH echo $OS + +set -x uname -a || true ls /etc/*release* || true cc --version || true @@ -20,3 +23,4 @@ ls -la /usr/local/Cellar/ || true scan-build --version || true genhtml --version || true valgrind --version || true +set +x diff --git a/.evergreen/scripts/run-load-balancer.sh b/.evergreen/scripts/run-load-balancer.sh deleted file mode 100755 index 7d431777e5..0000000000 --- a/.evergreen/scripts/run-load-balancer.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -MONGODB_URI=${MONGODB_URI} bash "${DRIVERS_TOOLS}"/.evergreen/run-load-balancer.sh start diff --git a/.evergreen/scripts/run-mockupdb-tests.sh b/.evergreen/scripts/run-mockupdb-tests.sh deleted file mode 100755 index 8825a0237d..0000000000 --- a/.evergreen/scripts/run-mockupdb-tests.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash - -set -o xtrace -export PYTHON_BINARY=${PYTHON_BINARY} -bash "${PROJECT_DIRECTORY}"/.evergreen/hatch.sh test:test-mockupdb diff --git a/.evergreen/scripts/run-mod-wsgi-tests.sh b/.evergreen/scripts/run-mod-wsgi-tests.sh deleted file mode 100755 index 607458b8c6..0000000000 --- a/.evergreen/scripts/run-mod-wsgi-tests.sh +++ /dev/null @@ -1,52 +0,0 @@ -#!/bin/bash -set -o xtrace -set -o errexit - -APACHE=$(command -v apache2 || command -v /usr/lib/apache2/mpm-prefork/apache2) || true -if [ -n "$APACHE" ]; then - APACHE_CONFIG=apache24ubuntu161404.conf -else - APACHE=$(command -v httpd) || true - if [ -z "$APACHE" ]; then - echo "Could not find apache2 binary" - exit 1 - else - APACHE_CONFIG=apache22amazon.conf - fi -fi - - -PYTHON_VERSION=$(${PYTHON_BINARY} -c "import sys; sys.stdout.write('.'.join(str(val) for val in sys.version_info[:2]))") - -# Ensure the C extensions are installed. -${PYTHON_BINARY} -m venv --system-site-packages .venv -source .venv/bin/activate -pip install -U pip -python -m pip install -e . - -export MOD_WSGI_SO=/opt/python/mod_wsgi/python_version/$PYTHON_VERSION/mod_wsgi_version/$MOD_WSGI_VERSION/mod_wsgi.so -export PYTHONHOME=/opt/python/$PYTHON_VERSION -# If MOD_WSGI_EMBEDDED is set use the default embedded mode behavior instead -# of daemon mode (WSGIDaemonProcess). -if [ -n "${MOD_WSGI_EMBEDDED:-}" ]; then - export MOD_WSGI_CONF=mod_wsgi_test_embedded.conf -else - export MOD_WSGI_CONF=mod_wsgi_test.conf -fi - -cd .. -$APACHE -k start -f ${PROJECT_DIRECTORY}/test/mod_wsgi_test/${APACHE_CONFIG} -trap '$APACHE -k stop -f ${PROJECT_DIRECTORY}/test/mod_wsgi_test/${APACHE_CONFIG}' EXIT HUP - -wget -t 1 -T 10 -O - "http://localhost:8080/interpreter1${PROJECT_DIRECTORY}" || (cat error_log && exit 1) -wget -t 1 -T 10 -O - "http://localhost:8080/interpreter2${PROJECT_DIRECTORY}" || (cat error_log && exit 1) - -python ${PROJECT_DIRECTORY}/test/mod_wsgi_test/test_client.py -n 25000 -t 100 parallel \ - http://localhost:8080/interpreter1${PROJECT_DIRECTORY} http://localhost:8080/interpreter2${PROJECT_DIRECTORY} || \ - (tail -n 100 error_log && exit 1) - -python ${PROJECT_DIRECTORY}/test/mod_wsgi_test/test_client.py -n 25000 serial \ - http://localhost:8080/interpreter1${PROJECT_DIRECTORY} http://localhost:8080/interpreter2${PROJECT_DIRECTORY} || \ - (tail -n 100 error_log && exit 1) - -rm -rf .venv diff --git a/.evergreen/scripts/run-mongodb-aws-test.sh b/.evergreen/scripts/run-mongodb-aws-test.sh deleted file mode 100755 index ec20bfd06b..0000000000 --- a/.evergreen/scripts/run-mongodb-aws-test.sh +++ /dev/null @@ -1,33 +0,0 @@ -#!/bin/bash - -set -o xtrace -set -o errexit # Exit the script with error if any of the commands fail - -############################################ -# Main Program # -############################################ - -# Supported/used environment variables: -# MONGODB_URI Set the URI, including an optional username/password to use -# to connect to the server via MONGODB-AWS authentication -# mechanism. -# PYTHON_BINARY The Python version to use. - -# shellcheck disable=SC2154 -if [ "${skip_EC2_auth_test:-}" = "true" ] && { [ "$1" = "ec2" ] || [ "$1" = "web-identity" ]; }; then - echo "This platform does not support the EC2 auth test, skipping..." - exit 0 -fi - -echo "Running MONGODB-AWS authentication tests for $1" - -# Handle credentials and environment setup. -. "$DRIVERS_TOOLS"/.evergreen/auth_aws/aws_setup.sh "$1" - -# show test output -set -x - -export TEST_AUTH_AWS=1 -export AUTH="auth" -export SET_XTRACE_ON=1 -bash ./.evergreen/hatch.sh test:test-eg diff --git a/.evergreen/scripts/run-ocsp-test.sh b/.evergreen/scripts/run-ocsp-test.sh deleted file mode 100755 index 3c6d3b2b3b..0000000000 --- a/.evergreen/scripts/run-ocsp-test.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash - -TEST_OCSP=1 \ -PYTHON_BINARY="${PYTHON_BINARY}" \ -CA_FILE="${DRIVERS_TOOLS}/.evergreen/ocsp/${OCSP_ALGORITHM}/ca.pem" \ -OCSP_TLS_SHOULD_SUCCEED="${OCSP_TLS_SHOULD_SUCCEED}" \ -bash "${PROJECT_DIRECTORY}"/.evergreen/hatch.sh test:test-eg -bash "${DRIVERS_TOOLS}"/.evergreen/ocsp/teardown.sh diff --git a/.evergreen/scripts/run-perf-tests.sh b/.evergreen/scripts/run-perf-tests.sh deleted file mode 100755 index 69a369fee1..0000000000 --- a/.evergreen/scripts/run-perf-tests.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash - -PROJECT_DIRECTORY=${PROJECT_DIRECTORY} -bash "${PROJECT_DIRECTORY}"/.evergreen/run-perf-tests.sh diff --git a/.evergreen/scripts/run-server.sh b/.evergreen/scripts/run-server.sh new file mode 100755 index 0000000000..298eedcd3e --- /dev/null +++ b/.evergreen/scripts/run-server.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +set -eu + +HERE=$(dirname ${BASH_SOURCE:-$0}) + +# Try to source the env file. +if [ -f $HERE/env.sh ]; then + echo "Sourcing env file" + source $HERE/env.sh +fi + +uv run $HERE/run_server.py "$@" diff --git a/.evergreen/scripts/run-tests.sh b/.evergreen/scripts/run-tests.sh deleted file mode 100755 index 6986a0bbee..0000000000 --- a/.evergreen/scripts/run-tests.sh +++ /dev/null @@ -1,54 +0,0 @@ -#!/bin/bash - -# Disable xtrace -set +x -if [ -n "${MONGODB_STARTED}" ]; then - export PYMONGO_MUST_CONNECT=true -fi -if [ -n "${DISABLE_TEST_COMMANDS}" ]; then - export PYMONGO_DISABLE_TEST_COMMANDS=1 -fi -if [ -n "${test_encryption}" ]; then - # Disable xtrace (just in case it was accidentally set). - set +x - bash "${DRIVERS_TOOLS}"/.evergreen/csfle/await-servers.sh - export TEST_ENCRYPTION=1 - if [ -n "${test_encryption_pyopenssl}" ]; then - export TEST_ENCRYPTION_PYOPENSSL=1 - fi -fi -if [ -n "${test_crypt_shared}" ]; then - export TEST_CRYPT_SHARED=1 - export CRYPT_SHARED_LIB_PATH=${CRYPT_SHARED_LIB_PATH} -fi -if [ -n "${test_pyopenssl}" ]; then - export TEST_PYOPENSSL=1 -fi -if [ -n "${SETDEFAULTENCODING}" ]; then - export SETDEFAULTENCODING="${SETDEFAULTENCODING}" -fi -if [ -n "${test_loadbalancer}" ]; then - export TEST_LOADBALANCER=1 - export SINGLE_MONGOS_LB_URI="${SINGLE_MONGOS_LB_URI}" - export MULTI_MONGOS_LB_URI="${MULTI_MONGOS_LB_URI}" -fi -if [ -n "${test_serverless}" ]; then - export TEST_SERVERLESS=1 -fi -if [ -n "${TEST_INDEX_MANAGEMENT:-}" ]; then - export TEST_INDEX_MANAGEMENT=1 -fi -if [ -n "${SKIP_CSOT_TESTS}" ]; then - export SKIP_CSOT_TESTS=1 -fi -GREEN_FRAMEWORK=${GREEN_FRAMEWORK} \ - PYTHON_BINARY=${PYTHON_BINARY} \ - NO_EXT=${NO_EXT} \ - COVERAGE=${COVERAGE} \ - COMPRESSORS=${COMPRESSORS} \ - AUTH=${AUTH} \ - SSL=${SSL} \ - TEST_DATA_LAKE=${TEST_DATA_LAKE:-} \ - TEST_SUITES=${TEST_SUITES:-} \ - MONGODB_API_VERSION=${MONGODB_API_VERSION} \ - bash "${PROJECT_DIRECTORY}"/.evergreen/hatch.sh test:test-eg diff --git a/.evergreen/scripts/run-with-env.sh b/.evergreen/scripts/run-with-env.sh deleted file mode 100755 index 2fd073605d..0000000000 --- a/.evergreen/scripts/run-with-env.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/bash -eu - -# Example use: bash run-with-env.sh run-tests.sh {args...} - -# Parameter expansion to get just the current directory's name -if [ "${PWD##*/}" == "src" ]; then - . .evergreen/scripts/env.sh - if [ -f ".evergreen/scripts/test-env.sh" ]; then - . .evergreen/scripts/test-env.sh - fi -else - . src/.evergreen/scripts/env.sh - if [ -f "src/.evergreen/scripts/test-env.sh" ]; then - . src/.evergreen/scripts/test-env.sh - fi -fi - -set -eu - -# shellcheck source=/dev/null -. "$@" diff --git a/.evergreen/scripts/run_server.py b/.evergreen/scripts/run_server.py new file mode 100644 index 0000000000..a35fbb57a8 --- /dev/null +++ b/.evergreen/scripts/run_server.py @@ -0,0 +1,59 @@ +from __future__ import annotations + +import os +from typing import Any + +from utils import DRIVERS_TOOLS, ROOT, get_test_options, run_command + + +def set_env(name: str, value: Any = "1") -> None: + os.environ[name] = str(value) + + +def start_server(): + opts, extra_opts = get_test_options( + "Run a MongoDB server. All given flags will be passed to run-orchestration.sh in DRIVERS_TOOLS.", + require_sub_test_name=False, + allow_extra_opts=True, + ) + test_name = opts.test_name + + # drivers-evergreen-tools expects the version variable to be named MONGODB_VERSION. + if "VERSION" in os.environ: + os.environ["MONGODB_VERSION"] = os.environ["VERSION"] + + if test_name == "auth_aws": + set_env("AUTH_AWS") + + elif test_name == "load_balancer": + set_env("LOAD_BALANCER") + + elif test_name == "search_index": + os.environ["TOPOLOGY"] = "replica_set" + os.environ["MONGODB_VERSION"] = "7.0" + + if not os.environ.get("TEST_CRYPT_SHARED"): + set_env("SKIP_CRYPT_SHARED") + + if opts.ssl: + extra_opts.append("--ssl") + if test_name != "ocsp": + certs = ROOT / "test/certificates" + set_env("TLS_CERT_KEY_FILE", certs / "client.pem") + set_env("TLS_PEM_KEY_FILE", certs / "server.pem") + set_env("TLS_CA_FILE", certs / "ca.pem") + + if opts.auth: + extra_opts.append("--auth") + + if opts.verbose: + extra_opts.append("-v") + elif opts.quiet: + extra_opts.append("-q") + + cmd = ["bash", f"{DRIVERS_TOOLS}/.evergreen/run-orchestration.sh", *extra_opts] + run_command(cmd, cwd=DRIVERS_TOOLS) + + +if __name__ == "__main__": + start_server() diff --git a/.evergreen/scripts/run_tests.py b/.evergreen/scripts/run_tests.py new file mode 100644 index 0000000000..c1c29c58bc --- /dev/null +++ b/.evergreen/scripts/run_tests.py @@ -0,0 +1,219 @@ +from __future__ import annotations + +import json +import logging +import os +import platform +import shutil +import sys +from datetime import datetime +from pathlib import Path +from shutil import which + +try: + import importlib_metadata +except ImportError: + from importlib import metadata as importlib_metadata + + +import pytest +from utils import DRIVERS_TOOLS, LOGGER, ROOT, run_command + +AUTH = os.environ.get("AUTH", "noauth") +SSL = os.environ.get("SSL", "nossl") +UV_ARGS = os.environ.get("UV_ARGS", "") +TEST_PERF = os.environ.get("TEST_PERF") +GREEN_FRAMEWORK = os.environ.get("GREEN_FRAMEWORK") +TEST_ARGS = os.environ.get("TEST_ARGS", "").split() +TEST_NAME = os.environ.get("TEST_NAME") +SUB_TEST_NAME = os.environ.get("SUB_TEST_NAME") + + +def list_packages(): + packages = set() + for distribution in importlib_metadata.distributions(): + if distribution.name: + packages.add(distribution.name) + print("Package Version URL") + print("------------------- ----------- ----------------------------------------------------") + for name in sorted(packages): + distribution = importlib_metadata.distribution(name) + url = "" + if distribution.origin is not None: + url = distribution.origin.url + print(f"{name:20s}{distribution.version:12s}{url}") + print("------------------- ----------- ----------------------------------------------------\n") + + +def handle_perf(start_time: datetime): + end_time = datetime.now() + elapsed_secs = (end_time - start_time).total_seconds() + with open("results.json") as fid: + results = json.load(fid) + LOGGER.info("results.json:\n%s", json.dumps(results, indent=2)) + + results = dict( + status="PASS", + exit_code=0, + test_file="BenchMarkTests", + start=int(start_time.timestamp()), + end=int(end_time.timestamp()), + elapsed=elapsed_secs, + ) + report = dict(failures=0, results=[results]) + LOGGER.info("report.json\n%s", json.dumps(report, indent=2)) + + with open("report.json", "w", newline="\n") as fid: + json.dump(report, fid) + + +def handle_green_framework() -> None: + if GREEN_FRAMEWORK == "gevent": + from gevent import monkey + + monkey.patch_all() + + # Never run async tests with a framework. + if len(TEST_ARGS) <= 1: + TEST_ARGS.extend(["-m", "not default_async and default"]) + else: + for i in range(len(TEST_ARGS) - 1): + if "-m" in TEST_ARGS[i]: + TEST_ARGS[i + 1] = f"not default_async and {TEST_ARGS[i + 1]}" + + LOGGER.info(f"Running tests with {GREEN_FRAMEWORK}...") + + +def handle_c_ext() -> None: + if platform.python_implementation() != "CPython": + return + sys.path.insert(0, str(ROOT / "tools")) + from fail_if_no_c import main as fail_if_no_c + + fail_if_no_c() + + +def handle_pymongocrypt() -> None: + import pymongocrypt + + LOGGER.info(f"pymongocrypt version: {pymongocrypt.__version__})") + LOGGER.info(f"libmongocrypt version: {pymongocrypt.libmongocrypt_version()})") + + +def handle_aws_lambda() -> None: + env = os.environ.copy() + target_dir = ROOT / "test/lambda" + env["TEST_LAMBDA_DIRECTORY"] = str(target_dir) + env.setdefault("AWS_REGION", "us-east-1") + dirs = ["pymongo", "gridfs", "bson"] + # Store the original .so files. + before_sos = [] + for dname in dirs: + before_sos.extend(f"{f.parent.name}/{f.name}" for f in (ROOT / dname).glob("*.so")) + # Build the c extensions. + docker = which("docker") or which("podman") + if not docker: + raise ValueError("Could not find docker!") + image = "quay.io/pypa/manylinux2014_x86_64:latest" + run_command( + f'{docker} run --rm -v "{ROOT}:/src" --platform linux/amd64 {image} /src/test/lambda/build_internal.sh' + ) + for dname in dirs: + target = ROOT / "test/lambda/mongodb" / dname + shutil.rmtree(target, ignore_errors=True) + shutil.copytree(ROOT / dname, target) + # Remove the original so files from the lambda directory. + for so_path in before_sos: + (ROOT / "test/lambda/mongodb" / so_path).unlink() + # Remove the new so files from the ROOT directory. + for dname in dirs: + so_paths = [f"{f.parent.name}/{f.name}" for f in (ROOT / dname).glob("*.so")] + for so_path in list(so_paths): + if so_path not in before_sos: + Path(so_path).unlink() + + script_name = "run-deployed-lambda-aws-tests.sh" + run_command(f"bash {DRIVERS_TOOLS}/.evergreen/aws_lambda/{script_name}", env=env) + + +def run() -> None: + # Add diagnostic for python version. + print("Running with python", sys.version) + + # List the installed packages. + list_packages() + + # Handle green framework first so they can patch modules. + if GREEN_FRAMEWORK: + handle_green_framework() + + # Ensure C extensions if applicable. + if not os.environ.get("NO_EXT"): + handle_c_ext() + + if os.environ.get("PYMONGOCRYPT_LIB"): + handle_pymongocrypt() + + LOGGER.info(f"Test setup:\n{AUTH=}\n{SSL=}\n{UV_ARGS=}\n{TEST_ARGS=}") + + # Record the start time for a perf test. + if TEST_PERF: + start_time = datetime.now() + + # Run mod_wsgi tests using the helper. + if TEST_NAME == "mod_wsgi": + from mod_wsgi_tester import test_mod_wsgi + + test_mod_wsgi() + return + + # Send kms tests to run remotely. + if TEST_NAME == "kms" and SUB_TEST_NAME in ["azure", "gcp"]: + from kms_tester import test_kms_send_to_remote + + test_kms_send_to_remote(SUB_TEST_NAME) + return + + # Handle doctests. + if TEST_NAME == "doctest": + from sphinx.cmd.build import main + + result = main("-E -b doctest doc ./doc/_build/doctest".split()) + sys.exit(result) + + # Send ecs tests to run remotely. + if TEST_NAME == "auth_aws" and SUB_TEST_NAME == "ecs": + run_command(f"{DRIVERS_TOOLS}/.evergreen/auth_aws/aws_setup.sh ecs") + return + + # Send OIDC tests to run remotely. + if ( + TEST_NAME == "auth_oidc" + and SUB_TEST_NAME != "default" + and not SUB_TEST_NAME.endswith("-remote") + ): + from oidc_tester import test_oidc_send_to_remote + + test_oidc_send_to_remote(SUB_TEST_NAME) + return + + # Run deployed aws lambda tests. + if TEST_NAME == "aws_lambda": + handle_aws_lambda() + return + + if os.environ.get("DEBUG_LOG"): + TEST_ARGS.extend(f"-o log_cli_level={logging.DEBUG}".split()) + + # Run local tests. + ret = pytest.main(TEST_ARGS + sys.argv[1:]) + if ret != 0: + sys.exit(ret) + + # Handle perf test post actions. + if TEST_PERF: + handle_perf(start_time) + + +if __name__ == "__main__": + run() diff --git a/.evergreen/scripts/setup-dev-env.sh b/.evergreen/scripts/setup-dev-env.sh new file mode 100755 index 0000000000..209857d542 --- /dev/null +++ b/.evergreen/scripts/setup-dev-env.sh @@ -0,0 +1,59 @@ +#!/bin/bash +# Set up a development environment on an evergreen host. +set -eu + +HERE=$(dirname ${BASH_SOURCE:-$0}) +HERE="$( cd -- "$HERE" > /dev/null 2>&1 && pwd )" +ROOT=$(dirname "$(dirname $HERE)") +pushd $ROOT > /dev/null + +# Bail early if running on GitHub Actions. +if [ -n "${GITHUB_ACTION:-}" ]; then + exit 0 +fi + +# Source the env files to pick up common variables. +if [ -f $HERE/env.sh ]; then + . $HERE/env.sh +fi +# PYTHON_BINARY or PYTHON_VERSION may be defined in test-env.sh. +if [ -f $HERE/test-env.sh ]; then + . $HERE/test-env.sh +fi + +# Ensure dependencies are installed. +bash $HERE/install-dependencies.sh + +# Get the appropriate UV_PYTHON. +. $ROOT/.evergreen/utils.sh + +if [ -z "${PYTHON_BINARY:-}" ]; then + if [ -n "${PYTHON_VERSION:-}" ]; then + PYTHON_BINARY=$(get_python_binary $PYTHON_VERSION) + else + PYTHON_BINARY=$(find_python3) + fi +fi +export UV_PYTHON=${PYTHON_BINARY} +echo "Using python $UV_PYTHON" + +# Add the default install path to the path if needed. +if [ -z "${PYMONGO_BIN_DIR:-}" ]; then + export PATH="$PATH:$HOME/.local/bin" +fi + +# Set up venv, making sure c extensions build unless disabled. +if [ -z "${NO_EXT:-}" ]; then + export PYMONGO_C_EXT_MUST_BUILD=1 +fi +# Set up visual studio env on Windows spawn hosts. +if [ -f $HOME/.visualStudioEnv.sh ]; then + set +u + SSH_TTY=1 source $HOME/.visualStudioEnv.sh + set -u +fi +uv sync + +echo "Setting up python environment... done." + +popd > /dev/null diff --git a/.evergreen/scripts/setup-encryption.sh b/.evergreen/scripts/setup-encryption.sh deleted file mode 100755 index 2f167cd20b..0000000000 --- a/.evergreen/scripts/setup-encryption.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash - -if [ -n "${test_encryption}" ]; then - ./.evergreen/hatch.sh encryption:setup -fi diff --git a/.evergreen/scripts/setup-system.sh b/.evergreen/scripts/setup-system.sh index d78d924f6b..9158414cce 100755 --- a/.evergreen/scripts/setup-system.sh +++ b/.evergreen/scripts/setup-system.sh @@ -1,5 +1,5 @@ #!/bin/bash - +# Set up the system on an evergreen host. set -eu HERE=$(dirname ${BASH_SOURCE:-$0}) @@ -7,8 +7,45 @@ pushd "$(dirname "$(dirname $HERE)")" echo "Setting up system..." bash .evergreen/scripts/configure-env.sh source .evergreen/scripts/env.sh -bash .evergreen/scripts/prepare-resources.sh bash $DRIVERS_TOOLS/.evergreen/setup.sh bash .evergreen/scripts/install-dependencies.sh popd + +# Enable core dumps if enabled on the machine +# Copied from https://github.com/mongodb/mongo/blob/master/etc/evergreen.yml +if [ -f /proc/self/coredump_filter ]; then + # Set the shell process (and its children processes) to dump ELF headers (bit 4), + # anonymous shared mappings (bit 1), and anonymous private mappings (bit 0). + echo 0x13 >/proc/self/coredump_filter + + if [ -f /sbin/sysctl ]; then + # Check that the core pattern is set explicitly on our distro image instead + # of being the OS's default value. This ensures that coredump names are consistent + # across distros and can be picked up by Evergreen. + core_pattern=$(/sbin/sysctl -n "kernel.core_pattern") + if [ "$core_pattern" = "dump_%e.%p.core" ]; then + echo "Enabling coredumps" + ulimit -c unlimited + fi + fi +fi + +if [ "$(uname -s)" = "Darwin" ]; then + core_pattern_mac=$(/usr/sbin/sysctl -n "kern.corefile") + if [ "$core_pattern_mac" = "dump_%N.%P.core" ]; then + echo "Enabling coredumps" + ulimit -c unlimited + fi +fi + +if [ -w /etc/hosts ]; then + SUDO="" +else + SUDO="sudo" +fi + +# Add 'server' and 'hostname_not_in_cert' as a hostnames +echo "127.0.0.1 server" | $SUDO tee -a /etc/hosts +echo "127.0.0.1 hostname_not_in_cert" | $SUDO tee -a /etc/hosts + echo "Setting up system... done." diff --git a/.evergreen/scripts/setup-tests.sh b/.evergreen/scripts/setup-tests.sh index 65462b2a68..1074c7eaaf 100755 --- a/.evergreen/scripts/setup-tests.sh +++ b/.evergreen/scripts/setup-tests.sh @@ -1,27 +1,25 @@ -#!/bin/bash -eux +#!/bin/bash +# Set up the test environment, including secrets and services. +set -eu -PROJECT_DIRECTORY="$(pwd)" -SCRIPT_DIR="$PROJECT_DIRECTORY/.evergreen/scripts" +# Supported/used environment variables: +# AUTH Set to enable authentication. Defaults to "noauth" +# SSL Set to enable SSL. Defaults to "nossl" +# GREEN_FRAMEWORK The green framework to test with, if any. +# COVERAGE If non-empty, run the test suite with coverage. +# COMPRESSORS If non-empty, install appropriate compressor. +# LIBMONGOCRYPT_URL The URL to download libmongocrypt. +# TEST_CRYPT_SHARED If non-empty, install crypt_shared lib. +# MONGODB_API_VERSION The mongodb api version to use in tests. +# MONGODB_URI If non-empty, use as the MONGODB_URI in tests. -if [ -f "$SCRIPT_DIR/test-env.sh" ]; then - echo "Reading $SCRIPT_DIR/test-env.sh file" - . "$SCRIPT_DIR/test-env.sh" - exit 0 -fi +SCRIPT_DIR=$(dirname ${BASH_SOURCE:-$0}) -cat < "$SCRIPT_DIR"/test-env.sh -export test_encryption="${test_encryption:-}" -export test_encryption_pyopenssl="${test_encryption_pyopenssl:-}" -export test_crypt_shared="${test_crypt_shared:-}" -export test_pyopenssl="${test_pyopenssl:-}" -export test_loadbalancer="${test_loadbalancer:-}" -export test_serverless="${test_serverless:-}" -export TEST_INDEX_MANAGEMENT="${TEST_INDEX_MANAGEMENT:-}" -export TEST_DATA_LAKE="${TEST_DATA_LAKE:-}" -export ORCHESTRATION_FILE="${ORCHESTRATION_FILE:-}" -export AUTH="${AUTH:-noauth}" -export SSL="${SSL:-nossl}" -export PYTHON_BINARY="${PYTHON_BINARY:-}" -EOT +# Try to source the env file. +if [ -f $SCRIPT_DIR/env.sh ]; then + source $SCRIPT_DIR/env.sh +fi -chmod +x "$SCRIPT_DIR"/test-env.sh +echo "Setting up tests with args \"$*\"..." +uv run $SCRIPT_DIR/setup_tests.py "$@" +echo "Setting up tests with args \"$*\"... done." diff --git a/.evergreen/scripts/setup_tests.py b/.evergreen/scripts/setup_tests.py new file mode 100644 index 0000000000..3f0a8cc7f9 --- /dev/null +++ b/.evergreen/scripts/setup_tests.py @@ -0,0 +1,483 @@ +from __future__ import annotations + +import base64 +import io +import os +import platform +import shutil +import stat +import tarfile +from pathlib import Path +from urllib import request + +from utils import ( + DRIVERS_TOOLS, + ENV_FILE, + HERE, + LOGGER, + PLATFORM, + ROOT, + TEST_SUITE_MAP, + Distro, + get_test_options, + read_env, + run_command, + write_env, +) + +# Passthrough environment variables. +PASS_THROUGH_ENV = [ + "GREEN_FRAMEWORK", + "NO_EXT", + "MONGODB_API_VERSION", + "DEBUG_LOG", + "PYTHON_BINARY", + "PYTHON_VERSION", + "REQUIRE_FIPS", + "IS_WIN32", +] + +# Map the test name to test extra. +EXTRAS_MAP = { + "auth_aws": "aws", + "auth_oidc": "aws", + "encryption": "encryption", + "enterprise_auth": "gssapi", + "kms": "encryption", + "ocsp": "ocsp", + "pyopenssl": "ocsp", +} + + +# Map the test name to test group. +GROUP_MAP = dict(mockupdb="mockupdb", perf="perf") + +# The python version used for perf tests. +PERF_PYTHON_VERSION = "3.10.11" + + +def is_set(var: str) -> bool: + value = os.environ.get(var, "") + return len(value.strip()) > 0 + + +def get_distro() -> Distro: + name = "" + version_id = "" + arch = platform.machine() + with open("/etc/os-release") as fid: + for line in fid.readlines(): + line = line.replace('"', "") # noqa: PLW2901 + if line.startswith("NAME="): + _, _, name = line.strip().partition("=") + if line.startswith("VERSION_ID="): + _, _, version_id = line.strip().partition("=") + return Distro(name=name, version_id=version_id, arch=arch) + + +def setup_libmongocrypt(): + target = "" + if PLATFORM == "windows": + # PYTHON-2808 Ensure this machine has the CA cert for google KMS. + if is_set("TEST_FLE_GCP_AUTO"): + run_command('powershell.exe "Invoke-WebRequest -URI https://oauth2.googleapis.com/"') + target = "windows-test" + + elif PLATFORM == "darwin": + target = "macos" + + else: + distro = get_distro() + if distro.name.startswith("Debian"): + target = f"debian{distro.version_id}" + elif distro.name.startswith("Ubuntu"): + if distro.version_id == "20.04": + target = "debian11" + elif distro.version_id == "22.04": + target = "debian12" + elif distro.version_id == "24.04": + target = "debian13" + elif distro.name.startswith("Red Hat"): + if distro.version_id.startswith("7"): + target = "rhel-70-64-bit" + elif distro.version_id.startswith("8"): + if distro.arch == "aarch64": + target = "rhel-82-arm64" + else: + target = "rhel-80-64-bit" + + if not is_set("LIBMONGOCRYPT_URL"): + if not target: + raise ValueError("Cannot find libmongocrypt target for current platform!") + url = f"https://s3.amazonaws.com/mciuploads/libmongocrypt/{target}/master/latest/libmongocrypt.tar.gz" + else: + url = os.environ["LIBMONGOCRYPT_URL"] + + shutil.rmtree(HERE / "libmongocrypt", ignore_errors=True) + + LOGGER.info(f"Fetching {url}...") + with request.urlopen(request.Request(url), timeout=15.0) as response: # noqa: S310 + if response.status == 200: + fileobj = io.BytesIO(response.read()) + with tarfile.open("libmongocrypt.tar.gz", fileobj=fileobj) as fid: + fid.extractall(Path.cwd() / "libmongocrypt") + LOGGER.info(f"Fetching {url}... done.") + + run_command("ls -la libmongocrypt") + run_command("ls -la libmongocrypt/nocrypto") + + if PLATFORM == "windows": + # libmongocrypt's windows dll is not marked executable. + run_command("chmod +x libmongocrypt/nocrypto/bin/mongocrypt.dll") + + +def load_config_from_file(path: str | Path) -> dict[str, str]: + config = read_env(path) + for key, value in config.items(): + write_env(key, value) + return config + + +def get_secrets(name: str) -> dict[str, str]: + secrets_dir = Path(f"{DRIVERS_TOOLS}/.evergreen/secrets_handling") + run_command(f"bash {secrets_dir.as_posix()}/setup-secrets.sh {name}", cwd=secrets_dir) + return load_config_from_file(secrets_dir / "secrets-export.sh") + + +def handle_test_env() -> None: + opts, _ = get_test_options("Set up the test environment and services.") + test_name = opts.test_name + sub_test_name = opts.sub_test_name + AUTH = "auth" if opts.auth else "noauth" + SSL = "ssl" if opts.ssl else "nossl" + TEST_ARGS = "" + + # Start compiling the args we'll pass to uv. + UV_ARGS = ["--extra test --no-group dev"] + + test_title = test_name + if sub_test_name: + test_title += f" {sub_test_name}" + + # Create the test env file with the initial set of values. + with ENV_FILE.open("w", newline="\n") as fid: + fid.write("#!/usr/bin/env bash\n") + fid.write("set +x\n") + ENV_FILE.chmod(ENV_FILE.stat().st_mode | stat.S_IEXEC) + + write_env("PIP_QUIET") # Quiet by default. + write_env("PIP_PREFER_BINARY") # Prefer binary dists by default. + + # Set an environment variable for the test name and sub test name. + write_env(f"TEST_{test_name.upper()}") + write_env("TEST_NAME", test_name) + write_env("SUB_TEST_NAME", sub_test_name) + + # Handle pass through env vars. + for var in PASS_THROUGH_ENV: + if is_set(var) or getattr(opts, var.lower(), ""): + write_env(var, os.environ.get(var, getattr(opts, var.lower(), ""))) + + if extra := EXTRAS_MAP.get(test_name, ""): + UV_ARGS.append(f"--extra {extra}") + + if group := GROUP_MAP.get(test_name, ""): + UV_ARGS.append(f"--group {group}") + + if opts.test_min_deps: + UV_ARGS.append("--resolution=lowest-direct") + + if test_name == "auth_oidc": + from oidc_tester import setup_oidc + + config = setup_oidc(sub_test_name) + if not config: + AUTH = "noauth" + + if test_name in ["aws_lambda", "search_index"]: + env = os.environ.copy() + env["MONGODB_VERSION"] = "7.0" + env["LAMBDA_STACK_NAME"] = "dbx-python-lambda" + write_env("LAMBDA_STACK_NAME", env["LAMBDA_STACK_NAME"]) + run_command( + f"bash {DRIVERS_TOOLS}/.evergreen/atlas/setup-atlas-cluster.sh", + env=env, + cwd=DRIVERS_TOOLS, + ) + + if test_name == "search_index": + AUTH = "auth" + + if test_name == "ocsp": + SSL = "ssl" + + write_env("AUTH", AUTH) + write_env("SSL", SSL) + LOGGER.info(f"Setting up '{test_title}' with {AUTH=} and {SSL=}...") + + if test_name == "aws_lambda": + UV_ARGS.append("--group pip") + # Store AWS creds if they were given. + if "AWS_ACCESS_KEY_ID" in os.environ: + for key in ["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN"]: + if key in os.environ: + write_env(key, os.environ[key]) + + if AUTH != "noauth": + if test_name == "auth_oidc": + DB_USER = config["OIDC_ADMIN_USER"] + DB_PASSWORD = config["OIDC_ADMIN_PWD"] + elif test_name == "search_index": + config = read_env(f"{DRIVERS_TOOLS}/.evergreen/atlas/secrets-export.sh") + DB_USER = config["DRIVERS_ATLAS_LAMBDA_USER"] + DB_PASSWORD = config["DRIVERS_ATLAS_LAMBDA_PASSWORD"] + write_env("MONGODB_URI", config["MONGODB_URI"]) + else: + DB_USER = "bob" + DB_PASSWORD = "pwd123" # noqa: S105 + write_env("DB_USER", DB_USER) + write_env("DB_PASSWORD", DB_PASSWORD) + LOGGER.info("Added auth, DB_USER: %s", DB_USER) + + if is_set("MONGODB_URI"): + write_env("PYMONGO_MUST_CONNECT", "true") + + if opts.disable_test_commands: + write_env("PYMONGO_DISABLE_TEST_COMMANDS", "1") + + if test_name == "enterprise_auth": + config = get_secrets("drivers/enterprise_auth") + if PLATFORM == "windows": + LOGGER.info("Setting GSSAPI_PASS") + write_env("GSSAPI_PASS", config["SASL_PASS"]) + write_env("GSSAPI_CANONICALIZE", "true") + else: + # BUILD-3830 + krb_conf = ROOT / ".evergreen/krb5.conf.empty" + krb_conf.touch() + write_env("KRB5_CONFIG", krb_conf) + LOGGER.info("Writing keytab") + keytab = base64.b64decode(config["KEYTAB_BASE64"]) + keytab_file = ROOT / ".evergreen/drivers.keytab" + with keytab_file.open("wb") as fid: + fid.write(keytab) + principal = config["PRINCIPAL"] + LOGGER.info("Running kinit") + os.environ["KRB5_CONFIG"] = str(krb_conf) + cmd = f"kinit -k -t {keytab_file} -p {principal}" + run_command(cmd) + + LOGGER.info("Setting GSSAPI variables") + write_env("GSSAPI_HOST", config["SASL_HOST"]) + write_env("GSSAPI_PORT", config["SASL_PORT"]) + write_env("GSSAPI_PRINCIPAL", config["PRINCIPAL"]) + + if test_name == "doctest": + UV_ARGS.append("--extra docs") + + if test_name == "load_balancer": + SINGLE_MONGOS_LB_URI = os.environ.get( + "SINGLE_MONGOS_LB_URI", "mongodb://127.0.0.1:8000/?loadBalanced=true" + ) + MULTI_MONGOS_LB_URI = os.environ.get( + "MULTI_MONGOS_LB_URI", "mongodb://127.0.0.1:8001/?loadBalanced=true" + ) + if SSL != "nossl": + SINGLE_MONGOS_LB_URI += "&tls=true" + MULTI_MONGOS_LB_URI += "&tls=true" + write_env("SINGLE_MONGOS_LB_URI", SINGLE_MONGOS_LB_URI) + write_env("MULTI_MONGOS_LB_URI", MULTI_MONGOS_LB_URI) + if not DRIVERS_TOOLS: + raise RuntimeError("Missing DRIVERS_TOOLS") + cmd = f'bash "{DRIVERS_TOOLS}/.evergreen/run-load-balancer.sh" start' + run_command(cmd) + + if test_name == "mod_wsgi": + from mod_wsgi_tester import setup_mod_wsgi + + setup_mod_wsgi(sub_test_name) + + if test_name == "ocsp": + if sub_test_name: + os.environ["OCSP_SERVER_TYPE"] = sub_test_name + for name in ["OCSP_SERVER_TYPE", "ORCHESTRATION_FILE"]: + if name not in os.environ: + raise ValueError(f"Please set {name}") + + server_type = os.environ["OCSP_SERVER_TYPE"] + orch_file = os.environ["ORCHESTRATION_FILE"] + ocsp_algo = orch_file.split("-")[0] + if server_type == "no-responder": + tls_should_succeed = "false" if "mustStaple-disableStapling" in orch_file else "true" + else: + tls_should_succeed = "true" if "valid" in server_type else "false" + + write_env("OCSP_TLS_SHOULD_SUCCEED", tls_should_succeed) + write_env("CA_FILE", f"{DRIVERS_TOOLS}/.evergreen/ocsp/{ocsp_algo}/ca.pem") + + if server_type != "no-responder": + env = os.environ.copy() + env["SERVER_TYPE"] = server_type + env["OCSP_ALGORITHM"] = ocsp_algo + run_command(f"bash {DRIVERS_TOOLS}/.evergreen/ocsp/setup.sh", env=env) + + # The mock OCSP responder MUST BE started before the mongod as the mongod expects that + # a responder will be available upon startup. + version = os.environ.get("VERSION", "latest") + cmd = [ + "bash", + f"{DRIVERS_TOOLS}/.evergreen/run-orchestration.sh", + "--ssl", + "--version", + version, + ] + if opts.verbose: + cmd.append("-v") + elif opts.quiet: + cmd.append("-q") + run_command(cmd, cwd=DRIVERS_TOOLS) + + if SSL != "nossl": + if not DRIVERS_TOOLS: + raise RuntimeError("Missing DRIVERS_TOOLS") + write_env("CLIENT_PEM", f"{DRIVERS_TOOLS}/.evergreen/x509gen/client.pem") + write_env("CA_PEM", f"{DRIVERS_TOOLS}/.evergreen/x509gen/ca.pem") + + compressors = os.environ.get("COMPRESSORS") or opts.compressor + if compressors == "snappy": + UV_ARGS.append("--extra snappy") + elif compressors == "zstd": + UV_ARGS.append("--extra zstd") + + if test_name in ["encryption", "kms"]: + # Check for libmongocrypt download. + if not (ROOT / "libmongocrypt").exists(): + setup_libmongocrypt() + + if not opts.test_min_deps: + UV_ARGS.append( + "--with pymongocrypt@git+https://github.com/mongodb/libmongocrypt@master#subdirectory=bindings/python" + ) + + # Use the nocrypto build to avoid dependency issues with older windows/python versions. + BASE = ROOT / "libmongocrypt/nocrypto" + if PLATFORM == "linux": + if (BASE / "lib/libmongocrypt.so").exists(): + PYMONGOCRYPT_LIB = BASE / "lib/libmongocrypt.so" + else: + PYMONGOCRYPT_LIB = BASE / "lib64/libmongocrypt.so" + elif PLATFORM == "darwin": + PYMONGOCRYPT_LIB = BASE / "lib/libmongocrypt.dylib" + else: + PYMONGOCRYPT_LIB = BASE / "bin/mongocrypt.dll" + if not PYMONGOCRYPT_LIB.exists(): + raise RuntimeError("Cannot find libmongocrypt shared object file") + write_env("PYMONGOCRYPT_LIB", PYMONGOCRYPT_LIB.as_posix()) + # PATH is updated by configure-env.sh for access to mongocryptd. + + if test_name == "encryption": + if not DRIVERS_TOOLS: + raise RuntimeError("Missing DRIVERS_TOOLS") + csfle_dir = Path(f"{DRIVERS_TOOLS}/.evergreen/csfle") + run_command(f"bash {csfle_dir.as_posix()}/setup-secrets.sh", cwd=csfle_dir) + load_config_from_file(csfle_dir / "secrets-export.sh") + run_command(f"bash {csfle_dir.as_posix()}/start-servers.sh") + + if sub_test_name == "pyopenssl": + UV_ARGS.append("--extra ocsp") + + if opts.crypt_shared: + config = read_env(f"{DRIVERS_TOOLS}/mo-expansion.sh") + CRYPT_SHARED_DIR = Path(config["CRYPT_SHARED_LIB_PATH"]).parent.as_posix() + LOGGER.info("Using crypt_shared_dir %s", CRYPT_SHARED_DIR) + if PLATFORM == "windows": + write_env("PATH", f"{CRYPT_SHARED_DIR}:$PATH") + else: + write_env( + "DYLD_FALLBACK_LIBRARY_PATH", + f"{CRYPT_SHARED_DIR}:${{DYLD_FALLBACK_LIBRARY_PATH:-}}", + ) + write_env("LD_LIBRARY_PATH", f"{CRYPT_SHARED_DIR}:${{LD_LIBRARY_PATH:-}}") + + if test_name == "kms": + from kms_tester import setup_kms + + setup_kms(sub_test_name) + + if test_name == "auth_aws" and sub_test_name != "ecs-remote": + auth_aws_dir = f"{DRIVERS_TOOLS}/.evergreen/auth_aws" + if "AWS_ROLE_SESSION_NAME" in os.environ: + write_env("AWS_ROLE_SESSION_NAME") + if sub_test_name != "ecs": + aws_setup = f"{auth_aws_dir}/aws_setup.sh" + run_command(f"bash {aws_setup} {sub_test_name}") + creds = read_env(f"{auth_aws_dir}/test-env.sh") + for name, value in creds.items(): + write_env(name, value) + else: + run_command(f"bash {auth_aws_dir}/setup-secrets.sh") + + if test_name == "atlas_connect": + secrets = get_secrets("drivers/atlas_connect") + + # Write file with Atlas X509 client certificate: + decoded = base64.b64decode(secrets["ATLAS_X509_DEV_CERT_BASE64"]).decode("utf8") + cert_file = ROOT / ".evergreen/atlas_x509_dev_client_certificate.pem" + with cert_file.open("w") as file: + file.write(decoded) + write_env( + "ATLAS_X509_DEV_WITH_CERT", + secrets["ATLAS_X509_DEV"] + "&tlsCertificateKeyFile=" + str(cert_file), + ) + + # We do not want the default client_context to be initialized. + write_env("DISABLE_CONTEXT") + + if test_name == "perf": + data_dir = ROOT / "specifications/source/benchmarking/data" + if not data_dir.exists(): + run_command("git clone --depth 1 https://github.com/mongodb/specifications.git") + run_command("tar xf extended_bson.tgz", cwd=data_dir) + run_command("tar xf parallel.tgz", cwd=data_dir) + run_command("tar xf single_and_multi_document.tgz", cwd=data_dir) + write_env("TEST_PATH", str(data_dir)) + write_env("OUTPUT_FILE", str(ROOT / "results.json")) + # Overwrite the UV_PYTHON from the env.sh file. + write_env("UV_PYTHON", "") + + UV_ARGS.append(f"--python={PERF_PYTHON_VERSION}") + + # PYTHON-4769 Run perf_test.py directly otherwise pytest's test collection negatively + # affects the benchmark results. + if sub_test_name == "sync": + TEST_ARGS = f"test/performance/perf_test.py {TEST_ARGS}" + else: + TEST_ARGS = f"test/performance/async_perf_test.py {TEST_ARGS}" + + # Add coverage if requested. + # Only cover CPython. PyPy reports suspiciously low coverage. + if opts.cov and platform.python_implementation() == "CPython": + # Keep in sync with combine-coverage.sh. + # coverage >=5 is needed for relative_files=true. + UV_ARGS.append("--group coverage") + TEST_ARGS = f"{TEST_ARGS} --cov" + write_env("COVERAGE") + + if opts.green_framework: + framework = opts.green_framework or os.environ["GREEN_FRAMEWORK"] + UV_ARGS.append(f"--group {framework}") + + else: + TEST_ARGS = f"-v --durations=5 {TEST_ARGS}" + TEST_SUITE = TEST_SUITE_MAP.get(test_name) + if TEST_SUITE: + TEST_ARGS = f"-m {TEST_SUITE} {TEST_ARGS}" + + write_env("TEST_ARGS", TEST_ARGS) + write_env("UV_ARGS", " ".join(UV_ARGS)) + + LOGGER.info(f"Setting up test '{test_title}' with {AUTH=} and {SSL=}... done.") + + +if __name__ == "__main__": + handle_test_env() diff --git a/.evergreen/scripts/stop-load-balancer.sh b/.evergreen/scripts/stop-load-balancer.sh deleted file mode 100755 index 2d3c5366ec..0000000000 --- a/.evergreen/scripts/stop-load-balancer.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash - -cd "${DRIVERS_TOOLS}"/.evergreen || exit -DRIVERS_TOOLS=${DRIVERS_TOOLS} -bash "${DRIVERS_TOOLS}"/.evergreen/run-load-balancer.sh stop diff --git a/.evergreen/scripts/stop-server.sh b/.evergreen/scripts/stop-server.sh new file mode 100755 index 0000000000..7599387f5f --- /dev/null +++ b/.evergreen/scripts/stop-server.sh @@ -0,0 +1,14 @@ +#!/bin/bash +# Stop a server that was started using run-orchestration.sh in DRIVERS_TOOLS. +set -eu + +HERE=$(dirname ${BASH_SOURCE:-$0}) +HERE="$( cd -- "$HERE" > /dev/null 2>&1 && pwd )" + +# Try to source the env file. +if [ -f $HERE/env.sh ]; then + echo "Sourcing env file" + source $HERE/env.sh +fi + +bash ${DRIVERS_TOOLS}/.evergreen/stop-orchestration.sh diff --git a/.evergreen/scripts/teardown-tests.sh b/.evergreen/scripts/teardown-tests.sh new file mode 100755 index 0000000000..898425b6cf --- /dev/null +++ b/.evergreen/scripts/teardown-tests.sh @@ -0,0 +1,24 @@ +#!/bin/bash +# Tear down any services that were used by tests. +set -eu + +SCRIPT_DIR=$(dirname ${BASH_SOURCE:-$0}) + +# Try to source the env file. +if [ -f $SCRIPT_DIR/env.sh ]; then + echo "Sourcing env inputs" + . $SCRIPT_DIR/env.sh +else + echo "Not sourcing env inputs" +fi + +# Handle test inputs. +if [ -f $SCRIPT_DIR/test-env.sh ]; then + echo "Sourcing test inputs" + . $SCRIPT_DIR/test-env.sh +else + echo "Missing test inputs, please run 'just setup-tests'" +fi + +# Teardown the test runner. +uv run $SCRIPT_DIR/teardown_tests.py diff --git a/.evergreen/scripts/teardown_tests.py b/.evergreen/scripts/teardown_tests.py new file mode 100644 index 0000000000..7da0b60815 --- /dev/null +++ b/.evergreen/scripts/teardown_tests.py @@ -0,0 +1,64 @@ +from __future__ import annotations + +import os +import shutil +import sys +from pathlib import Path + +from utils import DRIVERS_TOOLS, LOGGER, ROOT, run_command + +TEST_NAME = os.environ.get("TEST_NAME", "unconfigured") +SUB_TEST_NAME = os.environ.get("SUB_TEST_NAME") + +LOGGER.info(f"Tearing down tests of type '{TEST_NAME}'...") + +# Shut down csfle servers if applicable. +if TEST_NAME == "encryption": + run_command(f"bash {DRIVERS_TOOLS}/.evergreen/csfle/stop-servers.sh") + +# Shut down load balancer if applicable. +elif TEST_NAME == "load-balancer": + run_command(f"bash {DRIVERS_TOOLS}/.evergreen/run-load-balancer.sh stop") + +# Tear down kms VM if applicable. +elif TEST_NAME == "kms" and SUB_TEST_NAME in ["azure", "gcp"]: + from kms_tester import teardown_kms + + teardown_kms(SUB_TEST_NAME) + +# Tear down OIDC if applicable. +elif TEST_NAME == "auth_oidc": + from oidc_tester import teardown_oidc + + teardown_oidc(SUB_TEST_NAME) + +# Tear down ocsp if applicable. +elif TEST_NAME == "ocsp": + run_command(f"bash {DRIVERS_TOOLS}/.evergreen/ocsp/teardown.sh") + +# Tear down atlas cluster if applicable. +if TEST_NAME in ["aws_lambda", "search_index"]: + run_command(f"bash {DRIVERS_TOOLS}/.evergreen/atlas/teardown-atlas-cluster.sh") + +# Tear down auth_aws if applicable. +# We do not run web-identity hosts on macos, because the hosts lack permissions, +# so there is no reason to run the teardown, which would error with a 401. +elif TEST_NAME == "auth_aws" and sys.platform != "darwin": + run_command(f"bash {DRIVERS_TOOLS}/.evergreen/auth_aws/teardown.sh") + +# Tear down perf if applicable. +elif TEST_NAME == "perf": + shutil.rmtree(ROOT / "specifications", ignore_errors=True) + Path(os.environ["OUTPUT_FILE"]).unlink(missing_ok=True) + +# Tear down mog_wsgi if applicable. +elif TEST_NAME == "mod_wsgi": + from mod_wsgi_tester import teardown_mod_wsgi + + teardown_mod_wsgi() + +# Tear down coverage if applicable. +if os.environ.get("COVERAGE"): + shutil.rmtree(".pytest_cache", ignore_errors=True) + +LOGGER.info(f"Tearing down tests of type '{TEST_NAME}'... done.") diff --git a/.evergreen/scripts/upload-coverage-report.sh b/.evergreen/scripts/upload-coverage-report.sh index 71a2a80bb8..895664cbf2 100755 --- a/.evergreen/scripts/upload-coverage-report.sh +++ b/.evergreen/scripts/upload-coverage-report.sh @@ -1,3 +1,4 @@ #!/bin/bash - +# Upload a coverate report to s3. +set -eu aws s3 cp htmlcov/ s3://"$1"/coverage/"$2"/"$3"/htmlcov/ --recursive --acl public-read --region us-east-1 diff --git a/.evergreen/scripts/utils.py b/.evergreen/scripts/utils.py new file mode 100644 index 0000000000..2bc9c720d2 --- /dev/null +++ b/.evergreen/scripts/utils.py @@ -0,0 +1,227 @@ +from __future__ import annotations + +import argparse +import dataclasses +import logging +import os +import shlex +import subprocess +import sys +from pathlib import Path +from typing import Any + +HERE = Path(__file__).absolute().parent +ROOT = HERE.parent.parent +DRIVERS_TOOLS = os.environ.get("DRIVERS_TOOLS", "").replace(os.sep, "/") +TMP_DRIVER_FILE = "/tmp/mongo-python-driver.tgz" # noqa: S108 + +LOGGER = logging.getLogger("test") +logging.basicConfig(level=logging.INFO, format="%(levelname)-8s %(message)s") +ENV_FILE = HERE / "test-env.sh" +PLATFORM = "windows" if os.name == "nt" else sys.platform.lower() + + +@dataclasses.dataclass +class Distro: + name: str + version_id: str + arch: str + + +# Map the test name to a test suite. +TEST_SUITE_MAP = { + "atlas_connect": "atlas_connect", + "auth_aws": "auth_aws", + "auth_oidc": "auth_oidc", + "default": "", + "default_async": "default_async", + "default_sync": "default", + "encryption": "encryption", + "enterprise_auth": "auth", + "search_index": "search_index", + "kms": "kms", + "load_balancer": "load_balancer", + "mockupdb": "mockupdb", + "ocsp": "ocsp", + "perf": "perf", +} + +# Tests that require a sub test suite. +SUB_TEST_REQUIRED = ["auth_aws", "auth_oidc", "kms", "mod_wsgi", "perf"] + +EXTRA_TESTS = ["mod_wsgi", "aws_lambda", "doctest"] + +# Tests that do not use run-orchestration directly. +NO_RUN_ORCHESTRATION = [ + "auth_oidc", + "atlas_connect", + "aws_lambda", + "mockupdb", + "ocsp", +] + +# Mapping of env variables to options +OPTION_TO_ENV_VAR = {"cov": "COVERAGE", "crypt_shared": "TEST_CRYPT_SHARED"} + + +def get_test_options( + description, require_sub_test_name=True, allow_extra_opts=False +) -> tuple[argparse.Namespace, list[str]]: + parser = argparse.ArgumentParser( + description=description, formatter_class=argparse.RawDescriptionHelpFormatter + ) + if require_sub_test_name: + parser.add_argument( + "test_name", + choices=sorted(list(TEST_SUITE_MAP) + EXTRA_TESTS), + nargs="?", + default="default", + help="The optional name of the test suite to set up, typically the same name as a pytest marker.", + ) + parser.add_argument( + "sub_test_name", nargs="?", help="The optional sub test name, for example 'azure'." + ) + else: + parser.add_argument( + "test_name", + choices=set(list(TEST_SUITE_MAP) + EXTRA_TESTS) - set(NO_RUN_ORCHESTRATION), + nargs="?", + default="default", + help="The optional name of the test suite to be run, which informs the server configuration.", + ) + parser.add_argument( + "--verbose", "-v", action="store_true", help="Whether to log at the DEBUG level." + ) + parser.add_argument( + "--quiet", "-q", action="store_true", help="Whether to log at the WARNING level." + ) + parser.add_argument("--auth", action="store_true", help="Whether to add authentication.") + parser.add_argument("--ssl", action="store_true", help="Whether to add TLS configuration.") + parser.add_argument( + "--test-min-deps", action="store_true", help="Test against minimum dependency versions" + ) + + # Add the test modifiers. + if require_sub_test_name: + parser.add_argument( + "--debug-log", action="store_true", help="Enable pymongo standard logging." + ) + parser.add_argument("--cov", action="store_true", help="Add test coverage.") + parser.add_argument( + "--green-framework", + nargs=1, + choices=["gevent"], + help="Optional green framework to test against.", + ) + parser.add_argument( + "--compressor", + nargs=1, + choices=["zlib", "zstd", "snappy"], + help="Optional compression algorithm.", + ) + parser.add_argument("--crypt-shared", action="store_true", help="Test with crypt_shared.") + parser.add_argument("--no-ext", action="store_true", help="Run without c extensions.") + parser.add_argument( + "--mongodb-api-version", choices=["1"], help="MongoDB stable API version to use." + ) + parser.add_argument( + "--disable-test-commands", action="store_true", help="Disable test commands." + ) + + # Get the options. + if not allow_extra_opts: + opts, extra_opts = parser.parse_args(), [] + else: + opts, extra_opts = parser.parse_known_args() + + # Convert list inputs to strings. + for name in vars(opts): + value = getattr(opts, name) + if isinstance(value, list): + setattr(opts, name, value[0]) + + # Handle validation and environment variable overrides. + test_name = opts.test_name + sub_test_name = opts.sub_test_name if require_sub_test_name else "" + if require_sub_test_name and test_name in SUB_TEST_REQUIRED and not sub_test_name: + raise ValueError(f"Test '{test_name}' requires a sub_test_name") + handle_env_overrides(parser, opts) + if "auth" in test_name: + opts.auth = True + # 'auth_aws ecs' shouldn't have extra auth set. + if test_name == "auth_aws" and sub_test_name == "ecs": + opts.auth = False + if opts.verbose: + LOGGER.setLevel(logging.DEBUG) + elif opts.quiet: + LOGGER.setLevel(logging.WARNING) + return opts, extra_opts + + +def handle_env_overrides(parser: argparse.ArgumentParser, opts: argparse.Namespace) -> None: + # Get the options, and then allow environment variable overrides. + for key in vars(opts): + if key in OPTION_TO_ENV_VAR: + env_var = OPTION_TO_ENV_VAR[key] + else: + env_var = key.upper() + if env_var in os.environ: + if parser.get_default(key) != getattr(opts, key): + LOGGER.info("Overriding env var '%s' with cli option", env_var) + elif env_var == "AUTH": + opts.auth = os.environ.get("AUTH") == "auth" + elif env_var == "SSL": + ssl_opt = os.environ.get("SSL", "") + opts.ssl = ssl_opt and ssl_opt.lower() != "nossl" + elif isinstance(getattr(opts, key), bool): + if os.environ[env_var]: + setattr(opts, key, True) + else: + setattr(opts, key, os.environ[env_var]) + + +def read_env(path: Path | str) -> dict[str, str]: + config = dict() + with Path(path).open() as fid: + for line in fid.readlines(): + if "=" not in line: + continue + name, _, value = line.strip().partition("=") + if value.startswith(('"', "'")): + value = value[1:-1] + name = name.replace("export ", "") + config[name] = value + return config + + +def write_env(name: str, value: Any = "1") -> None: + with ENV_FILE.open("a", newline="\n") as fid: + # Remove any existing quote chars. + value = str(value).replace('"', "") + fid.write(f'export {name}="{value}"\n') + + +def run_command(cmd: str | list[str], **kwargs: Any) -> None: + if isinstance(cmd, list): + cmd = " ".join(cmd) + LOGGER.info("Running command '%s'...", cmd) + kwargs.setdefault("check", True) + # Prevent overriding the python used by other tools. + env = kwargs.pop("env", os.environ).copy() + if "UV_PYTHON" in env: + del env["UV_PYTHON"] + kwargs["env"] = env + try: + subprocess.run(shlex.split(cmd), **kwargs) # noqa: PLW1510, S603 + except subprocess.CalledProcessError as e: + LOGGER.error(e.output) + LOGGER.error(str(e)) + sys.exit(e.returncode) + LOGGER.info("Running command '%s'... done.", cmd) + + +def create_archive() -> str: + run_command("git add .", cwd=ROOT) + run_command('git commit --no-verify -m "add files"', check=False, cwd=ROOT) + run_command(f"git archive -o {TMP_DRIVER_FILE} HEAD", cwd=ROOT) + return TMP_DRIVER_FILE diff --git a/.evergreen/scripts/windows-fix.sh b/.evergreen/scripts/windows-fix.sh deleted file mode 100755 index cb4fa44130..0000000000 --- a/.evergreen/scripts/windows-fix.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/bash - -set +x -. src/.evergreen/scripts/env.sh -# shellcheck disable=SC2044 -for i in $(find "$DRIVERS_TOOLS"/.evergreen "$PROJECT_DIRECTORY"/.evergreen -name \*.sh); do - < "$i" tr -d '\r' >"$i".new - mv "$i".new "$i" -done -# Copy client certificate because symlinks do not work on Windows. -cp "$DRIVERS_TOOLS"/.evergreen/x509gen/client.pem "$MONGO_ORCHESTRATION_HOME"/lib/client.pem diff --git a/.evergreen/setup-encryption.sh b/.evergreen/setup-encryption.sh deleted file mode 100755 index b403ef9ca8..0000000000 --- a/.evergreen/setup-encryption.sh +++ /dev/null @@ -1,60 +0,0 @@ -#!/bin/bash -set -o errexit # Exit the script with error if any of the commands fail -set -o xtrace - -if [ -z "${DRIVERS_TOOLS}" ]; then - echo "Missing environment variable DRIVERS_TOOLS" - exit 1 -fi - -TARGET="" - -if [ "Windows_NT" = "${OS:-''}" ]; then # Magic variable in cygwin - # PYTHON-2808 Ensure this machine has the CA cert for google KMS. - powershell.exe "Invoke-WebRequest -URI https://oauth2.googleapis.com/" > /dev/null || true - TARGET="windows-test" -fi - -if [ "$(uname -s)" = "Darwin" ]; then - TARGET="macos" -fi - -if [ "$(uname -s)" = "Linux" ]; then - rhel_ver=$(awk -F'=' '/VERSION_ID/{ gsub(/"/,""); print $2}' /etc/os-release) - arch=$(uname -m) - echo "RHEL $rhel_ver $arch" - if [[ $rhel_ver =~ 7 ]]; then - TARGET="rhel-70-64-bit" - elif [[ $rhel_ver =~ 8 ]]; then - if [ "$arch" = "x86_64" ]; then - TARGET="rhel-80-64-bit" - elif [ "$arch" = "arm" ]; then - TARGET="rhel-82-arm64" - fi - fi -fi - -if [ -z "$LIBMONGOCRYPT_URL" ] && [ -n "$TARGET" ]; then - LIBMONGOCRYPT_URL="https://s3.amazonaws.com/mciuploads/libmongocrypt/$TARGET/master/latest/libmongocrypt.tar.gz" -fi - -if [ -z "$LIBMONGOCRYPT_URL" ]; then - echo "Cannot test client side encryption without LIBMONGOCRYPT_URL!" - exit 1 -fi -rm -rf libmongocrypt libmongocrypt.tar.gz -echo "Fetching $LIBMONGOCRYPT_URL..." -curl -O "$LIBMONGOCRYPT_URL" -echo "Fetching $LIBMONGOCRYPT_URL...done" -mkdir libmongocrypt -tar xzf libmongocrypt.tar.gz -C ./libmongocrypt -ls -la libmongocrypt -ls -la libmongocrypt/nocrypto - -if [ -z "${SKIP_SERVERS:-}" ]; then - PYTHON_BINARY_OLD=${PYTHON_BINARY} - export PYTHON_BINARY="" - bash "${DRIVERS_TOOLS}"/.evergreen/csfle/setup-secrets.sh - export PYTHON_BINARY=$PYTHON_BINARY_OLD - bash "${DRIVERS_TOOLS}"/.evergreen/csfle/start-servers.sh -fi diff --git a/.evergreen/setup-spawn-host.sh b/.evergreen/setup-spawn-host.sh index 4de2153d51..bada61e568 100755 --- a/.evergreen/setup-spawn-host.sh +++ b/.evergreen/setup-spawn-host.sh @@ -1,5 +1,5 @@ #!/bin/bash - +# Set up a remote evergreen spawn host. set -eu if [ -z "$1" ] @@ -8,9 +8,12 @@ if [ -z "$1" ] fi target=$1 +user=${target%@*} +remote_dir=/home/$user/mongo-python-driver echo "Copying files to $target..." -rsync -az -e ssh --exclude '.git' --filter=':- .gitignore' -r . $target:/home/ec2-user/mongo-python-driver +rsync -az -e ssh --exclude '.git' --filter=':- .gitignore' -r . $target:$remote_dir echo "Copying files to $target... done" -ssh $target /home/ec2-user/mongo-python-driver/.evergreen/scripts/setup-system.sh +ssh $target $remote_dir/.evergreen/scripts/setup-system.sh +ssh $target "cd $remote_dir && PYTHON_BINARY=${PYTHON_BINARY:-} .evergreen/scripts/setup-dev-env.sh" diff --git a/.evergreen/spec-patch/PYTHON-2673.patch b/.evergreen/spec-patch/PYTHON-2673.patch new file mode 100644 index 0000000000..868538f7b7 --- /dev/null +++ b/.evergreen/spec-patch/PYTHON-2673.patch @@ -0,0 +1,64 @@ +diff --git a/test/load_balancer/cursors.json b/test/load_balancer/cursors.json +index 43e4fbb4f..4e2a55fd4 100644 +--- a/test/load_balancer/cursors.json ++++ b/test/load_balancer/cursors.json +@@ -376,7 +376,7 @@ + ] + }, + { ++ "description": "pinned connections are not returned after an network error during getMore", +- "description": "pinned connections are returned after an network error during getMore", + "operations": [ + { + "name": "failPoint", +@@ -440,7 +440,7 @@ + "object": "testRunner", + "arguments": { + "client": "client0", ++ "connections": 1 +- "connections": 0 + } + }, + { +@@ -659,7 +659,7 @@ + ] + }, + { ++ "description": "pinned connections are not returned to the pool after a non-network error on getMore", +- "description": "pinned connections are returned to the pool after a non-network error on getMore", + "operations": [ + { + "name": "failPoint", +@@ -715,7 +715,7 @@ + "object": "testRunner", + "arguments": { + "client": "client0", ++ "connections": 1 +- "connections": 0 + } + }, + { +diff --git a/test/load_balancer/sdam-error-handling.json b/test/load_balancer/sdam-error-handling.json +index 63aabc04d..462fa0aac 100644 +--- a/test/load_balancer/sdam-error-handling.json ++++ b/test/load_balancer/sdam-error-handling.json +@@ -366,6 +366,9 @@ + { + "connectionCreatedEvent": {} + }, ++ { ++ "poolClearedEvent": {} ++ }, + { + "connectionClosedEvent": { + "reason": "error" +@@ -378,9 +375,6 @@ + "connectionCheckOutFailedEvent": { + "reason": "connectionError" + } +- }, +- { +- "poolClearedEvent": {} + } + ] + } diff --git a/.evergreen/spec-patch/PYTHON-3712.patch b/.evergreen/spec-patch/PYTHON-3712.patch new file mode 100644 index 0000000000..b48c05124c --- /dev/null +++ b/.evergreen/spec-patch/PYTHON-3712.patch @@ -0,0 +1,14 @@ +diff --git a/test/discovery_and_monitoring/unified/serverMonitoringMode.json b/test/discovery_and_monitoring/unified/serverMonitoringMode.json +index e44fad1b..4b492f7d 100644 +--- a/test/discovery_and_monitoring/unified/serverMonitoringMode.json ++++ b/test/discovery_and_monitoring/unified/serverMonitoringMode.json +@@ -5,7 +5,8 @@ + { + "topologies": [ + "single", +- "sharded" ++ "sharded", ++ "sharded-replicaset" + ], + "serverless": "forbid" + } diff --git a/.evergreen/spec-patch/PYTHON-4261.patch b/.evergreen/spec-patch/PYTHON-4261.patch new file mode 100644 index 0000000000..e4ffc5ce9f --- /dev/null +++ b/.evergreen/spec-patch/PYTHON-4261.patch @@ -0,0 +1,61 @@ +diff --git a/test/server_selection_logging/replica-set.json b/test/server_selection_logging/replica-set.json +index 830b1ea51..5eba784bf 100644 +--- a/test/server_selection_logging/replica-set.json ++++ b/test/server_selection_logging/replica-set.json +@@ -184,7 +184,7 @@ + } + }, + { +- "level": "debug", ++ "level": "info", + "component": "serverSelection", + "data": { + "message": "Waiting for suitable server to become available", +diff --git a/test/server_selection_logging/standalone.json b/test/server_selection_logging/standalone.json +index 830b1ea51..5eba784bf 100644 +--- a/test/server_selection_logging/standalone.json ++++ b/test/server_selection_logging/standalone.json +@@ -191,7 +191,7 @@ + } + }, + { +- "level": "debug", ++ "level": "info", + "component": "serverSelection", + "data": { + "message": "Waiting for suitable server to become available", +diff --git a/test/server_selection_logging/sharded.json b/test/server_selection_logging/sharded.json +index 830b1ea51..5eba784bf 100644 +--- a/test/server_selection_logging/sharded.json ++++ b/test/server_selection_logging/sharded.json +@@ -193,7 +193,7 @@ + } + }, + { +- "level": "debug", ++ "level": "info", + "component": "serverSelection", + "data": { + "message": "Waiting for suitable server to become available", +diff --git a/test/server_selection_logging/sharded.json b/test/server_selection_logging/operation-id.json +index 830b1ea51..5eba784bf 100644 +--- a/test/server_selection_logging/operation-id.json ++++ b/test/server_selection_logging/operation-id.json +@@ -197,7 +197,7 @@ + } + }, + { +- "level": "debug", ++ "level": "info", + "component": "serverSelection", + "data": { + "message": "Waiting for suitable server to become available", +@@ -383,7 +383,7 @@ + } + }, + { +- "level": "debug", ++ "level": "info", + "component": "serverSelection", + "data": { + "message": "Waiting for suitable server to become available", diff --git a/.evergreen/spec-patch/PYTHON-4918.patch b/.evergreen/spec-patch/PYTHON-4918.patch new file mode 100644 index 0000000000..5f409c5870 --- /dev/null +++ b/.evergreen/spec-patch/PYTHON-4918.patch @@ -0,0 +1,24 @@ +diff --git a/test/connection_monitoring/pool-create-min-size-error.json b/test/connection_monitoring/pool-create-min-size-error.json +index 1c744b85..509b2a23 100644 +--- a/test/connection_monitoring/pool-create-min-size-error.json ++++ b/test/connection_monitoring/pool-create-min-size-error.json +@@ -49,15 +49,15 @@ + "type": "ConnectionCreated", + "address": 42 + }, ++ { ++ "type": "ConnectionPoolCleared", ++ "address": 42 ++ }, + { + "type": "ConnectionClosed", + "address": 42, + "connectionId": 42, + "reason": "error" +- }, +- { +- "type": "ConnectionPoolCleared", +- "address": 42 + } + ], + "ignore": [ diff --git a/.evergreen/spec-patch/PYTHON-5052.patch b/.evergreen/spec-patch/PYTHON-5052.patch new file mode 100644 index 0000000000..01cbc00116 --- /dev/null +++ b/.evergreen/spec-patch/PYTHON-5052.patch @@ -0,0 +1,440 @@ +diff --git a/test/unified-test-format/invalid/entity-client-observeTracingMessages-additionalProperties.json b/test/unified-test-format/invalid/entity-client-observeTracingMessages-additionalProperties.json +new file mode 100644 +index 00000000..aa8046d2 +--- /dev/null ++++ b/test/unified-test-format/invalid/entity-client-observeTracingMessages-additionalProperties.json +@@ -0,0 +1,20 @@ ++{ ++ "description": "entity-client-observeTracingMessages-additionalProperties", ++ "schemaVersion": "1.26", ++ "createEntities": [ ++ { ++ "client": { ++ "id": "client0", ++ "observeTracingMessages": { ++ "foo": "bar" ++ } ++ } ++ } ++ ], ++ "tests": [ ++ { ++ "description": "observeTracingMessages must not have additional properties'", ++ "operations": [] ++ } ++ ] ++} +diff --git a/test/unified-test-format/invalid/entity-client-observeTracingMessages-additionalPropertyType.json b/test/unified-test-format/invalid/entity-client-observeTracingMessages-additionalPropertyType.json +new file mode 100644 +index 00000000..0b3a65f5 +--- /dev/null ++++ b/test/unified-test-format/invalid/entity-client-observeTracingMessages-additionalPropertyType.json +@@ -0,0 +1,20 @@ ++{ ++ "description": "entity-client-observeTracingMessages-additionalPropertyType", ++ "schemaVersion": "1.26", ++ "createEntities": [ ++ { ++ "client": { ++ "id": "client0", ++ "observeTracingMessages": { ++ "enableCommandPayload": 0 ++ } ++ } ++ } ++ ], ++ "tests": [ ++ { ++ "description": "observeTracingMessages enableCommandPayload must be boolean", ++ "operations": [] ++ } ++ ] ++} +diff --git a/test/unified-test-format/invalid/entity-client-observeTracingMessages-type.json b/test/unified-test-format/invalid/entity-client-observeTracingMessages-type.json +new file mode 100644 +index 00000000..de3ef39a +--- /dev/null ++++ b/test/unified-test-format/invalid/entity-client-observeTracingMessages-type.json +@@ -0,0 +1,18 @@ ++{ ++ "description": "entity-client-observeTracingMessages-type", ++ "schemaVersion": "1.26", ++ "createEntities": [ ++ { ++ "client": { ++ "id": "client0", ++ "observeTracingMessages": "foo" ++ } ++ } ++ ], ++ "tests": [ ++ { ++ "description": "observeTracingMessages must be an object", ++ "operations": [] ++ } ++ ] ++} +diff --git a/test/unified-test-format/invalid/expectedTracingSpans-additionalProperties.json b/test/unified-test-format/invalid/expectedTracingSpans-additionalProperties.json +new file mode 100644 +index 00000000..5947a286 +--- /dev/null ++++ b/test/unified-test-format/invalid/expectedTracingSpans-additionalProperties.json +@@ -0,0 +1,30 @@ ++{ ++ "description": "expectedTracingSpans-additionalProperties", ++ "schemaVersion": "1.26", ++ "createEntities": [ ++ { ++ "client": { ++ "id": "client0" ++ } ++ } ++ ], ++ "tests": [ ++ { ++ "description": "additional property foo not allowed in expectTracingMessages", ++ "operations": [], ++ "expectTracingMessages": { ++ "client": "client0", ++ "ignoreExtraSpans": false, ++ "spans": [ ++ { ++ "name": "command", ++ "tags": { ++ "db.system": "mongodb" ++ } ++ } ++ ], ++ "foo": 0 ++ } ++ } ++ ] ++} +diff --git a/test/unified-test-format/invalid/expectedTracingSpans-clientType.json b/test/unified-test-format/invalid/expectedTracingSpans-clientType.json +new file mode 100644 +index 00000000..2fe7faea +--- /dev/null ++++ b/test/unified-test-format/invalid/expectedTracingSpans-clientType.json +@@ -0,0 +1,28 @@ ++{ ++ "description": "expectedTracingSpans-clientType", ++ "schemaVersion": "1.26", ++ "createEntities": [ ++ { ++ "client": { ++ "id": "client0" ++ } ++ } ++ ], ++ "tests": [ ++ { ++ "description": "client type must be string", ++ "operations": [], ++ "expectTracingMessages": { ++ "client": 0, ++ "spans": [ ++ { ++ "name": "command", ++ "tags": { ++ "db.system": "mongodb" ++ } ++ } ++ ] ++ } ++ } ++ ] ++} +diff --git a/test/unified-test-format/invalid/expectedTracingSpans-emptyNestedSpan.json b/test/unified-test-format/invalid/expectedTracingSpans-emptyNestedSpan.json +new file mode 100644 +index 00000000..8a98d5ba +--- /dev/null ++++ b/test/unified-test-format/invalid/expectedTracingSpans-emptyNestedSpan.json +@@ -0,0 +1,29 @@ ++{ ++ "description": "expectedTracingSpans-emptyNestedSpan", ++ "schemaVersion": "1.26", ++ "createEntities": [ ++ { ++ "client": { ++ "id": "client0" ++ } ++ } ++ ], ++ "tests": [ ++ { ++ "description": "nested spans must not have fewer than 1 items'", ++ "operations": [], ++ "expectTracingMessages": { ++ "client": "client0", ++ "spans": [ ++ { ++ "name": "command", ++ "tags": { ++ "db.system": "mongodb" ++ }, ++ "nested": [] ++ } ++ ] ++ } ++ } ++ ] ++} +diff --git a/test/unified-test-format/invalid/expectedTracingSpans-invalidNestedSpan.json b/test/unified-test-format/invalid/expectedTracingSpans-invalidNestedSpan.json +new file mode 100644 +index 00000000..79a86744 +--- /dev/null ++++ b/test/unified-test-format/invalid/expectedTracingSpans-invalidNestedSpan.json +@@ -0,0 +1,31 @@ ++{ ++ "description": "expectedTracingSpans-invalidNestedSpan", ++ "schemaVersion": "1.26", ++ "createEntities": [ ++ { ++ "client": { ++ "id": "client0" ++ } ++ } ++ ], ++ "tests": [ ++ { ++ "description": "nested span must have required property name", ++ "operations": [], ++ "expectTracingMessages": { ++ "client": "client0", ++ "spans": [ ++ { ++ "name": "command", ++ "tags": { ++ "db.system": "mongodb" ++ }, ++ "nested": [ ++ {} ++ ] ++ } ++ ] ++ } ++ } ++ ] ++} +diff --git a/test/unified-test-format/invalid/expectedTracingSpans-missingPropertyClient.json b/test/unified-test-format/invalid/expectedTracingSpans-missingPropertyClient.json +new file mode 100644 +index 00000000..2fb1cd5b +--- /dev/null ++++ b/test/unified-test-format/invalid/expectedTracingSpans-missingPropertyClient.json +@@ -0,0 +1,27 @@ ++{ ++ "description": "expectedTracingSpans-missingPropertyClient", ++ "schemaVersion": "1.26", ++ "createEntities": [ ++ { ++ "client": { ++ "id": "client0" ++ } ++ } ++ ], ++ "tests": [ ++ { ++ "description": "missing required property client", ++ "operations": [], ++ "expectTracingMessages": { ++ "spans": [ ++ { ++ "name": "command", ++ "tags": { ++ "db.system": "mongodb" ++ } ++ } ++ ] ++ } ++ } ++ ] ++} +diff --git a/test/unified-test-format/invalid/expectedTracingSpans-missingPropertySpans.json b/test/unified-test-format/invalid/expectedTracingSpans-missingPropertySpans.json +new file mode 100644 +index 00000000..acd10307 +--- /dev/null ++++ b/test/unified-test-format/invalid/expectedTracingSpans-missingPropertySpans.json +@@ -0,0 +1,20 @@ ++{ ++ "description": "expectedTracingSpans-missingPropertySpans", ++ "schemaVersion": "1.26", ++ "createEntities": [ ++ { ++ "client": { ++ "id": "client0" ++ } ++ } ++ ], ++ "tests": [ ++ { ++ "description": "missing required property spans", ++ "operations": [], ++ "expectTracingMessages": { ++ "client": "client0" ++ } ++ } ++ ] ++} +diff --git a/test/unified-test-format/invalid/expectedTracingSpans-spanMalformedAdditionalProperties.json b/test/unified-test-format/invalid/expectedTracingSpans-spanMalformedAdditionalProperties.json +new file mode 100644 +index 00000000..17299f86 +--- /dev/null ++++ b/test/unified-test-format/invalid/expectedTracingSpans-spanMalformedAdditionalProperties.json +@@ -0,0 +1,28 @@ ++{ ++ "description": "expectedTracingSpans-spanMalformedAdditionalProperties", ++ "schemaVersion": "1.26", ++ "createEntities": [ ++ { ++ "client": { ++ "id": "client0" ++ } ++ } ++ ], ++ "tests": [ ++ { ++ "description": "Span must not have additional properties", ++ "operations": [], ++ "expectTracingMessages": { ++ "client": "client0", ++ "spans": [ ++ { ++ "name": "foo", ++ "tags": {}, ++ "nested": [], ++ "foo": "bar" ++ } ++ ] ++ } ++ } ++ ] ++} +diff --git a/test/unified-test-format/invalid/expectedTracingSpans-spanMalformedMissingName.json b/test/unified-test-format/invalid/expectedTracingSpans-spanMalformedMissingName.json +new file mode 100644 +index 00000000..0257cd9b +--- /dev/null ++++ b/test/unified-test-format/invalid/expectedTracingSpans-spanMalformedMissingName.json +@@ -0,0 +1,27 @@ ++{ ++ "description": "expectedTracingSpans-spanMalformedMissingName", ++ "schemaVersion": "1.26", ++ "createEntities": [ ++ { ++ "client": { ++ "id": "client0" ++ } ++ } ++ ], ++ "tests": [ ++ { ++ "description": "missing required span name", ++ "operations": [], ++ "expectTracingMessages": { ++ "client": "client0", ++ "spans": [ ++ { ++ "tags": { ++ "db.system": "mongodb" ++ } ++ } ++ ] ++ } ++ } ++ ] ++} +diff --git a/test/unified-test-format/invalid/expectedTracingSpans-spanMalformedMissingTags.json b/test/unified-test-format/invalid/expectedTracingSpans-spanMalformedMissingTags.json +new file mode 100644 +index 00000000..a09ca31c +--- /dev/null ++++ b/test/unified-test-format/invalid/expectedTracingSpans-spanMalformedMissingTags.json +@@ -0,0 +1,25 @@ ++{ ++ "description": "expectedTracingSpans-spanMalformedMissingTags", ++ "schemaVersion": "1.26", ++ "createEntities": [ ++ { ++ "client": { ++ "id": "client0" ++ } ++ } ++ ], ++ "tests": [ ++ { ++ "description": "missing required span tags", ++ "operations": [], ++ "expectTracingMessages": { ++ "client": "client0", ++ "spans": [ ++ { ++ "name": "foo" ++ } ++ ] ++ } ++ } ++ ] ++} +diff --git a/test/unified-test-format/invalid/expectedTracingSpans-spanMalformedNestedMustBeArray.json b/test/unified-test-format/invalid/expectedTracingSpans-spanMalformedNestedMustBeArray.json +new file mode 100644 +index 00000000..ccff0410 +--- /dev/null ++++ b/test/unified-test-format/invalid/expectedTracingSpans-spanMalformedNestedMustBeArray.json +@@ -0,0 +1,27 @@ ++{ ++ "description": "expectedTracingSpans-spanMalformedNestedMustBeArray", ++ "schemaVersion": "1.26", ++ "createEntities": [ ++ { ++ "client": { ++ "id": "client0" ++ } ++ } ++ ], ++ "tests": [ ++ { ++ "description": "nested spans must be an array", ++ "operations": [], ++ "expectTracingMessages": { ++ "client": "client0", ++ "spans": [ ++ { ++ "name": "foo", ++ "tags": {}, ++ "nested": {} ++ } ++ ] ++ } ++ } ++ ] ++} +diff --git a/test/unified-test-format/invalid/expectedTracingSpans-spanMalformedTagsMustBeObject.json b/test/unified-test-format/invalid/expectedTracingSpans-spanMalformedTagsMustBeObject.json +new file mode 100644 +index 00000000..72af1c29 +--- /dev/null ++++ b/test/unified-test-format/invalid/expectedTracingSpans-spanMalformedTagsMustBeObject.json +@@ -0,0 +1,26 @@ ++{ ++ "description": "expectedTracingSpans-spanMalformedNestedMustBeObject", ++ "schemaVersion": "1.26", ++ "createEntities": [ ++ { ++ "client": { ++ "id": "client0" ++ } ++ } ++ ], ++ "tests": [ ++ { ++ "description": "span tags must be an object", ++ "operations": [], ++ "expectTracingMessages": { ++ "client": "client0", ++ "spans": [ ++ { ++ "name": "foo", ++ "tags": [] ++ } ++ ] ++ } ++ } ++ ] ++} diff --git a/.evergreen/spec-patch/PYTHON-5493.patch b/.evergreen/spec-patch/PYTHON-5493.patch new file mode 100644 index 0000000000..99c105dcef --- /dev/null +++ b/.evergreen/spec-patch/PYTHON-5493.patch @@ -0,0 +1,50 @@ +diff --git a/test/connection_logging/connection-logging.json b/test/connection_logging/connection-logging.json +index 5799e834..72103b3c 100644 +--- a/test/connection_logging/connection-logging.json ++++ b/test/connection_logging/connection-logging.json +@@ -446,6 +446,22 @@ + } + } + }, ++ { ++ "level": "debug", ++ "component": "connection", ++ "data": { ++ "message": "Connection pool cleared", ++ "serverHost": { ++ "$$type": "string" ++ }, ++ "serverPort": { ++ "$$type": [ ++ "int", ++ "long" ++ ] ++ } ++ } ++ }, + { + "level": "debug", + "component": "connection", +@@ -498,22 +514,6 @@ + ] + } + } +- }, +- { +- "level": "debug", +- "component": "connection", +- "data": { +- "message": "Connection pool cleared", +- "serverHost": { +- "$$type": "string" +- }, +- "serverPort": { +- "$$type": [ +- "int", +- "long" +- ] +- } +- } + } + ] + } diff --git a/.evergreen/spec-patch/PYTHON-5529.patch b/.evergreen/spec-patch/PYTHON-5529.patch new file mode 100644 index 0000000000..a97602e055 --- /dev/null +++ b/.evergreen/spec-patch/PYTHON-5529.patch @@ -0,0 +1,587 @@ +diff --git a/test/csot/command-execution.json b/test/csot/command-execution.json +index aa9c3eb2..212cd410 100644 +--- a/test/csot/command-execution.json ++++ b/test/csot/command-execution.json +@@ -1,6 +1,6 @@ + { + "description": "timeoutMS behaves correctly during command execution", +- "schemaVersion": "1.9", ++ "schemaVersion": "1.26", + "runOnRequirements": [ + { + "minServerVersion": "4.4.7", +@@ -69,8 +69,10 @@ + "appName": "reduceMaxTimeMSTest", + "w": 1, + "timeoutMS": 500, +- "heartbeatFrequencyMS": 500 ++ "heartbeatFrequencyMS": 500, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "observeEvents": [ + "commandStartedEvent" + ] +@@ -185,8 +187,10 @@ + "appName": "rttTooHighTest", + "w": 1, + "timeoutMS": 10, +- "heartbeatFrequencyMS": 500 ++ "heartbeatFrequencyMS": 500, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "observeEvents": [ + "commandStartedEvent" + ] +@@ -316,8 +320,10 @@ + "appName": "reduceMaxTimeMSTest", + "w": 1, + "timeoutMS": 90, +- "heartbeatFrequencyMS": 100000 ++ "heartbeatFrequencyMS": 100000, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "observeEvents": [ + "commandStartedEvent" + ] +diff --git a/test/csot/convenient-transactions.json b/test/csot/convenient-transactions.json +index 3868b302..f9d03429 100644 +--- a/test/csot/convenient-transactions.json ++++ b/test/csot/convenient-transactions.json +@@ -1,6 +1,6 @@ + { + "description": "timeoutMS behaves correctly for the withTransaction API", +- "schemaVersion": "1.9", ++ "schemaVersion": "1.26", + "runOnRequirements": [ + { + "minServerVersion": "4.4", +@@ -21,8 +21,10 @@ + "client": { + "id": "client", + "uriOptions": { +- "timeoutMS": 500 ++ "timeoutMS": 500, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" +diff --git a/test/csot/error-transformations.json b/test/csot/error-transformations.json +index 4889e395..89be49f0 100644 +--- a/test/csot/error-transformations.json ++++ b/test/csot/error-transformations.json +@@ -1,6 +1,6 @@ + { + "description": "MaxTimeMSExpired server errors are transformed into a custom timeout error", +- "schemaVersion": "1.9", ++ "schemaVersion": "1.26", + "runOnRequirements": [ + { + "minServerVersion": "4.0", +@@ -26,8 +26,10 @@ + "client": { + "id": "client", + "uriOptions": { +- "timeoutMS": 250 ++ "timeoutMS": 250, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" +diff --git a/test/csot/global-timeoutMS.json b/test/csot/global-timeoutMS.json +index f1edbe68..9d8046d1 100644 +--- a/test/csot/global-timeoutMS.json ++++ b/test/csot/global-timeoutMS.json +@@ -1,6 +1,6 @@ + { + "description": "timeoutMS can be configured on a MongoClient", +- "schemaVersion": "1.9", ++ "schemaVersion": "1.26", + "runOnRequirements": [ + { + "minServerVersion": "4.4", +@@ -38,8 +38,10 @@ + "client": { + "id": "client", + "uriOptions": { +- "timeoutMS": 250 ++ "timeoutMS": 250, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" +@@ -217,8 +219,10 @@ + "client": { + "id": "client", + "uriOptions": { +- "timeoutMS": 250 ++ "timeoutMS": 250, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" +@@ -390,8 +394,10 @@ + "client": { + "id": "client", + "uriOptions": { +- "timeoutMS": 250 ++ "timeoutMS": 250, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" +@@ -569,8 +575,10 @@ + "client": { + "id": "client", + "uriOptions": { +- "timeoutMS": 250 ++ "timeoutMS": 250, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" +@@ -762,8 +770,10 @@ + "client": { + "id": "client", + "uriOptions": { +- "timeoutMS": 250 ++ "timeoutMS": 250, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" +@@ -941,8 +951,10 @@ + "client": { + "id": "client", + "uriOptions": { +- "timeoutMS": 250 ++ "timeoutMS": 250, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" +@@ -1120,8 +1132,10 @@ + "client": { + "id": "client", + "uriOptions": { +- "timeoutMS": 250 ++ "timeoutMS": 250, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" +@@ -1305,8 +1319,10 @@ + "client": { + "id": "client", + "uriOptions": { +- "timeoutMS": 250 ++ "timeoutMS": 250, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" +@@ -1484,8 +1500,10 @@ + "client": { + "id": "client", + "uriOptions": { +- "timeoutMS": 250 ++ "timeoutMS": 250, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" +@@ -1663,8 +1681,10 @@ + "client": { + "id": "client", + "uriOptions": { +- "timeoutMS": 250 ++ "timeoutMS": 250, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" +@@ -1842,8 +1862,10 @@ + "client": { + "id": "client", + "uriOptions": { +- "timeoutMS": 250 ++ "timeoutMS": 250, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" +@@ -2021,8 +2043,10 @@ + "client": { + "id": "client", + "uriOptions": { +- "timeoutMS": 250 ++ "timeoutMS": 250, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" +@@ -2194,8 +2218,10 @@ + "client": { + "id": "client", + "uriOptions": { +- "timeoutMS": 250 ++ "timeoutMS": 250, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" +@@ -2375,8 +2401,10 @@ + "client": { + "id": "client", + "uriOptions": { +- "timeoutMS": 250 ++ "timeoutMS": 250, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" +@@ -2554,8 +2582,10 @@ + "client": { + "id": "client", + "uriOptions": { +- "timeoutMS": 250 ++ "timeoutMS": 250, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" +@@ -2733,8 +2763,10 @@ + "client": { + "id": "client", + "uriOptions": { +- "timeoutMS": 250 ++ "timeoutMS": 250, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" +@@ -2906,8 +2938,10 @@ + "client": { + "id": "client", + "uriOptions": { +- "timeoutMS": 250 ++ "timeoutMS": 250, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" +@@ -3079,8 +3113,10 @@ + "client": { + "id": "client", + "uriOptions": { +- "timeoutMS": 250 ++ "timeoutMS": 250, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" +@@ -3258,8 +3294,10 @@ + "client": { + "id": "client", + "uriOptions": { +- "timeoutMS": 250 ++ "timeoutMS": 250, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" +@@ -3441,8 +3479,10 @@ + "client": { + "id": "client", + "uriOptions": { +- "timeoutMS": 250 ++ "timeoutMS": 250, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" +@@ -3628,8 +3668,10 @@ + "client": { + "id": "client", + "uriOptions": { +- "timeoutMS": 250 ++ "timeoutMS": 250, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" +@@ -3807,8 +3849,10 @@ + "client": { + "id": "client", + "uriOptions": { +- "timeoutMS": 250 ++ "timeoutMS": 250, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" +@@ -3986,8 +4030,10 @@ + "client": { + "id": "client", + "uriOptions": { +- "timeoutMS": 250 ++ "timeoutMS": 250, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" +@@ -4171,8 +4217,10 @@ + "client": { + "id": "client", + "uriOptions": { +- "timeoutMS": 250 ++ "timeoutMS": 250, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" +@@ -4360,8 +4408,10 @@ + "client": { + "id": "client", + "uriOptions": { +- "timeoutMS": 250 ++ "timeoutMS": 250, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" +@@ -4549,8 +4599,10 @@ + "client": { + "id": "client", + "uriOptions": { +- "timeoutMS": 250 ++ "timeoutMS": 250, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" +@@ -4728,8 +4780,10 @@ + "client": { + "id": "client", + "uriOptions": { +- "timeoutMS": 250 ++ "timeoutMS": 250, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" +@@ -4913,8 +4967,10 @@ + "client": { + "id": "client", + "uriOptions": { +- "timeoutMS": 250 ++ "timeoutMS": 250, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" +@@ -5102,8 +5158,10 @@ + "client": { + "id": "client", + "uriOptions": { +- "timeoutMS": 250 ++ "timeoutMS": 250, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" +@@ -5297,8 +5355,10 @@ + "client": { + "id": "client", + "uriOptions": { +- "timeoutMS": 250 ++ "timeoutMS": 250, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" +@@ -5482,8 +5542,10 @@ + "client": { + "id": "client", + "uriOptions": { +- "timeoutMS": 250 ++ "timeoutMS": 250, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" +@@ -5677,8 +5739,10 @@ + "client": { + "id": "client", + "uriOptions": { +- "timeoutMS": 250 ++ "timeoutMS": 250, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" +diff --git a/test/csot/non-tailable-cursors.json b/test/csot/non-tailable-cursors.json +index 291c6e72..58c59cb3 100644 +--- a/test/csot/non-tailable-cursors.json ++++ b/test/csot/non-tailable-cursors.json +@@ -1,6 +1,6 @@ + { + "description": "timeoutMS behaves correctly for non-tailable cursors", +- "schemaVersion": "1.9", ++ "schemaVersion": "1.26", + "runOnRequirements": [ + { + "minServerVersion": "4.4" +@@ -17,8 +17,10 @@ + "client": { + "id": "client", + "uriOptions": { +- "timeoutMS": 200 ++ "timeoutMS": 200, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" +diff --git a/test/csot/retryability-timeoutMS.json b/test/csot/retryability-timeoutMS.json +index 9daad260..5a0c9f36 100644 +--- a/test/csot/retryability-timeoutMS.json ++++ b/test/csot/retryability-timeoutMS.json +@@ -1,6 +1,6 @@ + { + "description": "timeoutMS behaves correctly for retryable operations", +- "schemaVersion": "1.9", ++ "schemaVersion": "1.26", + "runOnRequirements": [ + { + "minServerVersion": "4.0", +@@ -26,8 +26,10 @@ + "client": { + "id": "client", + "uriOptions": { +- "timeoutMS": 100 ++ "timeoutMS": 100, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" +diff --git a/test/csot/runCursorCommand.json b/test/csot/runCursorCommand.json +index 36f774fb..e5182e33 100644 +--- a/test/csot/runCursorCommand.json ++++ b/test/csot/runCursorCommand.json +@@ -1,6 +1,6 @@ + { + "description": "runCursorCommand", +- "schemaVersion": "1.9", ++ "schemaVersion": "1.26", + "runOnRequirements": [ + { + "minServerVersion": "4.4" +@@ -16,6 +16,10 @@ + { + "client": { + "id": "commandClient", ++ "uriOptions": { ++ "minPoolSize": 1 ++ }, ++ "awaitMinPoolSizeMS": 10000, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent", +diff --git a/test/csot/sessions-inherit-timeoutMS.json b/test/csot/sessions-inherit-timeoutMS.json +index 13ea91c7..dbf163e4 100644 +--- a/test/csot/sessions-inherit-timeoutMS.json ++++ b/test/csot/sessions-inherit-timeoutMS.json +@@ -1,6 +1,6 @@ + { + "description": "sessions inherit timeoutMS from their parent MongoClient", +- "schemaVersion": "1.9", ++ "schemaVersion": "1.26", + "runOnRequirements": [ + { + "minServerVersion": "4.4", +@@ -21,8 +21,10 @@ + "client": { + "id": "client", + "uriOptions": { +- "timeoutMS": 500 ++ "timeoutMS": 500, ++ "minPoolSize": 1 + }, ++ "awaitMinPoolSizeMS": 10000, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent", diff --git a/.evergreen/sync-spawn-host.sh b/.evergreen/sync-spawn-host.sh index 4c3e276d41..61dd84ec22 100755 --- a/.evergreen/sync-spawn-host.sh +++ b/.evergreen/sync-spawn-host.sh @@ -1,4 +1,6 @@ #!/bin/bash +# Synchronize local files to a remote Evergreen spawn host. +set -eu if [ -z "$1" ] then @@ -6,8 +8,13 @@ if [ -z "$1" ] fi target=$1 +user=${target%@*} +remote_dir=/home/$user/mongo-python-driver +echo "Copying files to $target..." +rsync -az -e ssh --exclude '.git' --filter=':- .gitignore' -r . $target:$remote_dir +echo "Copying files to $target... done." echo "Syncing files to $target..." # shellcheck disable=SC2034 -fswatch -o . | while read f; do rsync -hazv -e ssh --exclude '.git' --filter=':- .gitignore' -r . $target:/home/ec2-user/mongo-python-driver; done +fswatch -o . | while read f; do rsync -hazv -e ssh --exclude '.git' --filter=':- .gitignore' -r . $target:/home/$user/mongo-python-driver; done echo "Syncing files to $target... done." diff --git a/.evergreen/teardown-encryption.sh b/.evergreen/teardown-encryption.sh deleted file mode 100755 index 88dc16bba8..0000000000 --- a/.evergreen/teardown-encryption.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash -set -o errexit # Exit the script with error if any of the commands fail -set -o xtrace - -if [ -z "${DRIVERS_TOOLS}" ]; then - echo "Missing environment variable DRIVERS_TOOLS" -fi - -bash ${DRIVERS_TOOLS}/.evergreen/csfle/stop-servers.sh -rm -rf libmongocrypt/ libmongocrypt_git/ libmongocrypt.tar.gz mongocryptd.pid diff --git a/.evergreen/utils.sh b/.evergreen/utils.sh index d3af2dcc7a..dadb7db084 100755 --- a/.evergreen/utils.sh +++ b/.evergreen/utils.sh @@ -1,36 +1,34 @@ -#!/bin/bash -ex - -set -o xtrace +#!/bin/bash +# Utility functions used by pymongo evergreen scripts. +set -eu find_python3() { PYTHON="" - # Add a fallback system python3 if it is available and Python 3.9+. - if is_python_39 "$(command -v python3)"; then - PYTHON="$(command -v python3)" - fi # Find a suitable toolchain version, if available. if [ "$(uname -s)" = "Darwin" ]; then - # macos 11.00 - if [ -d "/Library/Frameworks/Python.Framework/Versions/3.10" ]; then - PYTHON="/Library/Frameworks/Python.Framework/Versions/3.10/bin/python3" - # macos 10.14 - elif [ -d "/Library/Frameworks/Python.Framework/Versions/3.9" ]; then - PYTHON="/Library/Frameworks/Python.Framework/Versions/3.9/bin/python3" - fi + PYTHON="/Library/Frameworks/Python.Framework/Versions/3.10/bin/python3" elif [ "Windows_NT" = "${OS:-}" ]; then # Magic variable in cygwin - PYTHON="C:/python/Python39/python.exe" + PYTHON="C:/python/Python310/python.exe" else - # Prefer our own toolchain, fall back to mongodb toolchain if it has Python 3.9+. - if [ -f "/opt/python/3.9/bin/python3" ]; then - PYTHON="/opt/python/3.9/bin/python3" - elif is_python_39 "$(command -v /opt/mongodbtoolchain/v4/bin/python3)"; then + # Prefer our own toolchain, fall back to mongodb toolchain if it has Python 3.10+. + if [ -f "/opt/python/3.10/bin/python3" ]; then + PYTHON="/opt/python/Current/bin/python3" + elif is_python_310 "$(command -v /opt/mongodbtoolchain/v5/bin/python3)"; then + PYTHON="/opt/mongodbtoolchain/v5/bin/python3" + elif is_python_310 "$(command -v /opt/mongodbtoolchain/v4/bin/python3)"; then PYTHON="/opt/mongodbtoolchain/v4/bin/python3" - elif is_python_39 "$(command -v /opt/mongodbtoolchain/v3/bin/python3)"; then + elif is_python_310 "$(command -v /opt/mongodbtoolchain/v3/bin/python3)"; then PYTHON="/opt/mongodbtoolchain/v3/bin/python3" fi fi + # Add a fallback system python3 if it is available and Python 3.10+. if [ -z "$PYTHON" ]; then - echo "Cannot test without python3.9+ installed!" + if is_python_310 "$(command -v python3)"; then + PYTHON="$(command -v python3)" + fi + fi + if [ -z "$PYTHON" ]; then + echo "Cannot test without python3.10+ installed!" exit 1 fi echo "$PYTHON" @@ -101,17 +99,50 @@ testinstall () { fi } -# Function that returns success if the provided Python binary is version 3.9 or later +# Function that returns success if the provided Python binary is version 3.10 or later # Usage: -# is_python_39 /path/to/python +# is_python_310 /path/to/python # * param1: Python binary -is_python_39() { +is_python_310() { if [ -z "$1" ]; then return 1 - elif $1 -c "import sys; exit(sys.version_info[:2] < (3, 9))"; then - # runs when sys.version_info[:2] >= (3, 9) + elif $1 -c "import sys; exit(sys.version_info[:2] < (3, 10))"; then + # runs when sys.version_info[:2] >= (3, 10) return 0 else return 1 fi } + + +# Function that gets a python binary given a python version string. +# Versions can be of the form 3.xx or pypy3.xx. +get_python_binary() { + version=$1 + if [ "$(uname -s)" = "Darwin" ]; then + if [[ "$version" == *"t"* ]]; then + binary_name="python3t" + framework_dir="PythonT" + else + binary_name="python3" + framework_dir="Python" + fi + version=$(echo "$version" | sed 's/t//g') + PYTHON="/Library/Frameworks/$framework_dir.Framework/Versions/$version/bin/$binary_name" + elif [ "Windows_NT" = "${OS:-}" ]; then + version=$(echo $version | cut -d. -f1,2 | sed 's/\.//g; s/t//g') + if [ -n "${IS_WIN32:-}" ]; then + PYTHON="C:/python/32/Python$version/python.exe" + else + PYTHON="C:/python/Python$version/python.exe" + fi + else + PYTHON="/opt/python/$version/bin/python3" + fi + if is_python_310 "$(command -v $PYTHON)"; then + echo "$PYTHON" + else + echo "Could not find suitable python binary for '$version'" >&2 + return 1 + fi +} diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000000..e21b87ddd3 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1 @@ +* @mongodb/dbx-python diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000000..8185a38836 --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,38 @@ + + +[Issue Key](https://jira.mongodb.org/browse/%7BISSUE_KEY%7D) +## Summary + + +## Changes in this PR + + +## Testing Plan + + +### Screenshots (optional) + + +## Checklist + +### Checklist for Author +- [ ] Did you update the changelog (if necessary)? +- [ ] Is the intention of the code captured in relevant tests? +- [ ] If there are new TODOs, has a related JIRA ticket been created? + +### Checklist for Reviewer {@primary_reviewer} +- [ ] Does the title of the PR reference a JIRA Ticket? +- [ ] Do you fully understand the implementation? (Would you be comfortable explaining how this code works to someone else?) +- [ ] Have you checked for spelling & grammar errors? +- [ ] Is all relevant documentation (README or docstring) updated? + +## Focus Areas for Reviewer (optional) + diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index e620cb1801..b138324bf4 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -34,17 +34,19 @@ jobs: build-mode: manual - language: python build-mode: none + - language: actions + build-mode: none steps: - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@v5 with: ref: ${{ inputs.ref }} persist-credentials: false - - uses: actions/setup-python@v5 + - uses: actions/setup-python@v6 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v3 + uses: github/codeql-action/init@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3 with: languages: ${{ matrix.language }} build-mode: ${{ matrix.build-mode }} @@ -52,7 +54,6 @@ jobs: queries: security-extended config: | paths-ignore: - - '.github/**' - 'doc/**' - 'tools/**' - 'test/**' @@ -62,6 +63,6 @@ jobs: pip install -e . - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3 + uses: github/codeql-action/analyze@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3 with: category: "/language:${{matrix.language}}" diff --git a/.github/workflows/create-release-branch.yml b/.github/workflows/create-release-branch.yml index f24f94179a..95a5e65c88 100644 --- a/.github/workflows/create-release-branch.yml +++ b/.github/workflows/create-release-branch.yml @@ -33,17 +33,19 @@ jobs: outputs: version: ${{ steps.pre-publish.outputs.version }} steps: - - uses: mongodb-labs/drivers-github-tools/secure-checkout@v2 + - uses: mongodb-labs/drivers-github-tools/secure-checkout@v3 with: app_id: ${{ vars.APP_ID }} private_key: ${{ secrets.APP_PRIVATE_KEY }} - - uses: mongodb-labs/drivers-github-tools/setup@v2 + - uses: mongodb-labs/drivers-github-tools/setup@v3 with: aws_role_arn: ${{ secrets.AWS_ROLE_ARN }} aws_region_name: ${{ vars.AWS_REGION_NAME }} aws_secret_id: ${{ secrets.AWS_SECRET_ID }} artifactory_username: ${{ vars.ARTIFACTORY_USERNAME }} - - uses: mongodb-labs/drivers-github-tools/create-branch@v2 + - name: Get hatch + run: pip install hatch + - uses: mongodb-labs/drivers-github-tools/create-branch@v3 id: create-branch with: branch_name: ${{ inputs.branch_name }} diff --git a/.github/workflows/dist.yml b/.github/workflows/dist.yml index a4c5a8279b..84bf1ba893 100644 --- a/.github/workflows/dist.yml +++ b/.github/workflows/dist.yml @@ -34,65 +34,62 @@ jobs: # Github Actions doesn't support pairing matrix values together, let's improvise # https://github.com/github/feedback/discussions/7835#discussioncomment-1769026 buildplat: - - [ubuntu-20.04, "manylinux_x86_64", "cp3*-manylinux_x86_64"] - - [ubuntu-20.04, "manylinux_aarch64", "cp3*-manylinux_aarch64"] - - [ubuntu-20.04, "manylinux_ppc64le", "cp3*-manylinux_ppc64le"] - - [ubuntu-20.04, "manylinux_s390x", "cp3*-manylinux_s390x"] - - [ubuntu-20.04, "manylinux_i686", "cp3*-manylinux_i686"] - - [windows-2019, "win_amd6", "cp3*-win_amd64"] - - [windows-2019, "win32", "cp3*-win32"] + - [ubuntu-latest, "manylinux_x86_64", "cp3*-manylinux_x86_64"] + - [ubuntu-latest, "manylinux_aarch64", "cp3*-manylinux_aarch64"] + - [ubuntu-latest, "manylinux_ppc64le", "cp3*-manylinux_ppc64le"] + - [ubuntu-latest, "manylinux_s390x", "cp3*-manylinux_s390x"] + - [ubuntu-latest, "manylinux_i686", "cp3*-manylinux_i686"] + - [windows-2022, "win_amd6", "cp3*-win_amd64"] + - [windows-2022, "win32", "cp3*-win32"] + - [windows-11-arm, "win_arm64", "cp3*-win_arm64"] - [macos-14, "macos", "cp*-macosx_*"] steps: - name: Checkout pymongo - uses: actions/checkout@v4 + uses: actions/checkout@v5 with: fetch-depth: 0 persist-credentials: false ref: ${{ inputs.ref }} - - uses: actions/setup-python@v5 + - uses: actions/setup-python@v6 with: cache: 'pip' - python-version: 3.9 + python-version: 3.11 cache-dependency-path: 'pyproject.toml' allow-prereleases: true - name: Set up QEMU if: runner.os == 'Linux' - uses: docker/setup-qemu-action@v3 + uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3 with: + # setup-qemu-action by default uses `tonistiigi/binfmt:latest` image, + # which is out of date. This causes seg faults during build. + # Here we manually fix the version. + image: tonistiigi/binfmt:qemu-v8.1.5 platforms: all - name: Install cibuildwheel - # Note: the default manylinux is manylinux2014 + # Note: the default manylinux is manylinux_2_28 run: | python -m pip install -U pip - python -m pip install "cibuildwheel>=2.20,<3" + python -m pip install "cibuildwheel>=3.2.0,<4" - name: Build wheels env: CIBW_BUILD: ${{ matrix.buildplat[2] }} run: python -m cibuildwheel --output-dir wheelhouse - - name: Build manylinux1 wheels - if: ${{ matrix.buildplat[1] == 'manylinux_x86_64' || matrix.buildplat[1] == 'manylinux_i686' }} - env: - CIBW_MANYLINUX_X86_64_IMAGE: manylinux1 - CIBW_MANYLINUX_I686_IMAGE: manylinux1 - CIBW_BUILD: "cp39-${{ matrix.buildplat[1] }} cp39-${{ matrix.buildplat[1] }}" - run: python -m cibuildwheel --output-dir wheelhouse - - name: Assert all versions in wheelhouse if: ${{ ! startsWith(matrix.buildplat[1], 'macos') }} run: | - ls wheelhouse/*cp39*.whl ls wheelhouse/*cp310*.whl ls wheelhouse/*cp311*.whl ls wheelhouse/*cp312*.whl ls wheelhouse/*cp313*.whl + ls wheelhouse/*cp314*.whl # Free-threading builds: - ls wheelhouse/*cp313t*.whl + ls wheelhouse/*cp314t*.whl - uses: actions/upload-artifact@v4 with: @@ -102,18 +99,18 @@ jobs: make_sdist: name: Make SDist - runs-on: macos-13 + runs-on: macos-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: fetch-depth: 0 persist-credentials: false ref: ${{ inputs.ref }} - - uses: actions/setup-python@v5 + - uses: actions/setup-python@v6 with: # Build sdist on lowest supported Python - python-version: '3.9' + python-version: "3.10" - name: Build SDist run: | @@ -138,7 +135,7 @@ jobs: name: Download Wheels steps: - name: Download all workflow run artifacts - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v5 - name: Flatten directory working-directory: . run: | diff --git a/.github/workflows/pull_request_template.md b/.github/workflows/pull_request_template.md deleted file mode 100644 index 852066d4b2..0000000000 --- a/.github/workflows/pull_request_template.md +++ /dev/null @@ -1,23 +0,0 @@ -# [JIRA Ticket ID](Link to Ticket) - - -# Summary - - -# Changes in this PR - - -# Test Plan - - -# Screenshots (Optional) - - -# Callouts or Follow-up items (Optional) - diff --git a/.github/workflows/release-python.yml b/.github/workflows/release-python.yml index cee222d109..6abca9e528 100644 --- a/.github/workflows/release-python.yml +++ b/.github/workflows/release-python.yml @@ -3,23 +3,25 @@ name: Release on: workflow_dispatch: inputs: - version: - description: "The new version to set" - required: true following_version: description: "The post (dev) version to set" - required: true dry_run: description: "Dry Run?" default: false type: boolean + schedule: + - cron: '30 5 * * *' env: # Changes per repo PRODUCT_NAME: PyMongo # Changes per branch - SILK_ASSET_GROUP: mongodb-python-driver EVERGREEN_PROJECT: mongo-python-driver + # Constant + # inputs will be empty on a scheduled run. so, we only set dry_run + # to 'false' when the input is set to 'false'. + DRY_RUN: ${{ ! contains(inputs.dry_run, 'false') }} + FOLLOWING_VERSION: ${{ inputs.following_version || '' }} defaults: run: @@ -29,27 +31,26 @@ jobs: pre-publish: environment: release runs-on: ubuntu-latest + if: github.repository_owner == 'mongodb' || github.event_name == 'workflow_dispatch' permissions: id-token: write contents: write outputs: version: ${{ steps.pre-publish.outputs.version }} steps: - - uses: mongodb-labs/drivers-github-tools/secure-checkout@v2 + - uses: mongodb-labs/drivers-github-tools/secure-checkout@v3 with: app_id: ${{ vars.APP_ID }} private_key: ${{ secrets.APP_PRIVATE_KEY }} - - uses: mongodb-labs/drivers-github-tools/setup@v2 + - uses: mongodb-labs/drivers-github-tools/setup@v3 with: aws_role_arn: ${{ secrets.AWS_ROLE_ARN }} aws_region_name: ${{ vars.AWS_REGION_NAME }} aws_secret_id: ${{ secrets.AWS_SECRET_ID }} - artifactory_username: ${{ vars.ARTIFACTORY_USERNAME }} - - uses: mongodb-labs/drivers-github-tools/python/pre-publish@v2 + - uses: mongodb-labs/drivers-github-tools/python/pre-publish@v3 id: pre-publish with: - version: ${{ inputs.version }} - dry_run: ${{ inputs.dry_run }} + dry_run: ${{ env.DRY_RUN }} build-dist: needs: [pre-publish] @@ -67,6 +68,29 @@ jobs: publish: needs: [build-dist, static-scan] + name: Upload release to PyPI + runs-on: ubuntu-latest + environment: release + permissions: + id-token: write + steps: + - name: Download all the dists + uses: actions/download-artifact@v5 + with: + name: all-dist-${{ github.run_id }} + path: dist/ + - name: Publish package distributions to TestPyPI + uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # release/v1 + with: + repository-url: https://test.pypi.org/legacy/ + skip-existing: true + attestations: ${{ env.DRY_RUN }} + - name: Publish package distributions to PyPI + if: startsWith(env.DRY_RUN, 'false') + uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # release/v1 + + post-publish: + needs: [publish] runs-on: ubuntu-latest environment: release permissions: @@ -75,22 +99,19 @@ jobs: attestations: write security-events: write steps: - - uses: mongodb-labs/drivers-github-tools/secure-checkout@v2 + - uses: mongodb-labs/drivers-github-tools/secure-checkout@v3 with: app_id: ${{ vars.APP_ID }} private_key: ${{ secrets.APP_PRIVATE_KEY }} - - uses: mongodb-labs/drivers-github-tools/setup@v2 + - uses: mongodb-labs/drivers-github-tools/setup@v3 with: aws_role_arn: ${{ secrets.AWS_ROLE_ARN }} aws_region_name: ${{ vars.AWS_REGION_NAME }} aws_secret_id: ${{ secrets.AWS_SECRET_ID }} - artifactory_username: ${{ vars.ARTIFACTORY_USERNAME }} - - uses: mongodb-labs/drivers-github-tools/python/publish@v2 + - uses: mongodb-labs/drivers-github-tools/python/post-publish@v3 with: - version: ${{ inputs.version }} - following_version: ${{ inputs.following_version }} + following_version: ${{ env.FOLLOWING_VERSION }} product_name: ${{ env.PRODUCT_NAME }} - silk_asset_group: ${{ env.SILK_ASSET_GROUP }} evergreen_project: ${{ env.EVERGREEN_PROJECT }} token: ${{ github.token }} - dry_run: ${{ inputs.dry_run }} + dry_run: ${{ env.DRY_RUN }} diff --git a/.github/workflows/test-python.yml b/.github/workflows/test-python.yml index 12cfaa4b27..a057570f3f 100644 --- a/.github/workflows/test-python.yml +++ b/.github/workflows/test-python.yml @@ -14,25 +14,30 @@ defaults: run: shell: bash -eux {0} +permissions: + contents: read + jobs: static: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: persist-credentials: false - - uses: actions/setup-python@v5 + - name: Install uv + uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6 with: - python-version: "3.9" - cache: 'pip' - cache-dependency-path: 'pyproject.toml' + enable-cache: true + python-version: "3.10" + - name: Install just + run: uv tool install rust-just - name: Install Python dependencies run: | - python -m pip install -U pip hatch + just install - name: Run linters run: | - hatch run lint:run-manual + just lint-manual - name: Run compilation run: | export PYMONGO_C_EXT_MUST_BUILD=1 @@ -40,7 +45,7 @@ jobs: python tools/fail_if_no_c.py - name: Run typecheck run: | - hatch run typing:check + just typing - run: | sudo apt-get install -y cppcheck - run: | @@ -48,155 +53,162 @@ jobs: cppcheck pymongo build: - # supercharge/mongodb-github-action requires containers so we don't test other platforms runs-on: ${{ matrix.os }} strategy: + fail-fast: false matrix: - os: [ubuntu-20.04] - python-version: ["3.9", "pypy-3.9", "3.13", "3.13t"] + # Tests currently only pass on ubuntu on GitHub Actions. + os: [ubuntu-latest] + python-version: ["3.10", "pypy-3.10", "3.13t"] + mongodb-version: ["8.0"] + name: CPython ${{ matrix.python-version }}-${{ matrix.os }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: persist-credentials: false - - if: ${{ matrix.python-version == '3.13t' }} - name: Setup free-threaded Python - uses: deadsnakes/action@v3.2.0 - with: - python-version: 3.13 - nogil: true - - if: ${{ matrix.python-version != '3.13t' }} - name: Setup Python - uses: actions/setup-python@v5 + - name: Install uv + uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6 with: + enable-cache: true python-version: ${{ matrix.python-version }} - cache: 'pip' - cache-dependency-path: 'pyproject.toml' - allow-prereleases: true - - name: Install dependencies - run: | - pip install -U pip - if [[ "${{ matrix.python-version }}" == "3.13" ]]; then - pip install --pre cffi setuptools - pip install --no-build-isolation hatch - elif [[ "${{ matrix.python-version }}" == "3.13t" ]]; then - # Hatch can't be installed on 3.13t, use pytest directly. - pip install . - pip install -r requirements/test.txt - else - pip install hatch - fi - - name: Start MongoDB - uses: supercharge/mongodb-github-action@1.11.0 - with: - mongodb-version: 6.0 + - id: setup-mongodb + uses: mongodb-labs/drivers-evergreen-tools@master + with: + version: "${{ matrix.mongodb-version }}" - name: Run tests - run: | - if [[ "${{ matrix.python-version }}" == "3.13t" ]]; then - pytest -v --durations=5 --maxfail=10 - else - hatch run test:test - fi + run: uv run --extra test pytest -v doctest: runs-on: ubuntu-latest name: DocTest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: persist-credentials: false - - name: Setup Python - uses: actions/setup-python@v5 + - name: Install uv + uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6 with: - python-version: "3.9" - cache: 'pip' - cache-dependency-path: 'pyproject.toml' - - name: Install dependencies - run: | - pip install -U hatch pip - - name: Start MongoDB - uses: supercharge/mongodb-github-action@1.11.0 + enable-cache: true + python-version: "3.10" + - name: Install just + run: uv tool install rust-just + - id: setup-mongodb + uses: mongodb-labs/drivers-evergreen-tools@master with: - mongodb-version: '8.0.0-rc4' + version: "8.0" + - name: Install dependencies + run: just install - name: Run tests - run: | - hatch run doctest:test + run: | + just setup-tests doctest + just run-tests docs: name: Docs Checks runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: persist-credentials: false - - uses: actions/setup-python@v5 + - name: Install uv + uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6 with: - cache: 'pip' - cache-dependency-path: 'pyproject.toml' - # Build docs on lowest supported Python for furo - python-version: '3.9' + enable-cache: true + python-version: "3.10" + - name: Install just + run: uv tool install rust-just - name: Install dependencies - run: | - pip install -U pip hatch + run: just install - name: Build docs - run: | - hatch run doc:build + run: just docs linkcheck: name: Link Check runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: persist-credentials: false - - uses: actions/setup-python@v5 + - name: Install uv + uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6 with: - cache: 'pip' - cache-dependency-path: 'pyproject.toml' - # Build docs on lowest supported Python for furo - python-version: '3.9' + enable-cache: true + python-version: "3.10" + - name: Install just + run: uv tool install rust-just - name: Install dependencies - run: | - pip install -U pip hatch + run: just install - name: Build docs - run: | - hatch run doc:linkcheck + run: just docs-linkcheck typing: name: Typing Tests runs-on: ubuntu-latest strategy: matrix: - python: ["3.9", "3.11"] + python: ["3.10", "3.11"] steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: persist-credentials: false - - uses: actions/setup-python@v5 + - name: Install uv + uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6 with: + enable-cache: true python-version: "${{matrix.python}}" - cache: 'pip' - cache-dependency-path: 'pyproject.toml' + - name: Install just + run: uv tool install rust-just - name: Install dependencies run: | - pip install -U pip hatch + just install - name: Run typecheck run: | - hatch run typing:check + just typing + + integration_tests: + runs-on: ubuntu-latest + name: Integration Tests + steps: + - uses: actions/checkout@v5 + with: + persist-credentials: false + - name: Install uv + uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6 + with: + enable-cache: true + python-version: "3.10" + - name: Install just + run: uv tool install rust-just + - name: Install dependencies + run: | + just install + - id: setup-mongodb + uses: mongodb-labs/drivers-evergreen-tools@master + - name: Run tests + run: | + just integration-tests + - id: setup-mongodb-ssl + uses: mongodb-labs/drivers-evergreen-tools@master + with: + ssl: true + - name: Run tests + run: | + just integration-tests make_sdist: runs-on: ubuntu-latest name: "Make an sdist" steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: persist-credentials: false - - uses: actions/setup-python@v5 + - uses: actions/setup-python@v6 with: cache: 'pip' cache-dependency-path: 'pyproject.toml' # Build sdist on lowest supported Python - python-version: '3.9' + python-version: "3.10" - name: Build SDist shell: bash run: | @@ -214,7 +226,9 @@ jobs: timeout-minutes: 20 steps: - name: Download sdist - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v5 + with: + path: sdist/ - name: Unpack SDist shell: bash run: | @@ -223,14 +237,14 @@ jobs: mkdir test tar --strip-components=1 -zxf *.tar.gz -C ./test ls test - - uses: actions/setup-python@v5 + - uses: actions/setup-python@v6 with: cache: 'pip' cache-dependency-path: 'sdist/test/pyproject.toml' # Test sdist on lowest supported Python - python-version: '3.9' - - name: Start MongoDB - uses: supercharge/mongodb-github-action@1.11.0 + python-version: "3.10" + - id: setup-mongodb + uses: mongodb-labs/drivers-evergreen-tools@master - name: Run connect test from sdist shell: bash run: | @@ -239,3 +253,28 @@ jobs: which python pip install -e ".[test]" PYMONGO_MUST_CONNECT=1 pytest -v -k client_context + + test_minimum: + permissions: + contents: read + runs-on: ubuntu-latest + name: Test minimum dependencies and Python + steps: + - uses: actions/checkout@v5 + with: + persist-credentials: false + - name: Install uv + uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6 + with: + python-version: "3.10" + - id: setup-mongodb + uses: mongodb-labs/drivers-evergreen-tools@master + with: + version: "8.0" + - name: Run tests + shell: bash + run: | + uv venv + source .venv/bin/activate + uv pip install -e ".[test]" --resolution=lowest-direct --force-reinstall + pytest -v test/test_srv_polling.py test/test_dns.py test/asynchronous/test_srv_polling.py test/asynchronous/test_dns.py diff --git a/.github/workflows/zizmor.yml b/.github/workflows/zizmor.yml index 31afeb6655..c991de2e6d 100644 --- a/.github/workflows/zizmor.yml +++ b/.github/workflows/zizmor.yml @@ -14,19 +14,8 @@ jobs: security-events: write steps: - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@v5 with: persist-credentials: false - - name: Setup Rust - uses: actions-rust-lang/setup-rust-toolchain@v1 - - name: Get zizmor - run: cargo install zizmor - name: Run zizmor 🌈 - run: zizmor --format sarif . > results.sarif - env: - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Upload SARIF file - uses: github/codeql-action/upload-sarif@v3 - with: - sarif_file: results.sarif - category: zizmor + uses: zizmorcore/zizmor-action@da5ac40c5419dcf7f21630fb2f95e725ae8fb9d5 diff --git a/.github/zizmor.yml b/.github/zizmor.yml new file mode 100644 index 0000000000..10fd4cdfcf --- /dev/null +++ b/.github/zizmor.yml @@ -0,0 +1,7 @@ +rules: + unpinned-uses: + config: + policies: + actions/*: ref-pin + mongodb-labs/drivers-github-tools/*: ref-pin + mongodb-labs/drivers-evergreen-tools: ref-pin diff --git a/.gitignore b/.gitignore index e4587125e8..74ed0bbb70 100644 --- a/.gitignore +++ b/.gitignore @@ -18,20 +18,26 @@ mongocryptd.pid .idea/ .vscode/ .nova/ +.temp/ venv/ secrets-export.sh libmongocrypt.tar.gz libmongocrypt/ -libmongocrypt_git/ -hatch_config.toml .venv +expansion.yml +*expansions.yml +.evergreen/scripts/env.sh +.evergreen/scripts/test-env.sh +specifications/ +results.json +.evergreen/atlas_x509_dev_client_certificate.pem # Lambda temp files test/lambda/.aws-sam -test/lambda/env.json test/lambda/mongodb/pymongo/* test/lambda/mongodb/gridfs/* test/lambda/mongodb/bson/* +test/lambda/*.json # test results and logs xunit-results/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a0b06ab0dc..d2b9d9a17a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -6,6 +6,7 @@ repos: - id: check-added-large-files - id: check-case-conflict - id: check-toml + - id: check-json - id: check-yaml exclude: template.yaml - id: debug-statements @@ -17,6 +18,14 @@ repos: exclude: .patch exclude_types: [json] +- repo: https://github.com/astral-sh/ruff-pre-commit + # Ruff version. + rev: v0.1.3 + hooks: + - id: ruff + args: ["--fix", "--show-fixes"] + - id: ruff-format + - repo: local hooks: - id: synchro @@ -29,14 +38,6 @@ repos: - ruff==0.1.3 - unasync -- repo: https://github.com/astral-sh/ruff-pre-commit - # Ruff version. - rev: v0.1.3 - hooks: - - id: ruff - args: ["--fix", "--show-fixes"] - - id: ruff-format - - repo: https://github.com/adamchainz/blacken-docs rev: "1.16.0" hooks: @@ -115,3 +116,20 @@ repos: (?x)( .evergreen/retry-with-backoff.sh ) + - id: generate-config + name: generate-config + entry: .evergreen/scripts/generate-config.sh + language: python + require_serial: true + additional_dependencies: ["shrub.py>=3.10.0", "pyyaml>=6.0.2"] + + - id: uv-lock + name: uv-lock + entry: uv lock + language: python + require_serial: true + files: ^(uv\.lock|pyproject\.toml|requirements.txt|requirements/.*\.txt)$ + pass_filenames: false + fail_fast: true + additional_dependencies: + - "uv>=0.8.4" diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 814e040048..a8881db9cb 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -16,7 +16,7 @@ be of interest or that has already been addressed. ## Supported Interpreters -PyMongo supports CPython 3.9+ and PyPy3.9+. Language features not +PyMongo supports CPython 3.10+ and PyPy3.10+. Language features not supported by all interpreters can not be used. ## Style Guide @@ -28,9 +28,10 @@ including 4 space indents and 79 character line limits. - Avoid backward breaking changes if at all possible. - Write inline documentation for new classes and methods. -- We use [hatch](https://hatch.pypa.io/dev/) for our script runner and packaging tool. +- We use [uv](https://docs.astral.sh/uv/) for python environment management and packaging. +- We use [just](https://just.systems/man/en/) as our task runner. - Write tests and make sure they pass (make sure you have a mongod - running on the default port, then execute `hatch run test:test` from the cmd + running on the default port, then execute `just test` from the cmd line to run the test suite). - Add yourself to doc/contributors.rst `:)` @@ -148,17 +149,18 @@ To run `pre-commit` manually, run: pre-commit run --all-files ``` -To run a manual hook like `mypy` manually, run: +To run a manual hook like `ruff` manually, run: ```bash -pre-commit run --all-files --hook-stage manual mypy +pre-commit run --all-files --hook-stage manual ruff ``` -Typically we use `hatch` to run the linters, e.g. +Typically we use `just` to run the linters, e.g. ```bash -hatch run typing:check-mypy -hatch run lint:build-manual +just install # this will install a venv with pre-commit installed, and install the pre-commit hook. +just typing-mypy +just run lint-manual ``` ## Documentation @@ -176,52 +178,255 @@ documentation including narrative docs, and the [Sphinx docstring format](https: You can build the documentation locally by running: ```bash -hatch run doc:build +just docs ``` When updating docs, it can be helpful to run the live docs server as: ```bash -hatch run doc:serve +just docs-serve ``` Browse to the link provided, and then as you make changes to docstrings or narrative docs, the pages will re-render and the browser will automatically refresh. - ## Running Tests Locally -- Ensure you have started the appropriate Mongo Server(s). -- Run `pip install hatch` to use `hatch` for testing or run - `pip install -e ".[test]"` to run `pytest` directly. -- Run `hatch run test:test` or `pytest` to run all of the tests. +- Run `just install` to set a local virtual environment, or you can manually + create a virtual environment and run `pytest` directly. If you want to use a specific + version of Python, remove the `.venv` folder and set `PYTHON_BINARY` before running `just install`. +- Ensure you have started the appropriate Mongo Server(s). You can run `just run-server` with optional args + to set up the server. All given options will be passed to + [`run-orchestration.sh`](https://github.com/mongodb-labs/drivers-evergreen-tools/blob/master/.evergreen/run-orchestration.sh). Run `$DRIVERS_TOOLS/evergreen/run-orchestration.sh -h` + for a full list of options. +- Run `just test` or `pytest` to run all of the tests. - Append `test/.py::::` to run specific tests. You can omit the `` to test a full class and the `` to test a full module. For example: - `hatch run test:test -- test/test_change_stream.py::TestUnifiedChangeStreamsErrors::test_change_stream_errors_on_ElectionInProgress`. + `just test test/test_change_stream.py::TestUnifiedChangeStreamsErrors::test_change_stream_errors_on_ElectionInProgress`. - Use the `-k` argument to select tests by pattern. -## Running Load Balancer Tests Locally -- Install `haproxy` (available as `brew install haproxy` on macOS). -- Clone `drivers-evergreen-tools`: - `git clone git@github.com:mongodb-labs/drivers-evergreen-tools.git`. -- Start the servers using - `LOAD_BALANCER=true TOPOLOGY=sharded_cluster AUTH=noauth SSL=nossl MONGODB_VERSION=6.0 DRIVERS_TOOLS=$PWD/drivers-evergreen-tools MONGO_ORCHESTRATION_HOME=$PWD/drivers-evergreen-tools/.evergreen/orchestration $PWD/drivers-evergreen-tools/.evergreen/run-orchestration.sh`. -- Start the load balancer using: - `MONGODB_URI='mongodb://localhost:27017,localhost:27018/' $PWD/drivers-evergreen-tools/.evergreen/run-load-balancer.sh start`. -- Run the tests from the `pymongo` checkout directory using: - `TEST_LOADBALANCER=1 hatch run test:test-eg`. - -## Running Encryption Tests Locally +## Running tests that require secrets, services, or other configuration + +### Prerequisites + - Clone `drivers-evergreen-tools`: - `git clone git@github.com:mongodb-labs/drivers-evergreen-tools.git`. -- Run `export DRIVERS_TOOLS=$PWD/drivers-evergreen-tools` -- Run `AWS_PROFILE= hatch run encryption:setup` after setting up your AWS profile with `aws configure sso`. -- Run the tests with `TEST_ENCRYPTION=1 hatch run test:test-eg`. -- When done, run `hatch run encryption:teardown` to clean up. + `git clone git@github.com:mongodb-labs/drivers-evergreen-tools.git`. +- Run `export DRIVERS_TOOLS=$PWD/drivers-evergreen-tools`. This can be put into a `.bashrc` file + for convenience. +- Some tests require access to [Drivers test secrets](https://github.com/mongodb-labs/drivers-evergreen-tools/tree/master/.evergreen/secrets_handling#secrets-handling). + +### Usage + +- Run `just run-server` with optional args to set up the server. +- Run `just setup-tests` with optional args to set up the test environment, secrets, etc. + See `just setup-tests -h` for a full list of available options. +- Run `just run-tests` to run the tests in an appropriate Python environment. +- When done, run `just teardown-tests` to clean up and `just stop-server` to stop the server. + +### SSL tests + +- Run `just run-server --ssl` to start the server with TLS enabled. +- Run `just setup-tests --ssl`. +- Run `just run-tests`. + +Note: for general testing purposes with an TLS-enabled server, you can use the following (this should ONLY be used +for local testing): + +```python +from pymongo import MongoClient + +client = MongoClient( + "mongodb://localhost:27017?tls=true&tlsAllowInvalidCertificates=true" +) +``` + +If you want to use the actual certificate file then set `tlsCertificateKeyFile` to the local path +to `/test/certificates/client.pem` and `tlsCAFile` to the local path to `/test/certificates/ca.pem`. + +### Encryption tests + +- Run `just run-server` to start the server. +- Run `just setup-tests encryption`. +- Run the tests with `just run-tests`. + +To test with `encryption` and `PyOpenSSL`, use `just setup-tests encryption pyopenssl`. + +### PyOpenSSL tests + +- Run `just run-server` to start the server. +- Run `just setup-tests default_sync pyopenssl`. +- Run the tests with `just run-tests`. + +Note: `PyOpenSSL` is not used in async tests, but you can use `just setup-tests default_async pyopenssl` +to verify that PyMongo falls back to the standard library `OpenSSL`. + +### Load balancer tests + +- Install `haproxy` (available as `brew install haproxy` on macOS). +- Start the server with `just run-server load_balancer`. +- Set up the test with `just setup-tests load_balancer`. +- Run the tests with `just run-tests`. + +### AWS auth tests + +- Run `just run-server auth_aws` to start the server. +- Run `just setup-tests auth_aws ` to set up the AWS test. +- Run the tests with `just run-tests`. + +### OIDC auth tests + +- Run `just setup-tests auth_oidc ` to set up the OIDC test. +- Run the tests with `just run-tests`. + +The supported types are [`default`, `azure`, `gcp`, `eks`, `aks`, and `gke`]. +For the `eks` test, you will need to set up access to the `drivers-test-secrets-role`, see the [Wiki](https://wiki.corp.mongodb.com/spaces/DRIVERS/pages/239737385/Using+AWS+Secrets+Manager+to+Store+Testing+Secrets). + +### KMS tests + +For KMS tests that are run locally, and expected to fail, in this case using `azure`: + +- Run `just run-server`. +- Run `just setup-tests kms azure-fail`. +- Run `just run-tests`. + +For KMS tests that run remotely and are expected to pass, in this case using `gcp`: + +- Run `just setup-tests kms gcp`. +- Run `just run-tests`. + +### Enterprise Auth tests + +Note: these tests can only be run from an Evergreen host. + +- Run `just run-server enterprise_auth`. +- Run `just setup-tests enterprise_auth`. +- Run `just run-tests`. + +### Atlas Connect tests + +- Run `just setup-tests atlas_connect`. +- Run `just run-tests`. + +### Search Index tests + +- Run `just run-server search_index`. +- Run `just setup-tests search_index`. +- Run `just run-tests`. + +### MockupDB tests + +- Run `just setup-tests mockupdb`. +- Run `just run-tests`. + +### Doc tests + +The doc tests require a running server. + +- Run `just run-server`. +- Run `just setup-tests doctest`. +- Run `just run-tests`. -## Re-sync Spec Tests +### Free-threaded Python Tests + +In the evergreen builds, the tests are configured to use the free-threaded python from the toolchain. +Locally you can run: + +- Run `just run-server`. +- Run `just setup-tests`. +- Run `UV_PYTHON=3.13t just run-tests`. + +### AWS Lambda tests + +You will need to set up access to the `drivers-test-secrets-role`, see the [Wiki](https://wiki.corp.mongodb.com/spaces/DRIVERS/pages/239737385/Using+AWS+Secrets+Manager+to+Store+Testing+Secrets). + +- Run `just setup-tests aws_lambda`. +- Run `just run-tests`. + +### mod_wsgi tests + +Note: these tests can only be run from an Evergreen Linux host that has the Python toolchain. + +- Run `just run-server`. +- Run `just setup-tests mod_wsgi `. +- Run `just run-tests`. + +The `mode` can be `standalone` or `embedded`. For the `replica_set` version of the tests, use +`TOPOLOGY=replica_set just run-server`. + +### OCSP tests + +- Export the orchestration file, e.g. `export ORCHESTRATION_FILE=rsa-basic-tls-ocsp-disableStapling.json`. +This corresponds to a config file in `$DRIVERS_TOOLS/.evergreen/orchestration/configs/servers`. +MongoDB servers on MacOS and Windows do not staple OCSP responses and only support RSA. +NOTE: because the mock ocsp responder MUST be started prior to the server starting, the ocsp tests start the server +as part of `setup-tests`. + +- Run `just setup-tests ocsp ` (options are "valid", "revoked", "valid-delegate", "revoked-delegate"). +- Run `just run-tests` + +If you are running one of the `no-responder` tests, omit the `run-server` step. + +### Perf Tests + +- Start the appropriate server, e.g. `just run-server --version=v8.0-perf --ssl`. +- Set up the tests with `sync` or `async`: `just setup-tests perf sync`. +- Run the tests: `just run-tests`. + +## Enable Debug Logs + +- Use `-o log_cli_level="DEBUG" -o log_cli=1` with `just test` or `pytest` to output all debug logs to the terminal. **Warning**: This will output a huge amount of logs. +- Add `log_cli=1` and `log_cli_level="DEBUG"` to the `tool.pytest.ini_options` section in `pyproject.toml` to enable debug logs in this manner by default on your machine. +- Set `DEBUG_LOG=1` and run `just setup-tests`, `just-test`, or `pytest` to enable debug logs only for failed tests. +- Finally, you can use `just setup-tests --debug-log`. +- For evergreen patch builds, you can use `evergreen patch --param DEBUG_LOG=1` to enable debug logs for failed tests in the patch. + +## Testing minimum dependencies + +To run any of the test suites with minimum supported dependencies, pass `--test-min-deps` to +`just setup-tests`. + +## Adding a new test suite + +- If adding new tests files that should only be run for that test suite, add a pytest marker to the file and add + to the list of pytest markers in `pyproject.toml`. Then add the test suite to the `TEST_SUITE_MAP` in `.evergreen/scripts/utils.py`. If for some reason it is not a pytest-runnable test, add it to the list of `EXTRA_TESTS` instead. +- If the test uses Atlas or otherwise doesn't use `run-orchestration.sh`, add it to the `NO_RUN_ORCHESTRATION` list in + `.evergreen/scripts/utils.py`. +- If there is something special required to run the local server or there is an extra flag that should always be set + like `AUTH`, add that logic to `.evergreen/scripts/run_server.py`. +- The bulk of the logic will typically be in `.evergreen/scripts/setup_tests.py`. This is where you should fetch secrets and make them available using `write_env`, start services, and write other env vars needed using `write_env`. +- If there are any special test considerations, including not running `pytest` at all, handle it in `.evergreen/scripts/run_tests.py`. +- If there are any services or atlas clusters to teardown, handle them in `.evergreen/scripts/teardown_tests.py`. +- Add functions to generate the test variant(s) and task(s) to the `.evergreen/scripts/generate_config.py`. +- Regenerate the test variants and tasks using `pre-commit run --all-files generate-config`. +- Make sure to add instructions for running the test suite to `CONTRIBUTING.md`. + +## Handling flaky tests + +We have a custom `flaky` decorator in [test/asynchronous/utils.py](test/asynchronous/utils.py) that can be used for +tests that are `flaky`. By default the decorator only applies when not running on CPython on Linux, since other +runtimes tend to have more variation. When using the `flaky` decorator, open a corresponding ticket and +a use the ticket number as the "reason" parameter to the decorator, e.g. `@flaky(reason="PYTHON-1234")`. +When running tests locally (not in CI), the `flaky` decorator will be disabled unless `ENABLE_FLAKY` is set. +To disable the `flaky` decorator in CI, you can use `evergreen patch --param DISABLE_FLAKY=1`. + +## Integration Tests + +The `integration_tests` directory has a set of scripts that verify the usage of PyMongo with downstream packages or frameworks. See the [README](./integration_tests/README.md) for more information. + +To run the tests, use `just integration_tests`. + +The tests should be able to run with and without SSL enabled. + +## Specification Tests + +The MongoDB [specifications repository](https://github.com/mongodb/specifications) +holds in progress and completed specifications for features of MongoDB, drivers, +and associated products. PyMongo supports the [Unified Test Format](https://jira.mongodb.org/browse/DRIVERS-709) +for running specification tests to confirm PyMongo behaves as expected. + +### Resynchronizing the Specification Tests If you would like to re-sync the copy of the specification tests in the PyMongo repository with that which is inside the [specifications @@ -242,6 +447,39 @@ update in PyMongo. This is primarily helpful if you are implementing a new feature in PyMongo that has spec tests already implemented, or if you are attempting to validate new spec tests in PyMongo. +### Automated Specification Test Resyncing +The (`/.evergreen/scripts/resync-all-specs.sh`) script +automatically runs once a week to resync all the specs with the [specifications repo](https://github.com/mongodb/specifications). +A PR will be generated by mongodb-drivers-pr-bot containing any changes picked up by this resync. +The PR description will display the name(s) of the updated specs along +with any errors that occurred. + +Spec test changes associated with a behavioral change or bugfix that has yet to be implemented in PyMongo +must be added to a patch file in `/.evergreen/spec-patch`. Each patch +file must be named after the associated PYTHON ticket and contain the +test differences between PyMongo's current tests and the specification. +All changes listed in these patch files will be *undone* by the script and won't +be applied to PyMongo's tests. + +When a new test file or folder is added to the spec repo before the associated code changes are implemented, that test's path must be added to `.evergreen/remove-unimplemented-tests.sh` along with a comment indicating the associated PYTHON ticket for those changes. + +Any PR that implements a PYTHON ticket documented in a patch file or within `.evergreen/remove-unimplemented-tests.sh` must also remove the associated patch file or entry in `remove-unimplemented-tests.sh`. + +#### Adding to a patch file +To add to or create a patch file, run `git diff` to show the desired changes to undo and copy the +results into the patch file. + +For example: the imaginary, unimplemented PYTHON-1234 ticket has associated spec test changes. To add those changes to `PYTHON-1234.patch`), do the following: +```bash +git diff HEAD~1 path/to/file >> .evergreen/spec-patch/PYTHON-1234.patch + +#### Running Locally +Both `resync-all-specs.sh` and `resync-all-specs.py` can be run locally (and won't generate a PR). +```bash +./.evergreen/scripts/resync-all-specs.sh +python3 ./.evergreen/scripts/resync-all-specs.py +``` + ## Making a Release Follow the [Python Driver Release Process Wiki](https://wiki.corp.mongodb.com/display/DRIVERS/Python+Driver+Release+Process). @@ -258,9 +496,35 @@ To prevent the `synchro` hook from accidentally overwriting code, it first check of a file is changing and not its async counterpart, and will fail. In the unlikely scenario that you want to override this behavior, first export `OVERRIDE_SYNCHRO_CHECK=1`. +Sometimes, the `synchro` hook will fail and introduce changes many previously unmodified files. This is due to static +Python errors, such as missing imports, incorrect syntax, or other fatal typos. To resolve these issues, +run `pre-commit run --all-files --hook-stage manual ruff` and fix all reported errors before running the `synchro` +hook again. + ## Converting a test to async + The `tools/convert_test_to_async.py` script takes in an existing synchronous test file and outputs a partially-converted asynchronous version of the same name to the `test/asynchronous` directory. Use this generated file as a starting point for the completed conversion. The script is used like so: `python tools/convert_test_to_async.py [test_file.py]` + +## Generating a flame graph using py-spy +To profile a test script and generate a flame graph, follow these steps: +1. Install `py-spy` if you haven't already: + ```bash + pip install py-spy + ``` +2. Inside your test script, perform any required setup and then loop over the code you want to profile for improved sampling. +3. Run `py-spy record -o -r -- python ` to generate a `.svg` file containing the flame graph. + (Note: on macOS you will need to run this command using `sudo` to allow `py-spy` to attach to the Python process.) +4. If you need to include native code (for example the C extensions), profiling should be done on a Linux system, as macOS and Windows do not support the `--native` option of `py-spy`. + Creating an ubuntu Evergreen spawn host and using `scp` to copy the flamegraph `.svg` file back to your local machine is the best way to do this. + +## Dependabot updates + +Dependabot will raise PRs at most once per week, grouped by GitHub Actions updates and Python requirement +file updates. We have a pre-commit hook that will update the `uv.lock` file when requirements change. +To update the lock file on a failing PR, you can use a method like `gh pr checkout `, then run +`just lint uv-lock` to update the lock file, and then push the changes. If a typing dependency has changed, +also run `just typing` and handle any new findings. diff --git a/README.md b/README.md index bd0755620e..ba1688cb70 100644 --- a/README.md +++ b/README.md @@ -10,11 +10,18 @@ The PyMongo distribution contains tools for interacting with MongoDB database from Python. The `bson` package is an implementation of the [BSON format](http://bsonspec.org) for Python. The `pymongo` package is -a native Python driver for MongoDB. The `gridfs` package is a +a native Python driver for MongoDB, offering both synchronous and asynchronous APIs. The `gridfs` package is a [gridfs](https://github.com/mongodb/specifications/blob/master/source/gridfs/gridfs-spec.md/) implementation on top of `pymongo`. -PyMongo supports MongoDB 4.0, 4.2, 4.4, 5.0, 6.0, 7.0, and 8.0. +PyMongo supports MongoDB 4.0, 4.2, 4.4, 5.0, 6.0, 7.0, and 8.0. PyMongo follows [semantic versioning](https://semver.org/spec/v2.0.0.html) for its releases. + +## Documentation + +Documentation is available at +[mongodb.com](https://www.mongodb.com/docs/languages/python/pymongo-driver/current/). + +[API documentation](https://pymongo.readthedocs.io/en/stable/api/) and the [full changelog](https://pymongo.readthedocs.io/en/stable/changelog.html) for each release is available at [readthedocs.io](https://pymongo.readthedocs.io/en/stable/index.html). ## Support / Feedback @@ -90,7 +97,7 @@ package that is incompatible with PyMongo. ## Dependencies -PyMongo supports CPython 3.9+ and PyPy3.9+. +PyMongo supports CPython 3.10+ and PyPy3.10+. Required dependencies: @@ -152,11 +159,6 @@ command: python -m pip install "pymongo[gssapi,aws,ocsp,snappy,zstd,encryption]" ``` -Additional dependencies are: - -- (to generate documentation or run tests) - [hatch](https://hatch.pypa.io/dev/) - ## Examples Here's a basic example (for more see the *examples* section of the @@ -196,14 +198,6 @@ ObjectId('4aba160ee23f6b543e000002') [8, 11] ``` -## Documentation - -Documentation is available at -[pymongo.readthedocs.io](https://pymongo.readthedocs.io/en/stable/). - -Documentation can be generated by running **pip install hatch; hatch run doc:build**. Generated -documentation can be found in the `doc/build/html/` directory. - ## Learning Resources - MongoDB Learn - [Python @@ -213,10 +207,11 @@ Center](https://www.mongodb.com/developer/languages/python/). ## Testing -The easiest way to run the tests is to run *hatch run test:test** in the root -of the distribution. For example, +The easiest way to run the tests is to run the following from the repository root. ```bash -pip install hatch -hatch run test:test +pip install -e ".[test]" +pytest ``` + +For more advanced testing scenarios, see the [contributing guide](./CONTRIBUTING.md#running-tests-locally). diff --git a/_setup.py b/_setup.py index 65ae1908fe..f99e9e7dc8 100644 --- a/_setup.py +++ b/_setup.py @@ -82,6 +82,11 @@ def run(self): ) def build_extension(self, ext): + # "ProgramFiles(x86)" is not a valid environment variable in Cygwin but is needed for + # the MSVCCompiler in distutils. + if os.name == "nt": + if "ProgramFiles" in os.environ and "ProgramFiles(x86)" not in os.environ: + os.environ["ProgramFiles(x86)"] = os.environ["ProgramFiles"] + " (x86)" name = ext.name try: build_ext.build_extension(self, ext) @@ -125,7 +130,11 @@ def build_extension(self, ext): except ValueError: pass ext_modules = [] -elif sys.platform.startswith("java") or sys.platform == "cli" or "PyPy" in sys.version: +elif ( + sys.platform.startswith("java") + or sys.platform == "cli" + or sys.implementation.name in ("pypy", "graalpy") +): sys.stdout.write( """ *****************************************************\n diff --git a/bson/__init__.py b/bson/__init__.py index fc6efe0d59..d260fb876f 100644 --- a/bson/__init__.py +++ b/bson/__init__.py @@ -58,10 +58,10 @@ the microsecond field is truncated. .. [#dt2] all datetime.datetime instances are encoded as UTC. By default, they are decoded as *naive* but timezone aware datetimes are also supported. - See :doc:`/examples/datetimes` for examples. + See `Dates and Times `_ for examples. .. [#dt3] To enable decoding a bson UTC datetime to a :class:`~bson.datetime_ms.DatetimeMS` - instance see :ref:`handling-out-of-range-datetimes`. -.. [#uuid] For :py:class:`uuid.UUID` encoding and decoding behavior see :doc:`/examples/uuid`. + instance see `handling out of range datetimes `_. +.. [#uuid] For :py:class:`uuid.UUID` encoding and decoding behavior see ``_. .. [#re] :class:`~bson.regex.Regex` instances and regular expression objects from ``re.compile()`` are both saved as BSON regular expressions. BSON regular expressions are decoded as :class:`~bson.regex.Regex` @@ -1009,7 +1009,7 @@ def _dict_to_bson( try: elements.append(_element_to_bson(key, value, check_keys, opts)) except InvalidDocument as err: - raise InvalidDocument(f"Invalid document {doc} | {err}") from err + raise InvalidDocument(f"Invalid document: {err}", doc) from err except AttributeError: raise TypeError(f"encoder expected a mapping type but got: {doc!r}") from None @@ -1327,7 +1327,7 @@ def decode_iter( elements = data[position : position + obj_size] position += obj_size - yield _bson_to_dict(elements, opts) # type:ignore[misc] + yield _bson_to_dict(elements, opts) @overload @@ -1373,7 +1373,7 @@ def decode_file_iter( raise InvalidBSON("cut off in middle of objsize") obj_size = _UNPACK_INT_FROM(size_data, 0)[0] - 4 elements = size_data + file_obj.read(max(0, obj_size)) - yield _bson_to_dict(elements, opts) # type:ignore[arg-type, misc] + yield _bson_to_dict(elements, opts) # type:ignore[misc] def is_valid(bson: bytes) -> bool: @@ -1386,7 +1386,7 @@ def is_valid(bson: bytes) -> bool: :param bson: the data to be validated """ if not isinstance(bson, bytes): - raise TypeError("BSON data must be an instance of a subclass of bytes") + raise TypeError(f"BSON data must be an instance of a subclass of bytes, not {type(bson)}") try: _bson_to_dict(bson, DEFAULT_CODEC_OPTIONS) diff --git a/bson/_cbsonmodule.c b/bson/_cbsonmodule.c index d91c7e0536..7d184641c5 100644 --- a/bson/_cbsonmodule.c +++ b/bson/_cbsonmodule.c @@ -1644,6 +1644,54 @@ static int write_raw_doc(buffer_t buffer, PyObject* raw, PyObject* _raw_str) { return bytes_written; } + +/* Update Invalid Document error to include doc as a property. + */ +void handle_invalid_doc_error(PyObject* dict) { + PyObject *etype = NULL, *evalue = NULL, *etrace = NULL; + PyObject *msg = NULL, *new_msg = NULL, *new_evalue = NULL; + PyErr_Fetch(&etype, &evalue, &etrace); + PyObject *InvalidDocument = _error("InvalidDocument"); + if (InvalidDocument == NULL) { + goto cleanup; + } + + if (evalue && PyErr_GivenExceptionMatches(etype, InvalidDocument)) { + msg = PyObject_Str(evalue); + if (msg) { + const char * msg_utf8 = PyUnicode_AsUTF8(msg); + if (msg_utf8 == NULL) { + goto cleanup; + } + new_msg = PyUnicode_FromFormat("Invalid document: %s", msg_utf8); + if (new_msg == NULL) { + goto cleanup; + } + // Add doc to the error instance as a property. + new_evalue = PyObject_CallFunctionObjArgs(InvalidDocument, new_msg, dict, NULL); + Py_DECREF(evalue); + Py_DECREF(etype); + etype = InvalidDocument; + InvalidDocument = NULL; + if (new_evalue) { + evalue = new_evalue; + new_evalue = NULL; + } else { + evalue = msg; + msg = NULL; + } + } + PyErr_NormalizeException(&etype, &evalue, &etrace); + } +cleanup: + PyErr_Restore(etype, evalue, etrace); + Py_XDECREF(msg); + Py_XDECREF(InvalidDocument); + Py_XDECREF(new_evalue); + Py_XDECREF(new_msg); +} + + /* returns the number of bytes written or 0 on failure */ int write_dict(PyObject* self, buffer_t buffer, PyObject* dict, unsigned char check_keys, @@ -1743,40 +1791,8 @@ int write_dict(PyObject* self, buffer_t buffer, while (PyDict_Next(dict, &pos, &key, &value)) { if (!decode_and_write_pair(self, buffer, key, value, check_keys, options, top_level)) { - if (PyErr_Occurred()) { - PyObject *etype = NULL, *evalue = NULL, *etrace = NULL; - PyErr_Fetch(&etype, &evalue, &etrace); - PyObject *InvalidDocument = _error("InvalidDocument"); - - if (top_level && InvalidDocument && PyErr_GivenExceptionMatches(etype, InvalidDocument)) { - - Py_DECREF(etype); - etype = InvalidDocument; - - if (evalue) { - PyObject *msg = PyObject_Str(evalue); - Py_DECREF(evalue); - - if (msg) { - // Prepend doc to the existing message - PyObject *dict_str = PyObject_Str(dict); - PyObject *new_msg = PyUnicode_FromFormat("Invalid document %s | %s", PyUnicode_AsUTF8(dict_str), PyUnicode_AsUTF8(msg)); - Py_DECREF(dict_str); - - if (new_msg) { - evalue = new_msg; - } - else { - evalue = msg; - } - } - } - PyErr_NormalizeException(&etype, &evalue, &etrace); - } - else { - Py_DECREF(InvalidDocument); - } - PyErr_Restore(etype, evalue, etrace); + if (PyErr_Occurred() && top_level) { + handle_invalid_doc_error(dict); } return 0; } @@ -1796,6 +1812,9 @@ int write_dict(PyObject* self, buffer_t buffer, } if (!decode_and_write_pair(self, buffer, key, value, check_keys, options, top_level)) { + if (PyErr_Occurred() && top_level) { + handle_invalid_doc_error(dict); + } Py_DECREF(key); Py_DECREF(value); Py_DECREF(iter); @@ -3206,11 +3225,18 @@ _cbson_exec(PyObject *m) INITERROR; } +#if PY_VERSION_HEX >= 0x030D0000 + if (PyModule_Add(m, "_C_API", c_api_object) < 0) { + Py_DECREF(m); + INITERROR; + } +# else if (PyModule_AddObject(m, "_C_API", c_api_object) < 0) { Py_DECREF(c_api_object); Py_DECREF(m); INITERROR; } +#endif return 0; } diff --git a/bson/binary.py b/bson/binary.py index 6dc5058c2c..48eb12b0ac 100644 --- a/bson/binary.py +++ b/bson/binary.py @@ -14,7 +14,7 @@ from __future__ import annotations import struct -from dataclasses import dataclass +import warnings from enum import Enum from typing import TYPE_CHECKING, Any, Optional, Sequence, Tuple, Type, Union, overload from uuid import UUID @@ -79,7 +79,7 @@ class UuidRepresentation: :class:`~bson.binary.Binary` instance will be returned instead of a :class:`uuid.UUID` instance. - See :ref:`unspecified-representation-details` for details. + See `unspecified representation details `_ for details. .. versionadded:: 3.11 """ @@ -91,7 +91,7 @@ class UuidRepresentation: and decoded from BSON binary, using RFC-4122 byte order with binary subtype :data:`UUID_SUBTYPE`. - See :ref:`standard-representation-details` for details. + See `standard representation details `_ for details. .. versionadded:: 3.11 """ @@ -103,7 +103,7 @@ class UuidRepresentation: and decoded from BSON binary, using RFC-4122 byte order with binary subtype :data:`OLD_UUID_SUBTYPE`. - See :ref:`python-legacy-representation-details` for details. + See `python legacy representation details `_ for details. .. versionadded:: 3.11 """ @@ -115,7 +115,7 @@ class UuidRepresentation: and decoded from BSON binary subtype :data:`OLD_UUID_SUBTYPE`, using the Java driver's legacy byte order. - See :ref:`java-legacy-representation-details` for details. + See `Java Legacy UUID `_ for details. .. versionadded:: 3.11 """ @@ -127,7 +127,7 @@ class UuidRepresentation: and decoded from BSON binary subtype :data:`OLD_UUID_SUBTYPE`, using the C# driver's legacy byte order. - See :ref:`csharp-legacy-representation-details` for details. + See `C# Legacy UUID `_ for details. .. versionadded:: 3.11 """ @@ -227,7 +227,6 @@ class BinaryVectorDtype(Enum): PACKED_BIT = b"\x10" -@dataclass class BinaryVector: """Vector of numbers along with metadata for binary interoperability. .. versionadded:: 4.10 @@ -247,6 +246,19 @@ def __init__(self, data: Sequence[float | int], dtype: BinaryVectorDtype, paddin self.dtype = dtype self.padding = padding + def __repr__(self) -> str: + return f"BinaryVector(dtype={self.dtype}, padding={self.padding}, data={self.data})" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, BinaryVector): + return False + return ( + self.dtype == other.dtype and self.padding == other.padding and self.data == other.data + ) + + def __len__(self) -> int: + return len(self.data) + class Binary(bytes): """Representation of BSON binary data. @@ -286,11 +298,11 @@ class Binary(bytes): def __new__( cls: Type[Binary], - data: Union[memoryview, bytes, _mmap, _array[Any]], + data: Union[memoryview, bytes, bytearray, _mmap, _array[Any]], subtype: int = BINARY_SUBTYPE, ) -> Binary: if not isinstance(subtype, int): - raise TypeError("subtype must be an instance of int") + raise TypeError(f"subtype must be an instance of int, not {type(subtype)}") if subtype >= 256 or subtype < 0: raise ValueError("subtype must be contained in [0, 256)") # Support any type that implements the buffer protocol. @@ -316,12 +328,12 @@ def from_uuid( :param uuid_representation: A member of :class:`~bson.binary.UuidRepresentation`. Default: :const:`~bson.binary.UuidRepresentation.STANDARD`. - See :ref:`handling-uuid-data-example` for details. + See `UUID representations `_ for details. .. versionadded:: 3.11 """ if not isinstance(uuid, UUID): - raise TypeError("uuid must be an instance of uuid.UUID") + raise TypeError(f"uuid must be an instance of uuid.UUID, not {type(uuid)}") if uuid_representation not in ALL_UUID_REPRESENTATIONS: raise ValueError( @@ -365,7 +377,7 @@ def as_uuid(self, uuid_representation: int = UuidRepresentation.STANDARD) -> UUI :param uuid_representation: A member of :class:`~bson.binary.UuidRepresentation`. Default: :const:`~bson.binary.UuidRepresentation.STANDARD`. - See :ref:`handling-uuid-data-example` for details. + See `UUID representations `_ for details. .. versionadded:: 3.11 """ @@ -405,14 +417,17 @@ def from_vector(cls: Type[Binary], vector: BinaryVector) -> Binary: @classmethod @overload def from_vector( - cls: Type[Binary], vector: list[int, float], dtype: BinaryVectorDtype, padding: int = 0 + cls: Type[Binary], + vector: Union[list[int], list[float]], + dtype: BinaryVectorDtype, + padding: int = 0, ) -> Binary: ... @classmethod def from_vector( cls: Type[Binary], - vector: Union[BinaryVector, list[int, float]], + vector: Union[BinaryVector, list[int], list[float]], dtype: Optional[BinaryVectorDtype] = None, padding: Optional[int] = None, ) -> Binary: @@ -428,6 +443,9 @@ def from_vector( :param padding: For fractional bytes, number of bits to ignore at end of vector. :return: Binary packed data identified by dtype and padding. + .. versionchanged:: 4.14 + When padding is non-zero, ignored bits should be zero. Raise exception on encoding, warn on decoding. + .. versionadded:: 4.10 """ if isinstance(vector, BinaryVector): @@ -447,6 +465,10 @@ def from_vector( raise ValueError(f"padding does not apply to {dtype=}") elif dtype == BinaryVectorDtype.PACKED_BIT: # pack ints in [0, 255] as unsigned uint8 format_str = "B" + if 0 <= padding > 7: + raise ValueError(f"{padding=}. It must be in [0,1, ..7].") + if padding and not vector: + raise ValueError("Empty vector with non-zero padding.") elif dtype == BinaryVectorDtype.FLOAT32: # pack floats as float32 format_str = "f" if padding: @@ -456,6 +478,10 @@ def from_vector( metadata = struct.pack(" BinaryVector: @@ -467,7 +493,7 @@ def as_vector(self) -> BinaryVector: """ if self.subtype != VECTOR_SUBTYPE: - raise ValueError(f"Cannot decode subtype {self.subtype} as a vector.") + raise ValueError(f"Cannot decode subtype {self.subtype} as a vector") position = 0 dtype, padding = struct.unpack_from(" BinaryVector: dtype = BinaryVectorDtype(dtype) n_values = len(self) - position + if padding and dtype != BinaryVectorDtype.PACKED_BIT: + raise ValueError( + f"Corrupt data. Padding ({padding}) must be 0 for all but PACKED_BIT dtypes. ({dtype=})" + ) + if dtype == BinaryVectorDtype.INT8: dtype_format = "b" format_string = f"<{n_values}{dtype_format}" @@ -495,9 +526,19 @@ def as_vector(self) -> BinaryVector: elif dtype == BinaryVectorDtype.PACKED_BIT: # data packed as uint8 + if padding and not n_values: + raise ValueError("Corrupt data. Vector has a padding P, but no data.") + if padding > 7 or padding < 0: + raise ValueError(f"Corrupt data. Padding ({padding}) must be between 0 and 7.") dtype_format = "B" format_string = f"<{n_values}{dtype_format}" unpacked_uint8s = list(struct.unpack_from(format_string, self, position)) + if padding and n_values and unpacked_uint8s[-1] & (1 << padding) - 1 != 0: + warnings.warn( + "Vector has a padding P, but bits in the final byte lower than P are non-zero. For pymongo>=5.0, they must be zero.", + DeprecationWarning, + stacklevel=2, + ) return BinaryVector(unpacked_uint8s, dtype, padding) else: diff --git a/bson/code.py b/bson/code.py index 6b4541d0ff..f0523b2a95 100644 --- a/bson/code.py +++ b/bson/code.py @@ -56,7 +56,7 @@ def __new__( **kwargs: Any, ) -> Code: if not isinstance(code, str): - raise TypeError("code must be an instance of str") + raise TypeError(f"code must be an instance of str, not {type(code)}") self = str.__new__(cls, code) @@ -67,7 +67,7 @@ def __new__( if scope is not None: if not isinstance(scope, _Mapping): - raise TypeError("scope must be an instance of dict") + raise TypeError(f"scope must be an instance of dict, not {type(scope)}") if self.__scope is not None: self.__scope.update(scope) # type: ignore else: diff --git a/bson/codec_options.py b/bson/codec_options.py index 3a0b83b7be..add5416a5b 100644 --- a/bson/codec_options.py +++ b/bson/codec_options.py @@ -57,7 +57,7 @@ class TypeEncoder(abc.ABC): Codec classes must implement the ``python_type`` attribute, and the ``transform_python`` method to support encoding. - See :ref:`custom-type-type-codec` documentation for an example. + See `encode data with type codecs `_ documentation for an example. """ @abc.abstractproperty @@ -76,7 +76,7 @@ class TypeDecoder(abc.ABC): Codec classes must implement the ``bson_type`` attribute, and the ``transform_bson`` method to support decoding. - See :ref:`custom-type-type-codec` documentation for an example. + See `encode data with type codecs `_ documentation for an example. """ @abc.abstractproperty @@ -98,7 +98,7 @@ class TypeCodec(TypeEncoder, TypeDecoder): ``bson_type`` attribute, and the ``transform_bson`` method to support decoding. - See :ref:`custom-type-type-codec` documentation for an example. + See `encode data with type codecs `_ documentation for an example. """ @@ -118,7 +118,7 @@ class TypeRegistry: >>> type_registry = TypeRegistry([Codec1, Codec2, Codec3, ...], ... fallback_encoder) - See :ref:`custom-type-type-registry` documentation for an example. + See `add codec to the type registry `_ documentation for an example. :param type_codecs: iterable of type codec instances. If ``type_codecs`` contains multiple codecs that transform a single @@ -128,7 +128,7 @@ class TypeRegistry: type. :param fallback_encoder: callable that accepts a single, unencodable python value and transforms it into a type that - :mod:`bson` can encode. See :ref:`fallback-encoder-callable` + :mod:`bson` can encode. See `define a fallback encoder `_ documentation for an example. """ @@ -160,6 +160,16 @@ def __init__( f"Expected an instance of {TypeEncoder.__name__}, {TypeDecoder.__name__}, or {TypeCodec.__name__}, got {codec!r} instead" ) + @property + def codecs(self) -> list[TypeEncoder | TypeDecoder | TypeCodec]: + """The list of type codecs in this registry.""" + return self.__type_codecs + + @property + def fallback_encoder(self) -> Optional[_Fallback]: + """The fallback encoder in this registry.""" + return self._fallback_encoder + def _validate_type_encoder(self, codec: _Codec) -> None: from bson import _BUILT_IN_TYPES @@ -317,10 +327,10 @@ def __init__(self, *args, **kwargs): >>> doc._id ObjectId('5b3016359110ea14e8c58b93') - See :doc:`/examples/datetimes` for examples using the `tz_aware` and + See `Dates and Times `_ for examples using the `tz_aware` and `tzinfo` options. - See :doc:`/examples/uuid` for examples using the `uuid_representation` + See `UUID `_ for examples using the `uuid_representation` option. :param document_class: BSON documents returned in queries will be decoded @@ -334,7 +344,7 @@ def __init__(self, *args, **kwargs): :data:`~bson.binary.UuidRepresentation.UNSPECIFIED`. New applications should consider setting this to :data:`~bson.binary.UuidRepresentation.STANDARD` for cross language - compatibility. See :ref:`handling-uuid-data-example` for details. + compatibility. See `UUID representations `_ for details. :param unicode_decode_error_handler: The error handler to apply when a Unicode-related error occurs during BSON decoding that would otherwise raise :exc:`UnicodeDecodeError`. Valid options include @@ -401,17 +411,23 @@ def __new__( "uuid_representation must be a value from bson.binary.UuidRepresentation" ) if not isinstance(unicode_decode_error_handler, str): - raise ValueError("unicode_decode_error_handler must be a string") + raise ValueError( + f"unicode_decode_error_handler must be a string, not {type(unicode_decode_error_handler)}" + ) if tzinfo is not None: if not isinstance(tzinfo, datetime.tzinfo): - raise TypeError("tzinfo must be an instance of datetime.tzinfo") + raise TypeError( + f"tzinfo must be an instance of datetime.tzinfo, not {type(tzinfo)}" + ) if not tz_aware: raise ValueError("cannot specify tzinfo without also setting tz_aware=True") type_registry = type_registry or TypeRegistry() if not isinstance(type_registry, TypeRegistry): - raise TypeError("type_registry must be an instance of TypeRegistry") + raise TypeError( + f"type_registry must be an instance of TypeRegistry, not {type(type_registry)}" + ) return tuple.__new__( cls, diff --git a/bson/datetime_ms.py b/bson/datetime_ms.py index 679524cb60..2047bd30b2 100644 --- a/bson/datetime_ms.py +++ b/bson/datetime_ms.py @@ -51,7 +51,7 @@ def __init__(self, value: Union[int, datetime.datetime]): To decode UTC datetimes as a ``DatetimeMS``, `datetime_conversion` in :class:`~bson.codec_options.CodecOptions` must be set to 'datetime_ms' or - 'datetime_auto'. See :ref:`handling-out-of-range-datetimes` for + 'datetime_auto'. See `handling out of range datetimes `_ for details. :param value: An instance of :class:`datetime.datetime` to be diff --git a/bson/dbref.py b/bson/dbref.py index 6c21b8162c..40bdb73cff 100644 --- a/bson/dbref.py +++ b/bson/dbref.py @@ -56,9 +56,9 @@ def __init__( .. seealso:: The MongoDB documentation on `dbrefs `_. """ if not isinstance(collection, str): - raise TypeError("collection must be an instance of str") + raise TypeError(f"collection must be an instance of str, not {type(collection)}") if database is not None and not isinstance(database, str): - raise TypeError("database must be an instance of str") + raise TypeError(f"database must be an instance of str, not {type(database)}") self.__collection = collection self.__id = id diff --git a/bson/decimal128.py b/bson/decimal128.py index 016afb5eb8..7480f94d0a 100644 --- a/bson/decimal128.py +++ b/bson/decimal128.py @@ -20,8 +20,11 @@ import decimal import struct +from decimal import Decimal from typing import Any, Sequence, Tuple, Type, Union +from bson.codec_options import TypeDecoder, TypeEncoder + _PACK_64 = struct.Struct(" Type[Decimal]: + return Decimal + + def transform_python(self, value: Any) -> Decimal128: + return Decimal128(value) + + +class DecimalDecoder(TypeDecoder): + """Converts BSON :class:`Decimal128` to Python :class:`decimal.Decimal`. + + For example:: + opts = CodecOptions(type_registry=TypeRegistry([DecimalDecoder()])) + bson.decode(data, codec_options=opts) + + .. versionadded:: 4.15 + """ + + @property + def bson_type(self) -> Type[Decimal128]: + return Decimal128 + + def transform_bson(self, value: Any) -> decimal.Decimal: + return value.to_decimal() + + def create_decimal128_context() -> decimal.Context: """Returns an instance of :class:`decimal.Context` appropriate for working with IEEE-754 128-bit decimal floating point values. @@ -277,7 +316,7 @@ def from_bid(cls: Type[Decimal128], value: bytes) -> Decimal128: point in Binary Integer Decimal (BID) format). """ if not isinstance(value, bytes): - raise TypeError("value must be an instance of bytes") + raise TypeError(f"value must be an instance of bytes, not {type(value)}") if len(value) != 16: raise ValueError("value must be exactly 16 bytes") return cls((_UNPACK_64(value[8:])[0], _UNPACK_64(value[:8])[0])) # type: ignore diff --git a/bson/errors.py b/bson/errors.py index a3699e704c..ffc117f7ac 100644 --- a/bson/errors.py +++ b/bson/errors.py @@ -15,6 +15,8 @@ """Exceptions raised by the BSON package.""" from __future__ import annotations +from typing import Any, Optional + class BSONError(Exception): """Base class for all BSON exceptions.""" @@ -31,6 +33,17 @@ class InvalidStringData(BSONError): class InvalidDocument(BSONError): """Raised when trying to create a BSON object from an invalid document.""" + def __init__(self, message: str, document: Optional[Any] = None) -> None: + super().__init__(message) + self._document = document + + @property + def document(self) -> Any: + """The invalid document that caused the error. + + ..versionadded:: 4.16""" + return self._document + class InvalidId(BSONError): """Raised when trying to create an ObjectId from invalid data.""" diff --git a/bson/json_util.py b/bson/json_util.py index ecae103b55..8151226a26 100644 --- a/bson/json_util.py +++ b/bson/json_util.py @@ -281,7 +281,7 @@ def __init__(self, *args: Any, **kwargs: Any): return DatetimeMS objects when the underlying datetime is out-of-range and 'datetime_clamp' to clamp to the minimum and maximum possible datetimes. Defaults to 'datetime'. See - :ref:`handling-out-of-range-datetimes` for details. + `handling out of range datetimes `_ for details. :param args: arguments to :class:`~bson.codec_options.CodecOptions` :param kwargs: arguments to :class:`~bson.codec_options.CodecOptions` @@ -844,7 +844,7 @@ def _encode_binary(data: bytes, subtype: int, json_options: JSONOptions) -> Any: return {"$binary": {"base64": base64.b64encode(data).decode(), "subType": "%02x" % subtype}} -def _encode_datetimems(obj: Any, json_options: JSONOptions) -> dict: +def _encode_datetimems(obj: Any, json_options: JSONOptions) -> dict: # type: ignore[type-arg] if ( json_options.datetime_representation == DatetimeRepresentation.ISO8601 and 0 <= int(obj) <= _MAX_UTC_MS @@ -855,7 +855,7 @@ def _encode_datetimems(obj: Any, json_options: JSONOptions) -> dict: return {"$date": {"$numberLong": str(int(obj))}} -def _encode_code(obj: Code, json_options: JSONOptions) -> dict: +def _encode_code(obj: Code, json_options: JSONOptions) -> dict: # type: ignore[type-arg] if obj.scope is None: return {"$code": str(obj)} else: @@ -873,7 +873,7 @@ def _encode_noop(obj: Any, dummy0: Any) -> Any: return obj -def _encode_regex(obj: Any, json_options: JSONOptions) -> dict: +def _encode_regex(obj: Any, json_options: JSONOptions) -> dict: # type: ignore[type-arg] flags = "" if obj.flags & re.IGNORECASE: flags += "i" @@ -918,7 +918,7 @@ def _encode_float(obj: float, json_options: JSONOptions) -> Any: return obj -def _encode_datetime(obj: datetime.datetime, json_options: JSONOptions) -> dict: +def _encode_datetime(obj: datetime.datetime, json_options: JSONOptions) -> dict: # type: ignore[type-arg] if json_options.datetime_representation == DatetimeRepresentation.ISO8601: if not obj.tzinfo: obj = obj.replace(tzinfo=utc) @@ -941,15 +941,15 @@ def _encode_datetime(obj: datetime.datetime, json_options: JSONOptions) -> dict: return {"$date": {"$numberLong": str(millis)}} -def _encode_bytes(obj: bytes, json_options: JSONOptions) -> dict: +def _encode_bytes(obj: bytes, json_options: JSONOptions) -> dict: # type: ignore[type-arg] return _encode_binary(obj, 0, json_options) -def _encode_binary_obj(obj: Binary, json_options: JSONOptions) -> dict: +def _encode_binary_obj(obj: Binary, json_options: JSONOptions) -> dict: # type: ignore[type-arg] return _encode_binary(obj, obj.subtype, json_options) -def _encode_uuid(obj: uuid.UUID, json_options: JSONOptions) -> dict: +def _encode_uuid(obj: uuid.UUID, json_options: JSONOptions) -> dict: # type: ignore[type-arg] if json_options.strict_uuid: binval = Binary.from_uuid(obj, uuid_representation=json_options.uuid_representation) return _encode_binary(binval, binval.subtype, json_options) @@ -957,27 +957,27 @@ def _encode_uuid(obj: uuid.UUID, json_options: JSONOptions) -> dict: return {"$uuid": obj.hex} -def _encode_objectid(obj: ObjectId, dummy0: Any) -> dict: +def _encode_objectid(obj: ObjectId, dummy0: Any) -> dict: # type: ignore[type-arg] return {"$oid": str(obj)} -def _encode_timestamp(obj: Timestamp, dummy0: Any) -> dict: +def _encode_timestamp(obj: Timestamp, dummy0: Any) -> dict: # type: ignore[type-arg] return {"$timestamp": {"t": obj.time, "i": obj.inc}} -def _encode_decimal128(obj: Timestamp, dummy0: Any) -> dict: +def _encode_decimal128(obj: Timestamp, dummy0: Any) -> dict: # type: ignore[type-arg] return {"$numberDecimal": str(obj)} -def _encode_dbref(obj: DBRef, json_options: JSONOptions) -> dict: +def _encode_dbref(obj: DBRef, json_options: JSONOptions) -> dict: # type: ignore[type-arg] return _json_convert(obj.as_doc(), json_options=json_options) -def _encode_minkey(dummy0: Any, dummy1: Any) -> dict: +def _encode_minkey(dummy0: Any, dummy1: Any) -> dict: # type: ignore[type-arg] return {"$minKey": 1} -def _encode_maxkey(dummy0: Any, dummy1: Any) -> dict: +def _encode_maxkey(dummy0: Any, dummy1: Any) -> dict: # type: ignore[type-arg] return {"$maxKey": 1} @@ -985,7 +985,7 @@ def _encode_maxkey(dummy0: Any, dummy1: Any) -> dict: # Each encoder function's signature is: # - obj: a Python data type, e.g. a Python int for _encode_int # - json_options: a JSONOptions -_ENCODERS: dict[Type, Callable[[Any, JSONOptions], Any]] = { +_ENCODERS: dict[Type, Callable[[Any, JSONOptions], Any]] = { # type: ignore[type-arg] bool: _encode_noop, bytes: _encode_bytes, datetime.datetime: _encode_datetime, @@ -1056,7 +1056,7 @@ def _get_datetime_size(obj: datetime.datetime) -> int: return 5 + len(str(obj.time())) -def _get_regex_size(obj: Regex) -> int: +def _get_regex_size(obj: Regex) -> int: # type: ignore[type-arg] return 18 + len(obj.pattern) diff --git a/bson/raw_bson.py b/bson/raw_bson.py index 2ce53143c2..9ead0765dc 100644 --- a/bson/raw_bson.py +++ b/bson/raw_bson.py @@ -60,7 +60,9 @@ def _inflate_bson( - bson_bytes: bytes, codec_options: CodecOptions[RawBSONDocument], raw_array: bool = False + bson_bytes: bytes | memoryview, + codec_options: CodecOptions[RawBSONDocument], + raw_array: bool = False, ) -> dict[str, Any]: """Inflates the top level fields of a BSON document. @@ -85,7 +87,9 @@ class RawBSONDocument(Mapping[str, Any]): __codec_options: CodecOptions[RawBSONDocument] def __init__( - self, bson_bytes: bytes, codec_options: Optional[CodecOptions[RawBSONDocument]] = None + self, + bson_bytes: bytes | memoryview, + codec_options: Optional[CodecOptions[RawBSONDocument]] = None, ) -> None: """Create a new :class:`RawBSONDocument` @@ -135,7 +139,7 @@ class from the standard library so it can be used like a read-only _get_object_size(bson_bytes, 0, len(bson_bytes)) @property - def raw(self) -> bytes: + def raw(self) -> bytes | memoryview: """The raw BSON bytes composing this document.""" return self.__raw @@ -153,7 +157,7 @@ def __inflated(self) -> Mapping[str, Any]: @staticmethod def _inflate_bson( - bson_bytes: bytes, codec_options: CodecOptions[RawBSONDocument] + bson_bytes: bytes | memoryview, codec_options: CodecOptions[RawBSONDocument] ) -> Mapping[str, Any]: return _inflate_bson(bson_bytes, codec_options) @@ -180,7 +184,7 @@ class _RawArrayBSONDocument(RawBSONDocument): @staticmethod def _inflate_bson( - bson_bytes: bytes, codec_options: CodecOptions[RawBSONDocument] + bson_bytes: bytes | memoryview, codec_options: CodecOptions[RawBSONDocument] ) -> Mapping[str, Any]: return _inflate_bson(bson_bytes, codec_options, raw_array=True) diff --git a/bson/son.py b/bson/son.py index 24275fce16..8fd4f95cd2 100644 --- a/bson/son.py +++ b/bson/son.py @@ -143,7 +143,7 @@ def popitem(self) -> Tuple[_Key, _Value]: del self[k] return (k, v) - def update(self, other: Optional[Any] = None, **kwargs: _Value) -> None: # type: ignore[override] + def update(self, other: Optional[Any] = None, **kwargs: _Value) -> None: # Make progressively weaker assumptions about "other" if other is None: pass diff --git a/bson/timestamp.py b/bson/timestamp.py index 3e76e7baad..949bd7b36c 100644 --- a/bson/timestamp.py +++ b/bson/timestamp.py @@ -58,9 +58,9 @@ def __init__(self, time: Union[datetime.datetime, int], inc: int) -> None: time = time - offset time = int(calendar.timegm(time.timetuple())) if not isinstance(time, int): - raise TypeError("time must be an instance of int") + raise TypeError(f"time must be an instance of int, not {type(time)}") if not isinstance(inc, int): - raise TypeError("inc must be an instance of int") + raise TypeError(f"inc must be an instance of int, not {type(inc)}") if not 0 <= time < UPPERBOUND: raise ValueError("time must be contained in [0, 2**32)") if not 0 <= inc < UPPERBOUND: diff --git a/bson/typings.py b/bson/typings.py index b80c661454..5913860556 100644 --- a/bson/typings.py +++ b/bson/typings.py @@ -28,4 +28,4 @@ _DocumentOut = Union[MutableMapping[str, Any], "RawBSONDocument"] _DocumentType = TypeVar("_DocumentType", bound=Mapping[str, Any]) _DocumentTypeArg = TypeVar("_DocumentTypeArg", bound=Mapping[str, Any]) -_ReadableBuffer = Union[bytes, memoryview, "mmap", "array"] +_ReadableBuffer = Union[bytes, memoryview, bytearray, "mmap", "array"] # type: ignore[type-arg] diff --git a/doc/api/bson/binary.rst b/doc/api/bson/binary.rst index 084fd02d50..7084a45b4e 100644 --- a/doc/api/bson/binary.rst +++ b/doc/api/bson/binary.rst @@ -16,6 +16,7 @@ .. autodata:: MD5_SUBTYPE .. autodata:: COLUMN_SUBTYPE .. autodata:: SENSITIVE_SUBTYPE + .. autodata:: VECTOR_SUBTYPE .. autodata:: USER_DEFINED_SUBTYPE .. autoclass:: UuidRepresentation diff --git a/doc/api/gridfs/asynchronous/grid_file.rst b/doc/api/gridfs/asynchronous/grid_file.rst new file mode 100644 index 0000000000..fbf34adc8a --- /dev/null +++ b/doc/api/gridfs/asynchronous/grid_file.rst @@ -0,0 +1,19 @@ +:mod:`grid_file` -- Async tools for representing files stored in GridFS +======================================================================= + +.. automodule:: gridfs.asynchronous.grid_file + :synopsis: Async tools for representing files stored in GridFS + + .. autoclass:: AsyncGridIn + :members: + + .. autoattribute:: _id + + .. autoclass:: AsyncGridOut + :members: + + .. autoattribute:: _id + .. automethod:: __aiter__ + + .. autoclass:: AsyncGridOutCursor + :members: diff --git a/doc/api/gridfs/asynchronous/index.rst b/doc/api/gridfs/asynchronous/index.rst new file mode 100644 index 0000000000..7b6ebb28b8 --- /dev/null +++ b/doc/api/gridfs/asynchronous/index.rst @@ -0,0 +1,14 @@ +:mod:`gridfs async` -- Async tools for working with GridFS +========================================================== + + +.. automodule:: gridfs.asynchronous + :synopsis: Async tools for working with GridFS + :members: AsyncGridFS, AsyncGridFSBucket + +Sub-modules: + +.. toctree:: + :maxdepth: 2 + + grid_file diff --git a/doc/api/gridfs/index.rst b/doc/api/gridfs/index.rst index b81fbde782..190c561d05 100644 --- a/doc/api/gridfs/index.rst +++ b/doc/api/gridfs/index.rst @@ -8,7 +8,8 @@ Sub-modules: .. toctree:: - :maxdepth: 2 + :maxdepth: 3 + asynchronous/index errors grid_file diff --git a/doc/api/index.rst b/doc/api/index.rst index 437f2cc6a6..339f5843bf 100644 --- a/doc/api/index.rst +++ b/doc/api/index.rst @@ -3,7 +3,7 @@ API Documentation The PyMongo distribution contains three top-level packages for interacting with MongoDB. :mod:`bson` is an implementation of the -`BSON format `_, :mod:`pymongo` is a +`BSON format `_, :mod:`pymongo` is a full-featured driver for MongoDB, and :mod:`gridfs` is a set of tools for working with the `GridFS `_ storage diff --git a/doc/api/pymongo/asynchronous/change_stream.rst b/doc/api/pymongo/asynchronous/change_stream.rst index df4f5dee41..1b506fdb55 100644 --- a/doc/api/pymongo/asynchronous/change_stream.rst +++ b/doc/api/pymongo/asynchronous/change_stream.rst @@ -1,10 +1,6 @@ :mod:`change_stream` -- Watch changes on a collection, database, or cluster =========================================================================== -.. warning:: This API is currently in beta, meaning the classes, methods, - and behaviors described within may change before the full release. - If you come across any bugs during your use of this API, - please file a Jira ticket in the "Python Driver" project at https://jira.mongodb.org/browse/PYTHON. .. automodule:: pymongo.asynchronous.change_stream :members: diff --git a/doc/api/pymongo/asynchronous/client_session.rst b/doc/api/pymongo/asynchronous/client_session.rst index c4bbd8edd2..d8403325d7 100644 --- a/doc/api/pymongo/asynchronous/client_session.rst +++ b/doc/api/pymongo/asynchronous/client_session.rst @@ -1,10 +1,6 @@ :mod:`client_session` -- Logical sessions for sequential operations =================================================================== -.. warning:: This API is currently in beta, meaning the classes, methods, - and behaviors described within may change before the full release. - If you come across any bugs during your use of this API, - please file a Jira ticket in the "Python Driver" project at https://jira.mongodb.org/browse/PYTHON. .. automodule:: pymongo.asynchronous.client_session :members: diff --git a/doc/api/pymongo/asynchronous/collection.rst b/doc/api/pymongo/asynchronous/collection.rst index ce1fe3ca04..779295ced1 100644 --- a/doc/api/pymongo/asynchronous/collection.rst +++ b/doc/api/pymongo/asynchronous/collection.rst @@ -1,10 +1,6 @@ :mod:`collection` -- Collection level operations ================================================ -.. warning:: This API is currently in beta, meaning the classes, methods, - and behaviors described within may change before the full release. - If you come across any bugs during your use of this API, - please file a Jira ticket in the "Python Driver" project at https://jira.mongodb.org/browse/PYTHON. .. automodule:: pymongo.asynchronous.collection :synopsis: Collection level operations diff --git a/doc/api/pymongo/asynchronous/command_cursor.rst b/doc/api/pymongo/asynchronous/command_cursor.rst index 7058563eee..1f94c6e525 100644 --- a/doc/api/pymongo/asynchronous/command_cursor.rst +++ b/doc/api/pymongo/asynchronous/command_cursor.rst @@ -1,10 +1,6 @@ :mod:`command_cursor` -- Tools for iterating over MongoDB command results ========================================================================= -.. warning:: This API is currently in beta, meaning the classes, methods, - and behaviors described within may change before the full release. - If you come across any bugs during your use of this API, - please file a Jira ticket in the "Python Driver" project at https://jira.mongodb.org/browse/PYTHON. .. automodule:: pymongo.asynchronous.command_cursor :synopsis: Tools for iterating over MongoDB command results diff --git a/doc/api/pymongo/asynchronous/cursor.rst b/doc/api/pymongo/asynchronous/cursor.rst index d357b84514..f511734de4 100644 --- a/doc/api/pymongo/asynchronous/cursor.rst +++ b/doc/api/pymongo/asynchronous/cursor.rst @@ -1,10 +1,6 @@ :mod:`cursor` -- Tools for iterating over MongoDB query results =============================================================== -.. warning:: This API is currently in beta, meaning the classes, methods, - and behaviors described within may change before the full release. - If you come across any bugs during your use of this API, - please file a Jira ticket in the "Python Driver" project at https://jira.mongodb.org/browse/PYTHON. .. automodule:: pymongo.asynchronous.cursor :synopsis: Tools for iterating over MongoDB query results diff --git a/doc/api/pymongo/asynchronous/database.rst b/doc/api/pymongo/asynchronous/database.rst index b45fe457e7..7b043ab0d1 100644 --- a/doc/api/pymongo/asynchronous/database.rst +++ b/doc/api/pymongo/asynchronous/database.rst @@ -1,10 +1,6 @@ :mod:`database` -- Database level operations ============================================ -.. warning:: This API is currently in beta, meaning the classes, methods, - and behaviors described within may change before the full release. - If you come across any bugs during your use of this API, - please file a Jira ticket in the "Python Driver" project at https://jira.mongodb.org/browse/PYTHON. .. automodule:: pymongo.asynchronous.database :synopsis: Database level operations diff --git a/doc/api/pymongo/asynchronous/index.rst b/doc/api/pymongo/asynchronous/index.rst index 1b41fb8222..b7fc985415 100644 --- a/doc/api/pymongo/asynchronous/index.rst +++ b/doc/api/pymongo/asynchronous/index.rst @@ -1,10 +1,6 @@ :mod:`pymongo async` -- Async Python driver for MongoDB ======================================================= -.. warning:: This API is currently in beta, meaning the classes, methods, - and behaviors described within may change before the full release. - If you come across any bugs during your use of this API, - please file a Jira ticket in the "Python Driver" project at https://jira.mongodb.org/browse/PYTHON. .. automodule:: pymongo.asynchronous :synopsis: Asynchronous Python driver for MongoDB diff --git a/doc/api/pymongo/asynchronous/mongo_client.rst b/doc/api/pymongo/asynchronous/mongo_client.rst index d0729da78b..899ca687d5 100644 --- a/doc/api/pymongo/asynchronous/mongo_client.rst +++ b/doc/api/pymongo/asynchronous/mongo_client.rst @@ -1,10 +1,6 @@ :mod:`mongo_client` -- Tools for connecting to MongoDB ====================================================== -.. warning:: This API is currently in beta, meaning the classes, methods, - and behaviors described within may change before the full release. - If you come across any bugs during your use of this API, - please file a Jira ticket in the "Python Driver" project at https://jira.mongodb.org/browse/PYTHON. .. automodule:: pymongo.asynchronous.mongo_client :synopsis: Tools for connecting to MongoDB diff --git a/doc/async-tutorial.rst b/doc/async-tutorial.rst deleted file mode 100644 index 2ccf011d8e..0000000000 --- a/doc/async-tutorial.rst +++ /dev/null @@ -1,422 +0,0 @@ -Async Tutorial -============== - -.. warning:: This API is currently in beta, meaning the classes, methods, - and behaviors described within may change before the full release. - If you come across any bugs during your use of this API, - please file a Jira ticket in the "Python Driver" project at https://jira.mongodb.org/browse/PYTHON. - -.. code-block:: pycon - - from pymongo import AsyncMongoClient - - client = AsyncMongoClient() - await client.drop_database("test-database") - -This tutorial is intended as an introduction to working with -**MongoDB** and **PyMongo** using the asynchronous API. - -Prerequisites -------------- -Before we start, make sure that you have the **PyMongo** distribution -:doc:`installed `. In the Python shell, the following -should run without raising an exception: - -.. code-block:: pycon - - >>> import pymongo - -This tutorial also assumes that a MongoDB instance is running on the -default host and port. Assuming you have `downloaded and installed -`_ MongoDB, you -can start it like so: - -.. code-block:: bash - - $ mongod - -Making a Connection with AsyncMongoClient ------------------------------------------ -The first step when working with **PyMongo** is to create a -:class:`~pymongo.asynchronous.mongo_client.AsyncMongoClient` to the running **mongod** -instance. Doing so is easy: - -.. code-block:: pycon - - >>> from pymongo import AsyncMongoClient - >>> client = AsyncMongoClient() - -The above code will connect on the default host and port. We can also -specify the host and port explicitly, as follows: - -.. code-block:: pycon - - >>> client = AsyncMongoClient("localhost", 27017) - -Or use the MongoDB URI format: - -.. code-block:: pycon - - >>> client = AsyncMongoClient("mongodb://localhost:27017/") - -By default, :class:`~pymongo.asynchronous.mongo_client.AsyncMongoClient` only connects to the database on its first operation. -To explicitly connect before performing an operation, use :meth:`~pymongo.asynchronous.mongo_client.AsyncMongoClient.aconnect`: - -.. code-block:: pycon - - >>> client = await AsyncMongoClient().aconnect() - -Getting a Database ------------------- -A single instance of MongoDB can support multiple independent -`databases `_. When -working with PyMongo you access databases using attribute style access -on :class:`~pymongo.asynchronous.mongo_client.AsyncMongoClient` instances: - -.. code-block:: pycon - - >>> db = client.test_database - -If your database name is such that using attribute style access won't -work (like ``test-database``), you can use dictionary style access -instead: - -.. code-block:: pycon - - >>> db = client["test-database"] - -Getting a Collection --------------------- -A `collection `_ is a -group of documents stored in MongoDB, and can be thought of as roughly -the equivalent of a table in a relational database. Getting a -collection in PyMongo works the same as getting a database: - -.. code-block:: pycon - - >>> collection = db.test_collection - -or (using dictionary style access): - -.. code-block:: pycon - - >>> collection = db["test-collection"] - -An important note about collections (and databases) in MongoDB is that -they are created lazily - none of the above commands have actually -performed any operations on the MongoDB server. Collections and -databases are created when the first document is inserted into them. - -Documents ---------- -Data in MongoDB is represented (and stored) using JSON-style -documents. In PyMongo we use dictionaries to represent documents. As -an example, the following dictionary might be used to represent a blog -post: - -.. code-block:: pycon - - >>> import datetime - >>> post = { - ... "author": "Mike", - ... "text": "My first blog post!", - ... "tags": ["mongodb", "python", "pymongo"], - ... "date": datetime.datetime.now(tz=datetime.timezone.utc), - ... } - -Note that documents can contain native Python types (like -:class:`datetime.datetime` instances) which will be automatically -converted to and from the appropriate `BSON -`_ types. - -Inserting a Document --------------------- -To insert a document into a collection we can use the -:meth:`~pymongo.asynchronous.collection.AsyncCollection.insert_one` method: - -.. code-block:: pycon - - >>> posts = db.posts - >>> post_id = (await posts.insert_one(post)).inserted_id - >>> post_id - ObjectId('...') - -When a document is inserted a special key, ``"_id"``, is automatically -added if the document doesn't already contain an ``"_id"`` key. The value -of ``"_id"`` must be unique across the -collection. :meth:`~pymongo.asynchronous.collection.AsyncCollection.insert_one` returns an -instance of :class:`~pymongo.results.InsertOneResult`. For more information -on ``"_id"``, see the `documentation on _id -`_. - -After inserting the first document, the *posts* collection has -actually been created on the server. We can verify this by listing all -of the collections in our database: - -.. code-block:: pycon - - >>> await db.list_collection_names() - ['posts'] - -Getting a Single Document With :meth:`~pymongo.asynchronous.collection.AsyncCollection.find_one` ------------------------------------------------------------------------------------------------- -The most basic type of query that can be performed in MongoDB is -:meth:`~pymongo.asynchronous.collection.AsyncCollection.find_one`. This method returns a -single document matching a query (or ``None`` if there are no -matches). It is useful when you know there is only one matching -document, or are only interested in the first match. Here we use -:meth:`~pymongo.asynchronous.collection.AsyncCollection.find_one` to get the first -document from the posts collection: - -.. code-block:: pycon - - >>> import pprint - >>> pprint.pprint(await posts.find_one()) - {'_id': ObjectId('...'), - 'author': 'Mike', - 'date': datetime.datetime(...), - 'tags': ['mongodb', 'python', 'pymongo'], - 'text': 'My first blog post!'} - -The result is a dictionary matching the one that we inserted previously. - -.. note:: The returned document contains an ``"_id"``, which was - automatically added on insert. - -:meth:`~pymongo.asynchronous.collection.AsyncCollection.find_one` also supports querying -on specific elements that the resulting document must match. To limit -our results to a document with author "Mike" we do: - -.. code-block:: pycon - - >>> pprint.pprint(await posts.find_one({"author": "Mike"})) - {'_id': ObjectId('...'), - 'author': 'Mike', - 'date': datetime.datetime(...), - 'tags': ['mongodb', 'python', 'pymongo'], - 'text': 'My first blog post!'} - -If we try with a different author, like "Eliot", we'll get no result: - -.. code-block:: pycon - - >>> await posts.find_one({"author": "Eliot"}) - >>> - -.. _async-querying-by-objectid: - -Querying By ObjectId --------------------- -We can also find a post by its ``_id``, which in our example is an ObjectId: - -.. code-block:: pycon - - >>> post_id - ObjectId(...) - >>> pprint.pprint(await posts.find_one({"_id": post_id})) - {'_id': ObjectId('...'), - 'author': 'Mike', - 'date': datetime.datetime(...), - 'tags': ['mongodb', 'python', 'pymongo'], - 'text': 'My first blog post!'} - -Note that an ObjectId is not the same as its string representation: - -.. code-block:: pycon - - >>> post_id_as_str = str(post_id) - >>> await posts.find_one({"_id": post_id_as_str}) # No result - >>> - -A common task in web applications is to get an ObjectId from the -request URL and find the matching document. It's necessary in this -case to **convert the ObjectId from a string** before passing it to -``find_one``:: - - from bson.objectid import ObjectId - - # The web framework gets post_id from the URL and passes it as a string - async def get(post_id): - # Convert from string to ObjectId: - document = await client.db.collection.find_one({'_id': ObjectId(post_id)}) - -.. seealso:: :ref:`web-application-querying-by-objectid` - -Bulk Inserts ------------- -In order to make querying a little more interesting, let's insert a -few more documents. In addition to inserting a single document, we can -also perform *bulk insert* operations, by passing a list as the -first argument to :meth:`~pymongo.asynchronous.collection.AsyncCollection.insert_many`. -This will insert each document in the list, sending only a single -command to the server: - -.. code-block:: pycon - - >>> new_posts = [ - ... { - ... "author": "Mike", - ... "text": "Another post!", - ... "tags": ["bulk", "insert"], - ... "date": datetime.datetime(2009, 11, 12, 11, 14), - ... }, - ... { - ... "author": "Eliot", - ... "title": "MongoDB is fun", - ... "text": "and pretty easy too!", - ... "date": datetime.datetime(2009, 11, 10, 10, 45), - ... }, - ... ] - >>> result = await posts.insert_many(new_posts) - >>> result.inserted_ids - [ObjectId('...'), ObjectId('...')] - -There are a couple of interesting things to note about this example: - - - The result from :meth:`~pymongo.asynchronous.collection.AsyncCollection.insert_many` now - returns two :class:`~bson.objectid.ObjectId` instances, one for - each inserted document. - - ``new_posts[1]`` has a different "shape" than the other posts - - there is no ``"tags"`` field and we've added a new field, - ``"title"``. This is what we mean when we say that MongoDB is - *schema-free*. - -Querying for More Than One Document ------------------------------------ -To get more than a single document as the result of a query we use the -:meth:`~pymongo.asynchronous.collection.AsyncCollection.find` -method. :meth:`~pymongo.asynchronous.collection.AsyncCollection.find` returns a -:class:`~pymongo.asynchronous.cursor.AsyncCursor` instance, which allows us to iterate -over all matching documents. For example, we can iterate over every -document in the ``posts`` collection: - -.. code-block:: pycon - - >>> async for post in posts.find(): - ... pprint.pprint(post) - ... - {'_id': ObjectId('...'), - 'author': 'Mike', - 'date': datetime.datetime(...), - 'tags': ['mongodb', 'python', 'pymongo'], - 'text': 'My first blog post!'} - {'_id': ObjectId('...'), - 'author': 'Mike', - 'date': datetime.datetime(...), - 'tags': ['bulk', 'insert'], - 'text': 'Another post!'} - {'_id': ObjectId('...'), - 'author': 'Eliot', - 'date': datetime.datetime(...), - 'text': 'and pretty easy too!', - 'title': 'MongoDB is fun'} - -Just like we did with :meth:`~pymongo.asynchronous.collection.AsyncCollection.find_one`, -we can pass a document to :meth:`~pymongo.asynchronous.collection.AsyncCollection.find` -to limit the returned results. Here, we get only those documents whose -author is "Mike": - -.. code-block:: pycon - - >>> async for post in posts.find({"author": "Mike"}): - ... pprint.pprint(post) - ... - {'_id': ObjectId('...'), - 'author': 'Mike', - 'date': datetime.datetime(...), - 'tags': ['mongodb', 'python', 'pymongo'], - 'text': 'My first blog post!'} - {'_id': ObjectId('...'), - 'author': 'Mike', - 'date': datetime.datetime(...), - 'tags': ['bulk', 'insert'], - 'text': 'Another post!'} - -Counting --------- -If we just want to know how many documents match a query we can -perform a :meth:`~pymongo.asynchronous.collection.AsyncCollection.count_documents` operation -instead of a full query. We can get a count of all of the documents -in a collection: - -.. code-block:: pycon - - >>> await posts.count_documents({}) - 3 - -or just of those documents that match a specific query: - -.. code-block:: pycon - - >>> await posts.count_documents({"author": "Mike"}) - 2 - -Range Queries -------------- -MongoDB supports many different types of `advanced queries -`_. As an -example, lets perform a query where we limit results to posts older -than a certain date, but also sort the results by author: - -.. code-block:: pycon - - >>> d = datetime.datetime(2009, 11, 12, 12) - >>> async for post in posts.find({"date": {"$lt": d}}).sort("author"): - ... pprint.pprint(post) - ... - {'_id': ObjectId('...'), - 'author': 'Eliot', - 'date': datetime.datetime(...), - 'text': 'and pretty easy too!', - 'title': 'MongoDB is fun'} - {'_id': ObjectId('...'), - 'author': 'Mike', - 'date': datetime.datetime(...), - 'tags': ['bulk', 'insert'], - 'text': 'Another post!'} - -Here we use the special ``"$lt"`` operator to do a range query, and -also call :meth:`~pymongo.asynchronous.cursor.AsyncCursor.sort` to sort the results -by author. - -Indexing --------- - -Adding indexes can help accelerate certain queries and can also add additional -functionality to querying and storing documents. In this example, we'll -demonstrate how to create a `unique index -`_ on a key that rejects -documents whose value for that key already exists in the index. - -First, we'll need to create the index: - -.. code-block:: pycon - - >>> result = await db.profiles.create_index([("user_id", pymongo.ASCENDING)], unique=True) - >>> sorted(list(await db.profiles.index_information())) - ['_id_', 'user_id_1'] - -Notice that we have two indexes now: one is the index on ``_id`` that MongoDB -creates automatically, and the other is the index on ``user_id`` we just -created. - -Now let's set up some user profiles: - -.. code-block:: pycon - - >>> user_profiles = [{"user_id": 211, "name": "Luke"}, {"user_id": 212, "name": "Ziltoid"}] - >>> result = await db.profiles.insert_many(user_profiles) - -The index prevents us from inserting a document whose ``user_id`` is already in -the collection: - -.. code-block:: pycon - - >>> new_profile = {"user_id": 213, "name": "Drew"} - >>> duplicate_profile = {"user_id": 212, "name": "Tommy"} - >>> result = await db.profiles.insert_one(new_profile) # This is fine. - >>> result = await db.profiles.insert_one(duplicate_profile) - Traceback (most recent call last): - DuplicateKeyError: E11000 duplicate key error index: test_database.profiles.$user_id_1 dup key: { : 212 } - -.. seealso:: The MongoDB documentation on `indexes `_ diff --git a/doc/atlas.rst b/doc/atlas.rst deleted file mode 100644 index 19ba9732f2..0000000000 --- a/doc/atlas.rst +++ /dev/null @@ -1,43 +0,0 @@ -Using PyMongo with MongoDB Atlas -================================ - -`Atlas `_ is MongoDB, Inc.'s hosted MongoDB as a -service offering. To connect to Atlas, pass the connection string provided by -Atlas to :class:`~pymongo.mongo_client.MongoClient`:: - - client = pymongo.MongoClient() - -Connections to Atlas require TLS/SSL. - -.. warning:: Industry best practices recommend, and some regulations require, - the use of TLS 1.1 or newer. Though no application changes are required for - PyMongo to make use of the newest protocols, some operating systems or - versions may not provide an OpenSSL version new enough to support them. - - Users of macOS older than 10.13 (High Sierra) will need to install Python - from `python.org`_, `homebrew`_, `macports`_, or another similar source. - - Users of Linux or other non-macOS Unix can check their OpenSSL version like - this:: - - $ openssl version - - If the version number is less than 1.0.1 support for TLS 1.1 or newer is not - available. Contact your operating system vendor for a solution or upgrade to - a newer distribution. - - You can check your Python interpreter by installing the `requests`_ module - and executing the following command:: - - python -c "import requests; print(requests.get('https://www.howsmyssl.com/a/check', verify=False).json()['tls_version'])" - - You should see "TLS 1.X" where X is >= 1. - - You can read more about TLS versions and their security implications here: - - ``_ - -.. _python.org: https://www.python.org/downloads/ -.. _homebrew: https://brew.sh/ -.. _macports: https://www.macports.org/ -.. _requests: https://pypi.python.org/pypi/requests diff --git a/doc/changelog.rst b/doc/changelog.rst index d9e6cc3f5b..f3eb4f6f23 100644 --- a/doc/changelog.rst +++ b/doc/changelog.rst @@ -1,20 +1,313 @@ Changelog ========= -Changes in Version 4.11.0 (YYYY/MM/DD) +Changes in Version 4.16.0 (XXXX/XX/XX) -------------------------------------- -.. warning:: PyMongo 4.11 drops support for Python 3.8: Python 3.9+ or PyPy 3.9+ is now required. +PyMongo 4.16 brings a number of changes including: + +.. warning:: PyMongo 4.16 drops support for Python 3.9: Python 3.10+ is now required. + +- Dropped support for Python 3.9. +- Removed invalid documents from :class:`bson.errors.InvalidDocument` error messages as + doing so may leak sensitive user data. + Instead, invalid documents are stored in :attr:`bson.errors.InvalidDocument.document`. +- PyMongo now requires ``dnspython>=2.6.1``, since ``dnspython`` 1.0 is no longer maintained and is incompatible with + Python 3.10+. The minimum version is ``2.6.1`` to account for `CVE-2023-29483 `_. +- Removed support for Eventlet. + Eventlet is actively being sunset by its maintainers and has compatibility issues with PyMongo's dnspython dependency. + +Changes in Version 4.15.3 (2025/10/07) +-------------------------------------- + +Version 4.15.3 is a bug fix release. + +- Fixed a memory leak when raising :class:`bson.errors.InvalidDocument` with C extensions. +- Fixed the return type of the :meth:`~pymongo.asynchronous.collection.AsyncCollection.distinct`, + :meth:`~pymongo.synchronous.collection.Collection.distinct`, :meth:`pymongo.asynchronous.cursor.AsyncCursor.distinct`, + and :meth:`pymongo.asynchronous.cursor.AsyncCursor.distinct` methods. + +Issues Resolved +............... + +See the `PyMongo 4.15.3 release notes in JIRA`_ for the list of resolved issues +in this release. + +.. _PyMongo 4.15.3 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=47293 + +Changes in Version 4.15.2 (2025/10/01) +-------------------------------------- + +Version 4.15.2 is a bug fix release. + +- Add wheels for Python 3.14 and 3.14t that were missing from 4.15.0 release. Drop the 3.13t wheel. + +Issues Resolved +............... + +See the `PyMongo 4.15.2 release notes in JIRA`_ for the list of resolved issues +in this release. + +.. _PyMongo 4.15.2 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=47186 + +Changes in Version 4.15.1 (2025/09/16) +-------------------------------------- + +Version 4.15.1 is a bug fix release. + +- Fixed a bug in :meth:`~pymongo.synchronous.encryption.ClientEncryption.encrypt` + and :meth:`~pymongo.asynchronous.encryption.AsyncClientEncryption.encrypt` + that would cause a ``TypeError`` when using ``pymongocrypt<1.16`` by passing + an unsupported ``type_opts`` parameter even if Queryable Encryption text + queries beta was not used. + +- Fixed a bug in ``AsyncMongoClient`` that caused a ``ServerSelectionTimeoutError`` + when used with ``uvicorn``, ``FastAPI``, or ``uvloop``. + +Issues Resolved +............... + +See the `PyMongo 4.15.1 release notes in JIRA`_ for the list of resolved issues +in this release. + +.. _PyMongo 4.15.1 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=46486 + +Changes in Version 4.15.0 (2025/09/10) +-------------------------------------- + +PyMongo 4.15 brings a number of changes including: + +- Added :class:`~pymongo.encryption_options.TextOpts`, + :attr:`~pymongo.encryption.Algorithm.TEXTPREVIEW`, + :attr:`~pymongo.encryption.QueryType.PREFIXPREVIEW`, + :attr:`~pymongo.encryption.QueryType.SUFFIXPREVIEW`, + :attr:`~pymongo.encryption.QueryType.SUBSTRINGPREVIEW`, + as part of the experimental Queryable Encryption text queries beta. + ``pymongocrypt>=1.16`` is required for text query support. +- Added :class:`bson.decimal128.DecimalEncoder` and + :class:`bson.decimal128.DecimalDecoder` + to support encoding and decoding of BSON Decimal128 values to + decimal.Decimal values using the TypeRegistry API. +- Added support for Windows ``arm64`` wheels. + +Changes in Version 4.14.1 (2025/08/19) +-------------------------------------- + +Version 4.14.1 is a bug fix release. + +- Fixed a bug in ``MongoClient.append_metadata()`` and + ``AsyncMongoClient.append_metadata()`` + that allowed duplicate ``DriverInfo.name`` to be appended to the metadata. + +Issues Resolved +............... + +See the `PyMongo 4.14.1 release notes in JIRA`_ for the list of resolved issues +in this release. + +.. _PyMongo 4.14.1 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=45256 + +Changes in Version 4.14.0 (2025/08/06) +-------------------------------------- + +.. warning:: PyMongo 4.14 drops support for MongoDB 4.0. PyMongo now supports + MongoDB 4.2+. + +PyMongo 4.14 brings a number of changes including: + +- Dropped support for MongoDB 4.0. +- Added preliminary support for Python 3.14 and 3.14 with free-threading. We do + not yet support the following with Python 3.14: + + - Subinterpreters (``concurrent.interpreters``) + - Free-threading with Encryption + - mod_wsgi + +- Removed experimental support for free-threading support in Python 3.13. +- Added :attr:`bson.codec_options.TypeRegistry.codecs` and + :attr:`bson.codec_options.TypeRegistry.fallback_encoder` properties + to allow users to directly access the type codecs and fallback encoder for a + given :class:`bson.codec_options.TypeRegistry`. +- Added + :meth:`pymongo.asynchronous.mongo_client.AsyncMongoClient.append_metadata` and + :meth:`pymongo.mongo_client.MongoClient.append_metadata` to allow instantiated + MongoClients to send client metadata on-demand +- Improved performance of selecting a server with the Primary selector. +- Introduces a minor breaking change. When encoding + :class:`bson.binary.BinaryVector`, a ``ValueError`` will be raised if the + 'padding' metadata field is < 0 or > 7, or non-zero for any type other than + PACKED_BIT. +- Changed :meth:`~pymongo.uri_parser.parse_uri`'s ``options`` return value to be + type ``dict`` instead of ``_CaseInsensitiveDictionary``. + +Issues Resolved +............... + +See the `PyMongo 4.14 release notes in JIRA`_ for the list of resolved issues +in this release. + +.. _PyMongo 4.14 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=43041 + +Changes in Version 4.13.2 (2025/06/17) +-------------------------------------- + +Version 4.13.2 is a bug fix release. + +- Fixed a bug where ``AsyncMongoClient`` would block the event loop while creating new connections, + potentially significantly increasing latency for ongoing operations. + +Issues Resolved +............... + +See the `PyMongo 4.13.2 release notes in JIRA`_ for the list of resolved issues +in this release. + +.. _PyMongo 4.13.2 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=43937 + + +Changes in Version 4.13.1 (2025/06/10) +-------------------------------------- + +Version 4.13.1 is a bug fix release. + +- Fixed a bug that could raise ``ServerSelectionTimeoutError`` when using timeouts with ``AsyncMongoClient``. +- Fixed a bug that could raise ``NetworkTimeout`` errors on Windows. + +Issues Resolved +............... + +See the `PyMongo 4.13.1 release notes in JIRA`_ for the list of resolved issues +in this release. + +.. _PyMongo 4.13.1 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=43924 + +Changes in Version 4.13.0 (2025/05/14) +-------------------------------------- + +PyMongo 4.13 brings a number of changes including: + +- The asynchronous API is now stable and no longer in beta. + See the :mod:`pymongo.asynchronous` docs + or the `migration guide `_ for more information. +- Fixed a bug where :class:`pymongo.write_concern.WriteConcern` repr was not eval-able + when using ``w="majority"``. +- When padding is set, ignored bits in a BSON BinaryVector of PACKED_BIT dtype should be set to zero. + When encoding, this is enforced and is a breaking change. + It is not yet enforced when decoding, so reading from the database will not fail, however a warning will be triggered. + From PyMongo 5.0, this rule will be enforced for both encoding and decoding. + +Issues Resolved +............... + +See the `PyMongo 4.13 release notes in JIRA`_ for the list of resolved issues +in this release. + +.. _PyMongo 4.13 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=42509 + +Changes in Version 4.12.1 (2025/04/29) +-------------------------------------- + +Version 4.12.1 is a bug fix release. + +- Fixed a bug that could raise ``UnboundLocalError`` when creating asynchronous connections over SSL. +- Fixed a bug causing SRV hostname validation to fail when resolver and resolved hostnames are identical with three domain levels. +- Fixed a bug that caused direct use of ``pymongo.uri_parser`` to raise an ``AttributeError``. +- Fixed a bug where clients created with connect=False and a "mongodb+srv://" connection string + could cause public ``pymongo.MongoClient`` and ``pymongo.AsyncMongoClient`` attributes (topology_description, + nodes, address, primary, secondaries, arbiters) to incorrectly return a Database, leading to type + errors such as: "NotImplementedError: Database objects do not implement truth value testing or bool()". +- Removed Eventlet testing against Python versions newer than 3.9 since + Eventlet is actively being sunset by its maintainers and has compatibility issues with PyMongo's dnspython dependency. +- Fixed a bug where MongoDB cluster topology changes could cause asynchronous operations to take much longer to complete + due to holding the Topology lock while closing stale connections. +- Fixed a bug that would cause AsyncMongoClient to attempt to use PyOpenSSL when available, resulting in errors such as + "pymongo.errors.ServerSelectionTimeoutError: 'SSLContext' object has no attribute 'wrap_bio'". + +Issues Resolved +............... + +See the `PyMongo 4.12.1 release notes in JIRA`_ for the list of resolved issues +in this release. + +.. _PyMongo 4.12.1 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=43094 + +Changes in Version 4.12.0 (2025/04/08) +-------------------------------------- + +.. warning:: Driver support for MongoDB 4.0 reached end of life in April 2025. + PyMongo 4.12 will be the last release to support MongoDB 4.0. + +PyMongo 4.12 brings a number of changes including: + +- Support for configuring DEK cache lifetime via the ``key_expiration_ms`` argument to + :class:`~pymongo.encryption_options.AutoEncryptionOpts`. +- Support for $lookup in CSFLE and QE supported on MongoDB 8.1+. +- pymongocrypt>=1.13 is now required for `In-Use Encryption `_ support. +- Added :meth:`gridfs.asynchronous.grid_file.AsyncGridFSBucket.rename_by_name` and :meth:`gridfs.grid_file.GridFSBucket.rename_by_name` + for more performant renaming of a file with multiple revisions. +- Added :meth:`gridfs.asynchronous.grid_file.AsyncGridFSBucket.delete_by_name` and :meth:`gridfs.grid_file.GridFSBucket.delete_by_name` + for more performant deletion of a file with multiple revisions. +- AsyncMongoClient no longer performs DNS resolution for "mongodb+srv://" connection strings on creation. + To avoid blocking the asyncio loop, the resolution is now deferred until the client is first connected. +- Added index hinting support to the + :meth:`~pymongo.asynchronous.collection.AsyncCollection.distinct` and + :meth:`~pymongo.collection.Collection.distinct` commands. +- Deprecated the ``hedge`` parameter for + :class:`~pymongo.read_preferences.PrimaryPreferred`, + :class:`~pymongo.read_preferences.Secondary`, + :class:`~pymongo.read_preferences.SecondaryPreferred`, + :class:`~pymongo.read_preferences.Nearest`. Support for ``hedge`` will be removed in PyMongo 5.0. +- Removed PyOpenSSL support from the asynchronous API due to limitations of the CPython asyncio.Protocol SSL implementation. +- Allow valid SRV hostnames with less than 3 parts. + +Issues Resolved +............... + +See the `PyMongo 4.12 release notes in JIRA`_ for the list of resolved issues +in this release. + +.. _PyMongo 4.12 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=41916 + +Changes in Version 4.11.2 (2025/03/05) +-------------------------------------- + +Version 4.11.2 is a bug fix release. + +- Fixed a bug where :meth:`~pymongo.database.Database.command` would fail when attempting to run the bulkWrite command. + +Issues Resolved +............... + +See the `PyMongo 4.11.2 release notes in JIRA`_ for the list of resolved issues in this release. + +.. _PyMongo 4.11.2 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=42506 + +Changes in Version 4.11.1 (2025/02/10) +-------------------------------------- + +- Fixed support for prebuilt ``ppc64le`` and ``s390x`` wheels. + +Changes in Version 4.11.0 (2025/01/28) +-------------------------------------- + +.. warning:: PyMongo 4.11 drops support for Python 3.8 and PyPy 3.9: Python 3.9+ or PyPy 3.10+ is now required. .. warning:: PyMongo 4.11 drops support for MongoDB 3.6. PyMongo now supports MongoDB 4.0+. Driver support for MongoDB 3.6 reached end of life in April 2024. +.. warning:: Driver support for MongoDB 4.0 reaches end of life in April 2025. + A future minor release of PyMongo will raise the minimum supported MongoDB Server version from 4.0 to 4.2. + This is in accordance with [MongoDB Software Lifecycle Schedules](https://www.mongodb.com/legal/support-policy/lifecycles). + **Support for MongoDB Server 4.0 will be dropped in a future release!** +.. warning:: This version does not include wheels for ``ppc64le`` or ``s390x`` architectures, see `PYTHON-5058`_ for more information. PyMongo 4.11 brings a number of changes including: -- Dropped support for Python 3.8. +- Dropped support for Python 3.8 and PyPy 3.9. - Dropped support for MongoDB 3.6. - Dropped support for the MONGODB-CR authenticate mechanism, which is no longer supported by MongoDB 4.0+. +- pymongocrypt>=1.12 is now required for `In-Use Encryption `_ support. - Added support for free-threaded Python with the GIL disabled. For more information see: `Free-threaded CPython `_. + We do not yet support free-threaded Python on Windows (`PYTHON-5027`_) or with In-Use Encryption (`PYTHON-5024`_). - :attr:`~pymongo.asynchronous.mongo_client.AsyncMongoClient.address` and :attr:`~pymongo.mongo_client.MongoClient.address` now correctly block when called on unconnected clients until either connection succeeds or a server selection timeout error is raised. @@ -41,6 +334,9 @@ See the `PyMongo 4.11 release notes in JIRA`_ for the list of resolved issues in this release. .. _PyMongo 4.11 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=40784 +.. _PYTHON-5027: https://jira.mongodb.org/browse/PYTHON-5027 +.. _PYTHON-5024: https://jira.mongodb.org/browse/PYTHON-5024 +.. _PYTHON-5058: https://jira.mongodb.org/browse/PYTHON-5058 Changes in Version 4.10.1 (2024/10/01) -------------------------------------- @@ -130,7 +426,7 @@ PyMongo 4.9 brings a number of improvements including: ``sparsity`` and ``trim_factor`` are now optional in :class:`~pymongo.encryption_options.RangeOpts`. - Added support for the "delegated" option for the KMIP ``master_key`` in :meth:`~pymongo.encryption.ClientEncryption.create_data_key`. -- pymongocrypt>=1.10 is now required for :ref:`In-Use Encryption` support. +- pymongocrypt>=1.10 is now required for `In-Use Encryption `_ support. - Added :meth:`~pymongo.cursor.Cursor.to_list` to :class:`~pymongo.cursor.Cursor`, :class:`~pymongo.command_cursor.CommandCursor`, :class:`~pymongo.asynchronous.cursor.AsyncCursor`, @@ -140,7 +436,7 @@ PyMongo 4.9 brings a number of improvements including: and :class:`~pymongo.asynchronous.mongo_client.AsyncMongoClient`, enabling users to perform insert, update, and delete operations against mixed namespaces in a minimized number of round trips. - Please see :doc:`examples/client_bulk` for more information. + Please see `Client Bulk Write `_ for more information. - Added support for the ``namespace`` parameter to the :class:`~pymongo.operations.InsertOne`, :class:`~pymongo.operations.ReplaceOne`, @@ -170,7 +466,7 @@ PyMongo 4.9 brings a number of improvements including: unction-as-a-service (FaaS) like AWS Lambda, Google Cloud Functions, and Microsoft Azure Functions. On some FaaS systems, there is a ``fork()`` operation at function startup. By delaying the connection to the first operation, we avoid a deadlock. See - `Is PyMongo Fork-Safe`_ for more information. + `multiple forks `_ for more information. Issues Resolved @@ -179,7 +475,6 @@ Issues Resolved See the `PyMongo 4.9 release notes in JIRA`_ for the list of resolved issues in this release. -.. _Is PyMongo Fork-Safe : https://www.mongodb.com/docs/languages/python/pymongo-driver/current/faq/#is-pymongo-fork-safe- .. _PyMongo 4.9 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=39940 @@ -278,10 +573,10 @@ PyMongo 4.7 brings a number of improvements including: using an OpenID Connect (OIDC) access token. The driver supports OIDC for workload identity, defined as an identity you assign to a software workload (such as an application, service, script, or container) to authenticate and access other services and resources. - Please see :doc:`examples/authentication` for more information. + Please see `Authentication `_ for more information. - Added support for Python's `native logging library `_, enabling developers to customize the verbosity of log messages for their applications. - Please see :doc:`examples/logging` for more information. + Please see `Logging `_ for more information. - Significantly improved the performance of encoding BSON documents to JSON. - Added support for named KMS providers for client side field level encryption. Previously supported KMS providers were only: aws, azure, gcp, kmip, and local. @@ -440,7 +735,7 @@ PyMongo 4.6 brings a number of improvements including: "mongodb://example.com?tls=true" is now a valid URI. - Fixed a bug where PyMongo would incorrectly promote all cursors to exhaust cursors when connected to load balanced MongoDB clusters or Serverless clusters. -- Added the :ref:`network-compression-example` documentation page. +- Added the `network compression `_ documentation page. - Added more timeout information to network errors. Issues Resolved @@ -465,7 +760,7 @@ PyMongo 4.5 brings a number of improvements including: - Added :meth:`~pymongo.database.Database.cursor_command` and :meth:`~pymongo.command_cursor.CommandCursor.try_next` to support executing an arbitrary command that returns a cursor. -- ``cryptography`` 2.5 or later is now required for :ref:`OCSP` support. +- ``cryptography`` 2.5 or later is now required for `OCSP `_ support. - Improved bson encoding and decoding performance by up to 134%(`PYTHON-3729`_, `PYTHON-3797`_, `PYTHON-3816`_, `PYTHON-3817`_, `PYTHON-3820`_, `PYTHON-3824`_, and `PYTHON-3846`_). .. warning:: PyMongo no longer supports PyPy3 versions older than 3.8. Users @@ -526,7 +821,7 @@ PyMongo 4.4 brings a number of improvements including: :class:`~pymongo.encryption_options.RangeOpts`, and :attr:`~pymongo.encryption.Algorithm.RANGEPREVIEW` as part of the experimental Queryable Encryption beta. -- pymongocrypt 1.6.0 or later is now required for :ref:`In-Use Encryption` support. MongoDB +- pymongocrypt 1.6.0 or later is now required for `In-Use Encryption `_ support. MongoDB Server 7.0 introduced a backwards breaking change to the QE protocol. Users taking advantage of the Queryable Encryption beta must now upgrade to MongoDB 7.0+ and PyMongo 4.4+. @@ -554,9 +849,9 @@ Changes in Version 4.3.3 (2022/11/17) Version 4.3.3 documents support for the following: -- :ref:`CSFLE on-demand credentials` for cloud KMS providers. -- Authentication support for :ref:`EKS Clusters`. -- Added the :ref:`timeout-example` example page to improve the documentation +- `CSFLE on-demand credentials `_ for cloud KMS providers. +- Authentication support for `EKS Clusters `_. +- Added the `timeout `_ example page to improve the documentation for :func:`pymongo.timeout`. Bug Fixes @@ -591,7 +886,7 @@ PyMongo 4.3 brings a number of improvements including: - Added support for decoding BSON datetimes outside of the range supported by Python's :class:`~datetime.datetime` builtin. See - :ref:`handling-out-of-range-datetimes` for examples, as well as + `handling out of range datetimes `_ for examples, as well as :class:`bson.datetime_ms.DatetimeMS`, :class:`bson.codec_options.DatetimeConversion`, and :class:`bson.codec_options.CodecOptions`'s ``datetime_conversion`` @@ -600,7 +895,7 @@ PyMongo 4.3 brings a number of improvements including: after a :py:func:`os.fork` to reduce the frequency of deadlocks. Note that deadlocks are still possible because libraries that PyMongo depends like OpenSSL cannot be made fork() safe in multithreaded applications. - (`PYTHON-2484`_). For more info see :ref:`pymongo-fork-safe`. + (`PYTHON-2484`_). For more info see `multiple forks `_. - When used with MongoDB 6.0+, :class:`~pymongo.change_stream.ChangeStream` s now allow for new types of events (such as DDL and C2C replication events) to be recorded with the new parameter ``show_expanded_events`` @@ -610,7 +905,7 @@ PyMongo 4.3 brings a number of improvements including: credentials expire or an error is encountered. - When using the ``MONGODB-AWS`` authentication mechanism with the ``aws`` extra, the behavior of credential fetching has changed with - ``pymongo_auth_aws>=1.1.0``. Please see :doc:`examples/authentication` for + ``pymongo_auth_aws>=1.1.0``. Please see `Authentication `_ for more information. Bug fixes @@ -643,9 +938,9 @@ PyMongo 4.2 brings a number of improvements including: - Support for MongoDB 6.0. - Support for the Queryable Encryption beta with MongoDB 6.0. Note that backwards-breaking - changes may be made before the final release. See :ref:`automatic-queryable-client-side-encryption` for example usage. + changes may be made before the final release. See `automatic queryable client-side encryption `_ for example usage. - Provisional (beta) support for :func:`pymongo.timeout` to apply a single timeout - to an entire block of pymongo operations. See :ref:`timeout-example` for examples. + to an entire block of pymongo operations. See `timeout `_ for examples. - Added the ``timeoutMS`` URI and keyword argument to :class:`~pymongo.mongo_client.MongoClient`. - Added the :attr:`pymongo.errors.PyMongoError.timeout` property which is ``True`` when the error was caused by a timeout. @@ -693,7 +988,7 @@ Unavoidable breaking changes encryption support. - :meth:`~pymongo.collection.Collection.estimated_document_count` now always uses the `count`_ command. Due to an oversight in versions 5.0.0-5.0.8 of MongoDB, - the count command was not included in V1 of the :ref:`versioned-api-ref`. + the count command was not included in V1 of the `Stable API `_. Users of the Stable API with estimated_document_count are recommended to upgrade their server version to 5.0.9+ or set :attr:`pymongo.server_api.ServerApi.strict` to ``False`` to avoid encountering errors (`PYTHON-3167`_). @@ -756,7 +1051,7 @@ Changes in Version 4.1 (2021/12/07) PyMongo 4.1 brings a number of improvements including: -- Type Hinting support (formerly provided by `pymongo-stubs`_). See :doc:`examples/type_hints` for more information. +- Type Hinting support (formerly provided by `pymongo-stubs`_). See `Type Hints `_ for more information. - Added support for the ``comment`` parameter to all helpers. For example see :meth:`~pymongo.collection.Collection.insert_one`. - Added support for the ``let`` parameter to @@ -845,7 +1140,7 @@ Breaking Changes in 4.0 :data:`bson.binary.UuidRepresentation.PYTHON_LEGACY` to :data:`bson.binary.UuidRepresentation.UNSPECIFIED`. Attempting to encode a :class:`uuid.UUID` instance to BSON or JSON now produces an error by default. - See :ref:`handling-uuid-data-example` for details. + See `UUID representations `_ for details. - Removed the ``waitQueueMultiple`` keyword argument to :class:`~pymongo.mongo_client.MongoClient` and removed :exc:`pymongo.errors.ExceededMaxWaiters`. @@ -1184,7 +1479,7 @@ Notable improvements - Added support for MongoDB 5.0. - Support for MongoDB Stable API, see :class:`~pymongo.server_api.ServerApi`. -- Support for snapshot reads on secondaries (see :ref:`snapshot-reads-ref`). +- Support for snapshot reads on secondaries (see `snapshot reads `_). - Support for Azure and GCP KMS providers for client side field level encryption. See the docstring for :class:`~pymongo.mongo_client.MongoClient`, :class:`~pymongo.encryption_options.AutoEncryptionOpts`, @@ -1241,7 +1536,7 @@ Deprecations same API. - Deprecated the :mod:`pymongo.messeage` module. - Deprecated the ``ssl_keyfile`` and ``ssl_certfile`` URI options in favor - of ``tlsCertificateKeyFile`` (see :doc:`examples/tls`). + of ``tlsCertificateKeyFile`` (see `TLS `_). .. _PYTHON-2466: https://jira.mongodb.org/browse/PYTHON-2466 .. _PYTHON-1690: https://jira.mongodb.org/browse/PYTHON-1690 @@ -1339,12 +1634,12 @@ Changes in Version 3.11.0 (2020/07/30) Version 3.11 adds support for MongoDB 4.4 and includes a number of bug fixes. Highlights include: -- Support for :ref:`OCSP` (Online Certificate Status Protocol). +- Support for `OCSP `_ (Online Certificate Status Protocol). - Support for `PyOpenSSL `_ as an - alternative TLS implementation. PyOpenSSL is required for :ref:`OCSP` + alternative TLS implementation. PyOpenSSL is required for `OCSP `_ support. It will also be installed when using the "tls" extra if the version of Python in use is older than 2.7.9. -- Support for the :ref:`MONGODB-AWS` authentication mechanism. +- Support for the `MONGODB-AWS `_ authentication mechanism. - Support for the ``directConnection`` URI option and kwarg to :class:`~pymongo.mongo_client.MongoClient`. - Support for speculative authentication attempts in connection handshakes @@ -1370,7 +1665,7 @@ Highlights include: - Added support for :data:`bson.binary.UuidRepresentation.UNSPECIFIED` and ``MongoClient(uuidRepresentation='unspecified')`` which will become the default UUID representation starting in PyMongo 4.0. See - :ref:`handling-uuid-data-example` for details. + `UUID representations `_ for details. - New methods :meth:`bson.binary.Binary.from_uuid` and :meth:`bson.binary.Binary.as_uuid`. - Added the ``background`` parameter to @@ -1454,7 +1749,7 @@ Version 3.10 includes a number of improvements and bug fixes. Highlights include: - Support for Client-Side Field Level Encryption with MongoDB 4.2. See - :doc:`examples/encryption` for examples. + `Client-Side Field Level Encryption `_ for examples. - Support for Python 3.8. - Added :attr:`pymongo.client_session.ClientSession.in_transaction`. - Do not hold the Topology lock while creating connections in a MongoClient's @@ -1480,7 +1775,7 @@ Changes in Version 3.9.0 (2019/08/13) Version 3.9 adds support for MongoDB 4.2. Highlights include: - Support for MongoDB 4.2 sharded transactions. Sharded transactions have - the same API as replica set transactions. See :ref:`transactions-ref`. + the same API as replica set transactions. See `Transactions `_. - New method :meth:`pymongo.client_session.ClientSession.with_transaction` to support conveniently running a transaction in a session with automatic retries and at-most-once semantics. @@ -1582,8 +1877,7 @@ Changes in Version 3.8.0 (2019/04/22) ------------------------------------- .. warning:: PyMongo no longer supports Python 2.6. RHEL 6 users should install - Python 2.7 or newer from `Red Hat Software Collections - `_. + Python 2.7 or newer from Red Hat Software Collections. CentOS 6 users should install Python 2.7 or newer from `SCL `_ @@ -1609,7 +1903,7 @@ Changes in Version 3.8.0 (2019/04/22) - Custom types can now be directly encoded to, and decoded from MongoDB using the :class:`~bson.codec_options.TypeCodec` and :class:`~bson.codec_options.TypeRegistry` APIs. For more information, see - the :doc:`custom type example `. + `Custom Types `_. - Attempting a multi-document transaction on a sharded cluster now raises a :exc:`~pymongo.errors.ConfigurationError`. - :meth:`pymongo.cursor.Cursor.distinct` and @@ -1639,7 +1933,7 @@ Changes in Version 3.8.0 (2019/04/22) - Iterating over a :class:`~bson.raw_bson.RawBSONDocument` now maintains the same field order of the underlying raw BSON document. - Applications can now register a custom server selector. For more information - see the :doc:`server selector example `. + see `Customize Server Selection `_. - The connection pool now implements a LIFO policy. Unavoidable breaking changes: @@ -1707,9 +2001,9 @@ Changes in Version 3.7.0 (2018/06/26) Version 3.7 adds support for MongoDB 4.0. Highlights include: - Support for single replica set multi-document ACID transactions. - See :ref:`transactions-ref`. + See `transactions `_. - Support for wire protocol compression via the new ``compressors`` URI and keyword argument to - :meth:`~pymongo.mongo_client.MongoClient`. See :ref:`network-compression-example` for details. + :meth:`~pymongo.mongo_client.MongoClient`. See `network compression `_ for details. - Support for Python 3.7. - New count methods, :meth:`~pymongo.collection.Collection.count_documents` and :meth:`~pymongo.collection.Collection.estimated_document_count`. @@ -1730,9 +2024,9 @@ Version 3.7 adds support for MongoDB 4.0. Highlights include: the following features and changes allow PyMongo to function when MD5 support is disabled in OpenSSL by the FIPS Object Module: - - Support for the :ref:`SCRAM-SHA-256 ` - authentication mechanism. The :ref:`GSSAPI `, - :ref:`PLAIN `, and :ref:`MONGODB-X509 ` + - Support for the `SCRAM-SHA-256 `_ + authentication mechanism. The `GSSAPI `_, + `PLAIN `_, and `MONGODB-X509 `_ mechanisms can also be used to avoid issues with OpenSSL in FIPS environments. - MD5 checksums are now optional in GridFS. See the ``disable_md5`` option @@ -1750,7 +2044,7 @@ Version 3.7 adds support for MongoDB 4.0. Highlights include: class which is a subclass of :class:`~pymongo.change_stream.ChangeStream`. - SCRAM client and server keys are cached for improved performance, following `RFC 5802 `_. -- If not specified, the authSource for the :ref:`PLAIN ` +- If not specified, the authSource for the `PLAIN `_ authentication mechanism defaults to $external. - wtimeoutMS is once again supported as a URI option. - When using unacknowledged write concern and connected to MongoDB server @@ -2000,7 +2294,7 @@ Changes and Deprecations: consistent across all MongoDB versions. - In Python 3, :meth:`~bson.json_util.loads` now automatically decodes JSON $binary with a subtype of 0 into :class:`bytes` instead of - :class:`~bson.binary.Binary`. See the :doc:`/python3` for more details. + :class:`~bson.binary.Binary`. - :meth:`~bson.json_util.loads` now raises ``TypeError`` or ``ValueError`` when parsing JSON type wrappers with values of the wrong type or any extra keys. @@ -2029,7 +2323,7 @@ Highlights include: - Complete support for MongoDB 3.4: - - Unicode aware string comparison using :doc:`examples/collations`. + - Unicode aware string comparison using `Collation `_. - Support for the new :class:`~bson.decimal128.Decimal128` BSON type. - A new maxStalenessSeconds read preference option. - A username is no longer required for the MONGODB-X509 authentication @@ -2367,7 +2661,7 @@ In PyMongo 3.0, the ``use_greenlets`` option is gone. To use PyMongo with Gevent simply call ``gevent.monkey.patch_all()``. For more information, -see :doc:`PyMongo's Gevent documentation `. +see `Gevent `_. :class:`~pymongo.mongo_client.MongoClient` changes .................................................. @@ -2411,7 +2705,7 @@ the list, and used it until a network error prompted it to re-evaluate all mongoses' latencies and reconnect to one of them. In PyMongo 3, the client monitors its network latency to all the mongoses continuously, and distributes operations evenly among those with the lowest latency. -See :ref:`mongos-load-balancing` for more information. +See `load balancing `_ for more information. The client methods ``start_request``, ``in_request``, and ``end_request`` are removed, and so is the ``auto_start_request`` option. Requests were @@ -2419,7 +2713,7 @@ designed to make read-your-writes consistency more likely with the ``w=0`` write concern. Additionally, a thread in a request used the same member for all secondary reads in a replica set. To ensure read-your-writes consistency in PyMongo 3.0, do not override the default write concern with ``w=0``, and -do not override the default :ref:`read preference ` of +do not override the default `read preference `_ of PRIMARY. Support for the ``slaveOk`` (or ``slave_okay``), ``safe``, and @@ -2433,8 +2727,7 @@ The ``max_pool_size`` option has been removed. It is replaced by the ``maxPoolSize`` MongoDB URI option. ``maxPoolSize`` is now a supported URI option in PyMongo and can be passed as a keyword argument. -The ``copy_database`` method is removed, see the -:doc:`copy_database examples ` for alternatives. +The ``copy_database`` method is removed, see `Copy and Clone Databases `_ for alternatives. The ``disconnect`` method is removed. Use :meth:`~pymongo.mongo_client.MongoClient.close` instead. @@ -2771,7 +3064,7 @@ Version 2.9.4 fixes issues reported since the release of 2.9.3. - Fixed :class:`~pymongo.mongo_replica_set_client.MongoReplicaSetClient` handling of uuidRepresentation. - Fixed building and testing the documentation with python 3.x. -- New documentation for :doc:`examples/tls` and :doc:`atlas`. +- New documentation for `TLS `_ and `Atlas `_. Issues Resolved ............... @@ -3010,7 +3303,7 @@ PyMongo 2.7 is a major release with a large number of new features and bug fixes. Highlights include: - Full support for MongoDB 2.6. -- A new :doc:`bulk write operations API `. +- A new `bulk write operations API `_. - Support for server side query timeouts using :meth:`~pymongo.cursor.Cursor.max_time_ms`. - Support for writing :meth:`~pymongo.collection.Collection.aggregate` @@ -3021,9 +3314,9 @@ fixes. Highlights include: error details from the server. - A new GridFS :meth:`~gridfs.GridFS.find` method that returns a :class:`~gridfs.grid_file.GridOutCursor`. -- Greatly improved :doc:`support for mod_wsgi ` when using +- Greatly improved `support for mod_wsgi `_ when using PyMongo's C extensions. Read `Jesse's blog post - `_ for details. + `_ for details. - Improved C extension support for ARM little endian. Breaking changes @@ -3101,14 +3394,14 @@ Important new features: ``waitQueueTimeoutMS`` is set, an operation that blocks waiting for a socket will raise :exc:`~pymongo.errors.ConnectionFailure` after the timeout. By default ``waitQueueTimeoutMS`` is not set. - See :ref:`connection-pooling` for more information. + See `connection pooling `_ for more information. - The :meth:`~pymongo.collection.Collection.insert` method automatically splits large batches of documents into multiple insert messages based on :attr:`~pymongo.mongo_client.MongoClient.max_message_size` - Support for the exhaust cursor flag. See :meth:`~pymongo.collection.Collection.find` for details and caveats. - Support for the PLAIN and MONGODB-X509 authentication mechanisms. - See :doc:`the authentication docs ` for more + See `the authentication docs `_ for more information. - Support aggregation output as a :class:`~pymongo.cursor.Cursor`. See :meth:`~pymongo.collection.Collection.aggregate` for details. @@ -3121,7 +3414,7 @@ Important new features: to having a ``max_pool_size`` larger than necessary. Err towards a larger value.) If your application accepts the default, continue to do so. - See :ref:`connection-pooling` for more information. + See `connection pooling `_ for more information. Issues Resolved ............... @@ -3167,7 +3460,7 @@ Version 2.5 includes changes to support new features in MongoDB 2.4. Important new features: -- Support for :ref:`GSSAPI (Kerberos) authentication `. +- Support for `GSSAPI (Kerberos) `_. - Support for SSL certificate validation with hostname matching. - Support for delegated and role based authentication. - New GEOSPHERE (2dsphere) and HASHED index constants. @@ -3274,11 +3567,11 @@ Version 2.3 adds support for new features and behavior changes in MongoDB Important New Features: - Support for expanded read preferences including directing reads to tagged - servers - See :ref:`secondary-reads` for more information. + servers - See `secondary reads `_ for more information. - Support for mongos failover. - A new :meth:`~pymongo.collection.Collection.aggregate` method to support MongoDB's new `aggregation framework - `_. + `_. - Support for legacy Java and C# byte order when encoding and decoding UUIDs. - Support for connecting directly to an arbiter. @@ -3328,10 +3621,10 @@ to this release. Important New Features: -- Support for Python 3 - - See the :doc:`python3` for more information. +- Support for Python 3. + See `Python 3 `_ for more information. - Support for Gevent - - See :doc:`examples/gevent` for more information. + See `Gevent `_ for more information. - Improved connection pooling. See `PYTHON-287 `_. @@ -3642,7 +3935,7 @@ Changes in Version 1.9 (2010/09/28) Version 1.9 adds a new package to the PyMongo distribution, :mod:`bson`. :mod:`bson` contains all of the `BSON -`_ encoding and decoding logic, and the BSON +`_ encoding and decoding logic, and the BSON types that were formerly in the :mod:`pymongo` package. The following modules have been renamed: @@ -3775,7 +4068,7 @@ Changes in Version 1.7 (2010/06/17) Version 1.7 is a recommended upgrade for all PyMongo users. The full release notes are below, and some more in depth discussion of the highlights is `here -`_. +`_. - no longer attempt to build the C extension on big-endian systems. - added :class:`~bson.min_key.MinKey` and @@ -3826,7 +4119,7 @@ The biggest change in version 1.6 is a complete re-implementation of :mod:`gridfs` with a lot of improvements over the old implementation. There are many details and examples of using the new API in `this blog post -`_. The +`_. The old API has been removed in this version, so existing code will need to be modified before upgrading to 1.6. @@ -3937,7 +4230,7 @@ Other changes: - clean up all cases where :class:`~pymongo.errors.ConnectionFailure` is raised. - simplification of connection pooling - makes driver ~2x faster for - simple benchmarks. see :ref:`connection-pooling` for more information. + simple benchmarks. see `connection pooling `_ for more information. - DEPRECATED ``pool_size``, ``auto_start_request`` and ``timeout`` parameters to :class:`~pymongo.connection.Connection`. DEPRECATED :meth:`~pymongo.connection.Connection.start_request`. @@ -4004,7 +4297,7 @@ Changes in Version 1.2 (2009/12/09) get around some issues with queries on fields named ``query`` - enforce 4MB document limit on the client side - added :meth:`~pymongo.collection.Collection.map_reduce` helper - see - :doc:`example ` + `Aggregation `_ - added :meth:`~pymongo.cursor.Cursor.distinct` method on :class:`~pymongo.cursor.Cursor` instances to allow distinct with queries diff --git a/doc/common-issues.rst b/doc/common-issues.rst deleted file mode 100644 index b300bac784..0000000000 --- a/doc/common-issues.rst +++ /dev/null @@ -1,96 +0,0 @@ -Frequently Encountered Issues -============================= - -Also see the :ref:`TLSErrors` section. - -Server reports wire version X, PyMongo requires Y -------------------------------------------------- - -When one attempts to connect to a <=3.6 version server, PyMongo will throw the following error:: - - >>> client.admin.command('ping') - ... - pymongo.errors.ConfigurationError: Server at localhost:27017 reports wire version 6, but this version of PyMongo requires at least 7 (MongoDB 4.0). - -This is caused by the driver being too new for the server it is being run against. -To resolve this issue either upgrade your database to version >= 4.0 or downgrade to an early version of PyMongo which supports MongoDB < 4.0. - - -'Cursor' object has no attribute '_Cursor__killed' --------------------------------------------------- - -On versions of PyMongo <3.9, when supplying invalid arguments the constructor of Cursor, -there will be a TypeError raised, and an AttributeError printed to ``stderr``. The AttributeError is not relevant, -instead look at the TypeError for debugging information:: - - >>> coll.find(wrong=1) - Exception ignored in: - ... - AttributeError: 'Cursor' object has no attribute '_Cursor__killed' - ... - TypeError: __init__() got an unexpected keyword argument 'wrong' - -To fix this, make sure that you are supplying the correct keyword arguments. -In addition, you can also upgrade to PyMongo >=3.9, which will remove the spurious error. - - -MongoClient fails ConfigurationError ------------------------------------- - -This is a common issue stemming from using incorrect keyword argument names. - - >>> client = MongoClient(wrong=1) - ... - pymongo.errors.ConfigurationError: Unknown option wrong - -To fix this, check your spelling and make sure that the keyword argument you are specifying exists. - - -DeprecationWarning: count is deprecated ---------------------------------------- - -PyMongo no longer supports :meth:`pymongo.cursor.count`. -Instead, use :meth:`pymongo.collection.count_documents`:: - - >>> client = MongoClient() - >>> d = datetime.datetime(2009, 11, 12, 12) - >>> list(client.db.coll.find({"date": {"$lt": d}}, limit=2)) - [{'_id': ObjectId('6247b058cebb8b179b7039f8'), 'date': datetime.datetime(1, 1, 1, 0, 0)}, {'_id': ObjectId('6247b059cebb8b179b7039f9'), 'date': datetime.datetime(1, 1, 1, 0, 0)}] - >>> client.db.coll.count_documents({"date": {"$lt": d}}, limit=2) - 2 - -Note that this is NOT the same as ``Cursor.count_documents`` (which does not exist), -this is a method of the Collection class, so you must call it on a collection object -or you will receive the following error:: - - >>> Cursor(MongoClient().db.coll).count() - Traceback (most recent call last): - File "", line 1, in - AttributeError: 'Cursor' object has no attribute 'count' - >>> - -Timeout when accessing MongoDB from PyMongo with tunneling ----------------------------------------------------------- - -When attempting to connect to a replica set MongoDB instance over an SSH tunnel you -will receive the following error:: - - File "/Library/Python/2.7/site-packages/pymongo/collection.py", line 1560, in count - return self._count(cmd, collation, session) - File "/Library/Python/2.7/site-packages/pymongo/collection.py", line 1504, in _count - with self._socket_for_reads() as (connection, slave_ok): - File "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/contextlib.py", line 17, in __enter__ - return self.gen.next() - File "/Library/Python/2.7/site-packages/pymongo/mongo_client.py", line 982, in _socket_for_reads - server = topology.select_server(read_preference) - File "/Library/Python/2.7/site-packages/pymongo/topology.py", line 224, in select_server - address)) - File "/Library/Python/2.7/site-packages/pymongo/topology.py", line 183, in select_servers - selector, server_timeout, address) - File "/Library/Python/2.7/site-packages/pymongo/topology.py", line 199, in _select_servers_loop - self._error_message(selector)) - pymongo.errors.ServerSelectionTimeoutError: localhost:27017: timed out - -This is due to the fact that PyMongo discovers replica set members using the response from the isMaster command which -then contains the address and ports of the other members. However, these addresses and ports will not be accessible through the SSH tunnel. Thus, this behavior is unsupported. -You can, however, connect directly to a single MongoDB node using the directConnection=True option with SSH tunneling. diff --git a/doc/compatibility-policy.rst b/doc/compatibility-policy.rst deleted file mode 100644 index 834f86ce54..0000000000 --- a/doc/compatibility-policy.rst +++ /dev/null @@ -1,62 +0,0 @@ -Compatibility Policy -==================== - -Semantic Versioning -------------------- - -PyMongo's version numbers follow `semantic versioning`_: each version number -is structured "major.minor.patch". Patch releases fix bugs, minor releases -add features (and may fix bugs), and major releases include API changes that -break backwards compatibility (and may add features and fix bugs). - -Deprecation ------------ - -Before we remove a feature in a major release, PyMongo's maintainers make an -effort to release at least one minor version that *deprecates* it. We add -"**DEPRECATED**" to the feature's documentation, and update the code to raise a -`DeprecationWarning`_. You can ensure your code is future-proof by running -your code with the latest PyMongo release and looking for DeprecationWarnings. - -The interpreter silences DeprecationWarnings by default. For example, the -following code uses the deprecated ``insert`` method but does not raise any -warning: - -.. code-block:: python - - # "insert.py" (with PyMongo 3.X) - from pymongo import MongoClient - - client = MongoClient() - client.test.test.insert({}) - -To print deprecation warnings to stderr, run python with "-Wd":: - - $ python3 -Wd insert.py - insert.py:4: DeprecationWarning: insert is deprecated. Use insert_one or insert_many instead. - client.test.test.insert({}) - -You can turn warnings into exceptions with "python -We":: - - $ python3 -We insert.py - Traceback (most recent call last): - File "insert.py", line 4, in - client.test.test.insert({}) - File "/home/durin/work/mongo-python-driver/pymongo/collection.py", line 2906, in insert - "instead.", DeprecationWarning, stacklevel=2) - DeprecationWarning: insert is deprecated. Use insert_one or insert_many instead. - -If your own code's test suite passes with "python -We" then it uses no -deprecated PyMongo features. - -.. seealso:: The Python documentation on `the warnings module`_, - and `the -W command line option`_. - -.. _semantic versioning: http://semver.org/ - -.. _DeprecationWarning: - https://docs.python.org/3/library/exceptions.html#DeprecationWarning - -.. _the warnings module: https://docs.python.org/3/library/warnings.html - -.. _the -W command line option: https://docs.python.org/3/using/cmdline.html#cmdoption-W diff --git a/doc/conf.py b/doc/conf.py index f82c719361..063429cd98 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -82,15 +82,21 @@ # Options for link checking # The anchors on the rendered markdown page are created after the fact, # so those link results in a 404. -# wiki.centos.org has been flakey. +# wiki.centos.org has been flaky. # sourceforge.net is giving a 403 error, but is still accessible from the browser. +# Links to release notes in jira give 401 error: unauthorized. PYTHON-5585 linkcheck_ignore = [ "https://github.com/mongodb/specifications/blob/master/source/server-discovery-and-monitoring/server-monitoring.md#requesting-an-immediate-check", + "https://github.com/mongodb/specifications/blob/master/source/transactions-convenient-api/transactions-convenient-api.md#handling-errors-inside-the-callback", "https://github.com/mongodb/libmongocrypt/blob/master/bindings/python/README.rst#installing-from-source", r"https://wiki.centos.org/[\w/]*", - r"http://sourceforge.net/", + r"https://sourceforge.net/", + r"https://jira\.mongodb\.org/secure/ReleaseNote\.jspa.*", ] +# Allow for flaky links. +linkcheck_retries = 3 + # -- Options for extensions ---------------------------------------------------- autoclass_content = "init" @@ -105,6 +111,7 @@ client = MongoClient() client.drop_database("doctest_test") db = client.doctest_test +server_major_version = int(client.server_info()['version'].split()[-1][0]) """ # -- Options for HTML output --------------------------------------------------- diff --git a/doc/contributors.rst b/doc/contributors.rst index 4a7f5424b1..08296e9595 100644 --- a/doc/contributors.rst +++ b/doc/contributors.rst @@ -103,3 +103,7 @@ The following is a list of people who have contributed to - Terry Patterson - Romain Morotti - Navjot Singh (navjots18) +- Jib Adegunloye (Jibola) +- Jeffrey A. Clark (aclark4life) +- Steven Silvester (blink1073) +- Noah Stapp (NoahStapp) diff --git a/doc/developer/index.rst b/doc/developer/index.rst deleted file mode 100644 index 2ce1e0536c..0000000000 --- a/doc/developer/index.rst +++ /dev/null @@ -1,9 +0,0 @@ -Developer Guide -=============== - -Technical guide for contributors to PyMongo. - -.. toctree:: - :maxdepth: 1 - - periodic_executor diff --git a/doc/developer/periodic_executor.rst b/doc/developer/periodic_executor.rst deleted file mode 100644 index 67eaa89f10..0000000000 --- a/doc/developer/periodic_executor.rst +++ /dev/null @@ -1,113 +0,0 @@ -Periodic Executors -================== - -.. currentmodule:: pymongo - -PyMongo implements a :class:`~periodic_executor.PeriodicExecutor` for two -purposes: as the background thread for :class:`~monitor.Monitor`, and to -regularly check if there are ``OP_KILL_CURSORS`` messages that must be sent to the server. - -Killing Cursors ---------------- - -An incompletely iterated :class:`~cursor.Cursor` on the client represents an -open cursor object on the server. In code like this, we lose a reference to -the cursor before finishing iteration:: - - for doc in collection.find(): - raise Exception() - -We try to send an ``OP_KILL_CURSORS`` to the server to tell it to clean up the -server-side cursor. But we must not take any locks directly from the cursor's -destructor (see `PYTHON-799`_), so we cannot safely use the PyMongo data -structures required to send a message. The solution is to add the cursor's id -to an array on the :class:`~mongo_client.MongoClient` without taking any locks. - -Each client has a :class:`~periodic_executor.PeriodicExecutor` devoted to -checking the array for cursor ids. Any it sees are the result of cursors that -were freed while the server-side cursor was still open. The executor can safely -take the locks it needs in order to send the ``OP_KILL_CURSORS`` message. - -.. _PYTHON-799: https://jira.mongodb.org/browse/PYTHON-799 - -Stopping Executors ------------------- - -Just as :class:`~cursor.Cursor` must not take any locks from its destructor, -neither can :class:`~mongo_client.MongoClient` and :class:`~topology.Topology`. -Thus, although the client calls :meth:`close` on its kill-cursors thread, and -the topology calls :meth:`close` on all its monitor threads, the :meth:`close` -method cannot actually call :meth:`wake` on the executor, since :meth:`wake` -takes a lock. - -Instead, executors wake periodically to check if ``self.close`` is set, -and if so they exit. - -A thread can log spurious errors if it wakes late in the Python interpreter's -shutdown sequence, so we try to join threads before then. Each periodic -executor (either a monitor or a kill-cursors thread) adds a weakref to itself -to a set called ``_EXECUTORS``, in the ``periodic_executor`` module. - -An `exit handler`_ runs on shutdown and tells all executors to stop, then -tries (with a short timeout) to join all executor threads. - -.. _exit handler: https://docs.python.org/2/library/atexit.html - -Monitoring ----------- - -For each server in the topology, :class:`~topology.Topology` uses a periodic -executor to launch a monitor thread. This thread must not prevent the topology -from being freed, so it weakrefs the topology. Furthermore, it uses a weakref -callback to terminate itself soon after the topology is freed. - -Solid lines represent strong references, dashed lines weak ones: - -.. generated with graphviz: "dot -Tpng periodic-executor-refs.dot > periodic-executor-refs.png" - -.. image:: ../static/periodic-executor-refs.png - -See `Stopping Executors`_ above for an explanation of the ``_EXECUTORS`` set. - -It is a requirement of the `Server Discovery And Monitoring Spec`_ that a -sleeping monitor can be awakened early. Aside from infrequent wakeups to do -their appointed chores, and occasional interruptions, periodic executors also -wake periodically to check if they should terminate. - -Our first implementation of this idea was the obvious one: use the Python -standard library's threading.Condition.wait with a timeout. Another thread -wakes the executor early by signaling the condition variable. - -A topology cannot signal the condition variable to tell the executor to -terminate, because it would risk a deadlock in the garbage collector: no -destructor or weakref callback can take a lock to signal the condition variable -(see `PYTHON-863`_); thus the only way for a dying object to terminate a -periodic executor is to set its "stopped" flag and let the executor see the -flag next time it wakes. - -We erred on the side of prompt cleanup, and set the check interval at 100ms. We -assumed that checking a flag and going back to sleep 10 times a second was -cheap on modern machines. - -Starting in Python 3.2, the builtin C implementation of lock.acquire takes a -timeout parameter, so Python 3.2+ Condition variables sleep simply by calling -lock.acquire; they are implemented as efficiently as expected. - -But in Python 2, lock.acquire has no timeout. To wait with a timeout, a Python -2 condition variable sleeps a millisecond, tries to acquire the lock, sleeps -twice as long, and tries again. This exponential backoff reaches a maximum -sleep time of 50ms. - -If PyMongo calls the condition variable's "wait" method with a short timeout, -the exponential backoff is restarted frequently. Overall, the condition variable -is not waking a few times a second, but hundreds of times. (See `PYTHON-983`_.) - -Thus the current design of periodic executors is surprisingly simple: they -do a simple ``time.sleep`` for a half-second, check if it is time to wake or -terminate, and sleep again. - -.. _Server Discovery And Monitoring Spec: https://github.com/mongodb/specifications/blob/master/source/server-discovery-and-monitoring/server-monitoring.md#requesting-an-immediate-check - -.. _PYTHON-863: https://jira.mongodb.org/browse/PYTHON-863 - -.. _PYTHON-983: https://jira.mongodb.org/browse/PYTHON-983 diff --git a/doc/examples/aggregation.rst b/doc/examples/aggregation.rst deleted file mode 100644 index 9b1a89fba7..0000000000 --- a/doc/examples/aggregation.rst +++ /dev/null @@ -1,90 +0,0 @@ -Aggregation Examples -==================== - -There are several methods of performing aggregations in MongoDB. These -examples cover the new aggregation framework, using map reduce and using the -group method. - -.. testsetup:: - - from pymongo import MongoClient - - client = MongoClient() - client.drop_database("aggregation_example") - -Setup ------ -To start, we'll insert some example data which we can perform -aggregations on: - -.. doctest:: - - >>> from pymongo import MongoClient - >>> db = MongoClient().aggregation_example - >>> result = db.things.insert_many( - ... [ - ... {"x": 1, "tags": ["dog", "cat"]}, - ... {"x": 2, "tags": ["cat"]}, - ... {"x": 2, "tags": ["mouse", "cat", "dog"]}, - ... {"x": 3, "tags": []}, - ... ] - ... ) - >>> result.inserted_ids - [ObjectId('...'), ObjectId('...'), ObjectId('...'), ObjectId('...')] - -.. _aggregate-examples: - -Aggregation Framework ---------------------- - -This example shows how to use the -:meth:`~pymongo.collection.Collection.aggregate` method to use the aggregation -framework. We'll perform a simple aggregation to count the number of -occurrences for each tag in the ``tags`` array, across the entire collection. -To achieve this we need to pass in three operations to the pipeline. -First, we need to unwind the ``tags`` array, then group by the tags and -sum them up, finally we sort by count. - -Python dictionaries prior to 3.7 don't maintain order. You should use :class:`~bson.son.SON` -or :class:`collections.OrderedDict` where explicit ordering is required for an older Python version -eg "$sort": - -.. note:: - - aggregate requires server version **>= 2.1.0**. - -.. doctest:: - - >>> from bson.son import SON - >>> pipeline = [ - ... {"$unwind": "$tags"}, - ... {"$group": {"_id": "$tags", "count": {"$sum": 1}}}, - ... {"$sort": SON([("count", -1), ("_id", -1)])}, - ... ] - >>> import pprint - >>> pprint.pprint(list(db.things.aggregate(pipeline))) - [{'_id': 'cat', 'count': 3}, - {'_id': 'dog', 'count': 2}, - {'_id': 'mouse', 'count': 1}] - -To run an explain plan for this aggregation use -`PyMongoExplain `_, -a companion library for PyMongo. It allows you to explain any CRUD operation -by providing a few convenience classes:: - - >>> from pymongoexplain import ExplainableCollection - >>> ExplainableCollection(collection).aggregate(pipeline) - {'ok': 1.0, 'queryPlanner': [...]} - -Or, use the :meth:`~pymongo.database.Database.command` method:: - - >>> db.command('aggregate', 'things', pipeline=pipeline, explain=True) - {'ok': 1.0, 'stages': [...]} - -As well as simple aggregations the aggregation framework provides projection -capabilities to reshape the returned data. Using projections and aggregation, -you can add computed fields, create new virtual sub-objects, and extract -sub-fields into the top-level of results. - -.. seealso:: The full documentation for MongoDB's `aggregation framework - `_ diff --git a/doc/examples/authentication.rst b/doc/examples/authentication.rst deleted file mode 100644 index a92222bafc..0000000000 --- a/doc/examples/authentication.rst +++ /dev/null @@ -1,528 +0,0 @@ -Authentication Examples -======================= - -MongoDB supports several different authentication mechanisms. These examples -cover all authentication methods currently supported by PyMongo, documenting -Python module and MongoDB version dependencies. - -.. _percent escaped: - -Percent-Escaping Username and Password --------------------------------------- - -Username and password must be percent-escaped with -:py:func:`urllib.parse.quote_plus`, to be used in a MongoDB URI. For example:: - - >>> from pymongo import MongoClient - >>> import urllib.parse - >>> username = urllib.parse.quote_plus('user') - >>> username - 'user' - >>> password = urllib.parse.quote_plus('pass/word') - >>> password - 'pass%2Fword' - >>> MongoClient('mongodb://%s:%s@127.0.0.1' % (username, password)) - ... - -.. _scram_sha_256: - -SCRAM-SHA-256 (RFC 7677) ------------------------- -.. versionadded:: 3.7 - -SCRAM-SHA-256 is the default authentication mechanism supported by a cluster -configured for authentication with MongoDB 4.0 or later. Authentication -requires a username, a password, and a database name. The default database -name is "admin", this can be overridden with the ``authSource`` option. -Credentials can be specified as arguments to -:class:`~pymongo.mongo_client.MongoClient`:: - - >>> from pymongo import MongoClient - >>> client = MongoClient('example.com', - ... username='user', - ... password='password', - ... authSource='the_database', - ... authMechanism='SCRAM-SHA-256') - -Or through the MongoDB URI:: - - >>> uri = "mongodb://user:password@example.com/?authSource=the_database&authMechanism=SCRAM-SHA-256" - >>> client = MongoClient(uri) - -SCRAM-SHA-1 (RFC 5802) ----------------------- -.. versionadded:: 2.8 - -SCRAM-SHA-1 is the default authentication mechanism supported by a cluster -configured for authentication with MongoDB 3.0 or later. Authentication -requires a username, a password, and a database name. The default database -name is "admin", this can be overridden with the ``authSource`` option. -Credentials can be specified as arguments to -:class:`~pymongo.mongo_client.MongoClient`:: - - >>> from pymongo import MongoClient - >>> client = MongoClient('example.com', - ... username='user', - ... password='password', - ... authSource='the_database', - ... authMechanism='SCRAM-SHA-1') - -Or through the MongoDB URI:: - - >>> uri = "mongodb://user:password@example.com/?authSource=the_database&authMechanism=SCRAM-SHA-1" - >>> client = MongoClient(uri) - -For best performance on Python versions older than 2.7.8 install `backports.pbkdf2`_. - -.. _backports.pbkdf2: https://pypi.python.org/pypi/backports.pbkdf2/ - -Default Authentication Mechanism --------------------------------- - -If no mechanism is specified, PyMongo automatically negotiates the mechanism to use (SCRAM-SHA-1 -or SCRAM-SHA-256) with the MongoDB server. - -Default Database and "authSource" ---------------------------------- - -You can specify both a default database and the authentication database in the -URI:: - - >>> uri = "mongodb://user:password@example.com/default_db?authSource=admin" - >>> client = MongoClient(uri) - -PyMongo will authenticate on the "admin" database, but the default database -will be "default_db":: - - >>> # get_database with no "name" argument chooses the DB from the URI - >>> db = MongoClient(uri).get_database() - >>> print(db.name) - 'default_db' - -.. _mongodb_x509: - -MONGODB-X509 ------------- -.. versionadded:: 2.6 - -The MONGODB-X509 mechanism authenticates via the X.509 certificate presented -by the driver during TLS/SSL negotiation. This authentication method requires -the use of TLS/SSL connections with certificate validation:: - - >>> from pymongo import MongoClient - >>> client = MongoClient('example.com', - ... authMechanism="MONGODB-X509", - ... tls=True, - ... tlsCertificateKeyFile='/path/to/client.pem', - ... tlsCAFile='/path/to/ca.pem') - -MONGODB-X509 authenticates against the $external virtual database, so you -do not have to specify a database in the URI:: - - >>> uri = "mongodb://example.com/?authMechanism=MONGODB-X509" - >>> client = MongoClient(uri, - ... tls=True, - ... tlsCertificateKeyFile='/path/to/client.pem', - ... tlsCAFile='/path/to/ca.pem') - >>> - -.. _gssapi: - -GSSAPI (Kerberos) ------------------ -.. versionadded:: 2.5 - -GSSAPI (Kerberos) authentication is available in the Enterprise Edition of -MongoDB. - -Unix -~~~~ - -To authenticate using GSSAPI you must first install the python `kerberos`_ or -`pykerberos`_ module using pip. Make sure you run kinit before -using the following authentication methods:: - - $ kinit mongodbuser@EXAMPLE.COM - mongodbuser@EXAMPLE.COM's Password: - $ klist - Credentials cache: FILE:/tmp/krb5cc_1000 - Principal: mongodbuser@EXAMPLE.COM - - Issued Expires Principal - Feb 9 13:48:51 2013 Feb 9 23:48:51 2013 krbtgt/EXAMPLE.COM@EXAMPLE.COM - -Now authenticate using the MongoDB URI. GSSAPI authenticates against the -$external virtual database so you do not have to specify a database in the -URI:: - - >>> # Note: the kerberos principal must be url encoded. - >>> from pymongo import MongoClient - >>> uri = "mongodb://mongodbuser%40EXAMPLE.COM@mongo-server.example.com/?authMechanism=GSSAPI" - >>> client = MongoClient(uri) - >>> - -The default service name used by MongoDB and PyMongo is ``mongodb``. You can -specify a custom service name with the ``authMechanismProperties`` option:: - - >>> from pymongo import MongoClient - >>> uri = "mongodb://mongodbuser%40EXAMPLE.COM@mongo-server.example.com/?authMechanism=GSSAPI&authMechanismProperties=SERVICE_NAME:myservicename" - >>> client = MongoClient(uri) - -Windows (SSPI) -~~~~~~~~~~~~~~ -.. versionadded:: 3.3 - -First install the `winkerberos`_ module. Unlike authentication on Unix kinit is -not used. If the user to authenticate is different from the user that owns the -application process provide a password to authenticate:: - - >>> uri = "mongodb://mongodbuser%40EXAMPLE.COM:mongodbuserpassword@example.com/?authMechanism=GSSAPI" - -Two extra ``authMechanismProperties`` are supported on Windows platforms: - -- CANONICALIZE_HOST_NAME - Uses the fully qualified domain name (FQDN) of the - MongoDB host for the server principal (GSSAPI libraries on Unix do this by - default):: - - >>> uri = "mongodb://mongodbuser%40EXAMPLE.COM@example.com/?authMechanism=GSSAPI&authMechanismProperties=CANONICALIZE_HOST_NAME:true" - -- SERVICE_REALM - This is used when the user's realm is different from the service's realm:: - - >>> uri = "mongodb://mongodbuser%40EXAMPLE.COM@example.com/?authMechanism=GSSAPI&authMechanismProperties=SERVICE_REALM:otherrealm" - - -.. _kerberos: http://pypi.python.org/pypi/kerberos -.. _pykerberos: https://pypi.python.org/pypi/pykerberos -.. _winkerberos: https://pypi.python.org/pypi/winkerberos/ - -.. _sasl_plain: - -SASL PLAIN (RFC 4616) ---------------------- -.. versionadded:: 2.6 - -MongoDB Enterprise Edition version 2.6 and newer support the SASL PLAIN -authentication mechanism, initially intended for delegating authentication -to an LDAP server. These examples use the $external virtual database for LDAP support:: - - >>> from pymongo import MongoClient - >>> uri = "mongodb://user:password@example.com/?authMechanism=PLAIN" - >>> client = MongoClient(uri) - >>> - -SASL PLAIN is a clear-text authentication mechanism. We **strongly** recommend -that you connect to MongoDB using TLS/SSL with certificate validation when -using the SASL PLAIN mechanism:: - - >>> from pymongo import MongoClient - >>> uri = "mongodb://user:password@example.com/?authMechanism=PLAIN" - >>> client = MongoClient(uri, - ... tls=True, - ... tlsCertificateKeyFile='/path/to/client.pem', - ... tlsCAFile='/path/to/ca.pem') - >>> - -.. _MONGODB-AWS: - -MONGODB-AWS ------------ -.. versionadded:: 3.11 - -The MONGODB-AWS authentication mechanism is available in MongoDB 4.4+ and -requires extra pymongo dependencies. To use it, install pymongo with the -``aws`` extra:: - - $ python -m pip install 'pymongo[aws]' - -The MONGODB-AWS mechanism authenticates using AWS IAM credentials (an access -key ID and a secret access key), `temporary AWS IAM credentials`_ obtained -from an `AWS Security Token Service (STS)`_ `Assume Role`_ request, -AWS Lambda `environment variables`_, or temporary AWS IAM credentials assigned -to an `EC2 instance`_ or ECS task. The use of temporary credentials, in -addition to an access key ID and a secret access key, also requires a -security (or session) token. - -Credentials can be configured through the MongoDB URI, environment variables, -or the local EC2 or ECS endpoint. The order in which the client searches for -`credentials`_ is the same as the one used by the AWS ``boto3`` library -when using ``pymongo_auth_aws>=1.1.0``. - -Because we are now using ``boto3`` to handle credentials, the order and -locations of credentials are slightly different from before. Particularly, -if you have a shared AWS credentials or config file, -then those credentials will be used by default if AWS auth environment -variables are not set. To override this behavior, set -``AWS_SHARED_CREDENTIALS_FILE=""`` in your shell or add -``os.environ["AWS_SHARED_CREDENTIALS_FILE"] = ""`` to your script or -application. Alternatively, you can create an AWS profile specifically for -your MongoDB credentials and set ``AWS_PROFILE`` to that profile name. - -MONGODB-AWS authenticates against the "$external" virtual database, so none of -the URIs in this section need to include the ``authSource`` URI option. - -.. _credentials: https://boto3.amazonaws.com/v1/documentation/api/latest/guide/credentials.html - -AWS IAM credentials -~~~~~~~~~~~~~~~~~~~ - -Applications can authenticate using AWS IAM credentials by providing a valid -access key id and secret access key pair as the username and password, -respectively, in the MongoDB URI. A sample URI would be:: - - >>> from pymongo import MongoClient - >>> uri = "mongodb+srv://:@example.mongodb.net/?authMechanism=MONGODB-AWS" - >>> client = MongoClient(uri) - -.. note:: The access_key_id and secret_access_key passed into the URI MUST - be `percent escaped`_. - -AssumeRole -~~~~~~~~~~ - -Applications can authenticate using temporary credentials returned from an -assume role request. These temporary credentials consist of an access key -ID, a secret access key, and a security token passed into the URI. -A sample URI would be:: - - >>> from pymongo import MongoClient - >>> uri = "mongodb+srv://:@example.mongodb.net/?authMechanism=MONGODB-AWS&authMechanismProperties=AWS_SESSION_TOKEN:" - >>> client = MongoClient(uri) - -.. note:: The access_key_id, secret_access_key, and session_token passed into - the URI MUST be `percent escaped`_. - - -AWS Lambda (Environment Variables) -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -When the username and password are not provided and the MONGODB-AWS mechanism -is set, the client will fallback to using the `environment variables`_ -``AWS_ACCESS_KEY_ID``, ``AWS_SECRET_ACCESS_KEY``, and ``AWS_SESSION_TOKEN`` -for the access key ID, secret access key, and session token, respectively:: - - $ export AWS_ACCESS_KEY_ID= - $ export AWS_SECRET_ACCESS_KEY= - $ export AWS_SESSION_TOKEN= - $ python - >>> from pymongo import MongoClient - >>> uri = "mongodb+srv://example.mongodb.net/?authMechanism=MONGODB-AWS" - >>> client = MongoClient(uri) - -.. note:: No username, password, or session token is passed into the URI. - PyMongo will use credentials set via the environment variables. - These environment variables MUST NOT be `percent escaped`_. - - -.. _EKS Clusters: - -EKS Clusters -~~~~~~~~~~~~ - -Applications using the `Authenticating users for your cluster from an OpenID Connect identity provider `_ capability on EKS can now -use the provided credentials, by giving the associated IAM User -`sts:AssumeRoleWithWebIdentity `_ -permission. - -When the username and password are not provided, the MONGODB-AWS mechanism -is set, and ``AWS_WEB_IDENTITY_TOKEN_FILE``, ``AWS_ROLE_ARN``, and -optional ``AWS_ROLE_SESSION_NAME`` are available, the driver will use -an ``AssumeRoleWithWebIdentity`` call to retrieve temporary credentials. -The application must be using ``pymongo_auth_aws`` >= 1.1.0 for EKS support. - -ECS Container -~~~~~~~~~~~~~ - -Applications can authenticate from an ECS container via temporary -credentials assigned to the machine. A sample URI on an ECS container -would be:: - - >>> from pymongo import MongoClient - >>> uri = "mongodb+srv://example.mongodb.com/?authMechanism=MONGODB-AWS" - >>> client = MongoClient(uri) - -.. note:: No username, password, or session token is passed into the URI. - PyMongo will query the ECS container endpoint to obtain these - credentials. - -EC2 Instance -~~~~~~~~~~~~ - -Applications can authenticate from an EC2 instance via temporary -credentials assigned to the machine. A sample URI on an EC2 machine -would be:: - - >>> from pymongo import MongoClient - >>> uri = "mongodb+srv://example.mongodb.com/?authMechanism=MONGODB-AWS" - >>> client = MongoClient(uri) - -.. note:: No username, password, or session token is passed into the URI. - PyMongo will query the EC2 instance endpoint to obtain these - credentials. - -.. _temporary AWS IAM credentials: https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp.html -.. _AWS Security Token Service (STS): https://docs.aws.amazon.com/STS/latest/APIReference/Welcome.html -.. _Assume Role: https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html -.. _EC2 instance: https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use_switch-role-ec2.html -.. _environment variables: https://docs.aws.amazon.com/lambda/latest/dg/configuration-envvars.html#configuration-envvars-runtime - -MONGODB-OIDC ------------- -.. versionadded:: 4.7 - -The `MONGODB-OIDC authentication mechanism`_ is available in MongoDB 7.0+ on Linux platforms. - -The MONGODB-OIDC mechanism authenticates using an OpenID Connect (OIDC) access token. -The driver supports OIDC for workload identity, defined as an identity you assign to a software workload -(such as an application, service, script, or container) to authenticate and access other services and resources. - -Credentials can be configured through the MongoDB URI or as arguments to -:class:`~pymongo.mongo_client.MongoClient`. - -Built-in Support -~~~~~~~~~~~~~~~~ - -The driver has built-in support for Azure IMDS and GCP IMDS environments. Other environments -are supported with `Custom Callbacks`_. - -Azure IMDS -^^^^^^^^^^ - -For an application running on an Azure VM or otherwise using the `Azure Internal Metadata Service`_, -you can use the built-in support for Azure. If using an Azure managed identity, the "" is -the client ID. If using a service principal to represent an enterprise application, the "" is -the application ID of the service principal. The ```` value is the ``audience`` -`configured on your MongoDB deployment`_. - -.. code-block:: python - - import os - - uri = os.environ["MONGODB_URI"] - - props = {"ENVIRONMENT": "azure", "TOKEN_RESOURCE": ""} - c = MongoClient( - uri, - username="", - authMechanism="MONGODB-OIDC", - authMechanismProperties=props, - ) - c.test.test.insert_one({}) - c.close() - -If the application is running on an Azure VM and only one managed identity is associated with the -VM, ``username`` can be omitted. - -If providing the ``TOKEN_RESOURCE`` as part of a connection string, it can be given as follows. -If the ``TOKEN_RESOURCE`` contains any of the following characters [``,``, ``+``, ``&``], then -it MUST be url-encoded. - -.. code-block:: python - - import os - - uri = f'{os.environ["MONGODB_URI"]}?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:azure,TOKEN_RESOURCE:' - c = MongoClient(uri) - c.test.test.insert_one({}) - c.close() - -GCP IMDS -^^^^^^^^ - -For an application running on an GCP VM or otherwise using the `GCP Internal Metadata Service`_, -you can use the built-in support for GCP, where ```` below is the ``audience`` -`configured on your MongoDB deployment`_. - -.. code-block:: python - - import os - - uri = os.environ["MONGODB_URI"] - - props = {"ENVIRONMENT": "gcp", "TOKEN_RESOURCE": ""} - c = MongoClient(uri, authMechanism="MONGODB-OIDC", authMechanismProperties=props) - c.test.test.insert_one({}) - c.close() - -If providing the ``TOKEN_RESOURCE`` as part of a connection string, it can be given as follows. -If the ``TOKEN_RESOURCE`` contains any of the following characters [``,``, ``+``, ``&``], then -it MUST be url-encoded. - -.. code-block:: python - - import os - - uri = f'{os.environ["MONGODB_URI"]}?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:gcp,TOKEN_RESOURCE:' - c = MongoClient(uri) - c.test.test.insert_one({}) - c.close() - -Custom Callbacks -~~~~~~~~~~~~~~~~ - -For environments that are not directly supported by the driver, you can use :class:`~pymongo.auth_oidc.OIDCCallback`. -Some examples are given below. - -Other Azure Environments -^^^^^^^^^^^^^^^^^^^^^^^^ - -For applications running on Azure Functions, App Service Environment (ASE), or -Azure Kubernetes Service (AKS), you can use the `azure-identity package`_ -to fetch the credentials. This example assumes you have set environment variables for -the ``audience`` `configured on your MongoDB deployment`_, and for the client id of the Azure -managed identity. - -.. code-block:: python - - import os - from azure.identity import DefaultAzureCredential - from pymongo import MongoClient - from pymongo.auth_oidc import OIDCCallback, OIDCCallbackContext, OIDCCallbackResult - - audience = os.environ["AZURE_AUDIENCE"] - client_id = os.environ["AZURE_IDENTITY_CLIENT_ID"] - uri = os.environ["MONGODB_URI"] - - - class MyCallback(OIDCCallback): - def fetch(self, context: OIDCCallbackContext) -> OIDCCallbackResult: - credential = DefaultAzureCredential(managed_identity_client_id=client_id) - token = credential.get_token(f"{audience}/.default").token - return OIDCCallbackResult(access_token=token) - - - props = {"OIDC_CALLBACK": MyCallback()} - c = MongoClient(uri, authMechanism="MONGODB-OIDC", authMechanismProperties=props) - c.test.test.insert_one({}) - c.close() - -GCP GKE -^^^^^^^ - -For a Google Kubernetes Engine cluster with a `configured service account`_, the token can be read from the standard -service account token file location. - -.. code-block:: python - - import os - from pymongo.auth_oidc import OIDCCallback, OIDCCallbackContext, OIDCCallbackResult - - - class MyCallback(OIDCCallback): - def fetch(self, context: OIDCCallbackContext) -> OIDCCallbackResult: - with open("/var/run/secrets/kubernetes.io/serviceaccount/token") as fid: - token = fid.read() - return OIDCCallbackResult(access_token=token) - - - uri = os.environ["MONGODB_URI"] - props = {"OIDC_CALLBACK": MyCallback()} - c = MongoClient(uri, authMechanism="MONGODB-OIDC", authMechanismProperties=props) - c.test.test.insert_one({}) - c.close() - -.. _MONGODB-OIDC authentication mechanism: https://www.mongodb.com/docs/manual/core/security-oidc/ -.. _Azure Internal Metadata Service: https://learn.microsoft.com/en-us/azure/virtual-machines/instance-metadata-service -.. _configured on your MongoDB deployment: https://www.mongodb.com/docs/manual/reference/parameters/#mongodb-parameter-param.oidcIdentityProviders -.. _GCP Internal Metadata Service: https://cloud.google.com/compute/docs/metadata/querying-metadata -.. _azure-identity package: https://pypi.org/project/azure-identity/ -.. _configured service account: https://cloud.google.com/kubernetes-engine/docs/how-to/service-accounts diff --git a/doc/examples/bulk.rst b/doc/examples/bulk.rst deleted file mode 100644 index 3ed8e09645..0000000000 --- a/doc/examples/bulk.rst +++ /dev/null @@ -1,184 +0,0 @@ -Bulk Write Operations -===================== - -.. testsetup:: - - from pymongo import MongoClient - - client = MongoClient() - client.drop_database("bulk_example") - -This tutorial explains how to take advantage of PyMongo's bulk -write operation features. Executing write operations in batches -reduces the number of network round trips, increasing write -throughput. - -Bulk Insert ------------ - -.. versionadded:: 2.6 - -A batch of documents can be inserted by passing a list to the -:meth:`~pymongo.collection.Collection.insert_many` method. PyMongo -will automatically split the batch into smaller sub-batches based on -the maximum message size accepted by MongoDB, supporting very large -bulk insert operations. - -.. doctest:: - - >>> import pymongo - >>> db = pymongo.MongoClient().bulk_example - >>> db.test.insert_many([{"i": i} for i in range(10000)]).inserted_ids - [...] - >>> db.test.count_documents({}) - 10000 - -Mixed Bulk Write Operations ---------------------------- - -.. versionadded:: 2.7 - -PyMongo also supports executing mixed bulk write operations. A batch -of insert, update, and remove operations can be executed together using -the bulk write operations API. - -.. _ordered_bulk: - -Ordered Bulk Write Operations -............................. - -Ordered bulk write operations are batched and sent to the server in the -order provided for serial execution. The return value is an instance of -:class:`~pymongo.results.BulkWriteResult` describing the type and count -of operations performed. - -.. doctest:: - :options: +NORMALIZE_WHITESPACE - - >>> from pprint import pprint - >>> from pymongo import InsertOne, DeleteMany, ReplaceOne, UpdateOne - >>> result = db.test.bulk_write( - ... [ - ... DeleteMany({}), # Remove all documents from the previous example. - ... InsertOne({"_id": 1}), - ... InsertOne({"_id": 2}), - ... InsertOne({"_id": 3}), - ... UpdateOne({"_id": 1}, {"$set": {"foo": "bar"}}), - ... UpdateOne({"_id": 4}, {"$inc": {"j": 1}}, upsert=True), - ... ReplaceOne({"j": 1}, {"j": 2}), - ... ] - ... ) - >>> pprint(result.bulk_api_result) - {'nInserted': 3, - 'nMatched': 2, - 'nModified': 2, - 'nRemoved': 10000, - 'nUpserted': 1, - 'upserted': [{'_id': 4, 'index': 5}], - 'writeConcernErrors': [], - 'writeErrors': []} - -The first write failure that occurs (e.g. duplicate key error) aborts the -remaining operations, and PyMongo raises -:class:`~pymongo.errors.BulkWriteError`. The :attr:`details` attribute of -the exception instance provides the execution results up until the failure -occurred and details about the failure - including the operation that caused -the failure. - -.. doctest:: - :options: +NORMALIZE_WHITESPACE - - >>> from pymongo import InsertOne, DeleteOne, ReplaceOne - >>> from pymongo.errors import BulkWriteError - >>> requests = [ - ... ReplaceOne({"j": 2}, {"i": 5}), - ... InsertOne({"_id": 4}), # Violates the unique key constraint on _id. - ... DeleteOne({"i": 5}), - ... ] - >>> try: - ... db.test.bulk_write(requests) - ... except BulkWriteError as bwe: - ... pprint(bwe.details) - ... - {'nInserted': 0, - 'nMatched': 1, - 'nModified': 1, - 'nRemoved': 0, - 'nUpserted': 0, - 'upserted': [], - 'writeConcernErrors': [], - 'writeErrors': [{'code': 11000, - 'errmsg': '...E11000...duplicate key error...', - 'index': 1,... - 'op': {'_id': 4}}]} - -.. _unordered_bulk: - -Unordered Bulk Write Operations -............................... - -Unordered bulk write operations are batched and sent to the server in -**arbitrary order** where they may be executed in parallel. Any errors -that occur are reported after all operations are attempted. - -In the next example the first and third operations fail due to the unique -constraint on _id. Since we are doing unordered execution the second -and fourth operations succeed. - -.. doctest:: - :options: +NORMALIZE_WHITESPACE - - >>> requests = [ - ... InsertOne({"_id": 1}), - ... DeleteOne({"_id": 2}), - ... InsertOne({"_id": 3}), - ... ReplaceOne({"_id": 4}, {"i": 1}), - ... ] - >>> try: - ... db.test.bulk_write(requests, ordered=False) - ... except BulkWriteError as bwe: - ... pprint(bwe.details) - ... - {'nInserted': 0, - 'nMatched': 1, - 'nModified': 1, - 'nRemoved': 1, - 'nUpserted': 0, - 'upserted': [], - 'writeConcernErrors': [], - 'writeErrors': [{'code': 11000, - 'errmsg': '...E11000...duplicate key error...', - 'index': 0,... - 'op': {'_id': 1}}, - {'code': 11000, - 'errmsg': '...', - 'index': 2,... - 'op': {'_id': 3}}]} - -Write Concern -............. - -Bulk operations are executed with the -:attr:`~pymongo.collection.Collection.write_concern` of the collection they -are executed against. Write concern errors (e.g. wtimeout) will be reported -after all operations are attempted, regardless of execution order. - -:: - >>> from pymongo import WriteConcern - >>> coll = db.get_collection( - ... 'test', write_concern=WriteConcern(w=3, wtimeout=1)) - >>> try: - ... coll.bulk_write([InsertOne({'a': i}) for i in range(4)]) - ... except BulkWriteError as bwe: - ... pprint(bwe.details) - ... - {'nInserted': 4, - 'nMatched': 0, - 'nModified': 0, - 'nRemoved': 0, - 'nUpserted': 0, - 'upserted': [], - 'writeConcernErrors': [{'code': 64... - 'errInfo': {'wtimeout': True}, - 'errmsg': 'waiting for replication timed out'}], - 'writeErrors': []} diff --git a/doc/examples/client_bulk.rst b/doc/examples/client_bulk.rst deleted file mode 100644 index 447f09688f..0000000000 --- a/doc/examples/client_bulk.rst +++ /dev/null @@ -1,188 +0,0 @@ -Client Bulk Write Operations -============================= - -.. testsetup:: - - from pymongo import MongoClient - - client = MongoClient() - client.drop_database("client_bulk_example") - db = client.client_bulk_example - client.db.drop_collection("test_one") - client.db.drop_collection("test_two") - client.db.drop_collection("test_three") - client.db.drop_collection("test_four") - client.db.drop_collection("test_five") - client.db.drop_collection("test_six") - -The :meth:`~pymongo.mongo_client.MongoClient.bulk_write` -method has been added to :class:`~pymongo.mongo_client.MongoClient` in PyMongo 4.9. -This method enables users to perform batches of write operations **across -multiple namespaces** in a minimized number of round trips, and -to receive detailed results for each operation performed. - -.. note:: This method requires MongoDB server version 8.0+. - -Basic Usage ------------- - -A list of insert, update, and delete operations can be passed into the -:meth:`~pymongo.mongo_client.MongoClient.bulk_write` method. Each request -must include the namespace on which to perform the operation. - -PyMongo will automatically split the given requests into smaller sub-batches based on -the maximum message size accepted by MongoDB, supporting very large bulk write operations. - -The return value is an instance of -:class:`~pymongo.results.ClientBulkWriteResult`. - -.. _summary_client_bulk: - -Summary Results -................. - -By default, the returned :class:`~pymongo.results.ClientBulkWriteResult` instance will contain a -summary of the types of operations performed in the bulk write, along with their respective counts. - -.. doctest:: - :options: +NORMALIZE_WHITESPACE - - >>> from pymongo import InsertOne, DeleteOne, UpdateOne - >>> models = [ - ... InsertOne(namespace="db.test_one", document={"_id": 1}), - ... InsertOne(namespace="db.test_two", document={"_id": 2}), - ... DeleteOne(namespace="db.test_one", filter={"_id": 1}), - ... UpdateOne( - ... namespace="db.test_two", - ... filter={"_id": 4}, - ... update={"$inc": {"j": 1}}, - ... upsert=True, - ... ), - ... ] - >>> result = client.bulk_write(models) - >>> result.inserted_count - 2 - >>> result.deleted_count - 1 - >>> result.modified_count - 0 - >>> result.upserted_count - 1 - -.. _verbose_client_bulk: - -Verbose Results -................. - -If the ``verbose_results`` parameter is set to True, the returned :class:`~pymongo.results.ClientBulkWriteResult` -instance will also include detailed results about each successful operation performed as part of the bulk write. - -.. doctest:: - :options: +NORMALIZE_WHITESPACE - - >>> from pymongo import InsertOne, DeleteMany, ReplaceOne, UpdateMany - >>> models = [ - ... DeleteMany( - ... namespace="db.test_two", filter={} - ... ), # Delete all documents from the previous example - ... InsertOne(namespace="db.test_one", document={"_id": 1}), - ... InsertOne(namespace="db.test_one", document={"_id": 2}), - ... InsertOne(namespace="db.test_two", document={"_id": 3}), - ... UpdateMany(namespace="db.test_one", filter={}, update={"$set": {"foo": "bar"}}), - ... ReplaceOne( - ... namespace="db.test_two", filter={"j": 1}, replacement={"_id": 4}, upsert=True - ... ), - ... ] - >>> result = client.bulk_write(models, verbose_results=True) - >>> result.delete_results - {0: DeleteResult({'ok': 1.0, 'idx': 0, 'n': 2}, ...)} - >>> result.insert_results - {1: InsertOneResult(1, ...), - 2: InsertOneResult(2, ...), - 3: InsertOneResult(3, ...)} - >>> result.update_results - {4: UpdateResult({'ok': 1.0, 'idx': 4, 'n': 2, 'nModified': 2}, ...), - 5: UpdateResult({'ok': 1.0, 'idx': 5, 'n': 1, 'nModified': 0, 'upserted': {'_id': 4}}, ...)} - - -Handling Errors ----------------- - -If any errors occur during the bulk write, a :class:`~pymongo.errors.ClientBulkWriteException` will be raised. -If a server, connection, or network error occurred, the ``error`` field of the exception will contain -that error. - -Individual write errors or write concern errors get recorded in the ``write_errors`` and ``write_concern_errors`` fields of the exception. -The ``partial_result`` field gets populated with the results of any operations that were successfully completed before the exception was raised. - -.. _ordered_client_bulk: - -Ordered Operations -.................... - -In an ordered bulk write (the default), if an individual write fails, no further operations will get executed. -For example, a duplicate key error on the third operation below aborts the remaining two operations. - -.. doctest:: - :options: +NORMALIZE_WHITESPACE - - >>> from pymongo import InsertOne, DeleteOne - >>> from pymongo.errors import ClientBulkWriteException - >>> models = [ - ... InsertOne(namespace="db.test_three", document={"_id": 3}), - ... InsertOne(namespace="db.test_four", document={"_id": 4}), - ... InsertOne(namespace="db.test_three", document={"_id": 3}), # Duplicate _id - ... InsertOne(namespace="db.test_four", document={"_id": 5}), - ... DeleteOne(namespace="db.test_three", filter={"_id": 3}), - ... ] - >>> try: - ... client.bulk_write(models) - ... except ClientBulkWriteException as cbwe: - ... exception = cbwe - ... - >>> exception.write_errors - [{'ok': 0.0, - 'idx': 2, - 'code': 11000, - 'errmsg': 'E11000 duplicate key error ... dup key: { _id: 3 }', ... - 'op': {'insert': 0, 'document': {'_id': 3}}}] - >>> exception.partial_result.inserted_count - 2 - >>> exception.partial_result.deleted_count - 0 - -.. _unordered_client_bulk: - -Unordered Operations -..................... - -If the ``ordered`` parameter is set to False, all operations in the bulk write will be attempted, regardless of any individual write errors that occur. -For example, the fourth and fifth write operations below get executed successfully, despite the duplicate key error on the third operation. - -.. doctest:: - :options: +NORMALIZE_WHITESPACE - - >>> from pymongo import InsertOne, DeleteOne - >>> from pymongo.errors import ClientBulkWriteException - >>> models = [ - ... InsertOne(namespace="db.test_five", document={"_id": 5}), - ... InsertOne(namespace="db.test_six", document={"_id": 6}), - ... InsertOne(namespace="db.test_five", document={"_id": 5}), # Duplicate _id - ... InsertOne(namespace="db.test_six", document={"_id": 7}), - ... DeleteOne(namespace="db.test_five", filter={"_id": 5}), - ... ] - >>> try: - ... client.bulk_write(models, ordered=False) - ... except ClientBulkWriteException as cbwe: - ... exception = cbwe - ... - >>> exception.write_errors - [{'ok': 0.0, - 'idx': 2, - 'code': 11000, - 'errmsg': 'E11000 duplicate key error ... dup key: { _id: 5 }', ... - 'op': {'insert': 0, 'document': {'_id': 5}}}] - >>> exception.partial_result.inserted_count - 3 - >>> exception.partial_result.deleted_count - 1 diff --git a/doc/examples/collations.rst b/doc/examples/collations.rst deleted file mode 100644 index 45e647d816..0000000000 --- a/doc/examples/collations.rst +++ /dev/null @@ -1,134 +0,0 @@ -Collations -========== - -.. seealso:: The API docs for :mod:`~pymongo.collation`. - -Collations are a new feature in MongoDB version 3.4. They provide a set of rules -to use when comparing strings that comply with the conventions of a particular -language, such as Spanish or German. If no collation is specified, the server -sorts strings based on a binary comparison. Many languages have specific -ordering rules, and collations allow users to build applications that adhere to -language-specific comparison rules. - -In French, for example, the last accent in a given word determines the sorting -order. The correct sorting order for the following four words in French is:: - - cote < côte < coté < côté - -Specifying a French collation allows users to sort string fields using the -French sort order. - -Usage ------ - -Users can specify a collation for a -:ref:`collection`, an -:ref:`index`, or a -:ref:`CRUD command `. - -Collation Parameters: -~~~~~~~~~~~~~~~~~~~~~ - -Collations can be specified with the :class:`~pymongo.collation.Collation` model -or with plain Python dictionaries. The structure is the same:: - - Collation(locale=, - caseLevel=, - caseFirst=, - strength=, - numericOrdering=, - alternate=, - maxVariable=, - backwards=) - -The only required parameter is ``locale``, which the server parses as -an `ICU format locale ID `_. -For example, set ``locale`` to ``en_US`` to represent US English -or ``fr_CA`` to represent Canadian French. - -For a complete description of the available parameters, see the MongoDB `manual -`_. - -.. COMMENT add link for manual entry. - -.. _collation-on-collection: - -Assign a Default Collation to a Collection -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -The following example demonstrates how to create a new collection called -``contacts`` and assign a default collation with the ``fr_CA`` locale. This -operation ensures that all queries that are run against the ``contacts`` -collection use the ``fr_CA`` collation unless another collation is explicitly -specified:: - - from pymongo import MongoClient - from pymongo.collation import Collation - - db = MongoClient().test - collection = db.create_collection('contacts', - collation=Collation(locale='fr_CA')) - -.. _collation-on-index: - -Assign a Default Collation to an Index -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -When creating a new index, you can specify a default collation. - -The following example shows how to create an index on the ``name`` -field of the ``contacts`` collection, with the ``unique`` parameter -enabled and a default collation with ``locale`` set to ``fr_CA``:: - - from pymongo import MongoClient - from pymongo.collation import Collation - - contacts = MongoClient().test.contacts - contacts.create_index('name', - unique=True, - collation=Collation(locale='fr_CA')) - -.. _collation-on-operation: - -Specify a Collation for a Query -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Individual queries can specify a collation to use when sorting -results. The following example demonstrates a query that runs on the -``contacts`` collection in database ``test``. It matches on -documents that contain ``New York`` in the ``city`` field, -and sorts on the ``name`` field with the ``fr_CA`` collation:: - - from pymongo import MongoClient - from pymongo.collation import Collation - - collection = MongoClient().test.contacts - docs = collection.find({'city': 'New York'}).sort('name').collation( - Collation(locale='fr_CA')) - -Other Query Types -~~~~~~~~~~~~~~~~~ - -You can use collations to control document matching rules for several different -types of queries. All the various update and delete methods -(:meth:`~pymongo.collection.Collection.update_one`, -:meth:`~pymongo.collection.Collection.update_many`, -:meth:`~pymongo.collection.Collection.delete_one`, etc.) support collation, and -you can create query filters which employ collations to comply with any of the -languages and variants available to the ``locale`` parameter. - -The following example uses a collation with ``strength`` set to -:const:`~pymongo.collation.CollationStrength.SECONDARY`, which considers only -the base character and character accents in string comparisons, but not case -sensitivity, for example. All documents in the ``contacts`` collection with -``jürgen`` (case-insensitive) in the ``first_name`` field are updated:: - - from pymongo import MongoClient - from pymongo.collation import Collation, CollationStrength - - contacts = MongoClient().test.contacts - result = contacts.update_many( - {'first_name': 'jürgen'}, - {'$set': {'verified': 1}}, - collation=Collation(locale='de', - strength=CollationStrength.SECONDARY)) diff --git a/doc/examples/copydb.rst b/doc/examples/copydb.rst deleted file mode 100644 index b37677b5c2..0000000000 --- a/doc/examples/copydb.rst +++ /dev/null @@ -1,73 +0,0 @@ -Copying a Database -================== - -MongoDB >= 4.2 --------------- - -Starting in MongoDB version 4.2, the server removes the deprecated ``copydb`` command. -As an alternative, users can use ``mongodump`` and ``mongorestore`` (with the ``mongorestore`` -options ``--nsFrom`` and ``--nsTo``). - -For example, to copy the ``test`` database from a local instance running on the -default port 27017 to the ``examples`` database on the same instance, you can: - -#. Use ``mongodump`` to dump the test database to an archive ``mongodump-test-db``:: - - mongodump --archive="mongodump-test-db" --db=test - -#. Use ``mongorestore`` with ``--nsFrom`` and ``--nsTo`` to restore (with database name change) - from the archive:: - - mongorestore --archive="mongodump-test-db" --nsFrom='test.*' --nsTo='examples.*' - -Include additional options as necessary, such as to specify the uri or host, username, -password and authentication database. - -For more info about using ``mongodump`` and ``mongorestore`` see the `Copy a Database`_ example -in the official ``mongodump`` documentation. - -MongoDB <= 4.0 --------------- - -When using MongoDB <= 4.0, it is possible to use the deprecated ``copydb`` command -to copy a database. To copy a database within a single ``mongod`` process, or -between ``mongod`` servers, connect to the target ``mongod`` and use the -:meth:`~pymongo.database.Database.command` method:: - - >>> from pymongo import MongoClient - >>> client = MongoClient('target.example.com') - >>> client.admin.command('copydb', - fromdb='source_db_name', - todb='target_db_name') - -To copy from a different mongod server that is not password-protected:: - - >>> client.admin.command('copydb', - fromdb='source_db_name', - todb='target_db_name', - fromhost='source.example.com') - -If the target server is password-protected, authenticate to the "admin" -database:: - - >>> client = MongoClient('target.example.com', - ... username='administrator', - ... password='pwd') - >>> client.admin.command('copydb', - fromdb='source_db_name', - todb='target_db_name', - fromhost='source.example.com') - -See the :doc:`authentication examples `. - -If the **source** server is password-protected, use the `copyDatabase -function in the mongo shell`_. - -Versions of PyMongo before 3.0 included a ``copy_database`` helper method, -but it has been removed. - -.. _copyDatabase function in the mongo shell: - http://mongodb.com/docs/manual/reference/method/db.copyDatabase/ - -.. _Copy a Database: - https://www.mongodb.com/docs/database-tools/mongodump/mongodump-examples/#copy-and-clone-databases diff --git a/doc/examples/custom_type.rst b/doc/examples/custom_type.rst deleted file mode 100644 index acf706deba..0000000000 --- a/doc/examples/custom_type.rst +++ /dev/null @@ -1,436 +0,0 @@ -Custom Type Example -=================== - -This is an example of using a custom type with PyMongo. The example here shows -how to subclass :class:`~bson.codec_options.TypeCodec` to write a type -codec, which is used to populate a :class:`~bson.codec_options.TypeRegistry`. -The type registry can then be used to create a custom-type-aware -:class:`~pymongo.collection.Collection`. Read and write operations -issued against the resulting collection object transparently manipulate -documents as they are saved to or retrieved from MongoDB. - - -Setting Up ----------- - -We'll start by getting a clean database to use for the example: - -.. doctest:: - - >>> from pymongo import MongoClient - >>> client = MongoClient() - >>> client.drop_database("custom_type_example") - >>> db = client.custom_type_example - - -Since the purpose of the example is to demonstrate working with custom types, -we'll need a custom data type to use. For this example, we will be working with -the :py:class:`~decimal.Decimal` type from Python's standard library. Since the -BSON library's :class:`~bson.decimal128.Decimal128` type (that implements -the IEEE 754 decimal128 decimal-based floating-point numbering format) is -distinct from Python's built-in :py:class:`~decimal.Decimal` type, attempting -to save an instance of ``Decimal`` with PyMongo, results in an -:exc:`~bson.errors.InvalidDocument` exception. - -.. doctest:: - - >>> from decimal import Decimal - >>> num = Decimal("45.321") - >>> db.test.insert_one({"num": num}) - Traceback (most recent call last): - ... - bson.errors.InvalidDocument: cannot encode object: Decimal('45.321'), of type: - - -.. _custom-type-type-codec: - -The :class:`~bson.codec_options.TypeCodec` Class -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. versionadded:: 3.8 - -In order to encode a custom type, we must first define a **type codec** for -that type. A type codec describes how an instance of a custom type can be -*transformed* to and/or from one of the types :mod:`~bson` already understands. -Depending on the desired functionality, users must choose from the following -base classes when defining type codecs: - -* :class:`~bson.codec_options.TypeEncoder`: subclass this to define a codec that - encodes a custom Python type to a known BSON type. Users must implement the - ``python_type`` property/attribute and the ``transform_python`` method. -* :class:`~bson.codec_options.TypeDecoder`: subclass this to define a codec that - decodes a specified BSON type into a custom Python type. Users must implement - the ``bson_type`` property/attribute and the ``transform_bson`` method. -* :class:`~bson.codec_options.TypeCodec`: subclass this to define a codec that - can both encode and decode a custom type. Users must implement the - ``python_type`` and ``bson_type`` properties/attributes, as well as the - ``transform_python`` and ``transform_bson`` methods. - - -The type codec for our custom type simply needs to define how a -:py:class:`~decimal.Decimal` instance can be converted into a -:class:`~bson.decimal128.Decimal128` instance and vice-versa. Since we are -interested in both encoding and decoding our custom type, we use the -``TypeCodec`` base class to define our codec: - -.. doctest:: - - >>> from bson.decimal128 import Decimal128 - >>> from bson.codec_options import TypeCodec - >>> class DecimalCodec(TypeCodec): - ... python_type = Decimal # the Python type acted upon by this type codec - ... bson_type = Decimal128 # the BSON type acted upon by this type codec - ... def transform_python(self, value): - ... """Function that transforms a custom type value into a type - ... that BSON can encode.""" - ... return Decimal128(value) - ... def transform_bson(self, value): - ... """Function that transforms a vanilla BSON type value into our - ... custom type.""" - ... return value.to_decimal() - ... - >>> decimal_codec = DecimalCodec() - - -.. _custom-type-type-registry: - -The :class:`~bson.codec_options.TypeRegistry` Class -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. versionadded:: 3.8 - -Before we can begin encoding and decoding our custom type objects, we must -first inform PyMongo about the corresponding codec. This is done by creating -a :class:`~bson.codec_options.TypeRegistry` instance: - -.. doctest:: - - >>> from bson.codec_options import TypeRegistry - >>> type_registry = TypeRegistry([decimal_codec]) - - -Note that type registries can be instantiated with any number of type codecs. -Once instantiated, registries are immutable and the only way to add codecs -to a registry is to create a new one. - - -Putting It Together -------------------- - -Finally, we can define a :class:`~bson.codec_options.CodecOptions` instance -with our ``type_registry`` and use it to get a -:class:`~pymongo.collection.Collection` object that understands the -:py:class:`~decimal.Decimal` data type: - -.. doctest:: - - >>> from bson.codec_options import CodecOptions - >>> codec_options = CodecOptions(type_registry=type_registry) - >>> collection = db.get_collection("test", codec_options=codec_options) - - -Now, we can seamlessly encode and decode instances of -:py:class:`~decimal.Decimal`: - -.. doctest:: - - >>> collection.insert_one({"num": Decimal("45.321")}) - InsertOneResult(ObjectId('...'), acknowledged=True) - >>> mydoc = collection.find_one() - >>> import pprint - >>> pprint.pprint(mydoc) - {'_id': ObjectId('...'), 'num': Decimal('45.321')} - - -We can see what's actually being saved to the database by creating a fresh -collection object without the customized codec options and using that to query -MongoDB: - -.. doctest:: - - >>> vanilla_collection = db.get_collection("test") - >>> pprint.pprint(vanilla_collection.find_one()) - {'_id': ObjectId('...'), 'num': Decimal128('45.321')} - - -Encoding Subtypes -^^^^^^^^^^^^^^^^^ - -Consider the situation where, in addition to encoding -:py:class:`~decimal.Decimal`, we also need to encode a type that subclasses -``Decimal``. PyMongo does this automatically for types that inherit from -Python types that are BSON-encodable by default, but the type codec system -described above does not offer the same flexibility. - -Consider this subtype of ``Decimal`` that has a method to return its value as -an integer: - -.. doctest:: - - >>> class DecimalInt(Decimal): - ... def my_method(self): - ... """Method implementing some custom logic.""" - ... return int(self) - ... - -If we try to save an instance of this type without first registering a type -codec for it, we get an error: - -.. doctest:: - - >>> collection.insert_one({"num": DecimalInt("45.321")}) - Traceback (most recent call last): - ... - bson.errors.InvalidDocument: cannot encode object: Decimal('45.321'), of type: - -In order to proceed further, we must define a type codec for ``DecimalInt``. -This is trivial to do since the same transformation as the one used for -``Decimal`` is adequate for encoding ``DecimalInt`` as well: - -.. doctest:: - - >>> class DecimalIntCodec(DecimalCodec): - ... @property - ... def python_type(self): - ... """The Python type acted upon by this type codec.""" - ... return DecimalInt - ... - >>> decimalint_codec = DecimalIntCodec() - - -.. note:: - - No attempt is made to modify decoding behavior because without additional - information, it is impossible to discern which incoming - :class:`~bson.decimal128.Decimal128` value needs to be decoded as ``Decimal`` - and which needs to be decoded as ``DecimalInt``. This example only considers - the situation where a user wants to *encode* documents containing either - of these types. - -After creating a new codec options object and using it to get a collection -object, we can seamlessly encode instances of ``DecimalInt``: - -.. doctest:: - - >>> type_registry = TypeRegistry([decimal_codec, decimalint_codec]) - >>> codec_options = CodecOptions(type_registry=type_registry) - >>> collection = db.get_collection("test", codec_options=codec_options) - >>> collection.drop() - >>> collection.insert_one({"num": DecimalInt("45.321")}) - InsertOneResult(ObjectId('...'), acknowledged=True) - >>> mydoc = collection.find_one() - >>> pprint.pprint(mydoc) - {'_id': ObjectId('...'), 'num': Decimal('45.321')} - -Note that the ``transform_bson`` method of the base codec class results in -these values being decoded as ``Decimal`` (and not ``DecimalInt``). - - -.. _decoding-binary-types: - -Decoding :class:`~bson.binary.Binary` Types -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -The decoding treatment of :class:`~bson.binary.Binary` types having -``subtype = 0`` by the :mod:`bson` module varies slightly depending on the -version of the Python runtime in use. This must be taken into account while -writing a ``TypeDecoder`` that modifies how this datatype is decoded. - -On Python 3.x, :class:`~bson.binary.Binary` data (``subtype = 0``) is decoded -as a ``bytes`` instance: - -.. code-block:: pycon - - >>> # On Python 3.x. - >>> from bson.binary import Binary - >>> newcoll = db.get_collection("new") - >>> newcoll.insert_one({"_id": 1, "data": Binary(b"123", subtype=0)}) - >>> doc = newcoll.find_one() - >>> type(doc["data"]) - bytes - - -On Python 2.7.x, the same data is decoded as a :class:`~bson.binary.Binary` -instance: - -.. code-block:: pycon - - >>> # On Python 2.7.x - >>> newcoll = db.get_collection("new") - >>> doc = newcoll.find_one() - >>> type(doc["data"]) - bson.binary.Binary - - -As a consequence of this disparity, users must set the ``bson_type`` attribute -on their :class:`~bson.codec_options.TypeDecoder` classes differently, -depending on the python version in use. - - -.. note:: - - For codebases requiring compatibility with both Python 2 and 3, type - decoders will have to be registered for both possible ``bson_type`` values. - - -.. _fallback-encoder-callable: - -The ``fallback_encoder`` Callable ---------------------------------- - -.. versionadded:: 3.8 - - -In addition to type codecs, users can also register a callable to encode types -that BSON doesn't recognize and for which no type codec has been registered. -This callable is the **fallback encoder** and like the ``transform_python`` -method, it accepts an unencodable value as a parameter and returns a -BSON-encodable value. The following fallback encoder encodes python's -:py:class:`~decimal.Decimal` type to a :class:`~bson.decimal128.Decimal128`: - -.. doctest:: - - >>> def fallback_encoder(value): - ... if isinstance(value, Decimal): - ... return Decimal128(value) - ... return value - ... - -After declaring the callback, we must create a type registry and codec options -with this fallback encoder before it can be used for initializing a collection: - -.. doctest:: - - >>> type_registry = TypeRegistry(fallback_encoder=fallback_encoder) - >>> codec_options = CodecOptions(type_registry=type_registry) - >>> collection = db.get_collection("test", codec_options=codec_options) - >>> collection.drop() - -We can now seamlessly encode instances of :py:class:`~decimal.Decimal`: - -.. doctest:: - - >>> collection.insert_one({"num": Decimal("45.321")}) - InsertOneResult(ObjectId('...'), acknowledged=True) - >>> mydoc = collection.find_one() - >>> pprint.pprint(mydoc) - {'_id': ObjectId('...'), 'num': Decimal128('45.321')} - - -.. note:: - - Fallback encoders are invoked *after* attempts to encode the given value - with standard BSON encoders and any configured type encoders have failed. - Therefore, in a type registry configured with a type encoder and fallback - encoder that both target the same custom type, the behavior specified in - the type encoder will prevail. - - -Because fallback encoders don't need to declare the types that they encode -beforehand, they can be used to support interesting use-cases that cannot be -serviced by ``TypeEncoder``. One such use-case is described in the next -section. - - -Encoding Unknown Types -^^^^^^^^^^^^^^^^^^^^^^ - -In this example, we demonstrate how a fallback encoder can be used to save -arbitrary objects to the database. We will use the the standard library's -:py:mod:`pickle` module to serialize the unknown types and so naturally, this -approach only works for types that are picklable. - -We start by defining some arbitrary custom types: - -.. code-block:: python - - class MyStringType(object): - def __init__(self, value): - self.__value = value - - def __repr__(self): - return "MyStringType('%s')" % (self.__value,) - - - class MyNumberType(object): - def __init__(self, value): - self.__value = value - - def __repr__(self): - return "MyNumberType(%s)" % (self.__value,) - -We also define a fallback encoder that pickles whatever objects it receives -and returns them as :class:`~bson.binary.Binary` instances with a custom -subtype. The custom subtype, in turn, allows us to write a TypeDecoder that -identifies pickled artifacts upon retrieval and transparently decodes them -back into Python objects: - -.. code-block:: python - - import pickle - from bson.binary import Binary, USER_DEFINED_SUBTYPE - - - def fallback_pickle_encoder(value): - return Binary(pickle.dumps(value), USER_DEFINED_SUBTYPE) - - - class PickledBinaryDecoder(TypeDecoder): - bson_type = Binary - - def transform_bson(self, value): - if value.subtype == USER_DEFINED_SUBTYPE: - return pickle.loads(value) - return value - - -.. note:: - - The above example is written assuming the use of Python 3. If you are using - Python 2, ``bson_type`` must be set to ``Binary``. See the - :ref:`decoding-binary-types` section for a detailed explanation. - - -Finally, we create a ``CodecOptions`` instance: - -.. code-block:: python - - codec_options = CodecOptions( - type_registry=TypeRegistry( - [PickledBinaryDecoder()], fallback_encoder=fallback_pickle_encoder - ) - ) - -We can now round trip our custom objects to MongoDB: - -.. code-block:: python - - collection = db.get_collection("test_fe", codec_options=codec_options) - collection.insert_one( - {"_id": 1, "str": MyStringType("hello world"), "num": MyNumberType(2)} - ) - mydoc = collection.find_one() - assert isinstance(mydoc["str"], MyStringType) - assert isinstance(mydoc["num"], MyNumberType) - - -Limitations ------------ - -PyMongo's type codec and fallback encoder features have the following -limitations: - -#. Users cannot customize the encoding behavior of Python types that PyMongo - already understands like ``int`` and ``str`` (the 'built-in types'). - Attempting to instantiate a type registry with one or more codecs that act - upon a built-in type results in a ``TypeError``. This limitation extends - to all subtypes of the standard types. -#. Chaining type encoders is not supported. A custom type value, once - transformed by a codec's ``transform_python`` method, *must* result in a - type that is either BSON-encodable by default, or can be - transformed by the fallback encoder into something BSON-encodable--it - *cannot* be transformed a second time by a different type codec. -#. The :meth:`~pymongo.database.Database.command` method does not apply the - user's TypeDecoders while decoding the command response document. -#. :mod:`gridfs` does not apply custom type encoding or decoding to any - documents received from or to returned to the user. diff --git a/doc/examples/datetimes.rst b/doc/examples/datetimes.rst deleted file mode 100644 index a8c0476903..0000000000 --- a/doc/examples/datetimes.rst +++ /dev/null @@ -1,177 +0,0 @@ -Datetimes and Timezones -======================= - -.. testsetup:: - - import datetime - from pymongo import MongoClient - from bson.codec_options import CodecOptions - - client = MongoClient() - client.drop_database("dt_example") - db = client.dt_example - -These examples show how to handle Python :class:`datetime.datetime` objects -correctly in PyMongo. - -Basic Usage ------------ - -PyMongo uses :class:`datetime.datetime` objects for representing dates and times -in MongoDB documents. Because MongoDB assumes that dates and times are in UTC, -care should be taken to ensure that dates and times written to the database -reflect UTC. For example, the following code stores the current UTC date and -time into MongoDB: - -.. doctest:: - - >>> result = db.objects.insert_one( - ... {"last_modified": datetime.datetime.now(tz=datetime.timezone.utc)} - ... ) - -Always use :meth:`datetime.datetime.now(tz=datetime.timezone.utc)`, which explicitly returns the current time in -UTC, instead of :meth:`datetime.datetime.now`, with no arguments, which returns the current local -time. Avoid doing this: - -.. doctest:: - - >>> result = db.objects.insert_one({"last_modified": datetime.datetime.now()}) - -The value for ``last_modified`` is very different between these two examples, even -though both documents were stored at around the same local time. This will be -confusing to the application that reads them: - -.. doctest:: - - >>> [doc["last_modified"] for doc in db.objects.find()] # doctest: +SKIP - [datetime.datetime(2015, 7, 8, 18, 17, 28, 324000), - datetime.datetime(2015, 7, 8, 11, 17, 42, 911000)] - -:class:`bson.codec_options.CodecOptions` has a ``tz_aware`` option that enables -"aware" :class:`datetime.datetime` objects, i.e., datetimes that know what -timezone they're in. By default, PyMongo retrieves naive datetimes: - -.. doctest:: - - >>> result = db.tzdemo.insert_one({"date": datetime.datetime(2002, 10, 27, 6, 0, 0)}) - >>> db.tzdemo.find_one()["date"] - datetime.datetime(2002, 10, 27, 6, 0) - >>> options = CodecOptions(tz_aware=True) - >>> db.get_collection("tzdemo", codec_options=options).find_one()["date"] # doctest: +SKIP - datetime.datetime(2002, 10, 27, 6, 0, - tzinfo=) - -Saving Datetimes with Timezones -------------------------------- - -When storing :class:`datetime.datetime` objects that specify a timezone -(i.e. they have a ``tzinfo`` property that isn't ``None``), PyMongo will convert -those datetimes to UTC automatically: - -.. doctest:: - - >>> from zoneinfo import ZoneInfo - >>> from datetime import datetime - >>> aware_datetime = datetime(2002, 10, 27, 6, 0, 0, tzinfo=ZoneInfo("US/Pacific")) - >>> result = db.times.insert_one({"date": aware_datetime}) - >>> db.times.find_one()["date"] - datetime.datetime(2002, 10, 27, 14, 0) - -Reading Time ------------- - -As previously mentioned, by default all :class:`datetime.datetime` objects -returned by PyMongo will be naive but reflect UTC (i.e. the time as stored in -MongoDB). By setting the ``tz_aware`` option on -:class:`~bson.codec_options.CodecOptions`, :class:`datetime.datetime` objects -will be timezone-aware and have a ``tzinfo`` property that reflects the UTC -timezone. - -PyMongo 3.1 introduced a ``tzinfo`` property that can be set on -:class:`~bson.codec_options.CodecOptions` to convert :class:`datetime.datetime` -objects to local time automatically. For example, if we wanted to read all times -out of MongoDB in US/Pacific time: - - >>> from bson.codec_options import CodecOptions - >>> db.times.find_one()['date'] - datetime.datetime(2002, 10, 27, 14, 0) - >>> aware_times = db.times.with_options(codec_options=CodecOptions( - ... tz_aware=True, - ... tzinfo=ZoneInfo("US/Pacific"))) - >>> result = aware_times.find_one()['date'] - datetime.datetime(2002, 10, 27, 6, 0, # doctest: +NORMALIZE_WHITESPACE - tzinfo=) - -.. _handling-out-of-range-datetimes: - -Handling out of range datetimes -------------------------------- - -Python's :class:`~datetime.datetime` can only represent datetimes within the -range allowed by -:attr:`~datetime.datetime.min` and :attr:`~datetime.datetime.max`, whereas -the range of datetimes allowed in BSON can represent any 64-bit number -of milliseconds from the Unix epoch. To deal with this, we can use the -:class:`bson.datetime_ms.DatetimeMS` object, which is a wrapper for the -:class:`int` built-in. - -To decode UTC datetime values as :class:`~bson.datetime_ms.DatetimeMS`, -:class:`~bson.codec_options.CodecOptions` should have its -``datetime_conversion`` parameter set to one of the options available in -:class:`bson.datetime_ms.DatetimeConversion`. These include -:attr:`~bson.datetime_ms.DatetimeConversion.DATETIME`, -:attr:`~bson.datetime_ms.DatetimeConversion.DATETIME_MS`, -:attr:`~bson.datetime_ms.DatetimeConversion.DATETIME_AUTO`, -:attr:`~bson.datetime_ms.DatetimeConversion.DATETIME_CLAMP`. -:attr:`~bson.datetime_ms.DatetimeConversion.DATETIME` is the default -option and has the behavior of raising an :class:`~builtin.OverflowError` upon -attempting to decode an out-of-range date. -:attr:`~bson.datetime_ms.DatetimeConversion.DATETIME_MS` will only return -:class:`~bson.datetime_ms.DatetimeMS` objects, regardless of whether the -represented datetime is in- or out-of-range: - -.. doctest:: - - >>> from datetime import datetime - >>> from bson import encode, decode - >>> from bson.datetime_ms import DatetimeMS - >>> from bson.codec_options import CodecOptions, DatetimeConversion - >>> x = encode({"x": datetime(1970, 1, 1)}) - >>> codec_ms = CodecOptions(datetime_conversion=DatetimeConversion.DATETIME_MS) - >>> decode(x, codec_options=codec_ms) - {'x': DatetimeMS(0)} - -:attr:`~bson.datetime_ms.DatetimeConversion.DATETIME_AUTO` will return -:class:`~datetime.datetime` if the underlying UTC datetime is within range, -or :class:`~bson.datetime_ms.DatetimeMS` if the underlying datetime -cannot be represented using the builtin Python :class:`~datetime.datetime`: - -.. doctest:: - - >>> x = encode({"x": datetime(1970, 1, 1)}) - >>> y = encode({"x": DatetimeMS(-(2**62))}) - >>> codec_auto = CodecOptions(datetime_conversion=DatetimeConversion.DATETIME_AUTO) - >>> decode(x, codec_options=codec_auto) - {'x': datetime.datetime(1970, 1, 1, 0, 0)} - >>> decode(y, codec_options=codec_auto) - {'x': DatetimeMS(-4611686018427387904)} - -:attr:`~bson.datetime_ms.DatetimeConversion.DATETIME_CLAMP` will clamp -resulting :class:`~datetime.datetime` objects to be within -:attr:`~datetime.datetime.min` and :attr:`~datetime.datetime.max` -(trimmed to ``999000`` microseconds): - -.. doctest:: - - >>> x = encode({"x": DatetimeMS(2**62)}) - >>> y = encode({"x": DatetimeMS(-(2**62))}) - >>> codec_clamp = CodecOptions(datetime_conversion=DatetimeConversion.DATETIME_CLAMP) - >>> decode(x, codec_options=codec_clamp) - {'x': datetime.datetime(9999, 12, 31, 23, 59, 59, 999000)} - >>> decode(y, codec_options=codec_clamp) - {'x': datetime.datetime(1, 1, 1, 0, 0)} - -:class:`~bson.datetime_ms.DatetimeMS` objects have support for rich comparison -methods against other instances of :class:`~bson.datetime_ms.DatetimeMS`. -They can also be converted to :class:`~datetime.datetime` objects with -:meth:`~bson.datetime_ms.DatetimeMS.to_datetime()`. diff --git a/doc/examples/encryption.rst b/doc/examples/encryption.rst deleted file mode 100644 index 338b177be3..0000000000 --- a/doc/examples/encryption.rst +++ /dev/null @@ -1,844 +0,0 @@ -.. _In-Use Encryption: - -In-Use Encryption -================= - -.. _Client-Side Field Level Encryption: - -Client-Side Field Level Encryption ----------------------------------- - -New in MongoDB 4.2, client-side field level encryption allows an application -to encrypt specific data fields in addition to pre-existing MongoDB -encryption features such as `Encryption at Rest -`_ and -`TLS/SSL (Transport Encryption) -`_. - -With field level encryption, applications can encrypt fields in documents -*prior* to transmitting data over the wire to the server. Client-side field -level encryption supports workloads where applications must guarantee that -unauthorized parties, including server administrators, cannot read the -encrypted data. - -.. seealso:: The MongoDB documentation on `Client Side Field Level Encryption `_. - -Dependencies -~~~~~~~~~~~~ - -To get started using client-side field level encryption in your project, -you will need to install the -`pymongocrypt `_ and -`pymongo-auth-aws `_ libraries -as well as the driver itself. Install both the driver and a compatible -version of the dependencies like this:: - - $ python -m pip install 'pymongo[encryption]' - -Note that installing on Linux requires pip 19 or later for manylinux2010 wheel -support. For more information about installing pymongocrypt see -`the installation instructions on the project's PyPI page -`_. - -Additionally, either `crypt_shared`_ or `mongocryptd`_ are required in order -to use *automatic* client-side encryption. - -crypt_shared -```````````` - -The Automatic Encryption Shared Library (crypt_shared) provides the same -functionality as `mongocryptd`_, but does not require you to spawn another -process to perform automatic encryption. - -By default, pymongo attempts to load crypt_shared from the system and if -found uses it automatically. To load crypt_shared from another location, -use the ``crypt_shared_lib_path`` argument to -:class:`~pymongo.encryption_options.AutoEncryptionOpts`. -If pymongo cannot load crypt_shared it will attempt to fallback to using -`mongocryptd`_ by default. Set ``crypt_shared_lib_required=True`` to make -the app always use crypt_shared and fail if it could not be loaded. - -For detailed installation instructions see -`the MongoDB documentation on Automatic Encryption Shared Library -`_. - -mongocryptd -``````````` - -The ``mongocryptd`` binary is required for automatic client-side encryption -and is included as a component in the `MongoDB Enterprise Server package -`_. -For detailed installation instructions see -`the MongoDB documentation on mongocryptd -`_. - -``mongocryptd`` performs the following: - -- Parses the automatic encryption rules specified to the database connection. - If the JSON schema contains invalid automatic encryption syntax or any - document validation syntax, ``mongocryptd`` returns an error. -- Uses the specified automatic encryption rules to mark fields in read and - write operations for encryption. -- Rejects read/write operations that may return unexpected or incorrect results - when applied to an encrypted field. For supported and unsupported operations, - see `Read/Write Support with Automatic Field Level Encryption - `_. - -A MongoClient configured with auto encryption will automatically spawn the -``mongocryptd`` process from the application's ``PATH``. Applications can -control the spawning behavior as part of the automatic encryption options. -For example to set the path to the ``mongocryptd`` process:: - - auto_encryption_opts = AutoEncryptionOpts( - ..., - mongocryptd_spawn_path='/path/to/mongocryptd') - -To control the logging output of ``mongocryptd`` pass options using -``mongocryptd_spawn_args``:: - - auto_encryption_opts = AutoEncryptionOpts( - ..., - mongocryptd_spawn_args=['--logpath=/path/to/mongocryptd.log', '--logappend']) - -If your application wishes to manage the ``mongocryptd`` process manually, -it is possible to disable spawning ``mongocryptd``:: - - auto_encryption_opts = AutoEncryptionOpts( - ..., - mongocryptd_bypass_spawn=True, - # URI of the local ``mongocryptd`` process. - mongocryptd_uri='mongodb://localhost:27020') - -``mongocryptd`` is only responsible for supporting automatic client-side field -level encryption and does not itself perform any encryption or decryption. - -.. _automatic-client-side-encryption: - -Automatic Client-Side Field Level Encryption -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Automatic client-side field level encryption is enabled by creating a -:class:`~pymongo.mongo_client.MongoClient` with the ``auto_encryption_opts`` -option set to an instance of -:class:`~pymongo.encryption_options.AutoEncryptionOpts`. The following -examples show how to setup automatic client-side field level encryption -using :class:`~pymongo.encryption.ClientEncryption` to create a new -encryption data key. - -.. note:: Automatic client-side field level encryption requires MongoDB >=4.2 - enterprise or a MongoDB >=4.2 Atlas cluster. The community version of the - server supports automatic decryption as well as - :ref:`explicit-client-side-encryption`. - -Providing Local Automatic Encryption Rules -`````````````````````````````````````````` - -The following example shows how to specify automatic encryption rules via the -``schema_map`` option. The automatic encryption rules are expressed using a -`strict subset of the JSON Schema syntax -`_. - -Supplying a ``schema_map`` provides more security than relying on -JSON Schemas obtained from the server. It protects against a -malicious server advertising a false JSON Schema, which could trick -the client into sending unencrypted data that should be encrypted. - -JSON Schemas supplied in the ``schema_map`` only apply to configuring -automatic client-side field level encryption. Other validation -rules in the JSON schema will not be enforced by the driver and -will result in an error. - -.. code-block:: python - - import os - from bson.codec_options import CodecOptions - from bson import json_util - from pymongo import MongoClient - from pymongo.encryption import Algorithm, ClientEncryption - from pymongo.encryption_options import AutoEncryptionOpts - - - def create_json_schema_file(kms_providers, key_vault_namespace, key_vault_client): - client_encryption = ClientEncryption( - kms_providers, - key_vault_namespace, - key_vault_client, - # The CodecOptions class used for encrypting and decrypting. - # This should be the same CodecOptions instance you have configured - # on MongoClient, Database, or Collection. We will not be calling - # encrypt() or decrypt() in this example so we can use any - # CodecOptions. - CodecOptions(), - ) - - # Create a new data key and json schema for the encryptedField. - # https://dochub.mongodb.org/core/client-side-field-level-encryption-automatic-encryption-rules - data_key_id = client_encryption.create_data_key( - "local", key_alt_names=["pymongo_encryption_example_1"] - ) - schema = { - "properties": { - "encryptedField": { - "encrypt": { - "keyId": [data_key_id], - "bsonType": "string", - "algorithm": Algorithm.AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic, - } - } - }, - "bsonType": "object", - } - # Use CANONICAL_JSON_OPTIONS so that other drivers and tools will be - # able to parse the MongoDB extended JSON file. - json_schema_string = json_util.dumps( - schema, json_options=json_util.CANONICAL_JSON_OPTIONS - ) - - with open("jsonSchema.json", "w") as file: - file.write(json_schema_string) - - - def main(): - # The MongoDB namespace (db.collection) used to store the - # encrypted documents in this example. - encrypted_namespace = "test.coll" - - # This must be the same master key that was used to create - # the encryption key. - local_master_key = os.urandom(96) - kms_providers = {"local": {"key": local_master_key}} - - # The MongoDB namespace (db.collection) used to store - # the encryption data keys. - key_vault_namespace = "encryption.__pymongoTestKeyVault" - key_vault_db_name, key_vault_coll_name = key_vault_namespace.split(".", 1) - - # The MongoClient used to access the key vault (key_vault_namespace). - key_vault_client = MongoClient() - key_vault = key_vault_client[key_vault_db_name][key_vault_coll_name] - # Ensure that two data keys cannot share the same keyAltName. - key_vault.drop() - key_vault.create_index( - "keyAltNames", - unique=True, - partialFilterExpression={"keyAltNames": {"$exists": True}}, - ) - - create_json_schema_file(kms_providers, key_vault_namespace, key_vault_client) - - # Load the JSON Schema and construct the local schema_map option. - with open("jsonSchema.json", "r") as file: - json_schema_string = file.read() - json_schema = json_util.loads(json_schema_string) - schema_map = {encrypted_namespace: json_schema} - - auto_encryption_opts = AutoEncryptionOpts( - kms_providers, key_vault_namespace, schema_map=schema_map - ) - - client = MongoClient(auto_encryption_opts=auto_encryption_opts) - db_name, coll_name = encrypted_namespace.split(".", 1) - coll = client[db_name][coll_name] - # Clear old data - coll.drop() - - coll.insert_one({"encryptedField": "123456789"}) - print("Decrypted document: %s" % (coll.find_one(),)) - unencrypted_coll = MongoClient()[db_name][coll_name] - print("Encrypted document: %s" % (unencrypted_coll.find_one(),)) - - - if __name__ == "__main__": - main() - - -Server-Side Field Level Encryption Enforcement -`````````````````````````````````````````````` - -MongoDB >=4.2 servers supports using schema validation to enforce encryption -of specific fields in a collection. This schema validation will prevent an -application from inserting unencrypted values for any fields marked with the -``"encrypt"`` JSON schema keyword. - -The following example shows how to setup automatic client-side field level -encryption using -:class:`~pymongo.encryption.ClientEncryption` to create a new encryption -data key and create a collection with the -`Automatic Encryption JSON Schema Syntax -`_: - -.. code-block:: python - - import os - - from bson.codec_options import CodecOptions - from bson.binary import STANDARD - - from pymongo import MongoClient - from pymongo.encryption import Algorithm, ClientEncryption - from pymongo.encryption_options import AutoEncryptionOpts - from pymongo.errors import OperationFailure - from pymongo.write_concern import WriteConcern - - - def main(): - # The MongoDB namespace (db.collection) used to store the - # encrypted documents in this example. - encrypted_namespace = "test.coll" - - # This must be the same master key that was used to create - # the encryption key. - local_master_key = os.urandom(96) - kms_providers = {"local": {"key": local_master_key}} - - # The MongoDB namespace (db.collection) used to store - # the encryption data keys. - key_vault_namespace = "encryption.__pymongoTestKeyVault" - key_vault_db_name, key_vault_coll_name = key_vault_namespace.split(".", 1) - - # The MongoClient used to access the key vault (key_vault_namespace). - key_vault_client = MongoClient() - key_vault = key_vault_client[key_vault_db_name][key_vault_coll_name] - # Ensure that two data keys cannot share the same keyAltName. - key_vault.drop() - key_vault.create_index( - "keyAltNames", - unique=True, - partialFilterExpression={"keyAltNames": {"$exists": True}}, - ) - - client_encryption = ClientEncryption( - kms_providers, - key_vault_namespace, - key_vault_client, - # The CodecOptions class used for encrypting and decrypting. - # This should be the same CodecOptions instance you have configured - # on MongoClient, Database, or Collection. We will not be calling - # encrypt() or decrypt() in this example so we can use any - # CodecOptions. - CodecOptions(), - ) - - # Create a new data key and json schema for the encryptedField. - data_key_id = client_encryption.create_data_key( - "local", key_alt_names=["pymongo_encryption_example_2"] - ) - json_schema = { - "properties": { - "encryptedField": { - "encrypt": { - "keyId": [data_key_id], - "bsonType": "string", - "algorithm": Algorithm.AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic, - } - } - }, - "bsonType": "object", - } - - auto_encryption_opts = AutoEncryptionOpts(kms_providers, key_vault_namespace) - client = MongoClient(auto_encryption_opts=auto_encryption_opts) - db_name, coll_name = encrypted_namespace.split(".", 1) - db = client[db_name] - # Clear old data - db.drop_collection(coll_name) - # Create the collection with the encryption JSON Schema. - db.create_collection( - coll_name, - # uuid_representation=STANDARD is required to ensure that any - # UUIDs in the $jsonSchema document are encoded to BSON Binary - # with the standard UUID subtype 4. This is only needed when - # running the "create" collection command with an encryption - # JSON Schema. - codec_options=CodecOptions(uuid_representation=STANDARD), - write_concern=WriteConcern(w="majority"), - validator={"$jsonSchema": json_schema}, - ) - coll = client[db_name][coll_name] - - coll.insert_one({"encryptedField": "123456789"}) - print("Decrypted document: %s" % (coll.find_one(),)) - unencrypted_coll = MongoClient()[db_name][coll_name] - print("Encrypted document: %s" % (unencrypted_coll.find_one(),)) - try: - unencrypted_coll.insert_one({"encryptedField": "123456789"}) - except OperationFailure as exc: - print("Unencrypted insert failed: %s" % (exc.details,)) - - - if __name__ == "__main__": - main() - - -.. _explicit-client-side-encryption: - -Explicit Encryption -~~~~~~~~~~~~~~~~~~~ - -Explicit encryption is a MongoDB community feature and does not use the -``mongocryptd`` process. Explicit encryption is provided by the -:class:`~pymongo.encryption.ClientEncryption` class, for example: - -.. code-block:: python - - import os - - from pymongo import MongoClient - from pymongo.encryption import Algorithm, ClientEncryption - - - def main(): - # This must be the same master key that was used to create - # the encryption key. - local_master_key = os.urandom(96) - kms_providers = {"local": {"key": local_master_key}} - - # The MongoDB namespace (db.collection) used to store - # the encryption data keys. - key_vault_namespace = "encryption.__pymongoTestKeyVault" - key_vault_db_name, key_vault_coll_name = key_vault_namespace.split(".", 1) - - # The MongoClient used to read/write application data. - client = MongoClient() - coll = client.test.coll - # Clear old data - coll.drop() - - # Set up the key vault (key_vault_namespace) for this example. - key_vault = client[key_vault_db_name][key_vault_coll_name] - # Ensure that two data keys cannot share the same keyAltName. - key_vault.drop() - key_vault.create_index( - "keyAltNames", - unique=True, - partialFilterExpression={"keyAltNames": {"$exists": True}}, - ) - - client_encryption = ClientEncryption( - kms_providers, - key_vault_namespace, - # The MongoClient to use for reading/writing to the key vault. - # This can be the same MongoClient used by the main application. - client, - # The CodecOptions class used for encrypting and decrypting. - # This should be the same CodecOptions instance you have configured - # on MongoClient, Database, or Collection. - coll.codec_options, - ) - - # Create a new data key for the encryptedField. - data_key_id = client_encryption.create_data_key( - "local", key_alt_names=["pymongo_encryption_example_3"] - ) - - # Explicitly encrypt a field: - encrypted_field = client_encryption.encrypt( - "123456789", - Algorithm.AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic, - key_id=data_key_id, - ) - coll.insert_one({"encryptedField": encrypted_field}) - doc = coll.find_one() - print("Encrypted document: %s" % (doc,)) - - # Explicitly decrypt the field: - doc["encryptedField"] = client_encryption.decrypt(doc["encryptedField"]) - print("Decrypted document: %s" % (doc,)) - - # Cleanup resources. - client_encryption.close() - client.close() - - - if __name__ == "__main__": - main() - - -Explicit Encryption with Automatic Decryption -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Although automatic encryption requires MongoDB >=4.2 enterprise or a -MongoDB >=4.2 Atlas cluster, automatic *decryption* is supported for all users. -To configure automatic *decryption* without automatic *encryption* set -``bypass_auto_encryption=True`` in -:class:`~pymongo.encryption_options.AutoEncryptionOpts`: - -.. code-block:: python - - import os - - from pymongo import MongoClient - from pymongo.encryption import Algorithm, ClientEncryption - from pymongo.encryption_options import AutoEncryptionOpts - - - def main(): - # This must be the same master key that was used to create - # the encryption key. - local_master_key = os.urandom(96) - kms_providers = {"local": {"key": local_master_key}} - - # The MongoDB namespace (db.collection) used to store - # the encryption data keys. - key_vault_namespace = "encryption.__pymongoTestKeyVault" - key_vault_db_name, key_vault_coll_name = key_vault_namespace.split(".", 1) - - # bypass_auto_encryption=True disable automatic encryption but keeps - # the automatic _decryption_ behavior. bypass_auto_encryption will - # also disable spawning mongocryptd. - auto_encryption_opts = AutoEncryptionOpts( - kms_providers, key_vault_namespace, bypass_auto_encryption=True - ) - - client = MongoClient(auto_encryption_opts=auto_encryption_opts) - coll = client.test.coll - # Clear old data - coll.drop() - - # Set up the key vault (key_vault_namespace) for this example. - key_vault = client[key_vault_db_name][key_vault_coll_name] - # Ensure that two data keys cannot share the same keyAltName. - key_vault.drop() - key_vault.create_index( - "keyAltNames", - unique=True, - partialFilterExpression={"keyAltNames": {"$exists": True}}, - ) - - client_encryption = ClientEncryption( - kms_providers, - key_vault_namespace, - # The MongoClient to use for reading/writing to the key vault. - # This can be the same MongoClient used by the main application. - client, - # The CodecOptions class used for encrypting and decrypting. - # This should be the same CodecOptions instance you have configured - # on MongoClient, Database, or Collection. - coll.codec_options, - ) - - # Create a new data key for the encryptedField. - data_key_id = client_encryption.create_data_key( - "local", key_alt_names=["pymongo_encryption_example_4"] - ) - - # Explicitly encrypt a field: - encrypted_field = client_encryption.encrypt( - "123456789", - Algorithm.AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic, - key_alt_name="pymongo_encryption_example_4", - ) - coll.insert_one({"encryptedField": encrypted_field}) - # Automatically decrypts any encrypted fields. - doc = coll.find_one() - print("Decrypted document: %s" % (doc,)) - unencrypted_coll = MongoClient().test.coll - print("Encrypted document: %s" % (unencrypted_coll.find_one(),)) - - # Cleanup resources. - client_encryption.close() - client.close() - - - if __name__ == "__main__": - main() - - -.. _CSFLE on-demand credentials: - - -CSFLE on-demand credentials -~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -``pymongocrypt`` 1.4 adds support for fetching on-demand KMS credentials for -AWS, GCP, and Azure cloud environments. - -To enable the driver's behavior to obtain credentials from the environment, add the appropriate key ("aws", "gcp", or "azure") with an empty map to -"kms_providers" in either :class:`~pymongo.encryption_options.AutoEncryptionOpts` or :class:`~pymongo.encryption.ClientEncryption` options. - -An application using AWS credentials would look like: - -.. code-block:: python - - from pymongo import MongoClient - from pymongo.encryption import ClientEncryption - - client = MongoClient() - client_encryption = ClientEncryption( - # The empty dictionary enables on-demand credentials. - kms_providers={"aws": {}}, - key_vault_namespace="keyvault.datakeys", - key_vault_client=client, - codec_options=client.codec_options, - ) - master_key = { - "region": "us-east-1", - "key": ("arn:aws:kms:us-east-1:123456789:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0"), - } - client_encryption.create_data_key("aws", master_key) - -The above will enable the same behavior of obtaining AWS credentials from the environment as is used for :ref:`MONGODB-AWS` authentication, including the -caching to avoid rate limiting. - -An application using GCP credentials would look like: - -.. code-block:: python - - from pymongo import MongoClient - from pymongo.encryption import ClientEncryption - - client = MongoClient() - client_encryption = ClientEncryption( - # The empty dictionary enables on-demand credentials. - kms_providers={"gcp": {}}, - key_vault_namespace="keyvault.datakeys", - key_vault_client=client, - codec_options=client.codec_options, - ) - master_key = { - "projectId": "my-project", - "location": "global", - "keyRing": "key-ring-csfle", - "keyName": "key-name-csfle", - } - client_encryption.create_data_key("gcp", master_key) - -The driver will query the `VM instance metadata `_ to obtain credentials. - -An application using Azure credentials would look like, this time using -:class:`~pymongo.encryption_options.AutoEncryptionOpts`: - -.. code-block:: python - - from pymongo import MongoClient - from pymongo.encryption_options import AutoEncryptionOpts - - # The empty dictionary enables on-demand credentials. - kms_providers = ({"azure": {}},) - key_vault_namespace = "keyvault.datakeys" - auto_encryption_opts = AutoEncryptionOpts(kms_providers, key_vault_namespace) - client = MongoClient(auto_encryption_opts=auto_encryption_opts) - coll = client.test.coll - coll.insert_one({"encryptedField": "123456789"}) - -The driver will `acquire an access token `_ from the Azure VM. - -.. _Queryable Encryption: - -Queryable Encryption --------------------- - -.. _automatic-queryable-client-side-encryption: - -Automatic Queryable Encryption -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Automatic Queryable Encryption requires MongoDB 7.0+ Enterprise or a MongoDB 7.0+ Atlas cluster. - -Queryable Encryption is the second version of Client-Side Field Level Encryption. -Data is encrypted client-side. Queryable Encryption supports indexed encrypted fields, -which are further processed server-side. - -Automatic encryption in Queryable Encryption is configured with an ``encrypted_fields`` mapping, -as demonstrated by the following example: - -.. code-block:: python - - import os - from bson.codec_options import CodecOptions - from pymongo import MongoClient - from pymongo.encryption import Algorithm, ClientEncryption, QueryType - from pymongo.encryption_options import AutoEncryptionOpts - - local_master_key = os.urandom(96) - kms_providers = {"local": {"key": local_master_key}} - key_vault_namespace = "keyvault.datakeys" - key_vault_client = MongoClient() - client_encryption = ClientEncryption( - kms_providers, key_vault_namespace, key_vault_client, CodecOptions() - ) - key_vault = key_vault_client["keyvault"]["datakeys"] - key_vault.drop() - # Ensure that two data keys cannot share the same keyAltName. - key_vault.create_index( - "keyAltNames", - unique=True, - partialFilterExpression={"keyAltNames": {"$exists": True}}, - ) - key1_id = client_encryption.create_data_key("local", key_alt_names=["firstName"]) - key2_id = client_encryption.create_data_key("local", key_alt_names=["lastName"]) - - encrypted_fields_map = { - "default.encryptedCollection": { - "escCollection": "encryptedCollection.esc", - "ecocCollection": "encryptedCollection.ecoc", - "fields": [ - { - "path": "firstName", - "bsonType": "string", - "keyId": key1_id, - "queries": [{"queryType": "equality"}], - }, - { - "path": "lastName", - "bsonType": "string", - "keyId": key2_id, - }, - ], - } - } - - auto_encryption_opts = AutoEncryptionOpts( - kms_providers, - key_vault_namespace, - encrypted_fields_map=encrypted_fields_map, - ) - client = MongoClient(auto_encryption_opts=auto_encryption_opts) - client.default.drop_collection("encryptedCollection") - coll = client.default.create_collection("encryptedCollection") - coll.insert_one({"_id": 1, "firstName": "Jane", "lastName": "Doe"}) - docs = list(coll.find({"firstName": "Jane"})) - print(docs) - -In the above example, the ``firstName`` and ``lastName`` fields are -automatically encrypted and decrypted. - -Explicit Queryable Encryption -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Explicit Queryable Encryption requires MongoDB 7.0+. - -Queryable Encryption is the second version of Client-Side Field Level Encryption. -Data is encrypted client-side. Queryable Encryption supports indexed encrypted fields, -which are further processed server-side. - -Explicit encryption in Queryable Encryption is performed using the ``encrypt`` and ``decrypt`` -methods. Automatic encryption (to allow the ``find_one`` to automatically decrypt) is configured -using an ``encrypted_fields`` mapping, as demonstrated by the following example: - -.. code-block:: python - - import os - from pymongo import MongoClient - from pymongo.encryption import ( - Algorithm, - AutoEncryptionOpts, - ClientEncryption, - QueryType, - ) - - - def main(): - # This must be the same master key that was used to create - # the encryption key. - local_master_key = os.urandom(96) - kms_providers = {"local": {"key": local_master_key}} - - # The MongoDB namespace (db.collection) used to store - # the encryption data keys. - key_vault_namespace = "encryption.__pymongoTestKeyVault" - key_vault_db_name, key_vault_coll_name = key_vault_namespace.split(".", 1) - - # Set up the key vault (key_vault_namespace) for this example. - client = MongoClient() - key_vault = client[key_vault_db_name][key_vault_coll_name] - - # Ensure that two data keys cannot share the same keyAltName. - key_vault.drop() - key_vault.create_index( - "keyAltNames", - unique=True, - partialFilterExpression={"keyAltNames": {"$exists": True}}, - ) - - client_encryption = ClientEncryption( - kms_providers, - key_vault_namespace, - # The MongoClient to use for reading/writing to the key vault. - # This can be the same MongoClient used by the main application. - client, - # The CodecOptions class used for encrypting and decrypting. - # This should be the same CodecOptions instance you have configured - # on MongoClient, Database, or Collection. - client.codec_options, - ) - - # Create a new data key for the encryptedField. - indexed_key_id = client_encryption.create_data_key("local") - unindexed_key_id = client_encryption.create_data_key("local") - - encrypted_fields = { - "escCollection": "enxcol_.default.esc", - "ecocCollection": "enxcol_.default.ecoc", - "fields": [ - { - "keyId": indexed_key_id, - "path": "encryptedIndexed", - "bsonType": "string", - "queries": {"queryType": "equality"}, - }, - { - "keyId": unindexed_key_id, - "path": "encryptedUnindexed", - "bsonType": "string", - }, - ], - } - - opts = AutoEncryptionOpts( - {"local": {"key": local_master_key}}, - key_vault.full_name, - bypass_query_analysis=True, - key_vault_client=client, - ) - - # The MongoClient used to read/write application data. - encrypted_client = MongoClient(auto_encryption_opts=opts) - encrypted_client.drop_database("test") - db = encrypted_client.test - - # Create the collection with encrypted fields. - coll = db.create_collection("coll", encryptedFields=encrypted_fields) - - # Create and encrypt an indexed and unindexed value. - val = "encrypted indexed value" - unindexed_val = "encrypted unindexed value" - insert_payload_indexed = client_encryption.encrypt( - val, Algorithm.INDEXED, indexed_key_id, contention_factor=1 - ) - insert_payload_unindexed = client_encryption.encrypt( - unindexed_val, Algorithm.UNINDEXED, unindexed_key_id - ) - - # Insert the payloads. - coll.insert_one( - { - "encryptedIndexed": insert_payload_indexed, - "encryptedUnindexed": insert_payload_unindexed, - } - ) - - # Encrypt our find payload using QueryType.EQUALITY. - # The value of "indexed_key_id" must be the same as used to encrypt - # the values above. - find_payload = client_encryption.encrypt( - val, - Algorithm.INDEXED, - indexed_key_id, - query_type=QueryType.EQUALITY, - contention_factor=1, - ) - - # Find the document we inserted using the encrypted payload. - # The returned document is automatically decrypted. - doc = coll.find_one({"encryptedIndexed": find_payload}) - print("Returned document: %s" % (doc,)) - - # Cleanup resources. - client_encryption.close() - encrypted_client.close() - client.close() - - - if __name__ == "__main__": - main() diff --git a/doc/examples/geo.rst b/doc/examples/geo.rst deleted file mode 100644 index e7da156720..0000000000 --- a/doc/examples/geo.rst +++ /dev/null @@ -1,109 +0,0 @@ -Geospatial Indexing Example -=========================== - -.. testsetup:: - - from pymongo import MongoClient - - client = MongoClient() - client.drop_database("geo_example") - -This example shows how to create and use a :data:`~pymongo.GEO2D` -index in PyMongo. To create a spherical (earth-like) geospatial index use :data:`~pymongo.GEOSPHERE` instead. - -.. seealso:: The MongoDB documentation on `Geospatial Indexes `_. - -Creating a Geospatial Index ---------------------------- - -Creating a geospatial index in pymongo is easy: - -.. doctest:: - - >>> from pymongo import MongoClient, GEO2D - >>> db = MongoClient().geo_example - >>> db.places.create_index([("loc", GEO2D)]) - 'loc_2d' - -Inserting Places ----------------- - -Locations in MongoDB are represented using either embedded documents -or lists where the first two elements are coordinates. Here, we'll -insert a couple of example locations: - -.. doctest:: - - >>> result = db.places.insert_many( - ... [{"loc": [2, 5]}, {"loc": [30, 5]}, {"loc": [1, 2]}, {"loc": [4, 4]}] - ... ) - >>> result.inserted_ids - [ObjectId('...'), ObjectId('...'), ObjectId('...'), ObjectId('...')] - -.. note:: If specifying latitude and longitude coordinates in :data:`~pymongo.GEOSPHERE`, list the **longitude** first and then **latitude**. - -Querying --------- - -Using the geospatial index we can find documents near another point: - -.. doctest:: - - >>> import pprint - >>> for doc in db.places.find({"loc": {"$near": [3, 6]}}).limit(3): - ... pprint.pprint(doc) - ... - {'_id': ObjectId('...'), 'loc': [2, 5]} - {'_id': ObjectId('...'), 'loc': [4, 4]} - {'_id': ObjectId('...'), 'loc': [1, 2]} - -.. note:: If using :data:`pymongo.GEOSPHERE`, using $nearSphere is recommended. - -The $maxDistance operator requires the use of :class:`~bson.son.SON`: - -.. doctest:: - - >>> from bson.son import SON - >>> query = {"loc": SON([("$near", [3, 6]), ("$maxDistance", 100)])} - >>> for doc in db.places.find(query).limit(3): - ... pprint.pprint(doc) - ... - {'_id': ObjectId('...'), 'loc': [2, 5]} - {'_id': ObjectId('...'), 'loc': [4, 4]} - {'_id': ObjectId('...'), 'loc': [1, 2]} - -It's also possible to query for all items within a given rectangle -(specified by lower-left and upper-right coordinates): - -.. doctest:: - - >>> query = {"loc": {"$within": {"$box": [[2, 2], [5, 6]]}}} - >>> for doc in db.places.find(query).sort("_id"): - ... pprint.pprint(doc) - ... - {'_id': ObjectId('...'), 'loc': [2, 5]} - {'_id': ObjectId('...'), 'loc': [4, 4]} - -Or circle (specified by center point and radius): - -.. doctest:: - - >>> query = {"loc": {"$within": {"$center": [[0, 0], 6]}}} - >>> for doc in db.places.find(query).sort("_id"): - ... pprint.pprint(doc) - ... - {'_id': ObjectId('...'), 'loc': [2, 5]} - {'_id': ObjectId('...'), 'loc': [1, 2]} - {'_id': ObjectId('...'), 'loc': [4, 4]} - -geoNear queries are also supported using :class:`~bson.son.SON`:: - - >>> from bson.son import SON - >>> db.command(SON([('geoNear', 'places'), ('near', [1, 2])])) - {'ok': 1.0, 'stats': ...} - -.. warning:: Starting in MongoDB version 4.0, MongoDB deprecates the **geoNear** command. Use one of the following operations instead. - - * $geoNear - aggregation stage. - * $near - query operator. - * $nearSphere - query operator. diff --git a/doc/examples/gevent.rst b/doc/examples/gevent.rst deleted file mode 100644 index 0ab41c1ec6..0000000000 --- a/doc/examples/gevent.rst +++ /dev/null @@ -1,52 +0,0 @@ -Gevent -====== - -PyMongo supports `Gevent `_. Simply call Gevent's -``monkey.patch_all()`` before loading any other modules: - -.. code-block:: pycon - - >>> # You must call patch_all() *before* importing any other modules - >>> from gevent import monkey - >>> _ = monkey.patch_all() - >>> from pymongo import MongoClient - >>> client = MongoClient() - -PyMongo uses thread and socket functions from the Python standard library. -Gevent's monkey-patching replaces those standard functions so that PyMongo -does asynchronous I/O with non-blocking sockets, and schedules operations -on greenlets instead of threads. - -Avoid blocking in Hub.join --------------------------- - -By default, PyMongo uses threads to discover and monitor your servers' topology -(see :ref:`health-monitoring`). If you execute ``monkey.patch_all()`` when -your application first begins, PyMongo automatically uses greenlets instead -of threads. - -When shutting down, if your application calls :meth:`~gevent.hub.Hub.join` on -Gevent's :class:`~gevent.hub.Hub` without first terminating these background -greenlets, the call to :meth:`~gevent.hub.Hub.join` blocks indefinitely. You -therefore **must close or dereference** any active -:class:`~pymongo.mongo_client.MongoClient` before exiting. - -An example solution to this issue in some application frameworks is a signal -handler to end background greenlets when your application receives SIGHUP: - -.. code-block:: python - - import signal - - - def graceful_reload(signum, traceback): - """Explicitly close some global MongoClient object.""" - client.close() - - - signal.signal(signal.SIGHUP, graceful_reload) - -Applications using uWSGI prior to 1.9.16 are affected by this issue, -or newer uWSGI versions with the ``-gevent-wait-for-hub`` option. -See `the uWSGI changelog for details -`_. diff --git a/doc/examples/gridfs.rst b/doc/examples/gridfs.rst deleted file mode 100644 index 5f40805d79..0000000000 --- a/doc/examples/gridfs.rst +++ /dev/null @@ -1,84 +0,0 @@ -GridFS Example -============== - -.. testsetup:: - - from pymongo import MongoClient - - client = MongoClient() - client.drop_database("gridfs_example") - -This example shows how to use :mod:`gridfs` to store large binary -objects (e.g. files) in MongoDB. - -.. seealso:: The API docs for :mod:`gridfs`. - -.. seealso:: `This blog post - `_ - for some motivation behind this API. - -Setup ------ - -We start by creating a :class:`~gridfs.GridFS` instance to use: - -.. doctest:: - - >>> from pymongo import MongoClient - >>> import gridfs - >>> - >>> db = MongoClient().gridfs_example - >>> fs = gridfs.GridFS(db) - -Every :class:`~gridfs.GridFS` instance is created with and will -operate on a specific :class:`~pymongo.database.Database` instance. - -Saving and Retrieving Data --------------------------- - -The simplest way to work with :mod:`gridfs` is to use its key/value -interface (the :meth:`~gridfs.GridFS.put` and -:meth:`~gridfs.GridFS.get` methods). To write data to GridFS, use -:meth:`~gridfs.GridFS.put`: - -.. doctest:: - - >>> a = fs.put(b"hello world") - -:meth:`~gridfs.GridFS.put` creates a new file in GridFS, and returns -the value of the file document's ``"_id"`` key. Given that ``"_id"`` -we can use :meth:`~gridfs.GridFS.get` to get back the contents of the -file: - -.. doctest:: - - >>> fs.get(a).read() - b'hello world' - -:meth:`~gridfs.GridFS.get` returns a file-like object, so we get the -file's contents by calling :meth:`~gridfs.grid_file.GridOut.read`. - -In addition to putting a :class:`str` as a GridFS file, we can also -put any file-like object (an object with a :meth:`read` -method). GridFS will handle reading the file in chunk-sized segments -automatically. We can also add additional attributes to the file as -keyword arguments: - -.. doctest:: - - >>> b = fs.put(fs.get(a), filename="foo", bar="baz") - >>> out = fs.get(b) - >>> out.read() - b'hello world' - >>> out.filename - 'foo' - >>> out.bar - 'baz' - >>> out.upload_date - datetime.datetime(...) - -The attributes we set in :meth:`~gridfs.GridFS.put` are stored in the -file document, and retrievable after calling -:meth:`~gridfs.GridFS.get`. Some attributes (like ``"filename"``) are -special and are defined in the GridFS specification - see that -document for more details. diff --git a/doc/examples/high_availability.rst b/doc/examples/high_availability.rst deleted file mode 100644 index 8f94aba074..0000000000 --- a/doc/examples/high_availability.rst +++ /dev/null @@ -1,367 +0,0 @@ -High Availability and PyMongo -============================= - -PyMongo makes it easy to write highly available applications whether -you use a `single replica set `_ -or a `large sharded cluster -`_. - -Connecting to a Replica Set ---------------------------- - -PyMongo makes working with `replica sets -`_ easy. Here we'll launch a new -replica set and show how to handle both initialization and normal -connections with PyMongo. - -.. seealso:: The MongoDB documentation on `replication `_. - -Starting a Replica Set -~~~~~~~~~~~~~~~~~~~~~~ - -The main `replica set documentation -`_ contains extensive information -about setting up a new replica set or migrating an existing MongoDB -setup, be sure to check that out. Here, we'll just do the bare minimum -to get a three node replica set setup locally. - -.. warning:: Replica sets should always use multiple nodes in - production - putting all set members on the same physical node is - only recommended for testing and development. - -We start three ``mongod`` processes, each on a different port and with -a different dbpath, but all using the same replica set name "foo". - -.. code-block:: bash - - $ mkdir -p /data/db0 /data/db1 /data/db2 - $ mongod --port 27017 --dbpath /data/db0 --replSet foo - -.. code-block:: bash - - $ mongod --port 27018 --dbpath /data/db1 --replSet foo - -.. code-block:: bash - - $ mongod --port 27019 --dbpath /data/db2 --replSet foo - -Initializing the Set -~~~~~~~~~~~~~~~~~~~~ - -At this point all of our nodes are up and running, but the set has yet -to be initialized. Until the set is initialized no node will become -the primary, and things are essentially "offline". - -To initialize the set we need to connect directly to a single node and run the -initiate command using the ``directConnection`` option:: - - >>> from pymongo import MongoClient - >>> c = MongoClient('localhost', 27017, directConnection=True) - -.. note:: We could have connected to any of the other nodes instead, - but only the node we initiate from is allowed to contain any - initial data. - -After connecting, we run the initiate command to get things started:: - - >>> config = {'_id': 'foo', 'members': [ - ... {'_id': 0, 'host': 'localhost:27017'}, - ... {'_id': 1, 'host': 'localhost:27018'}, - ... {'_id': 2, 'host': 'localhost:27019'}]} - >>> c.admin.command("replSetInitiate", config) - {'ok': 1.0, ...} - -The three ``mongod`` servers we started earlier will now coordinate -and come online as a replica set. - -Connecting to a Replica Set -~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -The initial connection as made above is a special case for an -uninitialized replica set. Normally we'll want to connect -differently. A connection to a replica set can be made using the -:meth:`~pymongo.mongo_client.MongoClient` constructor, specifying -one or more members of the set and optionally the replica set name. -Any of the following connects to the replica set we just created:: - - >>> MongoClient('localhost') - MongoClient(host=['localhost:27017'], ...) - >>> MongoClient('localhost', replicaset='foo') - MongoClient(host=['localhost:27017'], replicaset='foo', ...) - >>> MongoClient('localhost:27018', replicaset='foo') - MongoClient(['localhost:27018'], replicaset='foo', ...) - >>> MongoClient('localhost', 27019, replicaset='foo') - MongoClient(['localhost:27019'], replicaset='foo', ...) - >>> MongoClient('mongodb://localhost:27017,localhost:27018/') - MongoClient(['localhost:27017', 'localhost:27018'], ...) - >>> MongoClient('mongodb://localhost:27017,localhost:27018/?replicaSet=foo') - MongoClient(['localhost:27017', 'localhost:27018'], replicaset='foo', ...) - -The addresses passed to :meth:`~pymongo.mongo_client.MongoClient` are called -the *seeds*. As long as at least one of the seeds is online, MongoClient -discovers all the members in the replica set, and determines which is the -current primary and which are secondaries or arbiters. Each seed must be the -address of a single mongod. Multihomed and round robin DNS addresses are -**not** supported. - -The :class:`~pymongo.mongo_client.MongoClient` constructor is non-blocking: -the constructor returns immediately while the client connects to the replica -set using background threads. Note how, if you create a client and immediately -print the string representation of its -:attr:`~pymongo.mongo_client.MongoClient.nodes` attribute, the list may be -empty initially. If you wait a moment, MongoClient discovers the whole replica -set:: - - >>> from time import sleep - >>> c = MongoClient(replicaset='foo'); print(c.nodes); sleep(0.1); print(c.nodes) - frozenset([]) - frozenset([('localhost', 27019), ('localhost', 27017), ('localhost', 27018)]) - -You need not wait for replica set discovery in your application, however. -If you need to do any operation with a MongoClient, such as a -:meth:`~pymongo.collection.Collection.find` or an -:meth:`~pymongo.collection.Collection.insert_one`, the client waits to discover -a suitable member before it attempts the operation. - -Handling Failover -~~~~~~~~~~~~~~~~~ - -When a failover occurs, PyMongo will automatically attempt to find the -new primary node and perform subsequent operations on that node. This -can't happen completely transparently, however. Here we'll perform an -example failover to illustrate how everything behaves. First, we'll -connect to the replica set and perform a couple of basic operations:: - - >>> db = MongoClient("localhost", replicaSet='foo').test - >>> db.test.insert_one({"x": 1}).inserted_id - ObjectId('...') - >>> db.test.find_one() - {'x': 1, '_id': ObjectId('...')} - -By checking the host and port, we can see that we're connected to -*localhost:27017*, which is the current primary:: - - >>> db.client.address - ('localhost', 27017) - -Now let's bring down that node and see what happens when we run our -query again:: - - >>> db.test.find_one() - Traceback (most recent call last): - pymongo.errors.AutoReconnect: ... - -We get an :class:`~pymongo.errors.AutoReconnect` exception. This means -that the driver was not able to connect to the old primary (which -makes sense, as we killed the server), but that it will attempt to -automatically reconnect on subsequent operations. When this exception -is raised our application code needs to decide whether to retry the -operation or to simply continue, accepting the fact that the operation -might have failed. - -On subsequent attempts to run the query we might continue to see this -exception. Eventually, however, the replica set will failover and -elect a new primary (this should take no more than a couple of seconds in -general). At that point the driver will connect to the new primary and -the operation will succeed:: - - >>> db.test.find_one() - {'x': 1, '_id': ObjectId('...')} - >>> db.client.address - ('localhost', 27018) - -Bring the former primary back up. It will rejoin the set as a secondary. -Now we can move to the next section: distributing reads to secondaries. - -.. _secondary-reads: - -Secondary Reads -~~~~~~~~~~~~~~~ - -By default an instance of MongoClient sends queries to -the primary member of the replica set. To use secondaries for queries -we have to change the read preference:: - - >>> client = MongoClient( - ... 'localhost:27017', - ... replicaSet='foo', - ... readPreference='secondaryPreferred') - >>> client.read_preference - SecondaryPreferred(tag_sets=None) - -Now all queries will be sent to the secondary members of the set. If there are -no secondary members the primary will be used as a fallback. If you have -queries you would prefer to never send to the primary you can specify that -using the ``secondary`` read preference. - -By default the read preference of a :class:`~pymongo.database.Database` is -inherited from its MongoClient, and the read preference of a -:class:`~pymongo.collection.Collection` is inherited from its Database. To use -a different read preference use the -:meth:`~pymongo.mongo_client.MongoClient.get_database` method, or the -:meth:`~pymongo.database.Database.get_collection` method:: - - >>> from pymongo import ReadPreference - >>> client.read_preference - SecondaryPreferred(tag_sets=None) - >>> db = client.get_database('test', read_preference=ReadPreference.SECONDARY) - >>> db.read_preference - Secondary(tag_sets=None) - >>> coll = db.get_collection('test', read_preference=ReadPreference.PRIMARY) - >>> coll.read_preference - Primary() - -You can also change the read preference of an existing -:class:`~pymongo.collection.Collection` with the -:meth:`~pymongo.collection.Collection.with_options` method:: - - >>> coll2 = coll.with_options(read_preference=ReadPreference.NEAREST) - >>> coll.read_preference - Primary() - >>> coll2.read_preference - Nearest(tag_sets=None) - -Note that since most database commands can only be sent to the primary of a -replica set, the :meth:`~pymongo.database.Database.command` method does not obey -the Database's :attr:`~pymongo.database.Database.read_preference`, but you can -pass an explicit read preference to the method:: - - >>> db.command('dbstats', read_preference=ReadPreference.NEAREST) - {...} - -Reads are configured using three options: **read preference**, **tag sets**, -and **local threshold**. - -**Read preference**: - -Read preference is configured using one of the classes from -:mod:`~pymongo.read_preferences` (:class:`~pymongo.read_preferences.Primary`, -:class:`~pymongo.read_preferences.PrimaryPreferred`, -:class:`~pymongo.read_preferences.Secondary`, -:class:`~pymongo.read_preferences.SecondaryPreferred`, or -:class:`~pymongo.read_preferences.Nearest`). For convenience, we also provide -:class:`~pymongo.read_preferences.ReadPreference` with the following -attributes: - -- ``PRIMARY``: Read from the primary. This is the default read preference, - and provides the strongest consistency. If no primary is available, raise - :class:`~pymongo.errors.AutoReconnect`. - -- ``PRIMARY_PREFERRED``: Read from the primary if available, otherwise read - from a secondary. - -- ``SECONDARY``: Read from a secondary. If no matching secondary is available, - raise :class:`~pymongo.errors.AutoReconnect`. - -- ``SECONDARY_PREFERRED``: Read from a secondary if available, otherwise - from the primary. - -- ``NEAREST``: Read from any available member. - -**Tag sets**: - -Replica-set members can be `tagged -`_ according to any -criteria you choose. By default, PyMongo ignores tags when -choosing a member to read from, but your read preference can be configured with -a ``tag_sets`` parameter. ``tag_sets`` must be a list of dictionaries, each -dict providing tag values that the replica set member must match. -PyMongo tries each set of tags in turn until it finds a set of -tags with at least one matching member. For example, to prefer reads from the -New York data center, but fall back to the San Francisco data center, tag your -replica set members according to their location and create a -MongoClient like so:: - - >>> from pymongo.read_preferences import Secondary - >>> db = client.get_database( - ... 'test', read_preference=Secondary([{'dc': 'ny'}, {'dc': 'sf'}])) - >>> db.read_preference - Secondary(tag_sets=[{'dc': 'ny'}, {'dc': 'sf'}]) - -MongoClient tries to find secondaries in New York, then San Francisco, -and raises :class:`~pymongo.errors.AutoReconnect` if none are available. As an -additional fallback, specify a final, empty tag set, ``{}``, which means "read -from any member that matches the mode, ignoring tags." - -See :mod:`~pymongo.read_preferences` for more information. - -.. _distributes reads to secondaries: - -**Local threshold**: - -If multiple members match the read preference and tag sets, PyMongo reads -from among the nearest members, chosen according to ping time. By default, -only members whose ping times are within 15 milliseconds of the nearest -are used for queries. You can choose to distribute reads among members with -higher latencies by setting ``localThresholdMS`` to a larger -number:: - - >>> client = pymongo.MongoClient( - ... replicaSet='repl0', - ... readPreference='secondaryPreferred', - ... localThresholdMS=35) - -In this case, PyMongo distributes reads among matching members within 35 -milliseconds of the closest member's ping time. - -.. note:: ``localThresholdMS`` is ignored when talking to a - replica set *through* a mongos. The equivalent is the localThreshold_ command - line option. - -.. _localThreshold: https://mongodb.com/docs/manual/reference/program/mongos/#std-option-mongos.--localThreshold - -.. _health-monitoring: - -Health Monitoring -''''''''''''''''' - -When MongoClient is initialized it launches background threads to -monitor the replica set for changes in: - -* Health: detect when a member goes down or comes up, or if a different member - becomes primary -* Configuration: detect when members are added or removed, and detect changes - in members' tags -* Latency: track a moving average of each member's ping time - -Replica-set monitoring ensures queries are continually routed to the proper -members as the state of the replica set changes. - -.. _mongos-load-balancing: - -mongos Load Balancing ---------------------- - -An instance of :class:`~pymongo.mongo_client.MongoClient` can be configured -with a list of addresses of mongos servers: - - >>> client = MongoClient('mongodb://host1,host2,host3') - -Each member of the list must be a single mongos server. Multihomed and round -robin DNS addresses are **not** supported. The client continuously -monitors all the mongoses' availability, and its network latency to each. - -PyMongo distributes operations evenly among the set of mongoses within its -``localThresholdMS`` (similar to how it `distributes reads to secondaries`_ -in a replica set). By default the threshold is 15 ms. - -The lowest-latency server, and all servers with latencies no more than -``localThresholdMS`` beyond the lowest-latency server's, receive -operations equally. For example, if we have three mongoses: - - - host1: 20 ms - - host2: 35 ms - - host3: 40 ms - -By default the ``localThresholdMS`` is 15 ms, so PyMongo uses host1 and host2 -evenly. It uses host1 because its network latency to the driver is shortest. It -uses host2 because its latency is within 15 ms of the lowest-latency server's. -But it excuses host3: host3 is 20ms beyond the lowest-latency server. - -If we set ``localThresholdMS`` to 30 ms all servers are within the threshold: - - >>> client = MongoClient('mongodb://host1,host2,host3/?localThresholdMS=30') - -.. warning:: Do **not** connect PyMongo to a pool of mongos instances through a - load balancer. A single socket connection must always be routed to the same - mongos instance for proper cursor support. diff --git a/doc/examples/index.rst b/doc/examples/index.rst deleted file mode 100644 index ac450470ef..0000000000 --- a/doc/examples/index.rst +++ /dev/null @@ -1,40 +0,0 @@ -Examples -======== - -The examples in this section are intended to give in depth overviews -of how to accomplish specific tasks with MongoDB and PyMongo. - -Unless otherwise noted, all examples assume that a MongoDB instance is -running on the default host and port. Assuming you have `downloaded -and installed `_ -MongoDB, you can start it like so: - -.. code-block:: bash - - $ mongod - -.. toctree:: - :maxdepth: 1 - - aggregation - authentication - collations - copydb - custom_type - bulk - client_bulk - datetimes - geo - gevent - gridfs - high_availability - logging - mod_wsgi - network_compression - server_selection - tailable - timeouts - tls - type_hints - encryption - uuid diff --git a/doc/examples/logging.rst b/doc/examples/logging.rst deleted file mode 100644 index 0cbc8eff09..0000000000 --- a/doc/examples/logging.rst +++ /dev/null @@ -1,63 +0,0 @@ -Logging -======== - -Starting in 4.8, **PyMongo** supports `Python's native logging library `_, -enabling developers to customize the verbosity of log messages for their applications. - -Components -------------- -There are currently three different **PyMongo** components with logging support: ``pymongo.command``, ``pymongo.connection``, and ``pymongo.serverSelection``. -These components deal with command operations, connection management, and server selection, respectively. -Each can be configured separately or they can all be configured together. - -Configuration -------------- -Currently, the above components each support ``DEBUG`` logging. To enable a single component, do the following:: - - import logging - logging.getLogger('pymongo.').setLevel(logging.DEBUG) - - - -For example, to enable command logging:: - - import logging - logging.getLogger('pymongo.command').setLevel(logging.DEBUG) - - -You can also enable all ``DEBUG`` logs at once:: - - import logging - logging.getLogger('pymongo').setLevel(logging.DEBUG) - - -Truncation -------------- -When ``pymongo.command`` debug logs are enabled, every command sent to the server and every response sent back will be included as part of the logs. -By default, these command and response documents are truncated after 1000 bytes. - -You can configure a higher truncation limit by setting the ``MONGOB_LOG_MAX_DOCUMENT_LENGTH`` environment variable to your desired length. - -Note that by default, only sensitive authentication command contents are redacted. -All commands containing user data will be logged, including the actual contents of your queries. -To prevent this behavior, set ``MONGOB_LOG_MAX_DOCUMENT_LENGTH`` to 0. This will omit the command and response bodies from the logs. - -Example -------------- -Here's a simple example that enables ``pymongo.command`` debug logs and performs two database operations:: - - import logging - import pymongo - - # Automatically writes all logs to stdout - logging.basicConfig() - logging.getLogger('pymongo.command').setLevel(logging.DEBUG) - - client = pymongo.MongoClient() - client.db.test.insert_one({"x": 1}) - client.db.test.find_one({"x": 1}) - --------------------------------- - DEBUG:pymongo.command:{"clientId": {"$oid": "65cbe82614be1fc2beb4e4a9"}, "message": "Command started", "command": "{\"insert\": \"test\", \"ordered\": true, \"lsid\": {\"id\": {\"$binary\": {\"base64\": \"GI7ubVhPSsWd7+OwHEFx6Q==\", \"subType\": \"04\"}}}, \"$db\": \"db\", \"documents\": [{\"x\": 1, \"_id\": {\"$oid\": \"65cbe82614be1fc2beb4e4aa\"}}]}", "commandName": "insert", "databaseName": "db", "requestId": 1144108930, "operationId": 1144108930, "driverConnectionId": 1, "serverConnectionId": 3554, "serverHost": "localhost", "serverPort": 27017} - DEBUG:pymongo.command:{"clientId": {"$oid": "65cbe82614be1fc2beb4e4a9"}, "message": "Command succeeded", "durationMS": 0.515, "reply": "{\"n\": 1, \"ok\": 1.0}", "commandName": "insert", "databaseName": "db", "requestId": 1144108930, "operationId": 1144108930, "driverConnectionId": 1, "serverConnectionId": 3554, "serverHost": "localhost", "serverPort": 27017} - DEBUG:pymongo.command:{"clientId": {"$oid": "65cbe82614be1fc2beb4e4a9"}, "message": "Command started", "command": "{\"find\": \"test\", \"filter\": {\"x\": 1}, \"limit\": 1, \"singleBatch\": true, \"lsid\": {\"id\": {\"$binary\": {\"base64\": \"GI7ubVhPSsWd7+OwHEFx6Q==\", \"subType\": \"04\"}}}, \"$db\": \"db\"}", "commandName": "find", "databaseName": "db", "requestId": 470211272, "operationId": 470211272, "driverConnectionId": 1, "serverConnectionId": 3554, "serverHost": "localhost", "serverPort": 27017} - DEBUG:pymongo.command:{"clientId": {"$oid": "65cbe82614be1fc2beb4e4a9"}, "message": "Command succeeded", "durationMS": 0.621, "reply": "{\"cursor\": {\"firstBatch\": [{\"_id\": {\"$oid\": \"65cbdf391a957ed280001417\"}, \"x\": 1}], \"ns\": \"db.test\"}, \"ok\": 1.0}", "commandName": "find", "databaseName": "db", "requestId": 470211272, "operationId": 470211272, "driverConnectionId": 1, "serverConnectionId": 3554, "serverHost": "localhost", "serverPort": 27017} diff --git a/doc/examples/mod_wsgi.rst b/doc/examples/mod_wsgi.rst deleted file mode 100644 index 96d6ce892f..0000000000 --- a/doc/examples/mod_wsgi.rst +++ /dev/null @@ -1,64 +0,0 @@ -.. _pymongo-and-mod_wsgi: - -PyMongo and mod_wsgi -==================== - -To run your application under `mod_wsgi `_, -follow these guidelines: - -* Run ``mod_wsgi`` in daemon mode with the ``WSGIDaemonProcess`` directive. -* Assign each application to a separate daemon with ``WSGIProcessGroup``. -* Use ``WSGIApplicationGroup %{GLOBAL}`` to ensure your application is running - in the daemon's main Python interpreter, not a sub interpreter. - -For example, this ``mod_wsgi`` configuration ensures an application runs in the -main interpreter:: - - - WSGIDaemonProcess my_process - WSGIScriptAlias /my_app /path/to/app.wsgi - WSGIProcessGroup my_process - WSGIApplicationGroup %{GLOBAL} - - -If you have multiple applications that use PyMongo, put each in a separate -daemon, still in the global application group:: - - - WSGIDaemonProcess my_process - WSGIScriptAlias /my_app /path/to/app.wsgi - - WSGIProcessGroup my_process - - - WSGIDaemonProcess my_other_process - WSGIScriptAlias /my_other_app /path/to/other_app.wsgi - - WSGIProcessGroup my_other_process - - - WSGIApplicationGroup %{GLOBAL} - - -Background: ``mod_wsgi`` can run in "embedded" mode when only WSGIScriptAlias -is set, or "daemon" mode with WSGIDaemonProcess. In daemon mode, ``mod_wsgi`` -can run your application in the Python main interpreter, or in sub interpreters. -The correct way to run a PyMongo application is in daemon mode, using the main -interpreter. - -Python C extensions in general have issues running in multiple -Python sub interpreters. These difficulties are explained in the documentation for -`Py_NewInterpreter `_ -and in the `Multiple Python Sub Interpreters -`_ -section of the ``mod_wsgi`` documentation. - -Beginning with PyMongo 2.7, the C extension for BSON detects when it is running -in a sub interpreter and activates a workaround, which adds a small cost to -BSON decoding. To avoid this cost, use ``WSGIApplicationGroup %{GLOBAL}`` to -ensure your application runs in the main interpreter. - -Since your program runs in the main interpreter it should not share its -process with any other applications, lest they interfere with each other's -state. Each application should have its own daemon process, as shown in the -example above. diff --git a/doc/examples/network_compression.rst b/doc/examples/network_compression.rst deleted file mode 100644 index c270dff4b3..0000000000 --- a/doc/examples/network_compression.rst +++ /dev/null @@ -1,39 +0,0 @@ - -.. _network-compression-example: - -Network Compression -=================== - -PyMongo supports network compression where network traffic between the client -and MongoDB server are compressed which reduces the amount of data passed -over the network. By default no compression is used. - -The driver supports the following algorithms: - -- `snappy `_ available in MongoDB 3.4 and later. -- :mod:`zlib` available in MongoDB 3.6 and later. -- `zstandard `_ available in MongoDB 4.2 and later. - -.. note:: snappy and zstandard compression require additional dependencies. See :ref:`optional-deps`. - -Applications can enable wire protocol compression via the ``compressors`` URI and -keyword argument to :meth:`~pymongo.mongo_client.MongoClient`. For example:: - - >>> client = MongoClient(compressors='zlib') - -When multiple compression algorithms are given, the driver selects the first one in the -list supported by the MongoDB instance to which it is connected. For example:: - - >>> client = MongoClient(compressors='snappy,zstandard,zlib') - -The ``compressors`` option can also be set via the URI:: - - >>> client = MongoClient('mongodb://example.com/?compressors=snappy,zstandard,zlib') - -Additionally, zlib compression allows specifying a compression level with supported values from -1 to 9:: - - >>> client = MongoClient(compressors='zlib', zlibCompressionLevel=-1) - -The ``zlibCompressionLevel`` is passed as the ``level`` argument to :func:`zlib.compress`. - -.. seealso:: The MongoDB documentation on `network compression URI options `_. diff --git a/doc/examples/server_selection.rst b/doc/examples/server_selection.rst deleted file mode 100644 index 227e849df3..0000000000 --- a/doc/examples/server_selection.rst +++ /dev/null @@ -1,108 +0,0 @@ -Server Selector Example -======================= - -Users can exert fine-grained control over the `server selection algorithm`_ -by setting the ``server_selector`` option on the :class:`~pymongo.MongoClient` -to an appropriate callable. This example shows how to use this functionality -to prefer servers running on ``localhost``. - - -.. warning:: - - Use of custom server selector functions is a power user feature. Misusing - custom server selectors can have unintended consequences such as degraded - read/write performance. - - -.. testsetup:: - - from pymongo import MongoClient - - -.. _server selection algorithm: https://mongodb.com/docs/manual/core/read-preference-mechanics/ - - -Example: Selecting Servers Running on ``localhost`` ---------------------------------------------------- - -To start, we need to write the server selector function that will be used. -The server selector function should accept a list of -:class:`~pymongo.server_description.ServerDescription` objects and return a -list of server descriptions that are suitable for the read or write operation. -A server selector must not create or modify -:class:`~pymongo.server_description.ServerDescription` objects, and must return -the selected instances unchanged. - -In this example, we write a server selector that prioritizes servers running on -``localhost``. This can be desirable when using a sharded cluster with multiple -``mongos``, as locally run queries are likely to see lower latency and higher -throughput. Please note, however, that it is highly dependent on the -application if preferring ``localhost`` is beneficial or not. - -In addition to comparing the hostname with ``localhost``, our server selector -function accounts for the edge case when no servers are running on -``localhost``. In this case, we allow the default server selection logic to -prevail by passing through the received server description list unchanged. -Failure to do this would render the client unable to communicate with MongoDB -in the event that no servers were running on ``localhost``. - - -The described server selection logic is implemented in the following server -selector function: - - -.. doctest:: - - >>> def server_selector(server_descriptions): - ... servers = [ - ... server for server in server_descriptions if server.address[0] == "localhost" - ... ] - ... if not servers: - ... return server_descriptions - ... return servers - ... - - - -Finally, we can create a :class:`~pymongo.MongoClient` instance with this -server selector. - - -.. doctest:: - - >>> client = MongoClient(server_selector=server_selector) - - - -Server Selection Process ------------------------- - -This section dives deeper into the server selection process for reads and -writes. In the case of a write, the driver performs the following operations -(in order) during the selection process: - - -#. Select all writeable servers from the list of known hosts. For a replica set - this is the primary, while for a sharded cluster this is all the known mongoses. - -#. Apply the user-defined server selector function. Note that the custom server - selector is **not** called if there are no servers left from the previous - filtering stage. - -#. Apply the ``localThresholdMS`` setting to the list of remaining hosts. This - whittles the host list down to only contain servers whose latency is at most - ``localThresholdMS`` milliseconds higher than the lowest observed latency. - -#. Select a server at random from the remaining host list. The desired - operation is then performed against the selected server. - - -In the case of **reads** the process is identical except for the first step. -Here, instead of selecting all writeable servers, we select all servers -matching the user's :class:`~pymongo.read_preferences.ReadPreference` from the -list of known hosts. As an example, for a 3-member replica set with a -:class:`~pymongo.read_preferences.Secondary` read preference, we would select -all available secondaries. - - -.. _server selection algorithm: https://mongodb.com/docs/manual/core/read-preference-mechanics/ diff --git a/doc/examples/tailable.rst b/doc/examples/tailable.rst deleted file mode 100644 index 79458dc2ff..0000000000 --- a/doc/examples/tailable.rst +++ /dev/null @@ -1,42 +0,0 @@ -Tailable Cursors -================ - -By default, MongoDB will automatically close a cursor when the client has -exhausted all results in the cursor. However, for `capped collections -`_ you may -use a `tailable cursor -`_ -that remains open after the client exhausts the results in the initial cursor. - -The following is a basic example of using a tailable cursor to tail the oplog -of a replica set member:: - - import time - - import pymongo - - client = pymongo.MongoClient() - oplog = client.local.oplog.rs - first = oplog.find().sort('$natural', pymongo.ASCENDING).limit(-1).next() - print(first) - ts = first['ts'] - - while True: - # For a regular capped collection CursorType.TAILABLE_AWAIT is the - # only option required to create a tailable cursor. When querying the - # oplog, the oplog_replay option enables an optimization to quickly - # find the 'ts' value we're looking for. The oplog_replay option - # can only be used when querying the oplog. Starting in MongoDB 4.4 - # this option is ignored by the server as queries against the oplog - # are optimized automatically by the MongoDB query engine. - cursor = oplog.find({'ts': {'$gt': ts}}, - cursor_type=pymongo.CursorType.TAILABLE_AWAIT, - oplog_replay=True) - while cursor.alive: - for doc in cursor: - ts = doc['ts'] - print(doc) - # We end up here if the find() returned no documents or if the - # tailable cursor timed out (no new documents were added to the - # collection for more than 1 second). - time.sleep(1) diff --git a/doc/examples/timeouts.rst b/doc/examples/timeouts.rst deleted file mode 100644 index 5171588962..0000000000 --- a/doc/examples/timeouts.rst +++ /dev/null @@ -1,162 +0,0 @@ - -.. _timeout-example: - -Client Side Operation Timeout -============================= - -PyMongo 4.2 introduced :meth:`~pymongo.timeout` and the ``timeoutMS`` -URI and keyword argument to :class:`~pymongo.mongo_client.MongoClient`. -These features allow applications to more easily limit the amount of time that -one or more operations can execute before control is returned to the app. This -timeout applies to all of the work done to execute the operation, including -but not limited to server selection, connection checkout, serialization, and -server-side execution. - -Basic Usage ------------ - -The following example uses :meth:`~pymongo.timeout` to configure a 10-second -timeout for an :meth:`~pymongo.collection.Collection.insert_one` operation:: - - import pymongo - with pymongo.timeout(10): - coll.insert_one({"name": "Nunu"}) - -The :meth:`~pymongo.timeout` applies to all pymongo operations within the block. -The following example ensures that both the ``insert`` and the ``find`` complete -within 10 seconds total, or raise a timeout error:: - - with pymongo.timeout(10): - coll.insert_one({"name": "Nunu"}) - coll.find_one({"name": "Nunu"}) - -When nesting :func:`~pymongo.timeout`, the nested deadline is capped by the outer -deadline. The deadline can only be shortened, not extended. -When exiting the block, the previous deadline is restored:: - - with pymongo.timeout(5): - coll.find_one() # Uses the 5 second deadline. - with pymongo.timeout(3): - coll.find_one() # Uses the 3 second deadline. - coll.find_one() # Uses the original 5 second deadline. - with pymongo.timeout(10): - coll.find_one() # Still uses the original 5 second deadline. - coll.find_one() # Uses the original 5 second deadline. - -Timeout errors --------------- - -When the :meth:`~pymongo.timeout` with-statement is entered, a deadline is set -for the entire block. When that deadline is exceeded, any blocking pymongo operation -will raise a timeout exception. For example:: - - try: - with pymongo.timeout(10): - coll.insert_one({"name": "Nunu"}) - time.sleep(10) - # The deadline has now expired, the next operation will raise - # a timeout exception. - coll.find_one({"name": "Nunu"}) - except PyMongoError as exc: - if exc.timeout: - print(f"block timed out: {exc!r}") - else: - print(f"failed with non-timeout error: {exc!r}") - -The :attr:`pymongo.errors.PyMongoError.timeout` property (added in PyMongo 4.2) -will be ``True`` when the error was caused by a timeout and ``False`` otherwise. - -The timeoutMS URI option ------------------------- - -PyMongo 4.2 also added support for the ``timeoutMS`` URI and keyword argument to -:class:`~pymongo.mongo_client.MongoClient`. When this option is configured, the -client will automatically apply the timeout to each API call. For example:: - - client = MongoClient("mongodb://localhost/?timeoutMS=10000") - coll = client.test.test - coll.insert_one({"name": "Nunu"}) # Uses a 10-second timeout. - coll.find_one({"name": "Nunu"}) # Also uses a 10-second timeout. - -The above is roughly equivalent to:: - - client = MongoClient() - coll = client.test.test - with pymongo.timeout(10): - coll.insert_one({"name": "Nunu"}) - with pymongo.timeout(10): - coll.find_one({"name": "Nunu"}) - -pymongo.timeout overrides timeoutMS ------------------------------------ - -:meth:`~pymongo.timeout` overrides ``timeoutMS``; within a -:meth:`~pymongo.timeout` block a client's ``timeoutMS`` option is ignored:: - - client = MongoClient("mongodb://localhost/?timeoutMS=10000") - coll = client.test.test - coll.insert_one({"name": "Nunu"}) # Uses the client's 10-second timeout. - # pymongo.timeout overrides the client's timeoutMS. - with pymongo.timeout(20): - coll.insert_one({"name": "Nunu"}) # Uses the 20-second timeout. - with pymongo.timeout(5): - coll.find_one({"name": "Nunu"}) # Uses the 5-second timeout. - -pymongo.timeout is thread safe ------------------------------- - -:meth:`~pymongo.timeout` is thread safe; the timeout only applies to current -thread and multiple threads can configure different timeouts in parallel. - -pymongo.timeout is asyncio safe -------------------------------- - -:meth:`~pymongo.timeout` is asyncio safe; the timeout only applies to current -Task and multiple Tasks can configure different timeouts concurrently. -:meth:`~pymongo.timeout` can be used identically in -`Motor `_, for example:: - - import motor.motor_asyncio - client = motor.motor_asyncio.AsyncIOMotorClient() - coll = client.test.test - with pymongo.timeout(10): - await coll.insert_one({"name": "Nunu"}) - await coll.find_one({"name": "Nunu"}) - -Troubleshooting ---------------- - -There are many timeout errors that can be raised depending on when the timeout -expires. In code, these can be identified with the :attr:`pymongo.errors.PyMongoError.timeout` -property. Some specific timeout errors examples are described below. - -When the client was unable to find an available server to run the operation -within the given timeout:: - - pymongo.errors.ServerSelectionTimeoutError: No servers found yet, Timeout: -0.00202266700216569s, Topology Description: ]> - -When either the client was unable to establish a connection within the given -timeout or the operation was sent but the server was not able to respond in time:: - - pymongo.errors.NetworkTimeout: localhost:27017: timed out - -When the server cancelled the operation because it exceeded the given timeout. -Note that the operation may have partially completed on the server (depending -on the operation):: - - pymongo.errors.ExecutionTimeout: operation exceeded time limit, full error: {'ok': 0.0, 'errmsg': 'operation exceeded time limit', 'code': 50, 'codeName': 'MaxTimeMSExpired'} - -When the client cancelled the operation because it was not possible to complete -within the given timeout:: - - pymongo.errors.ExecutionTimeout: operation would exceed time limit, remaining timeout:0.00196 <= network round trip time:0.00427 - -When the client attempted a write operation but the server could not replicate -that write (according to the configured write concern) within the given timeout:: - - pymongo.errors.WTimeoutError: operation exceeded time limit, full error: {'code': 50, 'codeName': 'MaxTimeMSExpired', 'errmsg': 'operation exceeded time limit', 'errInfo': {'writeConcern': {'w': 1, 'wtimeout': 0}}} - -The same error as above but for :meth:`~pymongo.collection.Collection.insert_many` -or :meth:`~pymongo.collection.Collection.bulk_write`:: - - pymongo.errors.BulkWriteError: batch op errors occurred, full error: {'writeErrors': [], 'writeConcernErrors': [{'code': 50, 'codeName': 'MaxTimeMSExpired', 'errmsg': 'operation exceeded time limit', 'errInfo': {'writeConcern': {'w': 1, 'wtimeout': 0}}}], 'nInserted': 2, 'nUpserted': 0, 'nMatched': 0, 'nModified': 0, 'nRemoved': 0, 'upserted': []} diff --git a/doc/examples/tls.rst b/doc/examples/tls.rst deleted file mode 100644 index 9241ac23e7..0000000000 --- a/doc/examples/tls.rst +++ /dev/null @@ -1,234 +0,0 @@ -TLS/SSL and PyMongo -=================== - -PyMongo supports connecting to MongoDB over TLS/SSL. This guide covers the -configuration options supported by PyMongo. See `the server documentation -`_ to configure -MongoDB. - -.. warning:: Industry best practices recommend, and some regulations require, - the use of TLS 1.1 or newer. Though no application changes are required for - PyMongo to make use of the newest protocols, some operating systems or - versions may not provide an OpenSSL version new enough to support them. - - Users of macOS older than 10.13 (High Sierra) will need to install Python - from `python.org`_, `homebrew`_, `macports`_, or another similar source. - - Users of Linux or other non-macOS Unix can check their OpenSSL version like - this:: - - $ openssl version - - If the version number is less than 1.0.1 support for TLS 1.1 or newer is not - available. Contact your operating system vendor for a solution or upgrade to - a newer distribution. - - You can check your Python interpreter by installing the `requests`_ module - and executing the following command:: - - python -c "import requests; print(requests.get('https://www.howsmyssl.com/a/check', verify=False).json()['tls_version'])" - - You should see "TLS 1.X" where X is >= 1. - - You can read more about TLS versions and their security implications here: - - ``_ - -.. _python.org: https://www.python.org/downloads/ -.. _homebrew: https://brew.sh/ -.. _macports: https://www.macports.org/ -.. _requests: https://pypi.python.org/pypi/requests - -Basic configuration -................... - -In many cases connecting to MongoDB over TLS/SSL requires nothing more than -passing ``tls=True`` as a keyword argument to -:class:`~pymongo.mongo_client.MongoClient`:: - - >>> client = pymongo.MongoClient('example.com', tls=True) - -Or passing ``tls=true`` in the URI:: - - >>> client = pymongo.MongoClient('mongodb://example.com/?tls=true') - -This configures PyMongo to connect to the server using TLS, verify the server's -certificate and verify that the host you are attempting to connect to is listed -by that certificate. - -Certificate verification policy -............................... - -By default, PyMongo is configured to require a certificate from the server when -TLS is enabled. This is configurable using the ``tlsAllowInvalidCertificates`` -option. To disable this requirement pass ``tlsAllowInvalidCertificates=True`` -as a keyword parameter:: - - >>> client = pymongo.MongoClient('example.com', - ... tls=True, - ... tlsAllowInvalidCertificates=True) - -Or, in the URI:: - - >>> uri = 'mongodb://example.com/?tls=true&tlsAllowInvalidCertificates=true' - >>> client = pymongo.MongoClient(uri) - -Specifying a CA file -.................... - -In some cases you may want to configure PyMongo to use a specific set of CA -certificates. This is most often the case when you are acting as your own -certificate authority rather than using server certificates signed by a well -known authority. The ``tlsCAFile`` option takes a path to a CA file. It can be -passed as a keyword argument:: - - >>> client = pymongo.MongoClient('example.com', - ... tls=True, - ... tlsCAFile='/path/to/ca.pem') - -Or, in the URI:: - - >>> uri = 'mongodb://example.com/?tls=true&tlsCAFile=/path/to/ca.pem' - >>> client = pymongo.MongoClient(uri) - -Specifying a certificate revocation list -........................................ - -The ``tlsCRLFile`` option takes a path to a CRL file. It can be passed -as a keyword argument:: - - >>> client = pymongo.MongoClient('example.com', - ... tls=True, - ... tlsCRLFile='/path/to/crl.pem') - -Or, in the URI:: - - >>> uri = 'mongodb://example.com/?tls=true&tlsCRLFile=/path/to/crl.pem' - >>> client = pymongo.MongoClient(uri) - -.. note:: Certificate revocation lists and :ref:`OCSP` cannot be used together. - -Client certificates -................... - -PyMongo can be configured to present a client certificate using the -``tlsCertificateKeyFile`` option:: - - >>> client = pymongo.MongoClient('example.com', - ... tls=True, - ... tlsCertificateKeyFile='/path/to/client.pem') - -If the private key for the client certificate is stored in a separate file, -it should be concatenated with the certificate file. For example, to -concatenate a PEM-formatted certificate file ``cert.pem`` and a PEM-formatted -keyfile ``key.pem`` into a single file ``combined.pem``, on Unix systems, -users can run:: - - $ cat key.pem cert.pem > combined.pem - -PyMongo can be configured with the concatenated certificate keyfile using the -``tlsCertificateKeyFile`` option:: - - >>> client = pymongo.MongoClient('example.com', - ... tls=True, - ... tlsCertificateKeyFile='/path/to/combined.pem') - -If the private key contained in the certificate keyfile is encrypted, users -can provide a password or passphrase to decrypt the encrypted private keys -using the ``tlsCertificateKeyFilePassword`` option:: - - >>> client = pymongo.MongoClient('example.com', - ... tls=True, - ... tlsCertificateKeyFile='/path/to/combined.pem', - ... tlsCertificateKeyFilePassword=) - -These options can also be passed as part of the MongoDB URI. - -.. _OCSP: - -OCSP -.... - -Starting with PyMongo 3.11, if PyMongo was installed with the "ocsp" extra:: - - python -m pip install pymongo[ocsp] - -certificate revocation checking is enabled by way of `OCSP (Online Certification -Status Protocol) `_. -MongoDB 4.4+ `staples OCSP responses `_ -to the TLS handshake which PyMongo will verify, failing the TLS handshake if -the stapled OCSP response is invalid or indicates that the peer certificate is -revoked. - -When connecting to a server version older than 4.4, or when a 4.4+ version of -MongoDB does not staple an OCSP response, PyMongo will attempt to connect -directly to an OCSP endpoint if the peer certificate specified one. The TLS -handshake will only fail in this case if the response indicates that the -certificate is revoked. Invalid or malformed responses will be ignored, -favoring availability over maximum security. - -.. _TLSErrors: - -Troubleshooting TLS Errors -.......................... - -TLS errors often fall into three categories - certificate verification failure, -protocol version mismatch or certificate revocation checking failure. An error -message similar to the following means that OpenSSL was not able to verify the -server's certificate:: - - [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed - -This often occurs because OpenSSL does not have access to the system's -root certificates or the certificates are out of date. Linux users should -ensure that they have the latest root certificate updates installed from -their Linux vendor. macOS users using Python 3.7 or newer downloaded -from python.org `may have to run a script included with python -`_ to install -root certificates:: - - open "/Applications/Python /Install Certificates.command" - -Users of older PyPy portable versions may have to `set an environment -variable `_ to tell -OpenSSL where to find root certificates. This is easily done using the `certifi -module `_ from pypi:: - - $ pypy -m pip install certifi - $ export SSL_CERT_FILE=$(pypy -c "import certifi; print(certifi.where())") - -An error message similar to the following message means that the OpenSSL -version used by Python does not support a new enough TLS protocol to connect -to the server:: - - [SSL: TLSV1_ALERT_PROTOCOL_VERSION] tlsv1 alert protocol version - -Industry best practices recommend, and some regulations require, that older -TLS protocols be disabled in some MongoDB deployments. Some deployments may -disable TLS 1.0, others may disable TLS 1.0 and TLS 1.1. See the warning -earlier in this document for troubleshooting steps and solutions. - -An error message similar to the following message means that certificate -revocation checking failed:: - - [('SSL routines', 'tls_process_initial_server_flight', 'invalid status response')] - -See :ref:`OCSP` for more details. - -Python 3.10+ incompatibilities with TLS/SSL on MongoDB <= 4.0 -............................................................. - -Note that `changes made to the ssl module in Python 3.10+ -`_ may cause incompatibilities -with MongoDB <= 4.0. The following are some example errors that may occur with this -combination:: - - SSL handshake failed: localhost:27017: [SSL: SSLV3_ALERT_HANDSHAKE_FAILURE] sslv3 alert handshake failure (_ssl.c:997) - SSL handshake failed: localhost:27017: EOF occurred in violation of protocol (_ssl.c:997) - -The MongoDB server logs may show the following error:: - - 2021-06-30T21:22:44.917+0100 E NETWORK [conn16] SSL: error:1408A0C1:SSL routines:ssl3_get_client_hello:no shared cipher - -To resolve this issue, use Python <=3.10, upgrade to MongoDB 4.2+, or install -pymongo with the :ref:`OCSP` extra which relies on PyOpenSSL. diff --git a/doc/examples/type_hints.rst b/doc/examples/type_hints.rst deleted file mode 100644 index 375ad14330..0000000000 --- a/doc/examples/type_hints.rst +++ /dev/null @@ -1,332 +0,0 @@ - -.. _type_hints-example: - -Type Hints -========== - -As of version 4.1, PyMongo ships with `type hints`_. With type hints, Python -type checkers can easily find bugs before they reveal themselves in your code. - -If your IDE is configured to use type hints, -it can suggest more appropriate completions and highlight errors in your code. -Some examples include `PyCharm`_, `Sublime Text`_, and `Visual Studio Code`_. - -You can also use the `mypy`_ tool from your command line or in Continuous Integration tests. - -All of the public APIs in PyMongo are fully type hinted, and -several of them support generic parameters for the -type of document object returned when decoding BSON documents. - -Due to `limitations in mypy`_, the default -values for generic document types are not yet provided (they will eventually be ``Dict[str, any]``). - -For a larger set of examples that use types, see the PyMongo `test_typing module`_. - -If you would like to opt out of using the provided types, add the following to -your `mypy config`_: :: - - [mypy-pymongo] - follow_imports = False - - -Basic Usage ------------ - -Note that a type for :class:`~pymongo.mongo_client.MongoClient` must be specified. Here we use the -default, unspecified document type: - -.. doctest:: - - >>> from pymongo import MongoClient - >>> client: MongoClient = MongoClient() - >>> collection = client.test.test - >>> inserted = collection.insert_one({"x": 1, "tags": ["dog", "cat"]}) - >>> retrieved = collection.find_one({"x": 1}) - >>> assert isinstance(retrieved, dict) - -For a more accurate typing for document type you can use: - -.. doctest:: - - >>> from typing import Any, Dict - >>> from pymongo import MongoClient - >>> client: MongoClient[Dict[str, Any]] = MongoClient() - >>> collection = client.test.test - >>> inserted = collection.insert_one({"x": 1, "tags": ["dog", "cat"]}) - >>> retrieved = collection.find_one({"x": 1}) - >>> assert isinstance(retrieved, dict) - -Typed Client ------------- - -:class:`~pymongo.mongo_client.MongoClient` is generic on the document type used to decode BSON documents. - -You can specify a :class:`~bson.raw_bson.RawBSONDocument` document type: - -.. doctest:: - - >>> from pymongo import MongoClient - >>> from bson.raw_bson import RawBSONDocument - >>> client = MongoClient(document_class=RawBSONDocument) - >>> collection = client.test.test - >>> inserted = collection.insert_one({"x": 1, "tags": ["dog", "cat"]}) - >>> result = collection.find_one({"x": 1}) - >>> assert isinstance(result, RawBSONDocument) - -Subclasses of :py:class:`collections.abc.Mapping` can also be used, such as :class:`~bson.son.SON`: - -.. doctest:: - - >>> from bson import SON - >>> from pymongo import MongoClient - >>> client = MongoClient(document_class=SON[str, int]) - >>> collection = client.test.test - >>> inserted = collection.insert_one({"x": 1, "y": 2}) - >>> result = collection.find_one({"x": 1}) - >>> assert result is not None - >>> assert result["x"] == 1 - -Note that when using :class:`~bson.son.SON`, the key and value types must be given, e.g. ``SON[str, Any]``. - - -Typed Collection ----------------- - -You can use :py:class:`~typing.TypedDict` (Python 3.8+) when using a well-defined schema for the data in a -:class:`~pymongo.collection.Collection`. Note that all `schema validation`_ for inserts and updates is done on the server. -These methods automatically add an "_id" field. - -.. doctest:: - :pyversion: >= 3.8 - - >>> from typing import TypedDict - >>> from pymongo import MongoClient - >>> from pymongo.collection import Collection - >>> class Movie(TypedDict): - ... name: str - ... year: int - ... - >>> client: MongoClient = MongoClient() - >>> collection: Collection[Movie] = client.test.test - >>> inserted = collection.insert_one(Movie(name="Jurassic Park", year=1993)) - >>> result = collection.find_one({"name": "Jurassic Park"}) - >>> assert result is not None - >>> assert result["year"] == 1993 - >>> # This will raise a type-checking error, despite being present, because it is added by PyMongo. - >>> assert result["_id"] # type:ignore[typeddict-item] - -This same typing scheme works for all of the insert methods (:meth:`~pymongo.collection.Collection.insert_one`, -:meth:`~pymongo.collection.Collection.insert_many`, and :meth:`~pymongo.collection.Collection.bulk_write`). -For ``bulk_write`` both :class:`~pymongo.operations.InsertOne` and :class:`~pymongo.operations.ReplaceOne` operators are generic. - -.. doctest:: - :pyversion: >= 3.8 - - >>> from typing import TypedDict - >>> from pymongo import MongoClient - >>> from pymongo.operations import InsertOne - >>> from pymongo.collection import Collection - >>> client: MongoClient = MongoClient() - >>> collection: Collection[Movie] = client.test.test - >>> inserted = collection.bulk_write([InsertOne(Movie(name="Jurassic Park", year=1993))]) - >>> result = collection.find_one({"name": "Jurassic Park"}) - >>> assert result is not None - >>> assert result["year"] == 1993 - >>> # This will raise a type-checking error, despite being present, because it is added by PyMongo. - >>> assert result["_id"] # type:ignore[typeddict-item] - -Modeling Document Types with TypedDict --------------------------------------- - -You can use :py:class:`~typing.TypedDict` (Python 3.8+) to model structured data. -As noted above, PyMongo will automatically add an ``_id`` field if it is not present. This also applies to TypedDict. -There are three approaches to this: - - 1. Do not specify ``_id`` at all. It will be inserted automatically, and can be retrieved at run-time, but will yield a type-checking error unless explicitly ignored. - - 2. Specify ``_id`` explicitly. This will mean that every instance of your custom TypedDict class will have to pass a value for ``_id``. - - 3. Make use of :py:class:`~typing.NotRequired`. This has the flexibility of option 1, but with the ability to access the ``_id`` field without causing a type-checking error. - -Note: to use :py:class:`~typing.TypedDict` and :py:class:`~typing.NotRequired` in earlier versions of Python (<3.8, <3.11), use the ``typing_extensions`` package. - -.. doctest:: typed-dict-example - :pyversion: >= 3.11 - - >>> from typing import TypedDict, NotRequired - >>> from pymongo import MongoClient - >>> from pymongo.collection import Collection - >>> from bson import ObjectId - >>> class Movie(TypedDict): - ... name: str - ... year: int - ... - >>> class ExplicitMovie(TypedDict): - ... _id: ObjectId - ... name: str - ... year: int - ... - >>> class NotRequiredMovie(TypedDict): - ... _id: NotRequired[ObjectId] - ... name: str - ... year: int - ... - >>> client: MongoClient = MongoClient() - >>> collection: Collection[Movie] = client.test.test - >>> inserted = collection.insert_one(Movie(name="Jurassic Park", year=1993)) - >>> result = collection.find_one({"name": "Jurassic Park"}) - >>> assert result is not None - >>> # This will yield a type-checking error, despite being present, because it is added by PyMongo. - >>> assert result["_id"] # type:ignore[typeddict-item] - >>> collection: Collection[ExplicitMovie] = client.test.test - >>> # Note that the _id keyword argument must be supplied - >>> inserted = collection.insert_one( - ... ExplicitMovie(_id=ObjectId(), name="Jurassic Park", year=1993) - ... ) - >>> result = collection.find_one({"name": "Jurassic Park"}) - >>> assert result is not None - >>> # This will not raise a type-checking error. - >>> assert result["_id"] - >>> collection: Collection[NotRequiredMovie] = client.test.test - >>> # Note the lack of _id, similar to the first example - >>> inserted = collection.insert_one(NotRequiredMovie(name="Jurassic Park", year=1993)) - >>> result = collection.find_one({"name": "Jurassic Park"}) - >>> assert result is not None - >>> # This will not raise a type-checking error, despite not being provided explicitly. - >>> assert result["_id"] - - -Typed Database --------------- - -While less common, you could specify that the documents in an entire database -match a well-defined schema using :py:class:`~typing.TypedDict` (Python 3.8+). - - -.. doctest:: - - >>> from typing import TypedDict - >>> from pymongo import MongoClient - >>> from pymongo.database import Database - >>> class Movie(TypedDict): - ... name: str - ... year: int - ... - >>> client: MongoClient = MongoClient() - >>> db: Database[Movie] = client.test - >>> collection = db.test - >>> inserted = collection.insert_one({"name": "Jurassic Park", "year": 1993}) - >>> result = collection.find_one({"name": "Jurassic Park"}) - >>> assert result is not None - >>> assert result["year"] == 1993 - -Typed Command -------------- -When using the :meth:`~pymongo.database.Database.command`, you can specify the document type by providing a custom :class:`~bson.codec_options.CodecOptions`: - -.. doctest:: - - >>> from pymongo import MongoClient - >>> from bson.raw_bson import RawBSONDocument - >>> from bson import CodecOptions - >>> client: MongoClient = MongoClient() - >>> options = CodecOptions(RawBSONDocument) - >>> result = client.admin.command("ping", codec_options=options) - >>> assert isinstance(result, RawBSONDocument) - -Custom :py:class:`collections.abc.Mapping` subclasses and :py:class:`~typing.TypedDict` (Python 3.8+) are also supported. -For :py:class:`~typing.TypedDict`, use the form: ``options: CodecOptions[MyTypedDict] = CodecOptions(...)``. - -Typed BSON Decoding -------------------- -You can specify the document type returned by :mod:`bson` decoding functions by providing :class:`~bson.codec_options.CodecOptions`: - -.. doctest:: - - >>> from typing import Any, Dict - >>> from bson import CodecOptions, encode, decode - >>> class MyDict(Dict[str, Any]): - ... def foo(self): - ... return "bar" - ... - >>> options = CodecOptions(document_class=MyDict) - >>> doc = {"x": 1, "y": 2} - >>> bsonbytes = encode(doc, codec_options=options) - >>> rt_document = decode(bsonbytes, codec_options=options) - >>> assert rt_document.foo() == "bar" - -:class:`~bson.raw_bson.RawBSONDocument` and :py:class:`~typing.TypedDict` (Python 3.8+) are also supported. -For :py:class:`~typing.TypedDict`, use the form: ``options: CodecOptions[MyTypedDict] = CodecOptions(...)``. - - -Troubleshooting ---------------- - -Client Type Annotation -~~~~~~~~~~~~~~~~~~~~~~ -If you forget to add a type annotation for a :class:`~pymongo.mongo_client.MongoClient` object you may get the following ``mypy`` error:: - - from pymongo import MongoClient - client = MongoClient() # error: Need type annotation for "client" - -The solution is to annotate the type as ``client: MongoClient`` or ``client: MongoClient[Dict[str, Any]]``. See `Basic Usage`_. - -Incompatible Types -~~~~~~~~~~~~~~~~~~ -If you use the generic form of :class:`~pymongo.mongo_client.MongoClient` you -may encounter a ``mypy`` error like:: - - from pymongo import MongoClient - - client: MongoClient = MongoClient() - client.test.test.insert_many( - {"a": 1} - ) # error: Dict entry 0 has incompatible type "str": "int"; - # expected "Mapping[str, Any]": "int" - - -The solution is to use ``client: MongoClient[Dict[str, Any]]`` as used in -`Basic Usage`_ . - -Actual Type Errors -~~~~~~~~~~~~~~~~~~ - -Other times ``mypy`` will catch an actual error, like the following code:: - - from pymongo import MongoClient - from typing import Mapping - client: MongoClient = MongoClient() - client.test.test.insert_one( - [{}] - ) # error: Argument 1 to "insert_one" of "Collection" has - # incompatible type "List[Dict[, ]]"; - # expected "Mapping[str, Any]" - -In this case the solution is to use ``insert_one({})``, passing a document instead of a list. - -Another example is trying to set a value on a :class:`~bson.raw_bson.RawBSONDocument`, which is read-only.:: - - from bson.raw_bson import RawBSONDocument - from pymongo import MongoClient - - client = MongoClient(document_class=RawBSONDocument) - coll = client.test.test - doc = {"my": "doc"} - coll.insert_one(doc) - retrieved = coll.find_one({"_id": doc["_id"]}) - assert retrieved is not None - assert len(retrieved.raw) > 0 - retrieved[ - "foo" - ] = "bar" # error: Unsupported target for indexed assignment - # ("RawBSONDocument") [index] - -.. _PyCharm: https://www.jetbrains.com/help/pycharm/type-hinting-in-product.html -.. _Visual Studio Code: https://code.visualstudio.com/docs/languages/python -.. _Sublime Text: https://github.com/sublimelsp/LSP-pyright -.. _type hints: https://docs.python.org/3/library/typing.html -.. _mypy: https://mypy.readthedocs.io/en/stable/cheat_sheet_py3.html -.. _limitations in mypy: https://github.com/python/mypy/issues/3737 -.. _mypy config: https://mypy.readthedocs.io/en/stable/config_file.html -.. _test_typing module: https://github.com/mongodb/mongo-python-driver/blob/master/test/test_typing.py -.. _schema validation: https://www.mongodb.com/docs/manual/core/schema-validation/#when-to-use-schema-validation diff --git a/doc/examples/uuid.rst b/doc/examples/uuid.rst deleted file mode 100644 index 350db14d9a..0000000000 --- a/doc/examples/uuid.rst +++ /dev/null @@ -1,512 +0,0 @@ - -.. _handling-uuid-data-example: - -Handling UUID Data -================== - -PyMongo ships with built-in support for dealing with UUID types. -It is straightforward to store native :class:`uuid.UUID` objects -to MongoDB and retrieve them as native :class:`uuid.UUID` objects:: - - from pymongo import MongoClient - from bson.binary import UuidRepresentation - from uuid import uuid4 - - # use the 'standard' representation for cross-language compatibility. - client = MongoClient(uuidRepresentation='standard') - collection = client.get_database('uuid_db').get_collection('uuid_coll') - - # remove all documents from collection - collection.delete_many({}) - - # create a native uuid object - uuid_obj = uuid4() - - # save the native uuid object to MongoDB - collection.insert_one({'uuid': uuid_obj}) - - # retrieve the stored uuid object from MongoDB - document = collection.find_one({}) - - # check that the retrieved UUID matches the inserted UUID - assert document['uuid'] == uuid_obj - -Native :class:`uuid.UUID` objects can also be used as part of MongoDB -queries:: - - document = collection.find({'uuid': uuid_obj}) - assert document['uuid'] == uuid_obj - -The above examples illustrate the simplest of use-cases - one where the -UUID is generated by, and used in the same application. However, -the situation can be significantly more complex when dealing with a MongoDB -deployment that contains UUIDs created by other drivers as the Java and CSharp -drivers have historically encoded UUIDs using a byte-order that is different -from the one used by PyMongo. Applications that require interoperability across -these drivers must specify the appropriate -:class:`~bson.binary.UuidRepresentation`. - -In the following sections, we describe how drivers have historically differed -in their encoding of UUIDs, and how applications can use the -:class:`~bson.binary.UuidRepresentation` configuration option to maintain -cross-language compatibility. - -.. attention:: New applications that do not share a MongoDB deployment with - any other application and that have never stored UUIDs in MongoDB - should use the ``standard`` UUID representation for cross-language - compatibility. See :ref:`configuring-uuid-representation` for details - on how to configure the :class:`~bson.binary.UuidRepresentation`. - -.. _example-legacy-uuid: - -Legacy Handling of UUID Data ----------------------------- - -Historically, MongoDB Drivers have used different byte-ordering -while serializing UUID types to :class:`~bson.binary.Binary`. -Consider, for instance, a UUID with the following canonical textual -representation:: - - 00112233-4455-6677-8899-aabbccddeeff - -This UUID would historically be serialized by the Python driver as:: - - 00112233-4455-6677-8899-aabbccddeeff - -The same UUID would historically be serialized by the C# driver as:: - - 33221100-5544-7766-8899-aabbccddeeff - -Finally, the same UUID would historically be serialized by the Java driver as:: - - 77665544-3322-1100-ffee-ddccbbaa9988 - -.. note:: For in-depth information about the the byte-order historically - used by different drivers, see the `Handling of Native UUID Types - Specification - `_. - -This difference in the byte-order of UUIDs encoded by different drivers can -result in highly unintuitive behavior in some scenarios. We detail two such -scenarios in the next sections. - -Scenario 1: Applications Share a MongoDB Deployment -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Consider the following situation: - -* Application ``C`` written in C# generates a UUID and uses it as the ``_id`` - of a document that it proceeds to insert into the ``uuid_test`` collection of - the ``example_db`` database. Let's assume that the canonical textual - representation of the generated UUID is:: - - 00112233-4455-6677-8899-aabbccddeeff - -* Application ``P`` written in Python attempts to ``find`` the document - written by application ``C`` in the following manner:: - - from uuid import UUID - collection = client.example_db.uuid_test - result = collection.find_one({'_id': UUID('00112233-4455-6677-8899-aabbccddeeff')}) - - In this instance, ``result`` will never be the document that - was inserted by application ``C`` in the previous step. This is because of - the different byte-order used by the C# driver for representing UUIDs as - BSON Binary. The following query, on the other hand, will successfully find - this document:: - - result = collection.find_one({'_id': UUID('33221100-5544-7766-8899-aabbccddeeff')}) - -This example demonstrates how the differing byte-order used by different -drivers can hamper interoperability. To workaround this problem, users should -configure their ``MongoClient`` with the appropriate -:class:`~bson.binary.UuidRepresentation` (in this case, ``client`` in application -``P`` can be configured to use the -:data:`~bson.binary.UuidRepresentation.CSHARP_LEGACY` representation to -avoid the unintuitive behavior) as described in -:ref:`configuring-uuid-representation`. - -Scenario 2: Round-Tripping UUIDs -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -In the following examples, we see how using a misconfigured -:class:`~bson.binary.UuidRepresentation` can cause an application -to inadvertently change the :class:`~bson.binary.Binary` subtype, and in some -cases, the bytes of the :class:`~bson.binary.Binary` field itself when -round-tripping documents containing UUIDs. - -Consider the following situation:: - - from bson.codec_options import CodecOptions, DEFAULT_CODEC_OPTIONS - from bson.binary import Binary, UuidRepresentation - from uuid import uuid4 - - # Using UuidRepresentation.PYTHON_LEGACY stores a Binary subtype-3 UUID - python_opts = CodecOptions(uuid_representation=UuidRepresentation.PYTHON_LEGACY) - input_uuid = uuid4() - collection = client.testdb.get_collection('test', codec_options=python_opts) - collection.insert_one({'_id': 'foo', 'uuid': input_uuid}) - assert collection.find_one({'uuid': Binary(input_uuid.bytes, 3)})['_id'] == 'foo' - - # Retrieving this document using UuidRepresentation.STANDARD returns a Binary instance - std_opts = CodecOptions(uuid_representation=UuidRepresentation.STANDARD) - std_collection = client.testdb.get_collection('test', codec_options=std_opts) - doc = std_collection.find_one({'_id': 'foo'}) - assert isinstance(doc['uuid'], Binary) - - # Round-tripping the retrieved document yields the exact same document - std_collection.replace_one({'_id': 'foo'}, doc) - round_tripped_doc = collection.find_one({'uuid': Binary(input_uuid.bytes, 3)}) - assert doc == round_tripped_doc - - -In this example, round-tripping the document using the incorrect -:class:`~bson.binary.UuidRepresentation` (``STANDARD`` instead of -``PYTHON_LEGACY``) changes the :class:`~bson.binary.Binary` subtype as a -side-effect. **Note that this can also happen when the situation is reversed - -i.e. when the original document is written using ``STANDARD`` representation -and then round-tripped using the ``PYTHON_LEGACY`` representation.** - -In the next example, we see the consequences of incorrectly using a -representation that modifies byte-order (``CSHARP_LEGACY`` or ``JAVA_LEGACY``) -when round-tripping documents:: - - from bson.codec_options import CodecOptions, DEFAULT_CODEC_OPTIONS - from bson.binary import Binary, UuidRepresentation - from uuid import uuid4 - - # Using UuidRepresentation.STANDARD stores a Binary subtype-4 UUID - std_opts = CodecOptions(uuid_representation=UuidRepresentation.STANDARD) - input_uuid = uuid4() - collection = client.testdb.get_collection('test', codec_options=std_opts) - collection.insert_one({'_id': 'baz', 'uuid': input_uuid}) - assert collection.find_one({'uuid': Binary(input_uuid.bytes, 4)})['_id'] == 'baz' - - # Retrieving this document using UuidRepresentation.JAVA_LEGACY returns a native UUID - # without modifying the UUID byte-order - java_opts = CodecOptions(uuid_representation=UuidRepresentation.JAVA_LEGACY) - java_collection = client.testdb.get_collection('test', codec_options=java_opts) - doc = java_collection.find_one({'_id': 'baz'}) - assert doc['uuid'] == input_uuid - - # Round-tripping the retrieved document silently changes the Binary bytes and subtype - java_collection.replace_one({'_id': 'baz'}, doc) - assert collection.find_one({'uuid': Binary(input_uuid.bytes, 3)}) is None - assert collection.find_one({'uuid': Binary(input_uuid.bytes, 4)}) is None - round_tripped_doc = collection.find_one({'_id': 'baz'}) - assert round_tripped_doc['uuid'] == Binary(input_uuid.bytes, 3).as_uuid(UuidRepresentation.JAVA_LEGACY) - - -In this case, using the incorrect :class:`~bson.binary.UuidRepresentation` -(``JAVA_LEGACY`` instead of ``STANDARD``) changes the -:class:`~bson.binary.Binary` bytes and subtype as a side-effect. -**Note that this happens when any representation that -manipulates byte-order (``CSHARP_LEGACY`` or ``JAVA_LEGACY``) is incorrectly -used to round-trip UUIDs written with ``STANDARD``. When the situation is -reversed - i.e. when the original document is written using ``CSHARP_LEGACY`` -or ``JAVA_LEGACY`` and then round-tripped using ``STANDARD`` - -only the :class:`~bson.binary.Binary` subtype is changed.** - -.. note:: Starting in PyMongo 4.0, these issue will be resolved as - the ``STANDARD`` representation will decode Binary subtype 3 fields as - :class:`~bson.binary.Binary` objects of subtype 3 (instead of - :class:`uuid.UUID`), and each of the ``LEGACY_*`` representations will - decode Binary subtype 4 fields to :class:`~bson.binary.Binary` objects of - subtype 4 (instead of :class:`uuid.UUID`). - -.. _configuring-uuid-representation: - -Configuring a UUID Representation ---------------------------------- - -Users can workaround the problems described above by configuring their -applications with the appropriate :class:`~bson.binary.UuidRepresentation`. -Configuring the representation modifies PyMongo's behavior while -encoding :class:`uuid.UUID` objects to BSON and decoding -Binary subtype 3 and 4 fields from BSON. - -Applications can set the UUID representation in one of the following ways: - -#. At the ``MongoClient`` level using the ``uuidRepresentation`` URI option, - e.g.:: - - client = MongoClient("mongodb://a:27107/?uuidRepresentation=standard") - - Valid values are: - - .. list-table:: - :header-rows: 1 - - * - Value - - UUID Representation - - * - ``unspecified`` - - :ref:`unspecified-representation-details` - - * - ``standard`` - - :ref:`standard-representation-details` - - * - ``pythonLegacy`` - - :ref:`python-legacy-representation-details` - - * - ``javaLegacy`` - - :ref:`java-legacy-representation-details` - - * - ``csharpLegacy`` - - :ref:`csharp-legacy-representation-details` - -#. At the ``MongoClient`` level using the ``uuidRepresentation`` kwarg - option, e.g.:: - - from bson.binary import UuidRepresentation - client = MongoClient(uuidRepresentation=UuidRepresentation.STANDARD) - -#. At the ``Database`` or ``Collection`` level by supplying a suitable - :class:`~bson.codec_options.CodecOptions` instance, e.g.:: - - from bson.codec_options import CodecOptions - csharp_opts = CodecOptions(uuid_representation=UuidRepresentation.CSHARP_LEGACY) - java_opts = CodecOptions(uuid_representation=UuidRepresentation.JAVA_LEGACY) - - # Get database/collection from client with csharpLegacy UUID representation - csharp_database = client.get_database('csharp_db', codec_options=csharp_opts) - csharp_collection = client.testdb.get_collection('csharp_coll', codec_options=csharp_opts) - - # Get database/collection from existing database/collection with javaLegacy UUID representation - java_database = csharp_database.with_options(codec_options=java_opts) - java_collection = csharp_collection.with_options(codec_options=java_opts) - -Supported UUID Representations ------------------------------- - -.. list-table:: - :header-rows: 1 - - * - UUID Representation - - Default? - - Encode :class:`uuid.UUID` to - - Decode :class:`~bson.binary.Binary` subtype 4 to - - Decode :class:`~bson.binary.Binary` subtype 3 to - - * - :ref:`standard-representation-details` - - No - - :class:`~bson.binary.Binary` subtype 4 - - :class:`uuid.UUID` - - :class:`~bson.binary.Binary` subtype 3 - - * - :ref:`unspecified-representation-details` - - Yes, in PyMongo>=4 - - Raise :exc:`ValueError` - - :class:`~bson.binary.Binary` subtype 4 - - :class:`~bson.binary.Binary` subtype 3 - - * - :ref:`python-legacy-representation-details` - - No - - :class:`~bson.binary.Binary` subtype 3 with standard byte-order - - :class:`~bson.binary.Binary` subtype 4 - - :class:`uuid.UUID` - - * - :ref:`java-legacy-representation-details` - - No - - :class:`~bson.binary.Binary` subtype 3 with Java legacy byte-order - - :class:`~bson.binary.Binary` subtype 4 - - :class:`uuid.UUID` - - * - :ref:`csharp-legacy-representation-details` - - No - - :class:`~bson.binary.Binary` subtype 3 with C# legacy byte-order - - :class:`~bson.binary.Binary` subtype 4 - - :class:`uuid.UUID` - -We now detail the behavior and use-case for each supported UUID -representation. - -.. _unspecified-representation-details: - -``UNSPECIFIED`` -^^^^^^^^^^^^^^^ - -.. attention:: Starting in PyMongo 4.0, - :data:`~bson.binary.UuidRepresentation.UNSPECIFIED` is the default - UUID representation used by PyMongo. - -The :data:`~bson.binary.UuidRepresentation.UNSPECIFIED` representation -prevents the incorrect interpretation of UUID bytes by stopping short of -automatically converting UUID fields in BSON to native UUID types. Decoding -a UUID when using this representation returns a :class:`~bson.binary.Binary` -object instead. If required, users can coerce the decoded -:class:`~bson.binary.Binary` objects into native UUIDs using the -:meth:`~bson.binary.Binary.as_uuid` method and specifying the appropriate -representation format. The following example shows -what this might look like for a UUID stored by the C# driver:: - - from bson.codec_options import CodecOptions, DEFAULT_CODEC_OPTIONS - from bson.binary import Binary, UuidRepresentation - from uuid import uuid4 - - # Using UuidRepresentation.CSHARP_LEGACY - csharp_opts = CodecOptions(uuid_representation=UuidRepresentation.CSHARP_LEGACY) - - # Store a legacy C#-formatted UUID - input_uuid = uuid4() - collection = client.testdb.get_collection('test', codec_options=csharp_opts) - collection.insert_one({'_id': 'foo', 'uuid': input_uuid}) - - # Using UuidRepresentation.UNSPECIFIED - unspec_opts = CodecOptions(uuid_representation=UuidRepresentation.UNSPECIFIED) - unspec_collection = client.testdb.get_collection('test', codec_options=unspec_opts) - - # UUID fields are decoded as Binary when UuidRepresentation.UNSPECIFIED is configured - document = unspec_collection.find_one({'_id': 'foo'}) - decoded_field = document['uuid'] - assert isinstance(decoded_field, Binary) - - # Binary.as_uuid() can be used to coerce the decoded value to a native UUID - decoded_uuid = decoded_field.as_uuid(UuidRepresentation.CSHARP_LEGACY) - assert decoded_uuid == input_uuid - -Native :class:`uuid.UUID` objects cannot directly be encoded to -:class:`~bson.binary.Binary` when the UUID representation is ``UNSPECIFIED`` -and attempting to do so will result in an exception:: - - unspec_collection.insert_one({'_id': 'bar', 'uuid': uuid4()}) - Traceback (most recent call last): - ... - ValueError: cannot encode native uuid.UUID with UuidRepresentation.UNSPECIFIED. UUIDs can be manually converted to bson.Binary instances using bson.Binary.from_uuid() or a different UuidRepresentation can be configured. See the documentation for UuidRepresentation for more information. - -Instead, applications using :data:`~bson.binary.UuidRepresentation.UNSPECIFIED` -must explicitly coerce a native UUID using the -:meth:`~bson.binary.Binary.from_uuid` method:: - - explicit_binary = Binary.from_uuid(uuid4(), UuidRepresentation.STANDARD) - unspec_collection.insert_one({'_id': 'bar', 'uuid': explicit_binary}) - -.. _standard-representation-details: - -``STANDARD`` -^^^^^^^^^^^^ - -.. attention:: This UUID representation should be used by new applications or - applications that are encoding and/or decoding UUIDs in MongoDB for the - first time. - -The :data:`~bson.binary.UuidRepresentation.STANDARD` representation -enables cross-language compatibility by ensuring the same byte-ordering -when encoding UUIDs from all drivers. UUIDs written by a driver with this -representation configured will be handled correctly by every other provided -it is also configured with the ``STANDARD`` representation. - -``STANDARD`` encodes native :class:`uuid.UUID` objects to -:class:`~bson.binary.Binary` subtype 4 objects. - -.. _python-legacy-representation-details: - -``PYTHON_LEGACY`` -^^^^^^^^^^^^^^^^^ - -.. attention:: This uuid representation should be used when reading UUIDs - generated by existing applications that use the Python driver - but **don't** explicitly set a UUID representation. - -.. attention:: :data:`~bson.binary.UuidRepresentation.PYTHON_LEGACY` - was the default uuid representation in PyMongo 3. - -The :data:`~bson.binary.UuidRepresentation.PYTHON_LEGACY` representation -corresponds to the legacy representation of UUIDs used by PyMongo. This -representation conforms with -`RFC 4122 Section 4.1.2 `_. - -The following example illustrates the use of this representation:: - - from bson.codec_options import CodecOptions, DEFAULT_CODEC_OPTIONS - from bson.binary import Binary, UuidRepresentation - - # No configured UUID representation - collection = client.python_legacy.get_collection('test', codec_options=DEFAULT_CODEC_OPTIONS) - - # Using UuidRepresentation.PYTHON_LEGACY - pylegacy_opts = CodecOptions(uuid_representation=UuidRepresentation.PYTHON_LEGACY) - pylegacy_collection = client.python_legacy.get_collection('test', codec_options=pylegacy_opts) - - # UUIDs written by PyMongo 3 with no UuidRepresentation configured - # (or PyMongo 4.0 with PYTHON_LEGACY) can be queried using PYTHON_LEGACY - uuid_1 = uuid4() - pylegacy_collection.insert_one({'uuid': uuid_1}) - document = pylegacy_collection.find_one({'uuid': uuid_1}) - -``PYTHON_LEGACY`` encodes native :class:`uuid.UUID` objects to -:class:`~bson.binary.Binary` subtype 3 objects, preserving the same -byte-order as :attr:`~uuid.UUID.bytes`:: - - from bson.binary import Binary - - document = collection.find_one({'uuid': Binary(uuid_2.bytes, subtype=3)}) - assert document['uuid'] == uuid_2 - -.. _java-legacy-representation-details: - -``JAVA_LEGACY`` -^^^^^^^^^^^^^^^ - -.. attention:: This UUID representation should be used when reading UUIDs - written to MongoDB by the legacy applications (i.e. applications that don't - use the ``STANDARD`` representation) using the Java driver. - -The :data:`~bson.binary.UuidRepresentation.JAVA_LEGACY` representation -corresponds to the legacy representation of UUIDs used by the MongoDB Java -Driver. - -.. note:: The ``JAVA_LEGACY`` representation reverses the order of bytes 0-7, - and bytes 8-15. - -As an example, consider the same UUID described in :ref:`example-legacy-uuid`. -Let us assume that an application used the Java driver without an explicitly -specified UUID representation to insert the example UUID -``00112233-4455-6677-8899-aabbccddeeff`` into MongoDB. If we try to read this -value using ``PYTHON_LEGACY``, we end up with an entirely different UUID:: - - UUID('77665544-3322-1100-ffee-ddccbbaa9988') - -However, if we explicitly set the representation to -:data:`~bson.binary.UuidRepresentation.JAVA_LEGACY`, we get the correct result:: - - UUID('00112233-4455-6677-8899-aabbccddeeff') - -PyMongo uses the specified UUID representation to reorder the BSON bytes and -load them correctly. ``JAVA_LEGACY`` encodes native :class:`uuid.UUID` objects -to :class:`~bson.binary.Binary` subtype 3 objects, while performing the same -byte-reordering as the legacy Java driver's UUID to BSON encoder. - -.. _csharp-legacy-representation-details: - -``CSHARP_LEGACY`` -^^^^^^^^^^^^^^^^^ - -.. attention:: This UUID representation should be used when reading UUIDs - written to MongoDB by the legacy applications (i.e. applications that don't - use the ``STANDARD`` representation) using the C# driver. - -The :data:`~bson.binary.UuidRepresentation.CSHARP_LEGACY` representation -corresponds to the legacy representation of UUIDs used by the MongoDB Java -Driver. - -.. note:: The ``CSHARP_LEGACY`` representation reverses the order of bytes 0-3, - bytes 4-5, and bytes 6-7. - -As an example, consider the same UUID described in :ref:`example-legacy-uuid`. -Let us assume that an application used the C# driver without an explicitly -specified UUID representation to insert the example UUID -``00112233-4455-6677-8899-aabbccddeeff`` into MongoDB. If we try to read this -value using PYTHON_LEGACY, we end up with an entirely different UUID:: - - UUID('33221100-5544-7766-8899-aabbccddeeff') - -However, if we explicitly set the representation to -:data:`~bson.binary.UuidRepresentation.CSHARP_LEGACY`, we get the correct result:: - - UUID('00112233-4455-6677-8899-aabbccddeeff') - -PyMongo uses the specified UUID representation to reorder the BSON bytes and -load them correctly. ``CSHARP_LEGACY`` encodes native :class:`uuid.UUID` -objects to :class:`~bson.binary.Binary` subtype 3 objects, while performing -the same byte-reordering as the legacy C# driver's UUID to BSON encoder. diff --git a/doc/faq.rst b/doc/faq.rst deleted file mode 100644 index 15950e7716..0000000000 --- a/doc/faq.rst +++ /dev/null @@ -1,598 +0,0 @@ -Frequently Asked Questions -========================== - -Is PyMongo thread-safe? ------------------------ - -PyMongo is thread-safe and provides built-in connection pooling -for threaded applications. - -.. _pymongo-fork-safe: - -Is PyMongo fork-safe? ---------------------- - -PyMongo is not fork-safe. Care must be taken when using instances of -:class:`~pymongo.mongo_client.MongoClient` with ``fork()``. Specifically, -instances of MongoClient must not be copied from a parent process to -a child process. Instead, the parent process and each child process must -create their own instances of MongoClient. Instances of MongoClient copied from -the parent process have a high probability of deadlock in the child process due -to the inherent incompatibilities between ``fork()``, threads, and locks -described :ref:`below `. PyMongo will attempt to -issue a warning if there is a chance of this deadlock occurring. - -.. _pymongo-fork-safe-details: - -MongoClient spawns multiple threads to run background tasks such as monitoring -connected servers. These threads share state that is protected by instances of -:class:`~threading.Lock`, which are themselves `not fork-safe`_. The -driver is therefore subject to the same limitations as any other multithreaded -code that uses :class:`~threading.Lock` (and mutexes in general). One of these -limitations is that the locks become useless after ``fork()``. During the fork, -all locks are copied over to the child process in the same state as they were -in the parent: if they were locked, the copied locks are also locked. The child -created by ``fork()`` only has one thread, so any locks that were taken out by -other threads in the parent will never be released in the child. The next time -the child process attempts to acquire one of these locks, deadlock occurs. - -Starting in version 4.3, PyMongo utilizes :py:func:`os.register_at_fork` to -reset its locks and other shared state in the child process after a -:py:func:`os.fork` to reduce the frequency of deadlocks. However deadlocks -are still possible because libraries that PyMongo depends on, like `OpenSSL`_ -and `getaddrinfo(3)`_ (on some platforms), are not fork() safe in a -multithreaded application. Linux also imposes the restriction that: - - After a `fork()`_ in a multithreaded program, the child can - safely call only async-signal-safe functions (see - `signal-safety(7)`_) until such time as it calls `execve(2)`_. - -PyMongo relies on functions that are *not* `async-signal-safe`_ and hence the -child process can experience deadlocks or crashes when attempting to call -a non `async-signal-safe`_ function. For examples of deadlocks or crashes -that could occur see `PYTHON-3406`_. - -For a long but interesting read about the problems of Python locks in -multithreaded contexts with ``fork()``, see http://bugs.python.org/issue6721. - -.. _not fork-safe: http://bugs.python.org/issue6721 -.. _OpenSSL: https://github.com/openssl/openssl/issues/19066 -.. _fork(): https://man7.org/linux/man-pages/man2/fork.2.html -.. _signal-safety(7): https://man7.org/linux/man-pages/man7/signal-safety.7.html -.. _async-signal-safe: https://man7.org/linux/man-pages/man7/signal-safety.7.html -.. _execve(2): https://man7.org/linux/man-pages/man2/execve.2.html -.. _getaddrinfo(3): https://man7.org/linux/man-pages/man3/gai_strerror.3.html -.. _PYTHON-3406: https://jira.mongodb.org/browse/PYTHON-3406 - -.. _connection-pooling: - -Can PyMongo help me load the results of my query as a Pandas ``DataFrame``? ---------------------------------------------------------------------------- - -While PyMongo itself does not provide any APIs for working with -numerical or columnar data, -`PyMongoArrow `_ -is a companion library to PyMongo that makes it easy to load MongoDB query result sets as -`Pandas DataFrames `_, -`NumPy ndarrays `_, or -`Apache Arrow Tables `_. - -How does connection pooling work in PyMongo? --------------------------------------------- - -Every :class:`~pymongo.mongo_client.MongoClient` instance has a built-in -connection pool per server in your MongoDB topology. These pools open sockets -on demand to support the number of concurrent MongoDB operations that your -multi-threaded application requires. There is no thread-affinity for sockets. - -The size of each connection pool is capped at ``maxPoolSize``, which defaults -to 100. If there are ``maxPoolSize`` connections to a server and all are in -use, the next request to that server will wait until one of the connections -becomes available. - -The client instance opens two additional sockets per server in your MongoDB -topology for monitoring the server's state. - -For example, a client connected to a 3-node replica set opens 6 monitoring -sockets. It also opens as many sockets as needed to support a multi-threaded -application's concurrent operations on each server, up to ``maxPoolSize``. With -a ``maxPoolSize`` of 100, if the application only uses the primary (the -default), then only the primary connection pool grows and the total connections -is at most 106. If the application uses a -:class:`~pymongo.read_preferences.ReadPreference` to query the secondaries, -their pools also grow and the total connections can reach 306. - -Additionally, the pools are rate limited such that each connection pool can -only create at most 2 connections in parallel at any time. The connection -creation covers covers all the work required to setup a new connection -including DNS, TCP, SSL/TLS, MongoDB handshake, and MongoDB authentication. -For example, if three threads concurrently attempt to check out a connection -from an empty pool, the first two threads will begin creating new connections -while the third thread will wait. The third thread stops waiting when either: - -- one of the first two threads finishes creating a connection, or -- an existing connection is checked back into the pool. - -Rate limiting concurrent connection creation reduces the likelihood of -connection storms and improves the driver's ability to reuse existing -connections. - -It is possible to set the minimum number of concurrent connections to each -server with ``minPoolSize``, which defaults to 0. The connection pool will be -initialized with this number of sockets. If sockets are closed due to any -network errors, causing the total number of sockets (both in use and idle) to -drop below the minimum, more sockets are opened until the minimum is reached. - -The maximum number of milliseconds that a connection can remain idle in the -pool before being removed and replaced can be set with ``maxIdleTimeMS``, which -defaults to ``None`` (no limit). - -The default configuration for a :class:`~pymongo.mongo_client.MongoClient` -works for most applications:: - - client = MongoClient(host, port) - -Create this client **once** for each process, and reuse it for all -operations. It is a common mistake to create a new client for each request, -which is very inefficient. - -To support extremely high numbers of concurrent MongoDB operations within one -process, increase ``maxPoolSize``:: - - client = MongoClient(host, port, maxPoolSize=200) - -... or make it unbounded:: - - client = MongoClient(host, port, maxPoolSize=None) - -Once the pool reaches its maximum size, additional threads have to wait for -sockets to become available. PyMongo does not limit the number of threads -that can wait for sockets to become available and it is the application's -responsibility to limit the size of its thread pool to bound queuing during a -load spike. Threads are allowed to wait for any length of time unless -``waitQueueTimeoutMS`` is defined:: - - client = MongoClient(host, port, waitQueueTimeoutMS=100) - -A thread that waits more than 100ms (in this example) for a socket raises -:exc:`~pymongo.errors.ConnectionFailure`. Use this option if it is more -important to bound the duration of operations during a load spike than it is to -complete every operation. - -When :meth:`~pymongo.mongo_client.MongoClient.close` is called by any thread, -all idle sockets are closed, and all sockets that are in use will be closed as -they are returned to the pool. - -Does PyMongo support Python 3? ------------------------------- - -PyMongo supports CPython 3.9+ and PyPy3.9+. See the :doc:`python3` for details. - -Does PyMongo support asynchronous frameworks like Gevent, asyncio, Tornado, or Twisted? ---------------------------------------------------------------------------------------- - -PyMongo fully supports :doc:`Gevent `. - -To use MongoDB with `asyncio `_ -or `Tornado `_, see the -`Motor `_ project. - -For `Twisted `_, see `TxMongo -`_. Its stated mission is to keep feature -parity with PyMongo. - -.. _writes-and-ids: - -Why does PyMongo add an _id field to all of my documents? ---------------------------------------------------------- - -When a document is inserted to MongoDB using -:meth:`~pymongo.collection.Collection.insert_one`, -:meth:`~pymongo.collection.Collection.insert_many`, or -:meth:`~pymongo.collection.Collection.bulk_write`, and that document does not -include an ``_id`` field, PyMongo automatically adds one for you, set to an -instance of :class:`~bson.objectid.ObjectId`. For example:: - - >>> my_doc = {'x': 1} - >>> collection.insert_one(my_doc) - InsertOneResult(ObjectId('560db337fba522189f171720'), acknowledged=True) - >>> my_doc - {'x': 1, '_id': ObjectId('560db337fba522189f171720')} - -Users often discover this behavior when calling -:meth:`~pymongo.collection.Collection.insert_many` with a list of references -to a single document raises :exc:`~pymongo.errors.BulkWriteError`. Several -Python idioms lead to this pitfall:: - - >>> doc = {} - >>> collection.insert_many(doc for _ in range(10)) - Traceback (most recent call last): - ... - pymongo.errors.BulkWriteError: batch op errors occurred - >>> doc - {'_id': ObjectId('560f171cfba52279f0b0da0c')} - - >>> docs = [{}] - >>> collection.insert_many(docs * 10) - Traceback (most recent call last): - ... - pymongo.errors.BulkWriteError: batch op errors occurred - >>> docs - [{'_id': ObjectId('560f1933fba52279f0b0da0e')}] - -PyMongo adds an ``_id`` field in this manner for a few reasons: - -- All MongoDB documents are required to have an ``_id`` field. -- If PyMongo were to insert a document without an ``_id`` MongoDB would add one - itself, but it would not report the value back to PyMongo. -- Copying the document to insert before adding the ``_id`` field would be - prohibitively expensive for most high write volume applications. - -If you don't want PyMongo to add an ``_id`` to your documents, insert only -documents that already have an ``_id`` field, added by your application. - -Key order in subdocuments -- why does my query work in the shell but not PyMongo? ---------------------------------------------------------------------------------- - -.. - Note: We should rework this section now that Python 3.6+ has ordered dict. - -.. testsetup:: key-order - - from bson.son import SON - from pymongo.mongo_client import MongoClient - - collection = MongoClient().test.collection - collection.drop() - collection.insert_one({"_id": 1.0, "subdocument": SON([("b", 1.0), ("a", 1.0)])}) - -The key-value pairs in a BSON document can have any order (except that ``_id`` -is always first). The mongo shell preserves key order when reading and writing -data. Observe that "b" comes before "a" when we create the document and when it -is displayed: - -.. code-block:: javascript - - > // mongo shell. - > db.collection.insertOne( { "_id" : 1, "subdocument" : { "b" : 1, "a" : 1 } } ) - WriteResult({ "nInserted" : 1 }) - > db.collection.findOne() - { "_id" : 1, "subdocument" : { "b" : 1, "a" : 1 } } - -PyMongo represents BSON documents as Python dicts by default, and the order -of keys in dicts is not defined. That is, a dict declared with the "a" key -first is the same, to Python, as one with "b" first: - - >>> print({'a': 1.0, 'b': 1.0}) - {'a': 1.0, 'b': 1.0} - >>> print({'b': 1.0, 'a': 1.0}) - {'a': 1.0, 'b': 1.0} - -Therefore, Python dicts are not guaranteed to show keys in the order they are -stored in BSON. Here, "a" is shown before "b": - - >>> print(collection.find_one()) - {'_id': 1.0, 'subdocument': {'a': 1.0, 'b': 1.0}} - -To preserve order when reading BSON, use the :class:`~bson.son.SON` class, -which is a dict that remembers its key order. First, get a handle to the -collection, configured to use :class:`~bson.son.SON` instead of dict: - -.. doctest:: key-order - :options: +NORMALIZE_WHITESPACE - - >>> from bson import CodecOptions, SON - >>> opts = CodecOptions(document_class=SON) - >>> opts - CodecOptions(document_class=...SON..., tz_aware=False, uuid_representation=UuidRepresentation.UNSPECIFIED, unicode_decode_error_handler='strict', tzinfo=None, type_registry=TypeRegistry(type_codecs=[], fallback_encoder=None), datetime_conversion=DatetimeConversion.DATETIME) - >>> collection_son = collection.with_options(codec_options=opts) - -Now, documents and subdocuments in query results are represented with -:class:`~bson.son.SON` objects: - -.. doctest:: key-order - - >>> print(collection_son.find_one()) - SON([('_id', 1.0), ('subdocument', SON([('b', 1.0), ('a', 1.0)]))]) - -The subdocument's actual storage layout is now visible: "b" is before "a". - -Because a dict's key order is not defined, you cannot predict how it will be -serialized **to** BSON. But MongoDB considers subdocuments equal only if their -keys have the same order. So if you use a dict to query on a subdocument it may -not match: - - >>> collection.find_one({'subdocument': {'a': 1.0, 'b': 1.0}}) is None - True - -Swapping the key order in your query makes no difference: - - >>> collection.find_one({'subdocument': {'b': 1.0, 'a': 1.0}}) is None - True - -... because, as we saw above, Python considers the two dicts the same. - -There are two solutions. First, you can match the subdocument field-by-field: - - >>> collection.find_one({'subdocument.a': 1.0, - ... 'subdocument.b': 1.0}) - {'_id': 1.0, 'subdocument': {'a': 1.0, 'b': 1.0}} - -The query matches any subdocument with an "a" of 1.0 and a "b" of 1.0, -regardless of the order you specify them in Python or the order they are stored -in BSON. Additionally, this query now matches subdocuments with additional -keys besides "a" and "b", whereas the previous query required an exact match. - -The second solution is to use a :class:`~bson.son.SON` to specify the key order: - - >>> query = {'subdocument': SON([('b', 1.0), ('a', 1.0)])} - >>> collection.find_one(query) - {'_id': 1.0, 'subdocument': {'a': 1.0, 'b': 1.0}} - -The key order you use when you create a :class:`~bson.son.SON` is preserved -when it is serialized to BSON and used as a query. Thus you can create a -subdocument that exactly matches the subdocument in the collection. - -.. seealso:: `MongoDB Manual entry on subdocument matching - `_. - -What does *CursorNotFound* cursor id not valid at server mean? --------------------------------------------------------------- -Cursors in MongoDB can timeout on the server if they've been open for -a long time without any operations being performed on them. This can -lead to an :class:`~pymongo.errors.CursorNotFound` exception being -raised when attempting to iterate the cursor. - -How do I change the timeout value for cursors? ----------------------------------------------- -MongoDB doesn't support custom timeouts for cursors, but cursor -timeouts can be turned off entirely. Pass ``no_cursor_timeout=True`` to -:meth:`~pymongo.collection.Collection.find`. - -How can I store :mod:`decimal.Decimal` instances? -------------------------------------------------- - -PyMongo >= 3.4 supports the Decimal128 BSON type introduced in MongoDB 3.4. -See :mod:`~bson.decimal128` for more information. - -MongoDB <= 3.2 only supports IEEE 754 floating points - the same as the -Python float type. The only way PyMongo could store Decimal instances to -these versions of MongoDB would be to convert them to this standard, so -you'd really only be storing floats anyway - we force users to do this -conversion explicitly so that they are aware that it is happening. - -I'm saving ``9.99`` but when I query my document contains ``9.9900000000000002`` - what's going on here? --------------------------------------------------------------------------------------------------------- -The database representation is ``9.99`` as an IEEE floating point (which -is common to MongoDB and Python as well as most other modern -languages). The problem is that ``9.99`` cannot be represented exactly -with a double precision floating point - this is true in some versions of -Python as well: - - >>> 9.99 - 9.9900000000000002 - -The result that you get when you save ``9.99`` with PyMongo is exactly the -same as the result you'd get saving it with the JavaScript shell or -any of the other languages (and as the data you're working with when -you type ``9.99`` into a Python program). - -Can you add attribute style access for documents? -------------------------------------------------- -This request has come up a number of times but we've decided not to -implement anything like this. The relevant `jira case -`_ has some information -about the decision, but here is a brief summary: - -1. This will pollute the attribute namespace for documents, so could - lead to subtle bugs / confusing errors when using a key with the - same name as a dictionary method. - -2. The only reason we even use SON objects instead of regular - dictionaries is to maintain key ordering, since the server - requires this for certain operations. So we're hesitant to - needlessly complicate SON (at some point it's hypothetically - possible we might want to revert back to using dictionaries alone, - without breaking backwards compatibility for everyone). - -3. It's easy (and Pythonic) for new users to deal with documents, - since they behave just like dictionaries. If we start changing - their behavior it adds a barrier to entry for new users - another - class to learn. - -What is the correct way to handle time zones with PyMongo? ----------------------------------------------------------- - -See :doc:`examples/datetimes` for examples on how to handle -:class:`~datetime.datetime` objects correctly. - -How can I save a :mod:`datetime.date` instance? ------------------------------------------------ -PyMongo doesn't support saving :mod:`datetime.date` instances, since -there is no BSON type for dates without times. Rather than having the -driver enforce a convention for converting :mod:`datetime.date` -instances to :mod:`datetime.datetime` instances for you, any -conversion should be performed in your client code. - -.. _web-application-querying-by-objectid: - -When I query for a document by ObjectId in my web application I get no result ------------------------------------------------------------------------------ -It's common in web applications to encode documents' ObjectIds in URLs, like:: - - "/posts/50b3bda58a02fb9a84d8991e" - -Your web framework will pass the ObjectId portion of the URL to your request -handler as a string, so it must be converted to :class:`~bson.objectid.ObjectId` -before it is passed to :meth:`~pymongo.collection.Collection.find_one`. It is a -common mistake to forget to do this conversion. Here's how to do it correctly -in Flask_ (other web frameworks are similar):: - - from pymongo import MongoClient - from bson.objectid import ObjectId - - from flask import Flask, render_template - - client = MongoClient() - app = Flask(__name__) - - @app.route("/posts/<_id>") - def show_post(_id): - # NOTE!: converting _id from string to ObjectId before passing to find_one - post = client.db.posts.find_one({'_id': ObjectId(_id)}) - return render_template('post.html', post=post) - - if __name__ == "__main__": - app.run() - -.. _Flask: http://flask.pocoo.org/ - -.. seealso:: :ref:`querying-by-objectid` - -How can I use PyMongo from Django? ----------------------------------- -`Django `_ is a popular Python web -framework. Django includes an ORM, :mod:`django.db`. Currently, -there's no official MongoDB backend for Django. - -`django-mongodb-engine `_ -is an unofficial MongoDB backend that supports Django aggregations, (atomic) -updates, embedded objects, Map/Reduce and GridFS. It allows you to use most -of Django's built-in features, including the ORM, admin, authentication, site -and session frameworks and caching. - -However, it's easy to use MongoDB (and PyMongo) from Django -without using a Django backend. Certain features of Django that require -:mod:`django.db` (admin, authentication and sessions) will not work -using just MongoDB, but most of what Django provides can still be -used. - -One project which should make working with MongoDB and Django easier -is `mango `_. Mango is a set of -MongoDB backends for Django sessions and authentication (bypassing -:mod:`django.db` entirely). - -.. _using-with-mod-wsgi: - -Does PyMongo work with **mod_wsgi**? ------------------------------------- -Yes. See the configuration guide for :ref:`pymongo-and-mod_wsgi`. - -Does PyMongo work with PythonAnywhere? --------------------------------------- -No. PyMongo creates Python threads which -`PythonAnywhere `_ does not support. For more -information see `PYTHON-1495 `_. - -How can I use something like Python's ``json`` module to encode my documents to JSON? -------------------------------------------------------------------------------------- -:mod:`~bson.json_util` is PyMongo's built in, flexible tool for using -Python's :mod:`json` module with BSON documents and `MongoDB Extended JSON -`_. The -:mod:`json` module won't work out of the box with all documents from PyMongo -as PyMongo supports some special types (like :class:`~bson.objectid.ObjectId` -and :class:`~bson.dbref.DBRef`) that are not supported in JSON. - -`python-bsonjs `_ is a fast -BSON to MongoDB Extended JSON converter built on top of -`libbson `_. ``python-bsonjs`` does not -depend on PyMongo and can offer a nice performance improvement over -:mod:`~bson.json_util`. ``python-bsonjs`` works best with PyMongo when using -:class:`~bson.raw_bson.RawBSONDocument`. - -Why do I get OverflowError decoding dates stored by another language's driver? ------------------------------------------------------------------------------- -PyMongo decodes BSON datetime values to instances of Python's -:class:`datetime.datetime`. Instances of :class:`datetime.datetime` are -limited to years between :data:`datetime.MINYEAR` (usually 1) and -:data:`datetime.MAXYEAR` (usually 9999). Some MongoDB drivers (e.g. the PHP -driver) can store BSON datetimes with year values far outside those supported -by :class:`datetime.datetime`. - -There are a few ways to work around this issue. Starting with PyMongo 4.3, -:func:`bson.decode` can decode BSON datetimes in one of four ways, and can -be specified using the ``datetime_conversion`` parameter of -:class:`~bson.codec_options.CodecOptions`. - -The default option is -:attr:`~bson.codec_options.DatetimeConversion.DATETIME`, which will -attempt to decode as a :class:`datetime.datetime`, allowing -:class:`~builtin.OverflowError` to occur upon out-of-range dates. -:attr:`~bson.codec_options.DatetimeConversion.DATETIME_AUTO` alters -this behavior to instead return :class:`~bson.datetime_ms.DatetimeMS` when -representations are out-of-range, while returning :class:`~datetime.datetime` -objects as before: - -.. doctest:: - - >>> from datetime import datetime - >>> from bson.datetime_ms import DatetimeMS - >>> from bson.codec_options import DatetimeConversion - >>> from pymongo import MongoClient - >>> client = MongoClient(datetime_conversion=DatetimeConversion.DATETIME_AUTO) - >>> client.db.collection.insert_one({"x": datetime(1970, 1, 1)}) - InsertOneResult(ObjectId('...'), acknowledged=True) - >>> client.db.collection.insert_one({"x": DatetimeMS(2**62)}) - InsertOneResult(ObjectId('...'), acknowledged=True) - >>> for x in client.db.collection.find(): - ... print(x) - ... - {'_id': ObjectId('...'), 'x': datetime.datetime(1970, 1, 1, 0, 0)} - {'_id': ObjectId('...'), 'x': DatetimeMS(4611686018427387904)} - -For other options, please refer to -:class:`~bson.codec_options.DatetimeConversion`. - -Another option that does not involve setting ``datetime_conversion`` is to to -filter out documents values outside of the range supported by -:class:`~datetime.datetime`: - - >>> from datetime import datetime - >>> coll = client.test.dates - >>> cur = coll.find({'dt': {'$gte': datetime.min, '$lte': datetime.max}}) - -Another option, assuming you don't need the datetime field, is to filter out -just that field:: - - >>> cur = coll.find({}, projection={'dt': False}) - -.. _multiprocessing: - -Using PyMongo with Multiprocessing ----------------------------------- - -On Unix systems the multiprocessing module spawns processes using ``fork()``. -Care must be taken when using instances of -:class:`~pymongo.mongo_client.MongoClient` with ``fork()``. Specifically, -instances of MongoClient must not be copied from a parent process to a child -process. Instead, the parent process and each child process must create their -own instances of MongoClient. For example:: - - # Each process creates its own instance of MongoClient. - def func(): - db = pymongo.MongoClient().mydb - # Do something with db. - - proc = multiprocessing.Process(target=func) - proc.start() - -**Never do this**:: - - client = pymongo.MongoClient() - - # Each child process attempts to copy a global MongoClient - # created in the parent process. Never do this. - def func(): - db = client.mydb - # Do something with db. - - proc = multiprocessing.Process(target=func) - proc.start() - -Instances of MongoClient copied from the parent process have a high probability -of deadlock in the child process due to -:ref:`inherent incompatibilities between fork(), threads, and locks -`. PyMongo will attempt to issue a warning if there -is a chance of this deadlock occurring. - -.. seealso:: :ref:`pymongo-fork-safe` diff --git a/doc/index.rst b/doc/index.rst index 0ac8bdec6e..85812d1b14 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -3,70 +3,21 @@ PyMongo |release| Documentation .. note:: The PyMongo documentation has been migrated to the `MongoDB Documentation site `_. - As of PyMongo 4.10, the ReadTheDocs site will contain the detailed changelog and API docs, while the - rest of the documentation will only appear on the MongoDB Documentation site. + This site contains only the detailed changelog and API docs, while the + rest of the documentation appears on the MongoDB Documentation site. Overview -------- **PyMongo** is a Python distribution containing tools for working with -`MongoDB `_, and is the recommended way to -work with MongoDB from Python. This documentation attempts to explain -everything you need to know to use **PyMongo**. - -.. todo:: a list of PyMongo's features - -:doc:`installation` - Instructions on how to get the distribution. - -:doc:`tutorial` - Start here for a quick overview. - -:doc:`async-tutorial` - Start here for a quick overview of the asynchronous API. - -:doc:`examples/index` - Examples of how to perform specific tasks. - -:doc:`atlas` - Using PyMongo with MongoDB Atlas. - -:doc:`examples/tls` - Using PyMongo with TLS / SSL. - -:doc:`examples/encryption` - Using PyMongo with In-Use Encryption. - -:doc:`examples/type_hints` - Using PyMongo with type hints. - -:doc:`examples/logging` - Using PyMongo's logging capabilities. - -:doc:`faq` - Some questions that come up often. - -:doc:`migrate-to-pymongo4` - A PyMongo 3.x to 4.x migration guide. - -:doc:`python3` - Frequently asked questions about python 3 support. - -:doc:`compatibility-policy` - Explanation of deprecations, and how to keep pace with changes in PyMongo's - API. +`MongoDB `_, and is the recommended way to +work with MongoDB from Python. :doc:`api/index` The complete API documentation, organized by module. -:doc:`tools` - A listing of Python tools and libraries that have been written for - MongoDB. - -:doc:`developer/index` - Developer guide for contributors to PyMongo. +:doc:`changelog` + A full list of changes to PyMongo. -:doc:`common-issues` - Common issues encountered when using PyMongo. Getting Help ------------ @@ -81,7 +32,7 @@ Issues ------ All issues should be reported (and can be tracked / voted for / commented on) at the main `MongoDB JIRA bug tracker -`_, in the "Python Driver" +`_, in the "Python Driver" project. Feature Requests / Feedback @@ -94,25 +45,16 @@ Contributing **PyMongo** has a large :doc:`community ` and contributions are always encouraged. Contributions can be as simple as minor tweaks to this documentation. To contribute, fork the project on -`GitHub `_ and send a +`GitHub `_ and send a pull request. -Changes -------- -See the :doc:`changelog` for a full list of changes to PyMongo. - About This Documentation ------------------------ This documentation is generated using the `Sphinx `_ documentation generator. The source files for the documentation are located in the *doc/* directory of the -**PyMongo** distribution. To generate the docs locally run the -following command from the root directory of the **PyMongo** source: - -.. code-block:: bash - - $ pip install hatch - $ hatch run doc:build +**PyMongo** distribution. See the PyMongo `contributing guide `_ +for instructions on the building the docs from source. Indices and tables ------------------ @@ -124,18 +66,6 @@ Indices and tables .. toctree:: :hidden: - atlas - installation - tutorial - async-tutorial - examples/index - faq - compatibility-policy api/index - tools - contributors changelog - python3 - migrate-to-pymongo4 - developer/index - common-issues + contributors diff --git a/doc/installation.rst b/doc/installation.rst deleted file mode 100644 index f21a3792ad..0000000000 --- a/doc/installation.rst +++ /dev/null @@ -1,197 +0,0 @@ -Installing / Upgrading -====================== -.. highlight:: bash - -**PyMongo** is in the `Python Package Index -`_. - -.. warning:: **Do not install the "bson" package from pypi.** PyMongo comes - with its own bson package; doing "pip install bson" - installs a third-party package that is incompatible with PyMongo. - -Installing with pip -------------------- - -We recommend using `pip `_ -to install pymongo on all platforms:: - - $ python3 -m pip install pymongo - -To get a specific version of pymongo:: - - $ python3 -m pip install pymongo==3.5.1 - -To upgrade using pip:: - - $ python3 -m pip install --upgrade pymongo - -Dependencies ------------- - -PyMongo supports CPython 3.9+ and PyPy3.9+. - -Required dependencies -..................... - -Support for mongodb+srv:// URIs requires `dnspython -`_ - -.. _optional-deps: - -Optional dependencies -..................... - -GSSAPI authentication requires `pykerberos -`_ on Unix or `WinKerberos -`_ on Windows. The correct -dependency can be installed automatically along with PyMongo:: - - $ python3 -m pip install "pymongo[gssapi]" - -:ref:`MONGODB-AWS` authentication requires `pymongo-auth-aws -`_:: - - $ python3 -m pip install "pymongo[aws]" - - - -:ref:`OCSP` requires `PyOpenSSL -`_, `requests -`_ and `service_identity -`_:: - - $ python3 -m pip install "pymongo[ocsp]" - -Wire protocol compression with snappy requires `python-snappy -`_:: - - $ python3 -m pip install "pymongo[snappy]" - -Wire protocol compression with zstandard requires `zstandard -`_:: - - $ python3 -m pip install "pymongo[zstd]" - -:ref:`Client-Side Field Level Encryption` requires `pymongocrypt -`_ and -`pymongo-auth-aws `_:: - - $ python3 -m pip install "pymongo[encryption]" - -You can install all dependencies automatically with the following -command:: - - $ python3 -m pip install "pymongo[gssapi,aws,ocsp,snappy,zstd,encryption]" - -Installing from source ----------------------- - -If you'd rather install directly from the source (i.e. to stay on the -bleeding edge), install the C extension dependencies then check out the -latest source from GitHub and install the driver from the resulting tree:: - - $ git clone https://github.com/mongodb/mongo-python-driver.git pymongo - $ cd pymongo/ - $ pip install . - -Installing from source on Unix -.............................. - -To build the optional C extensions on Linux or another non-macOS Unix you must -have the GNU C compiler (gcc) installed. Depending on your flavor of Unix -(or Linux distribution) you may also need a python development package that -provides the necessary header files for your version of Python. The package -name may vary from distro to distro. - -Debian and Ubuntu users should issue the following command:: - - $ sudo apt-get install build-essential python-dev - -Users of Red Hat based distributions (RHEL, CentOS, Amazon Linux, Oracle Linux, -Fedora, etc.) should issue the following command:: - - $ sudo yum install gcc python-devel - -Installing from source on macOS / OSX -..................................... - -If you want to install PyMongo with C extensions from source you will need -the command line developer tools. On modern versions of macOS they can be -installed by running the following in Terminal (found in -/Applications/Utilities/):: - - xcode-select --install - -For older versions of OSX you may need Xcode. See the notes below for various -OSX and Xcode versions. - -**Snow Leopard (10.6)** - Xcode 3 with 'UNIX Development Support'. - -**Snow Leopard Xcode 4**: The Python versions shipped with OSX 10.6.x -are universal binaries. They support i386, PPC, and x86_64. Xcode 4 removed -support for PPC, causing the distutils version shipped with Apple's builds of -Python to fail to build the C extensions if you have Xcode 4 installed. There -is a workaround:: - - # For some Python builds from python.org - $ env ARCHFLAGS='-arch i386 -arch x86_64' python -m pip install pymongo - -See `http://bugs.python.org/issue11623 `_ -for a more detailed explanation. - -**Lion (10.7) and newer** - PyMongo's C extensions can be built against -versions of Python 3.9+ downloaded from python.org. In all cases Xcode must be -installed with 'UNIX Development Support'. - -**Xcode 5.1**: Starting with version 5.1 the version of clang that ships with -Xcode throws an error when it encounters compiler flags it doesn't recognize. -This may cause C extension builds to fail with an error similar to:: - - clang: error: unknown argument: '-mno-fused-madd' [-Wunused-command-line-argument-hard-error-in-future] - -There are workarounds:: - - # Apple specified workaround for Xcode 5.1 - $ ARCHFLAGS=-Wno-error=unused-command-line-argument-hard-error-in-future pip install pymongo - - # Alternative workaround using CFLAGS - $ CFLAGS=-Qunused-arguments pip install pymongo - - -Installing from source on Windows -................................. - -If you want to install PyMongo with C extensions from source the following -requirements apply to both CPython and ActiveState's ActivePython: - -Windows -~~~~~~~ - -Install Visual Studio 2015+. - -.. _install-no-c: - -Installing Without C Extensions -------------------------------- - -By default, the driver attempts to build and install optional C -extensions (used for increasing performance) when it is installed. If -any extension fails to build the driver will be installed anyway but a -warning will be printed. - -If you wish to install PyMongo without the C extensions, even if the -extensions build properly, it can be done using a command line option to -*pip install*:: - - $ NO_EXT=1 python -m pip install . - -Installing a beta or release candidate --------------------------------------- - -MongoDB, Inc. may occasionally tag a beta or release candidate for testing by -the community before final release. These releases will not be uploaded to pypi -but can be found on the -`GitHub tags page `_. -They can be installed by passing the full URL for the tag to pip:: - - $ python3 -m pip install https://github.com/mongodb/mongo-python-driver/archive/4.4.0b0.tar.gz diff --git a/doc/make.bat b/doc/make.bat index 2119f51099..aa1adb91a6 100644 --- a/doc/make.bat +++ b/doc/make.bat @@ -21,7 +21,7 @@ if errorlevel 9009 ( echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from - echo.http://sphinx-doc.org/ + echo.https://sphinx-doc.org/ exit /b 1 ) diff --git a/doc/migrate-to-pymongo4.rst b/doc/migrate-to-pymongo4.rst index 3e992a8249..fda3e2e129 100644 --- a/doc/migrate-to-pymongo4.rst +++ b/doc/migrate-to-pymongo4.rst @@ -1,3 +1,5 @@ +:orphan: + .. _pymongo4-migration-guide: PyMongo 4 Migration Guide @@ -34,7 +36,7 @@ Python 3.6+ PyMongo 4.0 drops support for Python 2.7, 3.4, and 3.5. Users who wish to upgrade to 4.x must first upgrade to Python 3.6.2+. Users upgrading from -Python 2 should consult the :doc:`python3`. +Python 2 should consult `Python 3 `_. Enable Deprecation Warnings --------------------------- @@ -796,8 +798,7 @@ incoming documents after receiving them from PyMongo. Alternatively, if your application uses the ``SONManipulator`` API to convert custom types to BSON, the :class:`~bson.codec_options.TypeCodec` and :class:`~bson.codec_options.TypeRegistry` APIs may be a suitable alternative. -For more information, see the -:doc:`custom type example `. +For more information, see `Custom Types `_. ``SON().items()`` now returns ``dict_items`` object. ---------------------------------------------------- @@ -837,7 +838,6 @@ pymongo.GEOHAYSTACK is removed Removed :attr:`pymongo.GEOHAYSTACK`. Replace with "geoHaystack" or create a 2d index and use $geoNear or $geoWithin instead. -See https://dochub.mongodb.org/core/4.4-deprecate-geoHaystack. UUIDLegacy is removed --------------------- @@ -983,7 +983,7 @@ you will receive an error like this when attempting to encode a :class:`uuid.UUI ValueError: cannot encode native uuid.UUID with UuidRepresentation.UNSPECIFIED. UUIDs can be manually converted... -See :ref:`handling-uuid-data-example` for details. +See `Handling UUIDs `_ for details. Additional BSON classes implement ``__slots__`` ............................................... diff --git a/doc/python3.rst b/doc/python3.rst deleted file mode 100644 index 1ea43b3ccb..0000000000 --- a/doc/python3.rst +++ /dev/null @@ -1,114 +0,0 @@ -Python 3 FAQ -============ - -What Python 3 versions are supported? -------------------------------------- - -PyMongo supports CPython 3.9+ and PyPy3.9+. - -Are there any PyMongo behavior changes with Python 3? ------------------------------------------------------ - -Only one intentional change. Instances of :class:`bytes` -are encoded as BSON type 5 (Binary data) with subtype 0. -In Python 3 they are decoded back to :class:`bytes`. In -Python 2 they are decoded to :class:`~bson.binary.Binary` -with subtype 0. - -For example, let's insert a :class:`bytes` instance using Python 3 then -read it back. Notice the byte string is decoded back to :class:`bytes`:: - - Python 3.7.9 (v3.7.9:13c94747c7, Aug 15 2020, 01:31:08) - [Clang 6.0 (clang-600.0.57)] on darwin - Type "help", "copyright", "credits" or "license" for more information. - >>> import pymongo - >>> c = pymongo.MongoClient() - >>> c.test.bintest.insert_one({'binary': b'this is a byte string'}).inserted_id - ObjectId('4f9086b1fba5222021000000') - >>> c.test.bintest.find_one() - {'binary': b'this is a byte string', '_id': ObjectId('4f9086b1fba5222021000000')} - -Now retrieve the same document in Python 2. Notice the byte string is decoded -to :class:`~bson.binary.Binary`:: - - Python 2.7.6 (default, Feb 26 2014, 10:36:22) - [GCC 4.7.3] on linux2 - Type "help", "copyright", "credits" or "license" for more information. - >>> import pymongo - >>> c = pymongo.MongoClient() - >>> c.test.bintest.find_one() - {u'binary': Binary('this is a byte string', 0), u'_id': ObjectId('4f9086b1fba5222021000000')} - - -There is a similar change in behavior in parsing JSON binary with subtype 0. -In Python 3 they are decoded into :class:`bytes`. In Python 2 they are -decoded to :class:`~bson.binary.Binary` with subtype 0. - -For example, let's decode a JSON binary subtype 0 using Python 3. Notice the -byte string is decoded to :class:`bytes`:: - - Python 3.7.9 (v3.7.9:13c94747c7, Aug 15 2020, 01:31:08) - [Clang 6.0 (clang-600.0.57)] on darwin - Type "help", "copyright", "credits" or "license" for more information. - >>> from bson.json_util import loads - >>> loads('{"b": {"$binary": "dGhpcyBpcyBhIGJ5dGUgc3RyaW5n", "$type": "00"}}') - {'b': b'this is a byte string'} - -Now decode the same JSON in Python 2 . Notice the byte string is decoded -to :class:`~bson.binary.Binary`:: - - Python 2.7.10 (default, Feb 7 2017, 00:08:15) - [GCC 4.2.1 Compatible Apple LLVM 8.0.0 (clang-800.0.34)] on darwin - Type "help", "copyright", "credits" or "license" for more information. - >>> from bson.json_util import loads - >>> loads('{"b": {"$binary": "dGhpcyBpcyBhIGJ5dGUgc3RyaW5n", "$type": "00"}}') - {u'b': Binary('this is a byte string', 0)} - -Why can't I share pickled ObjectIds between some versions of Python 2 and 3? ----------------------------------------------------------------------------- - -Instances of :class:`~bson.objectid.ObjectId` pickled using Python 2 -can always be unpickled using Python 3. - -If you pickled an ObjectId using Python 2 and want to unpickle it using -Python 3 you must pass ``encoding='latin-1'`` to pickle.loads:: - - Python 2.7.6 (default, Feb 26 2014, 10:36:22) - [GCC 4.7.3] on linux2 - Type "help", "copyright", "credits" or "license" for more information. - >>> import pickle - >>> from bson.objectid import ObjectId - >>> oid = ObjectId() - >>> oid - ObjectId('4f919ba2fba5225b84000000') - >>> pickle.dumps(oid) - 'ccopy_reg\n_reconstructor\np0\n(cbson.objectid\...' - - Python 3.7.9 (v3.7.9:13c94747c7, Aug 15 2020, 01:31:08) - [Clang 6.0 (clang-600.0.57)] on darwin - Type "help", "copyright", "credits" or "license" for more information. - >>> import pickle - >>> pickle.loads(b'ccopy_reg\n_reconstructor\np0\n(cbson.objectid\...', encoding='latin-1') - ObjectId('4f919ba2fba5225b84000000') - - -If you need to pickle ObjectIds using Python 3 and unpickle them using Python 2 -you must use ``protocol <= 2``:: - - Python 3.7.9 (v3.7.9:13c94747c7, Aug 15 2020, 01:31:08) - [Clang 6.0 (clang-600.0.57)] on darwin - Type "help", "copyright", "credits" or "license" for more information. - >>> import pickle - >>> from bson.objectid import ObjectId - >>> oid = ObjectId() - >>> oid - ObjectId('4f96f20c430ee6bd06000000') - >>> pickle.dumps(oid, protocol=2) - b'\x80\x02cbson.objectid\nObjectId\nq\x00)\x81q\x01c_codecs\nencode\...' - - Python 2.7.15 (default, Jun 21 2018, 15:00:48) - [GCC 7.3.0] on linux2 - Type "help", "copyright", "credits" or "license" for more information. - >>> import pickle - >>> pickle.loads('\x80\x02cbson.objectid\nObjectId\nq\x00)\x81q\x01c_codecs\nencode\...') - ObjectId('4f96f20c430ee6bd06000000') diff --git a/doc/tools.rst b/doc/tools.rst deleted file mode 100644 index 6dd0df8a4d..0000000000 --- a/doc/tools.rst +++ /dev/null @@ -1,164 +0,0 @@ -Tools -===== -Many tools have been written for working with **PyMongo**. If you know -of or have created a tool for working with MongoDB from Python please -list it here. - -.. note:: We try to keep this list current. As such, projects that - have not been updated recently or appear to be unmaintained will - occasionally be removed from the list or moved to the back (to keep - the list from becoming too intimidating). - - If a project gets removed that is still being developed or is in active use - please let us know or add it back. - -ORM-like Layers ---------------- -Some people have found that they prefer to work with a layer that -has more features than PyMongo provides. Often, things like models and -validation are desired. To that end, several different ORM-like layers -have been written by various authors. - -It is our recommendation that new users begin by working directly with -PyMongo, as described in the rest of this documentation. Many people -have found that the features of PyMongo are enough for their -needs. Even if you eventually come to the decision to use one of these -layers, the time spent working directly with the driver will have -increased your understanding of how MongoDB actually works. - -MongoEngine - `MongoEngine `_ is another ORM-like - layer on top of PyMongo. It allows you to define schemas for - documents and query collections using syntax inspired by the Django - ORM. The code is available on `GitHub - `_; for more information, see - the `tutorial `_. - -MincePy - `MincePy `_ is an - object-document mapper (ODM) designed to make any Python object storable - and queryable in a MongoDB database. It is designed with machine learning - and big-data computational and experimental science applications in mind - but is entirely general and can be useful to anyone looking to organise, - share, or process large amounts data with as little change to their current - workflow as possible. - -Ming - `Ming `_ is a - library that allows you to enforce schemas on a MongoDB database in - your Python application. It was developed by `SourceForge - `_ in the course of their migration to - MongoDB. See the `introductory blog post - `_ - for more details. - -MotorEngine - `MotorEngine `_ is a port of - MongoEngine to Motor, for asynchronous access with Tornado. - It implements the same modeling APIs to be data-portable, meaning that a - model defined in MongoEngine can be read in MotorEngine. The source is - `available on GitHub `_. - -uMongo - `uMongo `_ is a Python MongoDB ODM. - Its inception comes from two needs: the lack of async ODM and the - difficulty to do document (un)serialization with existing ODMs. - Works with multiple drivers: PyMongo, TxMongo, motor_asyncio, and - mongomock. The source `is available on GitHub - `_ - -No longer maintained -"""""""""""""""""""" - -PyMODM - `PyMODM `_ is an ORM-like framework on top - of PyMongo. PyMODM is maintained by engineers at MongoDB, Inc. and is quick - to adopt new MongoDB features. PyMODM is a "core" ODM, meaning that it - provides simple, extensible functionality that can be leveraged by other - libraries to target platforms like Django. At the same time, PyMODM is - powerful enough to be used for developing applications on its own. Complete - documentation is available on `readthedocs - `_. - -MongoKit - The `MongoKit `_ framework - is an ORM-like layer on top of PyMongo. There is also a MongoKit - `google group `_. - -Minimongo - `minimongo `_ is a lightweight, - pythonic interface to MongoDB. It retains pymongo's query and update API, - and provides a number of additional features, including a simple - document-oriented interface, connection pooling, index management, and - collection & database naming helpers. The `source is on GitHub - `_. - -Manga - `Manga `_ aims to be a simpler ORM-like - layer on top of PyMongo. The syntax for defining schema is inspired by the - Django ORM, but Pymongo's query language is maintained. The source `is on - GitHub `_. - -Humongolus - `Humongolus `_ is a lightweight ORM - framework for Python and MongoDB. The name comes from the combination of - MongoDB and `Homunculus `_ (the - concept of a miniature though fully formed human body). Humongolus allows - you to create models/schemas with robust validation. It attempts to be as - pythonic as possible and exposes the pymongo cursor objects whenever - possible. The code is available for download - `at GitHub `_. Tutorials and usage - examples are also available at GitHub. - -Framework Tools ---------------- -This section lists tools and adapters that have been designed to work with -various Python frameworks and libraries. - -* `Djongo `_ is a connector for using - Django with MongoDB as the database backend. Use the Django Admin GUI to add and - modify documents in MongoDB. - The `Djongo Source Code `_ is hosted on GitHub - and the `Djongo package `_ is on pypi. -* `Django MongoDB Engine - `_ is a MongoDB - database backend for Django that completely integrates with its ORM. - For more information `see the tutorial - `_. -* `mango `_ provides MongoDB backends for - Django sessions and authentication (bypassing :mod:`django.db` entirely). -* `Django MongoEngine - `_ is a MongoDB backend for - Django, an `example: - `_. - For more information see ``_ -* `mongodb_beaker `_ is a - project to enable using MongoDB as a backend for `beakers `_ caching / session system. - `The source is on GitHub `_. -* `Log4Mongo `_ is a flexible - Python logging handler that can store logs in MongoDB using normal and capped - collections. -* `MongoLog `_ is a Python logging - handler that stores logs in MongoDB using a capped collection. -* `rod.recipe.mongodb `_ is a - ZC Buildout recipe for downloading and installing MongoDB. -* `mongobox `_ is a tool to run a sandboxed - MongoDB instance from within a python app. -* `Flask-MongoAlchemy `_ Add - Flask support for MongoDB using MongoAlchemy. -* `Flask-MongoKit `_ Flask extension - to better integrate MongoKit into Flask. -* `Flask-PyMongo `_ Flask-PyMongo - bridges Flask and PyMongo. - -Alternative Drivers -------------------- -These are alternatives to PyMongo. - -* `Motor `_ is a full-featured, non-blocking - MongoDB driver for Python Tornado applications. -* `TxMongo `_ is an asynchronous Twisted - Python driver for MongoDB. -* `MongoMock `_ is a small - library to help testing Python code that interacts with MongoDB via - Pymongo. diff --git a/doc/tutorial.rst b/doc/tutorial.rst deleted file mode 100644 index e33936363d..0000000000 --- a/doc/tutorial.rst +++ /dev/null @@ -1,413 +0,0 @@ -Tutorial -======== - -.. testsetup:: - - from pymongo import MongoClient - - client = MongoClient() - client.drop_database("test-database") - -This tutorial is intended as an introduction to working with -**MongoDB** and **PyMongo**. - -Prerequisites -------------- -Before we start, make sure that you have the **PyMongo** distribution -:doc:`installed `. In the Python shell, the following -should run without raising an exception: - -.. doctest:: - - >>> import pymongo - -This tutorial also assumes that a MongoDB instance is running on the -default host and port. Assuming you have `downloaded and installed -`_ MongoDB, you -can start it like so: - -.. code-block:: bash - - $ mongod - -Making a Connection with MongoClient ------------------------------------- -The first step when working with **PyMongo** is to create a -:class:`~pymongo.mongo_client.MongoClient` to the running **mongod** -instance. Doing so is easy: - -.. doctest:: - - >>> from pymongo import MongoClient - >>> client = MongoClient() - -The above code will connect on the default host and port. We can also -specify the host and port explicitly, as follows: - -.. doctest:: - - >>> client = MongoClient("localhost", 27017) - -Or use the MongoDB URI format: - -.. doctest:: - - >>> client = MongoClient("mongodb://localhost:27017/") - -Getting a Database ------------------- -A single instance of MongoDB can support multiple independent -`databases `_. When -working with PyMongo you access databases using attribute style access -on :class:`~pymongo.mongo_client.MongoClient` instances: - -.. doctest:: - - >>> db = client.test_database - -If your database name is such that using attribute style access won't -work (like ``test-database``), you can use dictionary style access -instead: - -.. doctest:: - - >>> db = client["test-database"] - -Getting a Collection --------------------- -A `collection `_ is a -group of documents stored in MongoDB, and can be thought of as roughly -the equivalent of a table in a relational database. Getting a -collection in PyMongo works the same as getting a database: - -.. doctest:: - - >>> collection = db.test_collection - -or (using dictionary style access): - -.. doctest:: - - >>> collection = db["test-collection"] - -An important note about collections (and databases) in MongoDB is that -they are created lazily - none of the above commands have actually -performed any operations on the MongoDB server. Collections and -databases are created when the first document is inserted into them. - -Documents ---------- -Data in MongoDB is represented (and stored) using JSON-style -documents. In PyMongo we use dictionaries to represent documents. As -an example, the following dictionary might be used to represent a blog -post: - -.. doctest:: - - >>> import datetime - >>> post = { - ... "author": "Mike", - ... "text": "My first blog post!", - ... "tags": ["mongodb", "python", "pymongo"], - ... "date": datetime.datetime.now(tz=datetime.timezone.utc), - ... } - -Note that documents can contain native Python types (like -:class:`datetime.datetime` instances) which will be automatically -converted to and from the appropriate `BSON -`_ types. - -.. todo:: link to table of Python <-> BSON types - -Inserting a Document --------------------- -To insert a document into a collection we can use the -:meth:`~pymongo.collection.Collection.insert_one` method: - -.. doctest:: - - >>> posts = db.posts - >>> post_id = posts.insert_one(post).inserted_id - >>> post_id - ObjectId('...') - -When a document is inserted a special key, ``"_id"``, is automatically -added if the document doesn't already contain an ``"_id"`` key. The value -of ``"_id"`` must be unique across the -collection. :meth:`~pymongo.collection.Collection.insert_one` returns an -instance of :class:`~pymongo.results.InsertOneResult`. For more information -on ``"_id"``, see the `documentation on _id -`_. - -After inserting the first document, the *posts* collection has -actually been created on the server. We can verify this by listing all -of the collections in our database: - -.. doctest:: - - >>> db.list_collection_names() - ['posts'] - -Getting a Single Document With :meth:`~pymongo.collection.Collection.find_one` ------------------------------------------------------------------------------- -The most basic type of query that can be performed in MongoDB is -:meth:`~pymongo.collection.Collection.find_one`. This method returns a -single document matching a query (or ``None`` if there are no -matches). It is useful when you know there is only one matching -document, or are only interested in the first match. Here we use -:meth:`~pymongo.collection.Collection.find_one` to get the first -document from the posts collection: - -.. doctest:: - - >>> import pprint - >>> pprint.pprint(posts.find_one()) - {'_id': ObjectId('...'), - 'author': 'Mike', - 'date': datetime.datetime(...), - 'tags': ['mongodb', 'python', 'pymongo'], - 'text': 'My first blog post!'} - -The result is a dictionary matching the one that we inserted previously. - -.. note:: The returned document contains an ``"_id"``, which was - automatically added on insert. - -:meth:`~pymongo.collection.Collection.find_one` also supports querying -on specific elements that the resulting document must match. To limit -our results to a document with author "Mike" we do: - -.. doctest:: - - >>> pprint.pprint(posts.find_one({"author": "Mike"})) - {'_id': ObjectId('...'), - 'author': 'Mike', - 'date': datetime.datetime(...), - 'tags': ['mongodb', 'python', 'pymongo'], - 'text': 'My first blog post!'} - -If we try with a different author, like "Eliot", we'll get no result: - -.. doctest:: - - >>> posts.find_one({"author": "Eliot"}) - >>> - -.. _querying-by-objectid: - -Querying By ObjectId --------------------- -We can also find a post by its ``_id``, which in our example is an ObjectId: - -.. doctest:: - - >>> post_id - ObjectId(...) - >>> pprint.pprint(posts.find_one({"_id": post_id})) - {'_id': ObjectId('...'), - 'author': 'Mike', - 'date': datetime.datetime(...), - 'tags': ['mongodb', 'python', 'pymongo'], - 'text': 'My first blog post!'} - -Note that an ObjectId is not the same as its string representation: - -.. doctest:: - - >>> post_id_as_str = str(post_id) - >>> posts.find_one({"_id": post_id_as_str}) # No result - >>> - -A common task in web applications is to get an ObjectId from the -request URL and find the matching document. It's necessary in this -case to **convert the ObjectId from a string** before passing it to -``find_one``:: - - from bson.objectid import ObjectId - - # The web framework gets post_id from the URL and passes it as a string - def get(post_id): - # Convert from string to ObjectId: - document = client.db.collection.find_one({'_id': ObjectId(post_id)}) - -.. seealso:: :ref:`web-application-querying-by-objectid` - -Bulk Inserts ------------- -In order to make querying a little more interesting, let's insert a -few more documents. In addition to inserting a single document, we can -also perform *bulk insert* operations, by passing a list as the -first argument to :meth:`~pymongo.collection.Collection.insert_many`. -This will insert each document in the list, sending only a single -command to the server: - -.. doctest:: - - >>> new_posts = [ - ... { - ... "author": "Mike", - ... "text": "Another post!", - ... "tags": ["bulk", "insert"], - ... "date": datetime.datetime(2009, 11, 12, 11, 14), - ... }, - ... { - ... "author": "Eliot", - ... "title": "MongoDB is fun", - ... "text": "and pretty easy too!", - ... "date": datetime.datetime(2009, 11, 10, 10, 45), - ... }, - ... ] - >>> result = posts.insert_many(new_posts) - >>> result.inserted_ids - [ObjectId('...'), ObjectId('...')] - -There are a couple of interesting things to note about this example: - - - The result from :meth:`~pymongo.collection.Collection.insert_many` now - returns two :class:`~bson.objectid.ObjectId` instances, one for - each inserted document. - - ``new_posts[1]`` has a different "shape" than the other posts - - there is no ``"tags"`` field and we've added a new field, - ``"title"``. This is what we mean when we say that MongoDB is - *schema-free*. - -Querying for More Than One Document ------------------------------------ -To get more than a single document as the result of a query we use the -:meth:`~pymongo.collection.Collection.find` -method. :meth:`~pymongo.collection.Collection.find` returns a -:class:`~pymongo.cursor.Cursor` instance, which allows us to iterate -over all matching documents. For example, we can iterate over every -document in the ``posts`` collection: - -.. doctest:: - - >>> for post in posts.find(): - ... pprint.pprint(post) - ... - {'_id': ObjectId('...'), - 'author': 'Mike', - 'date': datetime.datetime(...), - 'tags': ['mongodb', 'python', 'pymongo'], - 'text': 'My first blog post!'} - {'_id': ObjectId('...'), - 'author': 'Mike', - 'date': datetime.datetime(...), - 'tags': ['bulk', 'insert'], - 'text': 'Another post!'} - {'_id': ObjectId('...'), - 'author': 'Eliot', - 'date': datetime.datetime(...), - 'text': 'and pretty easy too!', - 'title': 'MongoDB is fun'} - -Just like we did with :meth:`~pymongo.collection.Collection.find_one`, -we can pass a document to :meth:`~pymongo.collection.Collection.find` -to limit the returned results. Here, we get only those documents whose -author is "Mike": - -.. doctest:: - - >>> for post in posts.find({"author": "Mike"}): - ... pprint.pprint(post) - ... - {'_id': ObjectId('...'), - 'author': 'Mike', - 'date': datetime.datetime(...), - 'tags': ['mongodb', 'python', 'pymongo'], - 'text': 'My first blog post!'} - {'_id': ObjectId('...'), - 'author': 'Mike', - 'date': datetime.datetime(...), - 'tags': ['bulk', 'insert'], - 'text': 'Another post!'} - -Counting --------- -If we just want to know how many documents match a query we can -perform a :meth:`~pymongo.collection.Collection.count_documents` operation -instead of a full query. We can get a count of all of the documents -in a collection: - -.. doctest:: - - >>> posts.count_documents({}) - 3 - -or just of those documents that match a specific query: - -.. doctest:: - - >>> posts.count_documents({"author": "Mike"}) - 2 - -Range Queries -------------- -MongoDB supports many different types of `advanced queries -`_. As an -example, lets perform a query where we limit results to posts older -than a certain date, but also sort the results by author: - -.. doctest:: - - >>> d = datetime.datetime(2009, 11, 12, 12) - >>> for post in posts.find({"date": {"$lt": d}}).sort("author"): - ... pprint.pprint(post) - ... - {'_id': ObjectId('...'), - 'author': 'Eliot', - 'date': datetime.datetime(...), - 'text': 'and pretty easy too!', - 'title': 'MongoDB is fun'} - {'_id': ObjectId('...'), - 'author': 'Mike', - 'date': datetime.datetime(...), - 'tags': ['bulk', 'insert'], - 'text': 'Another post!'} - -Here we use the special ``"$lt"`` operator to do a range query, and -also call :meth:`~pymongo.cursor.Cursor.sort` to sort the results -by author. - -Indexing --------- - -Adding indexes can help accelerate certain queries and can also add additional -functionality to querying and storing documents. In this example, we'll -demonstrate how to create a `unique index -`_ on a key that rejects -documents whose value for that key already exists in the index. - -First, we'll need to create the index: - -.. doctest:: - - >>> result = db.profiles.create_index([("user_id", pymongo.ASCENDING)], unique=True) - >>> sorted(list(db.profiles.index_information())) - ['_id_', 'user_id_1'] - -Notice that we have two indexes now: one is the index on ``_id`` that MongoDB -creates automatically, and the other is the index on ``user_id`` we just -created. - -Now let's set up some user profiles: - -.. doctest:: - - >>> user_profiles = [{"user_id": 211, "name": "Luke"}, {"user_id": 212, "name": "Ziltoid"}] - >>> result = db.profiles.insert_many(user_profiles) - -The index prevents us from inserting a document whose ``user_id`` is already in -the collection: - -.. doctest:: - :options: +IGNORE_EXCEPTION_DETAIL - - >>> new_profile = {"user_id": 213, "name": "Drew"} - >>> duplicate_profile = {"user_id": 212, "name": "Tommy"} - >>> result = db.profiles.insert_one(new_profile) # This is fine. - >>> result = db.profiles.insert_one(duplicate_profile) - Traceback (most recent call last): - DuplicateKeyError: E11000 duplicate key error index: test_database.profiles.$user_id_1 dup key: { : 212 } - -.. seealso:: The MongoDB documentation on `indexes `_ diff --git a/green_framework_test.py b/green_framework_test.py deleted file mode 100644 index 037d0279c3..0000000000 --- a/green_framework_test.py +++ /dev/null @@ -1,117 +0,0 @@ -# Copyright 2015-present MongoDB, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Test PyMongo with a variety of greenlet-based monkey-patching frameworks.""" -from __future__ import annotations - -import getopt -import sys - -import pytest - - -def run_gevent(): - """Prepare to run tests with Gevent. Can raise ImportError.""" - from gevent import monkey - - monkey.patch_all() - - -def run_eventlet(): - """Prepare to run tests with Eventlet. Can raise ImportError.""" - import eventlet - - # https://github.com/eventlet/eventlet/issues/401 - eventlet.sleep() - eventlet.monkey_patch() - - -FRAMEWORKS = { - "gevent": run_gevent, - "eventlet": run_eventlet, -} - - -def list_frameworks(): - """Tell the user what framework names are valid.""" - sys.stdout.write( - """Testable frameworks: %s - -Note that membership in this list means the framework can be tested with -PyMongo, not necessarily that it is officially supported. -""" - % ", ".join(sorted(FRAMEWORKS)) - ) - - -def run(framework_name, *args): - """Run tests with monkey-patching enabled. Can raise ImportError.""" - # Monkey-patch. - FRAMEWORKS[framework_name]() - - arg_list = list(args) - - # Never run async tests with a framework - if len(arg_list) <= 1: - arg_list.extend(["-m", "not default_async and default"]) - else: - for i in range(len(arg_list) - 1): - if "-m" in arg_list[i]: - arg_list[i + 1] = f"not default_async and {arg_list[i + 1]}" - - # Run the tests. - sys.exit(pytest.main(arg_list)) - - -def main(): - """Parse options and run tests.""" - usage = f"""python {sys.argv[0]} FRAMEWORK_NAME - -Test PyMongo with a variety of greenlet-based monkey-patching frameworks. See -python {sys.argv[0]} --help-frameworks.""" - - try: - opts, args = getopt.getopt(sys.argv[1:], "h", ["help", "help-frameworks"]) - except getopt.GetoptError as err: - print(str(err)) - print(usage) - sys.exit(2) - - for option_name, _ in opts: - if option_name in ("-h", "--help"): - print(usage) - sys.exit() - elif option_name == "--help-frameworks": - list_frameworks() - sys.exit() - else: - raise AssertionError("unhandled option") - - if not args: - print(usage) - sys.exit(1) - - if args[0] not in FRAMEWORKS: - print("%r is not a testable framework.\n" % args[0]) - list_frameworks() - sys.exit(1) - - run( - args[0], - *args[1:], # Framework name. - ) # Command line args to pytest, like what test to run. - - -if __name__ == "__main__": - main() diff --git a/gridfs/asynchronous/__init__.py b/gridfs/asynchronous/__init__.py new file mode 100644 index 0000000000..0826145b11 --- /dev/null +++ b/gridfs/asynchronous/__init__.py @@ -0,0 +1,42 @@ +# Copyright 2009-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""GridFS is a specification for storing large objects in Mongo. + +The :mod:`gridfs` package is an implementation of GridFS on top of +:mod:`pymongo`, exposing a file-like interface. + +.. seealso:: The MongoDB documentation on `gridfs `_. +""" +from __future__ import annotations + +from gridfs.asynchronous.grid_file import ( + AsyncGridFS, + AsyncGridFSBucket, + AsyncGridIn, + AsyncGridOut, + AsyncGridOutCursor, +) +from gridfs.errors import NoFile +from gridfs.grid_file_shared import DEFAULT_CHUNK_SIZE + +__all__ = [ + "AsyncGridFS", + "AsyncGridFSBucket", + "NoFile", + "DEFAULT_CHUNK_SIZE", + "AsyncGridIn", + "AsyncGridOut", + "AsyncGridOutCursor", +] diff --git a/gridfs/asynchronous/grid_file.py b/gridfs/asynchronous/grid_file.py index a49d51d304..69a2200d3b 100644 --- a/gridfs/asynchronous/grid_file.py +++ b/gridfs/asynchronous/grid_file.py @@ -46,7 +46,6 @@ from pymongo.asynchronous.collection import AsyncCollection from pymongo.asynchronous.cursor import AsyncCursor from pymongo.asynchronous.database import AsyncDatabase -from pymongo.asynchronous.helpers import anext from pymongo.common import validate_string from pymongo.errors import ( BulkWriteError, @@ -70,7 +69,7 @@ def _disallow_transactions(session: Optional[AsyncClientSession]) -> None: class AsyncGridFS: """An instance of GridFS on top of a single Database.""" - def __init__(self, database: AsyncDatabase, collection: str = "fs"): + def __init__(self, database: AsyncDatabase[Any], collection: str = "fs"): """Create a new instance of :class:`GridFS`. Raises :class:`TypeError` if `database` is not an instance of @@ -100,7 +99,7 @@ def __init__(self, database: AsyncDatabase, collection: str = "fs"): .. seealso:: The MongoDB documentation on `gridfs `_. """ if not isinstance(database, AsyncDatabase): - raise TypeError("database must be an instance of Database") + raise TypeError(f"database must be an instance of Database, not {type(database)}") database = _clear_entity_type_registry(database) @@ -231,7 +230,7 @@ async def get_version( try: doc = await anext(cursor) return AsyncGridOut(self._collection, file_document=doc, session=session) - except StopIteration: + except StopAsyncIteration: raise NoFile("no version %d for filename %r" % (version, filename)) from None async def get_last_version( @@ -463,7 +462,7 @@ class AsyncGridFSBucket: def __init__( self, - db: AsyncDatabase, + db: AsyncDatabase[Any], bucket_name: str = "fs", chunk_size_bytes: int = DEFAULT_CHUNK_SIZE, write_concern: Optional[WriteConcern] = None, @@ -503,7 +502,7 @@ def __init__( .. seealso:: The MongoDB documentation on `gridfs `_. """ if not isinstance(db, AsyncDatabase): - raise TypeError("database must be an instance of AsyncDatabase") + raise TypeError(f"database must be an instance of AsyncDatabase, not {type(db)}") db = _clear_entity_type_registry(db) @@ -513,11 +512,11 @@ def __init__( self._bucket_name = bucket_name self._collection = db[bucket_name] - self._chunks: AsyncCollection = self._collection.chunks.with_options( + self._chunks: AsyncCollection[Any] = self._collection.chunks.with_options( write_concern=write_concern, read_preference=read_preference ) - self._files: AsyncCollection = self._collection.files.with_options( + self._files: AsyncCollection[Any] = self._collection.files.with_options( write_concern=write_concern, read_preference=read_preference ) @@ -834,6 +833,35 @@ async def delete(self, file_id: Any, session: Optional[AsyncClientSession] = Non if not res.deleted_count: raise NoFile("no file could be deleted because none matched %s" % file_id) + @_csot.apply + async def delete_by_name( + self, filename: str, session: Optional[AsyncClientSession] = None + ) -> None: + """Given a filename, delete this stored file's files collection document(s) + and associated chunks from a GridFS bucket. + + For example:: + + my_db = AsyncMongoClient().test + fs = AsyncGridFSBucket(my_db) + await fs.upload_from_stream("test_file", "data I want to store!") + await fs.delete_by_name("test_file") + + Raises :exc:`~gridfs.errors.NoFile` if no file with the given filename exists. + + :param filename: The name of the file to be deleted. + :param session: a :class:`~pymongo.client_session.AsyncClientSession` + + .. versionadded:: 4.12 + """ + _disallow_transactions(session) + files = self._files.find({"filename": filename}, {"_id": 1}, session=session) + file_ids = [file["_id"] async for file in files] + res = await self._files.delete_many({"_id": {"$in": file_ids}}, session=session) + await self._chunks.delete_many({"files_id": {"$in": file_ids}}, session=session) + if not res.deleted_count: + raise NoFile(f"no file could be deleted because none matched filename {filename!r}") + def find(self, *args: Any, **kwargs: Any) -> AsyncGridOutCursor: """Find and return the files collection documents that match ``filter`` @@ -1021,13 +1049,42 @@ async def rename( "matched file_id %i" % (new_filename, file_id) ) + async def rename_by_name( + self, filename: str, new_filename: str, session: Optional[AsyncClientSession] = None + ) -> None: + """Renames the stored file with the specified filename. + + For example:: + + my_db = AsyncMongoClient().test + fs = AsyncGridFSBucket(my_db) + await fs.upload_from_stream("test_file", "data I want to store!") + await fs.rename_by_name("test_file", "new_test_name") + + Raises :exc:`~gridfs.errors.NoFile` if no file with the given filename exists. + + :param filename: The filename of the file to be renamed. + :param new_filename: The new name of the file. + :param session: a :class:`~pymongo.client_session.AsyncClientSession` + + .. versionadded:: 4.12 + """ + _disallow_transactions(session) + result = await self._files.update_many( + {"filename": filename}, {"$set": {"filename": new_filename}}, session=session + ) + if not result.matched_count: + raise NoFile( + f"no files could be renamed {new_filename!r} because none matched filename {filename!r}" + ) + class AsyncGridIn: """Class to write data to GridFS.""" def __init__( self, - root_collection: AsyncCollection, + root_collection: AsyncCollection[Any], session: Optional[AsyncClientSession] = None, **kwargs: Any, ) -> None: @@ -1082,7 +1139,9 @@ def __init__( :attr:`~pymongo.collection.AsyncCollection.write_concern` """ if not isinstance(root_collection, AsyncCollection): - raise TypeError("root_collection must be an instance of AsyncCollection") + raise TypeError( + f"root_collection must be an instance of AsyncCollection, not {type(root_collection)}" + ) if not root_collection.write_concern.acknowledged: raise ConfigurationError("root_collection must use acknowledged write_concern") @@ -1112,7 +1171,7 @@ def __init__( object.__setattr__(self, "_buffered_docs_size", 0) async def _create_index( - self, collection: AsyncCollection, index_key: Any, unique: bool + self, collection: AsyncCollection[Any], index_key: Any, unique: bool ) -> None: doc = await collection.find_one(projection={"_id": 1}, session=self._session) if doc is None: @@ -1299,11 +1358,8 @@ async def write(self, data: Any) -> None: raise ValueError("cannot write to a closed file") try: - if isinstance(data, AsyncGridOut): - read = data.read - else: - # file-like - read = data.read + # file-like + read = data.read except AttributeError: # string if not isinstance(data, (str, bytes)): @@ -1315,7 +1371,7 @@ async def write(self, data: Any) -> None: raise TypeError( "must specify an encoding for file in order to write str" ) from None - read = io.BytesIO(data).read # type: ignore[assignment] + read = io.BytesIO(data).read if inspect.iscoroutinefunction(read): await self._write_async(read) @@ -1329,15 +1385,15 @@ async def write(self, data: Any) -> None: except BaseException: await self.abort() raise - self._buffer.write(to_write) # type: ignore - if len(to_write) < space: # type: ignore + self._buffer.write(to_write) + if len(to_write) < space: return # EOF or incomplete await self._flush_buffer() to_write = read(self.chunk_size) - while to_write and len(to_write) == self.chunk_size: # type: ignore + while to_write and len(to_write) == self.chunk_size: await self._flush_data(to_write) to_write = read(self.chunk_size) - self._buffer.write(to_write) # type: ignore + self._buffer.write(to_write) async def _write_async(self, read: Any) -> None: if self._buffer.tell() > 0: @@ -1399,7 +1455,7 @@ class AsyncGridOut(GRIDOUT_BASE_CLASS): # type: ignore def __init__( self, - root_collection: AsyncCollection, + root_collection: AsyncCollection[Any], file_id: Optional[int] = None, file_document: Optional[Any] = None, session: Optional[AsyncClientSession] = None, @@ -1436,7 +1492,9 @@ def __init__( from the server. Metadata is fetched when first needed. """ if not isinstance(root_collection, AsyncCollection): - raise TypeError("root_collection must be an instance of AsyncCollection") + raise TypeError( + f"root_collection must be an instance of AsyncCollection, not {type(root_collection)}" + ) _disallow_transactions(session) root_collection = _clear_entity_type_registry(root_collection) @@ -1770,7 +1828,7 @@ class _AsyncGridOutChunkIterator: def __init__( self, grid_out: AsyncGridOut, - chunks: AsyncCollection, + chunks: AsyncCollection[Any], session: Optional[AsyncClientSession], next_chunk: Any, ) -> None: @@ -1783,7 +1841,7 @@ def __init__( self._num_chunks = math.ceil(float(self._length) / self._chunk_size) self._cursor = None - _cursor: Optional[AsyncCursor] + _cursor: Optional[AsyncCursor[Any]] def expected_chunk_length(self, chunk_n: int) -> int: if chunk_n < self._num_chunks - 1: @@ -1862,7 +1920,7 @@ async def close(self) -> None: class AsyncGridOutIterator: def __init__( - self, grid_out: AsyncGridOut, chunks: AsyncCollection, session: AsyncClientSession + self, grid_out: AsyncGridOut, chunks: AsyncCollection[Any], session: AsyncClientSession ): self._chunk_iter = _AsyncGridOutChunkIterator(grid_out, chunks, session, 0) @@ -1876,14 +1934,14 @@ async def next(self) -> bytes: __anext__ = next -class AsyncGridOutCursor(AsyncCursor): +class AsyncGridOutCursor(AsyncCursor): # type: ignore[type-arg] """A cursor / iterator for returning GridOut objects as the result of an arbitrary query against the GridFS files collection. """ def __init__( self, - collection: AsyncCollection, + collection: AsyncCollection[Any], filter: Optional[Mapping[str, Any]] = None, skip: int = 0, limit: int = 0, diff --git a/gridfs/synchronous/__init__.py b/gridfs/synchronous/__init__.py new file mode 100644 index 0000000000..bc2704364b --- /dev/null +++ b/gridfs/synchronous/__init__.py @@ -0,0 +1,42 @@ +# Copyright 2009-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""GridFS is a specification for storing large objects in Mongo. + +The :mod:`gridfs` package is an implementation of GridFS on top of +:mod:`pymongo`, exposing a file-like interface. + +.. seealso:: The MongoDB documentation on `gridfs `_. +""" +from __future__ import annotations + +from gridfs.errors import NoFile +from gridfs.grid_file_shared import DEFAULT_CHUNK_SIZE +from gridfs.synchronous.grid_file import ( + GridFS, + GridFSBucket, + GridIn, + GridOut, + GridOutCursor, +) + +__all__ = [ + "GridFS", + "GridFSBucket", + "NoFile", + "DEFAULT_CHUNK_SIZE", + "GridIn", + "GridOut", + "GridOutCursor", +] diff --git a/gridfs/synchronous/grid_file.py b/gridfs/synchronous/grid_file.py index 655f05f57a..7364aedda3 100644 --- a/gridfs/synchronous/grid_file.py +++ b/gridfs/synchronous/grid_file.py @@ -57,7 +57,6 @@ from pymongo.synchronous.collection import Collection from pymongo.synchronous.cursor import Cursor from pymongo.synchronous.database import Database -from pymongo.synchronous.helpers import next _IS_SYNC = True @@ -70,7 +69,7 @@ def _disallow_transactions(session: Optional[ClientSession]) -> None: class GridFS: """An instance of GridFS on top of a single Database.""" - def __init__(self, database: Database, collection: str = "fs"): + def __init__(self, database: Database[Any], collection: str = "fs"): """Create a new instance of :class:`GridFS`. Raises :class:`TypeError` if `database` is not an instance of @@ -100,7 +99,7 @@ def __init__(self, database: Database, collection: str = "fs"): .. seealso:: The MongoDB documentation on `gridfs `_. """ if not isinstance(database, Database): - raise TypeError("database must be an instance of Database") + raise TypeError(f"database must be an instance of Database, not {type(database)}") database = _clear_entity_type_registry(database) @@ -461,7 +460,7 @@ class GridFSBucket: def __init__( self, - db: Database, + db: Database[Any], bucket_name: str = "fs", chunk_size_bytes: int = DEFAULT_CHUNK_SIZE, write_concern: Optional[WriteConcern] = None, @@ -501,7 +500,7 @@ def __init__( .. seealso:: The MongoDB documentation on `gridfs `_. """ if not isinstance(db, Database): - raise TypeError("database must be an instance of Database") + raise TypeError(f"database must be an instance of Database, not {type(db)}") db = _clear_entity_type_registry(db) @@ -511,11 +510,11 @@ def __init__( self._bucket_name = bucket_name self._collection = db[bucket_name] - self._chunks: Collection = self._collection.chunks.with_options( + self._chunks: Collection[Any] = self._collection.chunks.with_options( write_concern=write_concern, read_preference=read_preference ) - self._files: Collection = self._collection.files.with_options( + self._files: Collection[Any] = self._collection.files.with_options( write_concern=write_concern, read_preference=read_preference ) @@ -830,6 +829,33 @@ def delete(self, file_id: Any, session: Optional[ClientSession] = None) -> None: if not res.deleted_count: raise NoFile("no file could be deleted because none matched %s" % file_id) + @_csot.apply + def delete_by_name(self, filename: str, session: Optional[ClientSession] = None) -> None: + """Given a filename, delete this stored file's files collection document(s) + and associated chunks from a GridFS bucket. + + For example:: + + my_db = MongoClient().test + fs = GridFSBucket(my_db) + fs.upload_from_stream("test_file", "data I want to store!") + fs.delete_by_name("test_file") + + Raises :exc:`~gridfs.errors.NoFile` if no file with the given filename exists. + + :param filename: The name of the file to be deleted. + :param session: a :class:`~pymongo.client_session.ClientSession` + + .. versionadded:: 4.12 + """ + _disallow_transactions(session) + files = self._files.find({"filename": filename}, {"_id": 1}, session=session) + file_ids = [file["_id"] for file in files] + res = self._files.delete_many({"_id": {"$in": file_ids}}, session=session) + self._chunks.delete_many({"files_id": {"$in": file_ids}}, session=session) + if not res.deleted_count: + raise NoFile(f"no file could be deleted because none matched filename {filename!r}") + def find(self, *args: Any, **kwargs: Any) -> GridOutCursor: """Find and return the files collection documents that match ``filter`` @@ -1015,13 +1041,42 @@ def rename( "matched file_id %i" % (new_filename, file_id) ) + def rename_by_name( + self, filename: str, new_filename: str, session: Optional[ClientSession] = None + ) -> None: + """Renames the stored file with the specified filename. + + For example:: + + my_db = MongoClient().test + fs = GridFSBucket(my_db) + fs.upload_from_stream("test_file", "data I want to store!") + fs.rename_by_name("test_file", "new_test_name") + + Raises :exc:`~gridfs.errors.NoFile` if no file with the given filename exists. + + :param filename: The filename of the file to be renamed. + :param new_filename: The new name of the file. + :param session: a :class:`~pymongo.client_session.ClientSession` + + .. versionadded:: 4.12 + """ + _disallow_transactions(session) + result = self._files.update_many( + {"filename": filename}, {"$set": {"filename": new_filename}}, session=session + ) + if not result.matched_count: + raise NoFile( + f"no files could be renamed {new_filename!r} because none matched filename {filename!r}" + ) + class GridIn: """Class to write data to GridFS.""" def __init__( self, - root_collection: Collection, + root_collection: Collection[Any], session: Optional[ClientSession] = None, **kwargs: Any, ) -> None: @@ -1076,7 +1131,9 @@ def __init__( :attr:`~pymongo.collection.Collection.write_concern` """ if not isinstance(root_collection, Collection): - raise TypeError("root_collection must be an instance of Collection") + raise TypeError( + f"root_collection must be an instance of Collection, not {type(root_collection)}" + ) if not root_collection.write_concern.acknowledged: raise ConfigurationError("root_collection must use acknowledged write_concern") @@ -1105,7 +1162,7 @@ def __init__( object.__setattr__(self, "_buffered_docs", []) object.__setattr__(self, "_buffered_docs_size", 0) - def _create_index(self, collection: Collection, index_key: Any, unique: bool) -> None: + def _create_index(self, collection: Collection[Any], index_key: Any, unique: bool) -> None: doc = collection.find_one(projection={"_id": 1}, session=self._session) if doc is None: try: @@ -1289,11 +1346,8 @@ def write(self, data: Any) -> None: raise ValueError("cannot write to a closed file") try: - if isinstance(data, GridOut): - read = data.read - else: - # file-like - read = data.read + # file-like + read = data.read except AttributeError: # string if not isinstance(data, (str, bytes)): @@ -1305,7 +1359,7 @@ def write(self, data: Any) -> None: raise TypeError( "must specify an encoding for file in order to write str" ) from None - read = io.BytesIO(data).read # type: ignore[assignment] + read = io.BytesIO(data).read if inspect.iscoroutinefunction(read): self._write_async(read) @@ -1319,15 +1373,15 @@ def write(self, data: Any) -> None: except BaseException: self.abort() raise - self._buffer.write(to_write) # type: ignore - if len(to_write) < space: # type: ignore + self._buffer.write(to_write) + if len(to_write) < space: return # EOF or incomplete self._flush_buffer() to_write = read(self.chunk_size) - while to_write and len(to_write) == self.chunk_size: # type: ignore + while to_write and len(to_write) == self.chunk_size: self._flush_data(to_write) to_write = read(self.chunk_size) - self._buffer.write(to_write) # type: ignore + self._buffer.write(to_write) def _write_async(self, read: Any) -> None: if self._buffer.tell() > 0: @@ -1389,7 +1443,7 @@ class GridOut(GRIDOUT_BASE_CLASS): # type: ignore def __init__( self, - root_collection: Collection, + root_collection: Collection[Any], file_id: Optional[int] = None, file_document: Optional[Any] = None, session: Optional[ClientSession] = None, @@ -1426,7 +1480,9 @@ def __init__( from the server. Metadata is fetched when first needed. """ if not isinstance(root_collection, Collection): - raise TypeError("root_collection must be an instance of Collection") + raise TypeError( + f"root_collection must be an instance of Collection, not {type(root_collection)}" + ) _disallow_transactions(session) root_collection = _clear_entity_type_registry(root_collection) @@ -1760,7 +1816,7 @@ class GridOutChunkIterator: def __init__( self, grid_out: GridOut, - chunks: Collection, + chunks: Collection[Any], session: Optional[ClientSession], next_chunk: Any, ) -> None: @@ -1773,7 +1829,7 @@ def __init__( self._num_chunks = math.ceil(float(self._length) / self._chunk_size) self._cursor = None - _cursor: Optional[Cursor] + _cursor: Optional[Cursor[Any]] def expected_chunk_length(self, chunk_n: int) -> int: if chunk_n < self._num_chunks - 1: @@ -1851,7 +1907,7 @@ def close(self) -> None: class GridOutIterator: - def __init__(self, grid_out: GridOut, chunks: Collection, session: ClientSession): + def __init__(self, grid_out: GridOut, chunks: Collection[Any], session: ClientSession): self._chunk_iter = GridOutChunkIterator(grid_out, chunks, session, 0) def __iter__(self) -> GridOutIterator: @@ -1864,14 +1920,14 @@ def next(self) -> bytes: __next__ = next -class GridOutCursor(Cursor): +class GridOutCursor(Cursor): # type: ignore[type-arg] """A cursor / iterator for returning GridOut objects as the result of an arbitrary query against the GridFS files collection. """ def __init__( self, - collection: Collection, + collection: Collection[Any], filter: Optional[Mapping[str, Any]] = None, skip: int = 0, limit: int = 0, diff --git a/hatch.toml b/hatch.toml deleted file mode 100644 index 60bd0af014..0000000000 --- a/hatch.toml +++ /dev/null @@ -1,52 +0,0 @@ -# See https://hatch.pypa.io/dev/config/environment/overview/ - -[envs.doc] -features = ["docs"] -[envs.doc.scripts] -build = "sphinx-build -W -b html doc ./doc/_build/html" -serve = "sphinx-autobuild -W -b html doc --watch ./pymongo --watch ./bson --watch ./gridfs ./doc/_build/serve" -linkcheck = "sphinx-build -E -b linkcheck doc ./doc/_build/linkcheck" - -[envs.doctest] -features = ["docs","test"] -[envs.doctest.scripts] -test = "sphinx-build -E -b doctest doc ./doc/_build/doctest" - -[envs.typing] -pre-install-commands = [ - "pip install -q -r requirements/typing.txt", -] -[envs.typing.scripts] -check-mypy = [ - "mypy --install-types --non-interactive bson gridfs tools pymongo", - "mypy --install-types --non-interactive --config-file mypy_test.ini test", - "mypy --install-types --non-interactive test/test_typing.py test/test_typing_strict.py" -] -check-pyright = ["rm -f pyrightconfig.json", "pyright test/test_typing.py test/test_typing_strict.py"] -check-strict-pyright = [ - "echo '{{\"strict\": [\"tests/test_typing_strict.py\"]}}' > pyrightconfig.json", - "pyright test/test_typing_strict.py", - "rm -f pyrightconfig.json" -] -check = ["check-mypy", "check-pyright", "check-strict-pyright"] - -[envs.lint] -skip-install = true -dependencies = ["pre-commit"] -[envs.lint.scripts] -run = "pre-commit run --all-files" -run-manual = "pre-commit run --all-files --hook-stage manual" - -[envs.test] -features = ["test"] -[envs.test.scripts] -test = "pytest -v --durations=5 --maxfail=10 {args}" -test-eg = "bash ./.evergreen/run-tests.sh {args}" -test-async = "pytest -v --durations=5 --maxfail=10 -m default_async {args}" -test-mockupdb = ["pip install -U git+https://github.com/mongodb-labs/mongo-mockup-db@master", "test -m mockupdb"] - -[envs.encryption] -skip-install = true -[envs.encryption.scripts] -setup = "bash .evergreen/setup-encryption.sh" -teardown = "bash .evergreen/teardown-encryption.sh" diff --git a/hatch_build.py b/hatch_build.py index 91315eb09f..40271972dd 100644 --- a/hatch_build.py +++ b/hatch_build.py @@ -19,7 +19,7 @@ def initialize(self, version, build_data): here = Path(__file__).parent.resolve() sys.path.insert(0, str(here)) - subprocess.check_call([sys.executable, "_setup.py", "build_ext", "-i"]) + subprocess.run([sys.executable, "_setup.py", "build_ext", "-i"], check=True) # Ensure wheel is marked as binary and contains the binary files. build_data["infer_tag"] = True diff --git a/integration_tests/README.md b/integration_tests/README.md new file mode 100644 index 0000000000..fb64a9066f --- /dev/null +++ b/integration_tests/README.md @@ -0,0 +1,42 @@ +# Integration Tests + +A set of tests that verify the usage of PyMongo with downstream packages or frameworks. + +Each test uses [PEP 723 inline metadata](https://packaging.python.org/en/latest/specifications/inline-script-metadata/) and can be run using `pipx` or `uv`. + +The `run.sh` convenience script can be used to run all of the files using `uv`. + +Here is an example header for the script with the inline dependencies: + +```python +# /// script +# dependencies = [ +# "uvloop>=0.18" +# ] +# requires-python = ">=3.10" +# /// +``` + +Here is an example of using the test helper function to create a configured client for the test: + + +```python +import asyncio +import sys +from pathlib import Path + +# Use pymongo from parent directory. +root = Path(__file__).parent.parent +sys.path.insert(0, str(root)) + +from test.asynchronous import async_simple_test_client # noqa: E402 + + +async def main(): + async with async_simple_test_client() as client: + result = await client.admin.command("ping") + assert result["ok"] + + +asyncio.run(main()) +``` diff --git a/integration_tests/run.sh b/integration_tests/run.sh new file mode 100755 index 0000000000..051e2b8a75 --- /dev/null +++ b/integration_tests/run.sh @@ -0,0 +1,11 @@ +#!/bin/bash +# Run all of the integration test files using `uv run`. +set -eu + +for file in integration_tests/test_*.py ; do + echo "-----------------" + echo "Running $file..." + uv run $file + echo "Running $file...done." + echo "-----------------" +done diff --git a/integration_tests/test_uv_loop.py b/integration_tests/test_uv_loop.py new file mode 100644 index 0000000000..88a3ad73ab --- /dev/null +++ b/integration_tests/test_uv_loop.py @@ -0,0 +1,27 @@ +# /// script +# dependencies = [ +# "uvloop>=0.18" +# ] +# requires-python = ">=3.10" +# /// +from __future__ import annotations + +import sys +from pathlib import Path + +import uvloop + +# Use pymongo from parent directory. +root = Path(__file__).parent.parent +sys.path.insert(0, str(root)) + +from test.asynchronous import async_simple_test_client # noqa: E402 + + +async def main(): + async with async_simple_test_client() as client: + result = await client.admin.command("ping") + assert result["ok"] + + +uvloop.run(main()) diff --git a/justfile b/justfile new file mode 100644 index 0000000000..17b95e87b7 --- /dev/null +++ b/justfile @@ -0,0 +1,85 @@ +# See https://just.systems/man/en/ for instructions +set shell := ["bash", "-c"] + +# Commonly used command segments. +typing_run := "uv run --group typing --extra aws --extra encryption --extra ocsp --extra snappy --extra test --extra zstd" +docs_run := "uv run --extra docs" +doc_build := "./doc/_build" +mypy_args := "--install-types --non-interactive" + +# Make the default recipe private so it doesn't show up in the list. +[private] +default: + @just --list + +[private] +resync: + @uv sync --quiet + +install: + bash .evergreen/scripts/setup-dev-env.sh + uvx pre-commit install + +[group('docs')] +docs: && resync + {{docs_run}} sphinx-build -W -b html doc {{doc_build}}/html + +[group('docs')] +docs-serve: && resync + {{docs_run}} sphinx-autobuild -W -b html doc --watch ./pymongo --watch ./bson --watch ./gridfs {{doc_build}}/serve + +[group('docs')] +docs-linkcheck: && resync + {{docs_run}} sphinx-build -E -b linkcheck doc {{doc_build}}/linkcheck + +[group('typing')] +typing: && resync + just typing-mypy + just typing-pyright + +[group('typing')] +typing-mypy: && resync + {{typing_run}} mypy {{mypy_args}} bson gridfs tools pymongo + {{typing_run}} mypy {{mypy_args}} --config-file mypy_test.ini test + {{typing_run}} mypy {{mypy_args}} test/test_typing.py test/test_typing_strict.py + +[group('typing')] +typing-pyright: && resync + {{typing_run}} pyright test/test_typing.py test/test_typing_strict.py + {{typing_run}} pyright -p strict_pyrightconfig.json test/test_typing_strict.py + +[group('lint')] +lint *args="": && resync + uvx pre-commit run --all-files {{args}} + +[group('lint')] +lint-manual *args="": && resync + uvx pre-commit run --all-files --hook-stage manual {{args}} + +[group('test')] +test *args="-v --durations=5 --maxfail=10": && resync + uv run --extra test pytest {{args}} + +[group('test')] +run-tests *args: && resync + bash ./.evergreen/run-tests.sh {{args}} + +[group('test')] +setup-tests *args="": + bash .evergreen/scripts/setup-tests.sh {{args}} + +[group('test')] +teardown-tests: + bash .evergreen/scripts/teardown-tests.sh + +[group('test')] +integration-tests: + bash integration_tests/run.sh + +[group('server')] +run-server *args="": + bash .evergreen/scripts/run-server.sh {{args}} + +[group('server')] +stop-server: + bash .evergreen/scripts/stop-server.sh diff --git a/pymongo/__init__.py b/pymongo/__init__.py index 58f6ff338b..ac540d94db 100644 --- a/pymongo/__init__.py +++ b/pymongo/__init__.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -55,7 +55,7 @@ GEO2D = "2d" """Index specifier for a 2-dimensional `geospatial index`_. -.. _geospatial index: http://mongodb.com/docs/manual/core/2d/ +.. _geospatial index: https://mongodb.com/docs/manual/core/2d/ """ GEOSPHERE = "2dsphere" @@ -63,7 +63,7 @@ .. versionadded:: 2.5 -.. _spherical geospatial index: http://mongodb.com/docs/manual/core/2dsphere/ +.. _spherical geospatial index: https://mongodb.com/docs/manual/core/2dsphere/ """ HASHED = "hashed" @@ -71,7 +71,7 @@ .. versionadded:: 2.5 -.. _hashed index: http://mongodb.com/docs/manual/core/index-hashed/ +.. _hashed index: https://mongodb.com/docs/manual/core/index-hashed/ """ TEXT = "text" @@ -83,7 +83,7 @@ .. versionadded:: 2.7.1 -.. _text index: http://mongodb.com/docs/manual/core/index-text/ +.. _text index: https://mongodb.com/docs/manual/core/index-text/ """ from pymongo import _csot @@ -105,6 +105,16 @@ from pymongo.synchronous.mongo_client import MongoClient from pymongo.write_concern import WriteConcern +# Public module compatibility imports +# isort: off +from pymongo import uri_parser # noqa: F401 +from pymongo import change_stream # noqa: F401 +from pymongo import client_session # noqa: F401 +from pymongo import collection # noqa: F401 +from pymongo import command_cursor # noqa: F401 +from pymongo import database # noqa: F401 +# isort: on + version = __version__ """Current version of PyMongo.""" @@ -155,12 +165,12 @@ def timeout(seconds: Optional[float]) -> ContextManager[None]: :raises: :py:class:`ValueError`: When `seconds` is negative. - See :ref:`timeout-example` for more examples. + See `Limit Server Execution Time `_ for more examples. .. versionadded:: 4.2 """ if not isinstance(seconds, (int, float, type(None))): - raise TypeError("timeout must be None, an int, or a float") + raise TypeError(f"timeout must be None, an int, or a float, not {type(seconds)}") if seconds and seconds < 0: raise ValueError("timeout cannot be negative") if seconds is not None: diff --git a/pymongo/_asyncio_lock.py b/pymongo/_asyncio_lock.py index 669b0f63a7..5ca09982fa 100644 --- a/pymongo/_asyncio_lock.py +++ b/pymongo/_asyncio_lock.py @@ -93,7 +93,7 @@ class Lock(_ContextManagerMixin, _LoopBoundMixin): """ def __init__(self) -> None: - self._waiters: Optional[collections.deque] = None + self._waiters: Optional[collections.deque[Any]] = None self._locked = False def __repr__(self) -> str: @@ -160,7 +160,7 @@ def release(self) -> None: self._locked = False self._wake_up_first() else: - raise RuntimeError("Lock is not acquired.") + raise RuntimeError("Lock is not acquired") def _wake_up_first(self) -> None: """Ensure that the first waiter will wake up.""" @@ -196,7 +196,7 @@ def __init__(self, lock: Optional[Lock] = None) -> None: self.acquire = lock.acquire self.release = lock.release - self._waiters: collections.deque = collections.deque() + self._waiters: collections.deque[Any] = collections.deque() def __repr__(self) -> str: res = super().__repr__() @@ -260,7 +260,7 @@ async def wait(self) -> bool: self._notify(1) raise - async def wait_for(self, predicate: Any) -> Coroutine: + async def wait_for(self, predicate: Any) -> Coroutine[Any, Any, Any]: """Wait until a predicate becomes true. The predicate should be a callable whose result will be diff --git a/pymongo/_asyncio_task.py b/pymongo/_asyncio_task.py index 8e457763d9..118471963a 100644 --- a/pymongo/_asyncio_task.py +++ b/pymongo/_asyncio_task.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -24,7 +24,7 @@ # TODO (https://jira.mongodb.org/browse/PYTHON-4981): Revisit once the underlying cause of the swallowed cancellations is uncovered -class _Task(asyncio.Task): +class _Task(asyncio.Task[Any]): def __init__(self, coro: Coroutine[Any, Any, Any], *, name: Optional[str] = None) -> None: super().__init__(coro, name=name) self._cancel_requests = 0 @@ -43,7 +43,7 @@ def cancelling(self) -> int: return self._cancel_requests -def create_task(coro: Coroutine[Any, Any, Any], *, name: Optional[str] = None) -> asyncio.Task: +def create_task(coro: Coroutine[Any, Any, Any], *, name: Optional[str] = None) -> asyncio.Task[Any]: if sys.version_info >= (3, 11): return asyncio.create_task(coro, name=name) return _Task(coro, name=name) diff --git a/pymongo/_azure_helpers.py b/pymongo/_azure_helpers.py index 704c561cd5..8a7af0b407 100644 --- a/pymongo/_azure_helpers.py +++ b/pymongo/_azure_helpers.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -46,7 +46,7 @@ def _get_azure_response( try: data = json.loads(body) except Exception: - raise ValueError("Azure IMDS response must be in JSON format.") from None + raise ValueError("Azure IMDS response must be in JSON format") from None for key in ["access_token", "expires_in"]: if not data.get(key): diff --git a/pymongo/_client_bulk_shared.py b/pymongo/_client_bulk_shared.py index 649f1c6aa0..5814025566 100644 --- a/pymongo/_client_bulk_shared.py +++ b/pymongo/_client_bulk_shared.py @@ -4,7 +4,7 @@ # may not use this file except in compliance with the License. You # may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/pymongo/_cmessagemodule.c b/pymongo/_cmessagemodule.c index eb457b341c..a506863737 100644 --- a/pymongo/_cmessagemodule.c +++ b/pymongo/_cmessagemodule.c @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/pymongo/_csot.py b/pymongo/_csot.py index 06c6b68ac9..ce72a66486 100644 --- a/pymongo/_csot.py +++ b/pymongo/_csot.py @@ -32,6 +32,12 @@ DEADLINE: ContextVar[float] = ContextVar("DEADLINE", default=float("inf")) +def reset_all() -> None: + TIMEOUT.set(None) + RTT.set(0.0) + DEADLINE.set(float("inf")) + + def get_timeout() -> Optional[float]: return TIMEOUT.get(None) @@ -62,7 +68,7 @@ def clamp_remaining(max_timeout: float) -> float: return min(timeout, max_timeout) -class _TimeoutContext(AbstractContextManager): +class _TimeoutContext(AbstractContextManager[Any]): """Internal timeout context manager. Use :func:`pymongo.timeout` instead:: diff --git a/pymongo/_gcp_helpers.py b/pymongo/_gcp_helpers.py index d90f3cc217..7979d1e807 100644 --- a/pymongo/_gcp_helpers.py +++ b/pymongo/_gcp_helpers.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/pymongo/_version.py b/pymongo/_version.py index 3de24a8e14..c6ba82ab13 100644 --- a/pymongo/_version.py +++ b/pymongo/_version.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -18,7 +18,7 @@ import re from typing import List, Tuple, Union -__version__ = "4.11.0.dev0" +__version__ = "4.16.0.dev0" def get_version_tuple(version: str) -> Tuple[Union[int, str], ...]: diff --git a/pymongo/asynchronous/aggregation.py b/pymongo/asynchronous/aggregation.py index 7684151897..6ca60ad9c3 100644 --- a/pymongo/asynchronous/aggregation.py +++ b/pymongo/asynchronous/aggregation.py @@ -4,7 +4,7 @@ # may not use this file except in compliance with the License. You # may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -46,11 +46,10 @@ class _AggregationCommand: def __init__( self, - target: Union[AsyncDatabase, AsyncCollection], - cursor_class: type[AsyncCommandCursor], + target: Union[AsyncDatabase[Any], AsyncCollection[Any]], + cursor_class: type[AsyncCommandCursor[Any]], pipeline: _Pipeline, options: MutableMapping[str, Any], - explicit_session: bool, let: Optional[Mapping[str, Any]] = None, user_fields: Optional[MutableMapping[str, Any]] = None, result_processor: Optional[Callable[[Mapping[str, Any], AsyncConnection], None]] = None, @@ -92,7 +91,6 @@ def __init__( self._options["cursor"]["batchSize"] = self._batch_size self._cursor_class = cursor_class - self._explicit_session = explicit_session self._user_fields = user_fields self._result_processor = result_processor @@ -111,12 +109,12 @@ def _cursor_namespace(self) -> str: """The namespace in which the aggregate command is run.""" raise NotImplementedError - def _cursor_collection(self, cursor_doc: Mapping[str, Any]) -> AsyncCollection: + def _cursor_collection(self, cursor_doc: Mapping[str, Any]) -> AsyncCollection[Any]: """The AsyncCollection used for the aggregate command cursor.""" raise NotImplementedError @property - def _database(self) -> AsyncDatabase: + def _database(self) -> AsyncDatabase[Any]: """The database against which the aggregation command is run.""" raise NotImplementedError @@ -197,7 +195,6 @@ async def get_cursor( batch_size=self._batch_size or 0, max_await_time_ms=self._max_await_time_ms, session=session, - explicit_session=self._explicit_session, comment=self._options.get("comment"), ) await cmd_cursor._maybe_pin_connection(conn) @@ -205,7 +202,7 @@ async def get_cursor( class _CollectionAggregationCommand(_AggregationCommand): - _target: AsyncCollection + _target: AsyncCollection[Any] @property def _aggregation_target(self) -> str: @@ -215,12 +212,12 @@ def _aggregation_target(self) -> str: def _cursor_namespace(self) -> str: return self._target.full_name - def _cursor_collection(self, cursor: Mapping[str, Any]) -> AsyncCollection: + def _cursor_collection(self, cursor: Mapping[str, Any]) -> AsyncCollection[Any]: """The AsyncCollection used for the aggregate command cursor.""" return self._target @property - def _database(self) -> AsyncDatabase: + def _database(self) -> AsyncDatabase[Any]: return self._target.database @@ -234,7 +231,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class _DatabaseAggregationCommand(_AggregationCommand): - _target: AsyncDatabase + _target: AsyncDatabase[Any] @property def _aggregation_target(self) -> int: @@ -245,10 +242,10 @@ def _cursor_namespace(self) -> str: return f"{self._target.name}.$cmd.aggregate" @property - def _database(self) -> AsyncDatabase: + def _database(self) -> AsyncDatabase[Any]: return self._target - def _cursor_collection(self, cursor: Mapping[str, Any]) -> AsyncCollection: + def _cursor_collection(self, cursor: Mapping[str, Any]) -> AsyncCollection[Any]: """The AsyncCollection used for the aggregate command cursor.""" # AsyncCollection level aggregate may not always return the "ns" field # according to our MockupDB tests. Let's handle that case for db level diff --git a/pymongo/asynchronous/auth.py b/pymongo/asynchronous/auth.py index 48ce4bbd39..c1321f1d90 100644 --- a/pymongo/asynchronous/auth.py +++ b/pymongo/asynchronous/auth.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -38,6 +38,7 @@ _authenticate_oidc, _get_authenticator, ) +from pymongo.asynchronous.helpers import _getaddrinfo from pymongo.auth_shared import ( MongoCredential, _authenticate_scram_start, @@ -160,7 +161,7 @@ def _password_digest(username: str, password: str) -> str: if len(password) == 0: raise ValueError("password can't be empty") if not isinstance(username, str): - raise TypeError("username must be an instance of str") + raise TypeError(f"username must be an instance of str, not {type(username)}") md5hash = hashlib.md5() # noqa: S324 data = f"{username}:mongo:{password}" @@ -177,15 +178,22 @@ def _auth_key(nonce: str, username: str, password: str) -> str: return md5hash.hexdigest() -def _canonicalize_hostname(hostname: str, option: str | bool) -> str: +async def _canonicalize_hostname(hostname: str, option: str | bool) -> str: """Canonicalize hostname following MIT-krb5 behavior.""" # https://github.com/krb5/krb5/blob/d406afa363554097ac48646a29249c04f498c88e/src/util/k5test.py#L505-L520 if option in [False, "none"]: return hostname - af, socktype, proto, canonname, sockaddr = socket.getaddrinfo( - hostname, None, 0, 0, socket.IPPROTO_TCP, socket.AI_CANONNAME - )[0] + af, socktype, proto, canonname, sockaddr = ( + await _getaddrinfo( + hostname, + None, + family=0, + type=0, + proto=socket.IPPROTO_TCP, + flags=socket.AI_CANONNAME, + ) + )[0] # type: ignore[index] # For forward just to resolve the cname as dns.lookup() will not return it. if option == "forward": @@ -213,7 +221,7 @@ async def _authenticate_gssapi(credentials: MongoCredential, conn: AsyncConnecti # Starting here and continuing through the while loop below - establish # the security context. See RFC 4752, Section 3.1, first paragraph. host = props.service_host or conn.address[0] - host = _canonicalize_hostname(host, props.canonicalize_host_name) + host = await _canonicalize_hostname(host, props.canonicalize_host_name) service = props.service_name + "@" + host if props.service_realm is not None: service = service + "@" + props.service_realm diff --git a/pymongo/asynchronous/auth_aws.py b/pymongo/asynchronous/auth_aws.py index 9dcc625d19..210d306046 100644 --- a/pymongo/asynchronous/auth_aws.py +++ b/pymongo/asynchronous/auth_aws.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/pymongo/asynchronous/auth_oidc.py b/pymongo/asynchronous/auth_oidc.py index f1c15045de..f8f046bd94 100644 --- a/pymongo/asynchronous/auth_oidc.py +++ b/pymongo/asynchronous/auth_oidc.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -15,6 +15,7 @@ """MONGODB-OIDC Authentication helpers.""" from __future__ import annotations +import asyncio import threading import time from dataclasses import dataclass, field @@ -36,6 +37,7 @@ ) from pymongo.errors import ConfigurationError, OperationFailure from pymongo.helpers_shared import _AUTHENTICATION_FAILURE_CODE +from pymongo.lock import Lock, _async_create_lock if TYPE_CHECKING: from pymongo.asynchronous.pool import AsyncConnection @@ -81,7 +83,11 @@ class _OIDCAuthenticator: access_token: Optional[str] = field(default=None) idp_info: Optional[OIDCIdPInfo] = field(default=None) token_gen_id: int = field(default=0) - lock: threading.Lock = field(default_factory=threading.Lock) + if not _IS_SYNC: + lock: Lock = field(default_factory=_async_create_lock) # type: ignore[assignment] + else: + lock: threading.Lock = field(default_factory=_async_create_lock) # type: ignore[assignment, no-redef] + last_call_time: float = field(default=0) async def reauthenticate(self, conn: AsyncConnection) -> Optional[Mapping[str, Any]]: @@ -164,7 +170,7 @@ async def _authenticate_human(self, conn: AsyncConnection) -> Optional[Mapping[s # Attempt to authenticate with a JwtStepRequest. return await self._sasl_continue_jwt(conn, start_resp) - def _get_access_token(self) -> Optional[str]: + async def _get_access_token(self) -> Optional[str]: properties = self.properties cb: Union[None, OIDCCallback] resp: OIDCCallbackResult @@ -186,7 +192,7 @@ def _get_access_token(self) -> Optional[str]: return None if not prev_token and cb is not None: - with self.lock: + async with self.lock: # type: ignore[attr-defined] # See if the token was changed while we were waiting for the # lock. new_token = self.access_token @@ -196,7 +202,7 @@ def _get_access_token(self) -> Optional[str]: # Ensure that we are waiting a min time between callback invocations. delta = time.time() - self.last_call_time if delta < TIME_BETWEEN_CALLS_SECONDS: - time.sleep(TIME_BETWEEN_CALLS_SECONDS - delta) + await asyncio.sleep(TIME_BETWEEN_CALLS_SECONDS - delta) self.last_call_time = time.time() if is_human: @@ -211,9 +217,14 @@ def _get_access_token(self) -> Optional[str]: idp_info=self.idp_info, username=self.properties.username, ) - resp = cb.fetch(context) + if not _IS_SYNC: + resp = await asyncio.get_running_loop().run_in_executor(None, cb.fetch, context) # type: ignore[assignment] + else: + resp = cb.fetch(context) if not isinstance(resp, OIDCCallbackResult): - raise ValueError("Callback result must be of type OIDCCallbackResult") + raise ValueError( + f"Callback result must be of type OIDCCallbackResult, not {type(resp)}" + ) self.refresh_token = resp.refresh_token self.access_token = resp.access_token self.token_gen_id += 1 @@ -248,16 +259,16 @@ async def _sasl_continue_jwt( ) -> Mapping[str, Any]: self.access_token = None self.refresh_token = None - start_payload: dict = bson.decode(start_resp["payload"]) + start_payload: dict[str, Any] = bson.decode(start_resp["payload"]) if "issuer" in start_payload: self.idp_info = OIDCIdPInfo(**start_payload) - access_token = self._get_access_token() + access_token = await self._get_access_token() conn.oidc_token_gen_id = self.token_gen_id cmd = self._get_continue_command({"jwt": access_token}, start_resp) return await self._run_command(conn, cmd) async def _sasl_start_jwt(self, conn: AsyncConnection) -> Mapping[str, Any]: - access_token = self._get_access_token() + access_token = await self._get_access_token() conn.oidc_token_gen_id = self.token_gen_id cmd = self._get_start_command({"jwt": access_token}) return await self._run_command(conn, cmd) diff --git a/pymongo/asynchronous/bulk.py b/pymongo/asynchronous/bulk.py index 6770d7b34e..4a54f9eb3f 100644 --- a/pymongo/asynchronous/bulk.py +++ b/pymongo/asynchronous/bulk.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -87,7 +87,7 @@ def __init__( self, collection: AsyncCollection[_DocumentType], ordered: bool, - bypass_document_validation: bool, + bypass_document_validation: Optional[bool], comment: Optional[str] = None, let: Optional[Any] = None, ) -> None: @@ -248,15 +248,15 @@ async def write_command( request_id: int, msg: bytes, docs: list[Mapping[str, Any]], - client: AsyncMongoClient, + client: AsyncMongoClient[Any], ) -> dict[str, Any]: """A proxy for SocketInfo.write_command that handles event publishing.""" cmd[bwc.field] = docs if _COMMAND_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _COMMAND_LOGGER, - clientId=client._topology_settings._topology_id, message=_CommandStatusMessage.STARTED, + clientId=client._topology_settings._topology_id, command=cmd, commandName=next(iter(cmd)), databaseName=bwc.db_name, @@ -276,8 +276,8 @@ async def write_command( if _COMMAND_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _COMMAND_LOGGER, - clientId=client._topology_settings._topology_id, message=_CommandStatusMessage.SUCCEEDED, + clientId=client._topology_settings._topology_id, durationMS=duration, reply=reply, commandName=next(iter(cmd)), @@ -302,8 +302,8 @@ async def write_command( if _COMMAND_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _COMMAND_LOGGER, - clientId=client._topology_settings._topology_id, message=_CommandStatusMessage.FAILED, + clientId=client._topology_settings._topology_id, durationMS=duration, failure=failure, commandName=next(iter(cmd)), @@ -334,14 +334,14 @@ async def unack_write( msg: bytes, max_doc_size: int, docs: list[Mapping[str, Any]], - client: AsyncMongoClient, + client: AsyncMongoClient[Any], ) -> Optional[Mapping[str, Any]]: """A proxy for AsyncConnection.unack_write that handles event publishing.""" if _COMMAND_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _COMMAND_LOGGER, - clientId=client._topology_settings._topology_id, message=_CommandStatusMessage.STARTED, + clientId=client._topology_settings._topology_id, command=cmd, commandName=next(iter(cmd)), databaseName=bwc.db_name, @@ -366,8 +366,8 @@ async def unack_write( if _COMMAND_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _COMMAND_LOGGER, - clientId=client._topology_settings._topology_id, message=_CommandStatusMessage.SUCCEEDED, + clientId=client._topology_settings._topology_id, durationMS=duration, reply=reply, commandName=next(iter(cmd)), @@ -393,8 +393,8 @@ async def unack_write( if _COMMAND_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _COMMAND_LOGGER, - clientId=client._topology_settings._topology_id, message=_CommandStatusMessage.FAILED, + clientId=client._topology_settings._topology_id, durationMS=duration, failure=failure, commandName=next(iter(cmd)), @@ -419,7 +419,7 @@ async def _execute_batch_unack( bwc: Union[_BulkWriteContext, _EncryptedBulkWriteContext], cmd: dict[str, Any], ops: list[Mapping[str, Any]], - client: AsyncMongoClient, + client: AsyncMongoClient[Any], ) -> list[Mapping[str, Any]]: if self.is_encrypted: _, batched_cmd, to_send = bwc.batch_command(cmd, ops) @@ -446,7 +446,7 @@ async def _execute_batch( bwc: Union[_BulkWriteContext, _EncryptedBulkWriteContext], cmd: dict[str, Any], ops: list[Mapping[str, Any]], - client: AsyncMongoClient, + client: AsyncMongoClient[Any], ) -> tuple[dict[str, Any], list[Mapping[str, Any]]]: if self.is_encrypted: _, batched_cmd, to_send = bwc.batch_command(cmd, ops) @@ -516,8 +516,8 @@ async def _execute_command( if self.comment: cmd["comment"] = self.comment _csot.apply_write_concern(cmd, write_concern) - if self.bypass_doc_val: - cmd["bypassDocumentValidation"] = True + if self.bypass_doc_val is not None: + cmd["bypassDocumentValidation"] = self.bypass_doc_val if self.let is not None and run.op_type in (_DELETE, _UPDATE): cmd["let"] = self.let if session: diff --git a/pymongo/asynchronous/change_stream.py b/pymongo/asynchronous/change_stream.py index 719020c409..b2b78b0660 100644 --- a/pymongo/asynchronous/change_stream.py +++ b/pymongo/asynchronous/change_stream.py @@ -4,7 +4,7 @@ # may not use this file except in compliance with the License. You # may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -164,7 +164,7 @@ def _aggregation_command_class(self) -> Type[_AggregationCommand]: raise NotImplementedError @property - def _client(self) -> AsyncMongoClient: + def _client(self) -> AsyncMongoClient: # type: ignore[type-arg] """The client against which the aggregation commands for this AsyncChangeStream will be run. """ @@ -206,7 +206,7 @@ def _command_options(self) -> dict[str, Any]: def _aggregation_pipeline(self) -> list[dict[str, Any]]: """Return the full aggregation pipeline for this AsyncChangeStream.""" options = self._change_stream_options() - full_pipeline: list = [{"$changeStream": options}] + full_pipeline: list[dict[str, Any]] = [{"$changeStream": options}] full_pipeline.extend(self._pipeline) return full_pipeline @@ -236,8 +236,8 @@ def _process_result(self, result: Mapping[str, Any], conn: AsyncConnection) -> N ) async def _run_aggregation_cmd( - self, session: Optional[AsyncClientSession], explicit_session: bool - ) -> AsyncCommandCursor: + self, session: Optional[AsyncClientSession] + ) -> AsyncCommandCursor: # type: ignore[type-arg] """Run the full aggregation pipeline for this AsyncChangeStream and return the corresponding AsyncCommandCursor. """ @@ -246,7 +246,6 @@ async def _run_aggregation_cmd( AsyncCommandCursor, self._aggregation_pipeline(), self._command_options(), - explicit_session, result_processor=self._process_result, comment=self._comment, ) @@ -257,11 +256,9 @@ async def _run_aggregation_cmd( operation=_Op.AGGREGATE, ) - async def _create_cursor(self) -> AsyncCommandCursor: - async with self._client._tmp_session(self._session, close=False) as s: - return await self._run_aggregation_cmd( - session=s, explicit_session=self._session is not None - ) + async def _create_cursor(self) -> AsyncCommandCursor: # type: ignore[type-arg] + async with self._client._tmp_session(self._session) as s: + return await self._run_aggregation_cmd(session=s) async def _resume(self) -> None: """Reestablish this change stream after a resumable error.""" @@ -391,7 +388,8 @@ async def try_next(self) -> Optional[_DocumentType]: if not _resumable(exc) and not exc.timeout: await self.close() raise - except Exception: + # Catch KeyboardInterrupt, CancelledError, etc. and cleanup. + except BaseException: await self.close() raise diff --git a/pymongo/asynchronous/client_bulk.py b/pymongo/asynchronous/client_bulk.py index 45824256da..151942c8a8 100644 --- a/pymongo/asynchronous/client_bulk.py +++ b/pymongo/asynchronous/client_bulk.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -88,7 +88,7 @@ class _AsyncClientBulk: def __init__( self, - client: AsyncMongoClient, + client: AsyncMongoClient[Any], write_concern: WriteConcern, ordered: bool = True, bypass_document_validation: Optional[bool] = None, @@ -233,7 +233,7 @@ async def write_command( msg: Union[bytes, dict[str, Any]], op_docs: list[Mapping[str, Any]], ns_docs: list[Mapping[str, Any]], - client: AsyncMongoClient, + client: AsyncMongoClient[Any], ) -> dict[str, Any]: """A proxy for AsyncConnection.write_command that handles event publishing.""" cmd["ops"] = op_docs @@ -241,8 +241,8 @@ async def write_command( if _COMMAND_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _COMMAND_LOGGER, - clientId=client._topology_settings._topology_id, message=_CommandStatusMessage.STARTED, + clientId=client._topology_settings._topology_id, command=cmd, commandName=next(iter(cmd)), databaseName=bwc.db_name, @@ -262,8 +262,8 @@ async def write_command( if _COMMAND_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _COMMAND_LOGGER, - clientId=client._topology_settings._topology_id, message=_CommandStatusMessage.SUCCEEDED, + clientId=client._topology_settings._topology_id, durationMS=duration, reply=reply, commandName=next(iter(cmd)), @@ -289,8 +289,8 @@ async def write_command( if _COMMAND_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _COMMAND_LOGGER, - clientId=client._topology_settings._topology_id, message=_CommandStatusMessage.FAILED, + clientId=client._topology_settings._topology_id, durationMS=duration, failure=failure, commandName=next(iter(cmd)), @@ -324,14 +324,14 @@ async def unack_write( msg: bytes, op_docs: list[Mapping[str, Any]], ns_docs: list[Mapping[str, Any]], - client: AsyncMongoClient, + client: AsyncMongoClient[Any], ) -> Optional[Mapping[str, Any]]: """A proxy for AsyncConnection.unack_write that handles event publishing.""" if _COMMAND_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _COMMAND_LOGGER, - clientId=client._topology_settings._topology_id, message=_CommandStatusMessage.STARTED, + clientId=client._topology_settings._topology_id, command=cmd, commandName=next(iter(cmd)), databaseName=bwc.db_name, @@ -356,8 +356,8 @@ async def unack_write( if _COMMAND_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _COMMAND_LOGGER, - clientId=client._topology_settings._topology_id, message=_CommandStatusMessage.SUCCEEDED, + clientId=client._topology_settings._topology_id, durationMS=duration, reply=reply, commandName=next(iter(cmd)), @@ -383,8 +383,8 @@ async def unack_write( if _COMMAND_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _COMMAND_LOGGER, - clientId=client._topology_settings._topology_id, message=_CommandStatusMessage.FAILED, + clientId=client._topology_settings._topology_id, durationMS=duration, failure=failure, commandName=next(iter(cmd)), @@ -440,6 +440,8 @@ async def _process_results_cursor( ) -> None: """Internal helper for processing the server reply command cursor.""" if result.get("cursor"): + if session: + session._leave_alive = True coll = AsyncCollection( database=AsyncDatabase(self.client, "admin"), name="$cmd.bulkWrite", @@ -449,7 +451,6 @@ async def _process_results_cursor( result["cursor"], conn.address, session=session, - explicit_session=session is not None, comment=self.comment, ) await cmd_cursor._maybe_pin_connection(conn) diff --git a/pymongo/asynchronous/client_session.py b/pymongo/asynchronous/client_session.py index d80495d804..8674e98447 100644 --- a/pymongo/asynchronous/client_session.py +++ b/pymongo/asynchronous/client_session.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -21,7 +21,7 @@ .. code-block:: python - with client.start_session(causal_consistency=True) as session: + async with client.start_session(causal_consistency=True) as session: collection = client.db.collection await collection.update_one({"_id": 1}, {"$set": {"x": 10}}, session=session) secondary_c = collection.with_options(read_preference=ReadPreference.SECONDARY) @@ -53,8 +53,8 @@ orders = client.db.orders inventory = client.db.inventory - with client.start_session() as session: - async with session.start_transaction(): + async with client.start_session() as session: + async with await session.start_transaction(): await orders.insert_one({"sku": "abc123", "qty": 100}, session=session) await inventory.update_one( {"sku": "abc123", "qty": {"$gte": 100}}, @@ -62,7 +62,7 @@ session=session, ) -Upon normal completion of ``async with session.start_transaction()`` block, the +Upon normal completion of ``async with await session.start_transaction()`` block, the transaction automatically calls :meth:`AsyncClientSession.commit_transaction`. If the block exits with an exception, the transaction automatically calls :meth:`AsyncClientSession.abort_transaction`. @@ -113,7 +113,7 @@ .. code-block:: python # Each read using this session reads data from the same point in time. - with client.start_session(snapshot=True) as session: + async with client.start_session(snapshot=True) as session: order = await orders.find_one({"sku": "abc123"}, session=session) inventory = await inventory.find_one({"sku": "abc123"}, session=session) @@ -167,7 +167,6 @@ WTimeoutError, ) from pymongo.helpers_shared import _RETRYABLE_ERROR_CODES -from pymongo.operations import _Op from pymongo.read_concern import ReadConcern from pymongo.read_preferences import ReadPreference, _ServerMode from pymongo.server_type import SERVER_TYPE @@ -310,7 +309,9 @@ def __init__( ) if max_commit_time_ms is not None: if not isinstance(max_commit_time_ms, int): - raise TypeError("max_commit_time_ms must be an integer or None") + raise TypeError( + f"max_commit_time_ms must be an integer or None, not {type(max_commit_time_ms)}" + ) @property def read_concern(self) -> Optional[ReadConcern]: @@ -394,7 +395,7 @@ class _TxnState: class _Transaction: """Internal class to hold transaction information in a AsyncClientSession.""" - def __init__(self, opts: Optional[TransactionOptions], client: AsyncMongoClient): + def __init__(self, opts: Optional[TransactionOptions], client: AsyncMongoClient[Any]): self.opts = opts self.state = _TxnState.NONE self.sharded = False @@ -456,10 +457,10 @@ def _max_time_expired_error(exc: PyMongoError) -> bool: # From the transactions spec, all the retryable writes errors plus -# WriteConcernFailed. -_UNKNOWN_COMMIT_ERROR_CODES: frozenset = _RETRYABLE_ERROR_CODES | frozenset( +# WriteConcernTimeout. +_UNKNOWN_COMMIT_ERROR_CODES: frozenset = _RETRYABLE_ERROR_CODES | frozenset( # type: ignore[type-arg] [ - 64, # WriteConcernFailed + 64, # WriteConcernTimeout 50, # MaxTimeMSExpired ] ) @@ -497,13 +498,13 @@ class AsyncClientSession: def __init__( self, - client: AsyncMongoClient, + client: AsyncMongoClient[Any], server_session: Any, options: SessionOptions, implicit: bool, ) -> None: # An AsyncMongoClient, a _ServerSession, a SessionOptions, and a set. - self._client: AsyncMongoClient = client + self._client: AsyncMongoClient[Any] = client self._server_session = server_session self._options = options self._cluster_time: Optional[Mapping[str, Any]] = None @@ -512,6 +513,10 @@ def __init__( # Is this an implicitly created session? self._implicit = implicit self._transaction = _Transaction(None, client) + # Is this session attached to a cursor? + self._attached_to_cursor = False + # Should we leave the session alive when the cursor is closed? + self._leave_alive = False async def end_session(self) -> None: """Finish this session. If a transaction has started, abort it. @@ -534,7 +539,7 @@ async def _end_session(self, lock: bool) -> None: def _end_implicit_session(self) -> None: # Implicit sessions can't be part of transactions or pinned connections - if self._server_session is not None: + if not self._leave_alive and self._server_session is not None: self._client._return_server_session(self._server_session) self._server_session = None @@ -549,7 +554,7 @@ async def __aexit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None: await self._end_session(lock=True) @property - def client(self) -> AsyncMongoClient: + def client(self) -> AsyncMongoClient[Any]: """The :class:`~pymongo.asynchronous.mongo_client.AsyncMongoClient` this session was created from. """ @@ -617,7 +622,7 @@ async def callback(session): await inventory.update_one({"sku": "abc123", "qty": {"$gte": 100}}, {"$inc": {"qty": -100}}, session=session) - with client.start_session() as session: + async with client.start_session() as session: await session.with_transaction(callback) To pass arbitrary arguments to the ``callback``, wrap your callable @@ -626,7 +631,7 @@ async def callback(session): async def callback(session, custom_arg, custom_kwarg=None): # Transaction operations... - with client.start_session() as session: + async with client.start_session() as session: await session.with_transaction( lambda s: callback(s, "custom_arg", custom_kwarg=1)) @@ -658,6 +663,12 @@ async def callback(session, custom_arg, custom_kwarg=None): ``with_transaction`` starts a new transaction and re-executes the ``callback``. + The ``callback`` MUST NOT silently handle command errors + without allowing such errors to propagate. Command errors may abort the + transaction on the server, and an attempt to commit the transaction will + be rejected with a ``NoSuchTransaction`` error. For more information see + the `transactions specification`_. + When :meth:`~AsyncClientSession.commit_transaction` raises an exception with the ``"UnknownTransactionCommitResult"`` error label, ``with_transaction`` retries the commit until the result of the @@ -687,6 +698,9 @@ async def callback(session, custom_arg, custom_kwarg=None): :return: The return value of the ``callback``. .. versionadded:: 3.9 + + .. _transactions specification: + https://github.com/mongodb/specifications/blob/master/source/transactions-convenient-api/transactions-convenient-api.md#handling-errors-inside-the-callback """ start_time = time.monotonic() while True: @@ -695,7 +709,8 @@ async def callback(session, custom_arg, custom_kwarg=None): ) try: ret = await callback(self) - except Exception as exc: + # Catch KeyboardInterrupt, CancelledError, etc. and cleanup. + except BaseException as exc: if self.in_transaction: await self.abort_transaction() if ( @@ -739,7 +754,7 @@ async def start_transaction( write_concern: Optional[WriteConcern] = None, read_preference: Optional[_ServerMode] = None, max_commit_time_ms: Optional[int] = None, - ) -> AsyncContextManager: + ) -> AsyncContextManager[Any]: """Start a multi-statement transaction. Takes the same arguments as :class:`TransactionOptions`. @@ -856,7 +871,7 @@ async def func( return await self._finish_transaction(conn, command_name) return await self._client._retry_internal( - func, self, None, retryable=True, operation=_Op.ABORT + func, self, None, retryable=True, operation=command_name ) async def _finish_transaction(self, conn: AsyncConnection, command_name: str) -> dict[str, Any]: @@ -902,7 +917,9 @@ def advance_cluster_time(self, cluster_time: Mapping[str, Any]) -> None: another `AsyncClientSession` instance. """ if not isinstance(cluster_time, _Mapping): - raise TypeError("cluster_time must be a subclass of collections.Mapping") + raise TypeError( + f"cluster_time must be a subclass of collections.Mapping, not {type(cluster_time)}" + ) if not isinstance(cluster_time.get("clusterTime"), Timestamp): raise ValueError("Invalid cluster_time") self._advance_cluster_time(cluster_time) @@ -923,7 +940,9 @@ def advance_operation_time(self, operation_time: Timestamp) -> None: another `AsyncClientSession` instance. """ if not isinstance(operation_time, Timestamp): - raise TypeError("operation_time must be an instance of bson.timestamp.Timestamp") + raise TypeError( + f"operation_time must be an instance of bson.timestamp.Timestamp, not {type(operation_time)}" + ) self._advance_operation_time(operation_time) def _process_response(self, reply: Mapping[str, Any]) -> None: @@ -1107,7 +1126,7 @@ def inc_transaction_id(self) -> None: self._transaction_id += 1 -class _ServerSessionPool(collections.deque): +class _ServerSessionPool(collections.deque): # type: ignore[type-arg] """Pool of _ServerSession objects. This class is thread-safe. diff --git a/pymongo/asynchronous/collection.py b/pymongo/asynchronous/collection.py index 9b73423627..e7e2f58031 100644 --- a/pymongo/asynchronous/collection.py +++ b/pymongo/asynchronous/collection.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -228,7 +228,7 @@ def __init__( read_concern or database.read_concern, ) if not isinstance(name, str): - raise TypeError("name must be an instance of str") + raise TypeError(f"name must be an instance of str, not {type(name)}") from pymongo.asynchronous.database import AsyncDatabase if not isinstance(database, AsyncDatabase): @@ -581,7 +581,7 @@ async def _command( conn: AsyncConnection, command: MutableMapping[str, Any], read_preference: Optional[_ServerMode] = None, - codec_options: Optional[CodecOptions] = None, + codec_options: Optional[CodecOptions[Mapping[str, Any]]] = None, check: bool = True, allowable_errors: Optional[Sequence[Union[str, int]]] = None, read_concern: Optional[ReadConcern] = None, @@ -701,10 +701,10 @@ async def bulk_write( self, requests: Sequence[_WriteOp[_DocumentType]], ordered: bool = True, - bypass_document_validation: bool = False, + bypass_document_validation: Optional[bool] = None, session: Optional[AsyncClientSession] = None, comment: Optional[Any] = None, - let: Optional[Mapping] = None, + let: Optional[Mapping[str, Any]] = None, ) -> BulkWriteResult: """Send a batch of write operations to the server. @@ -762,7 +762,7 @@ async def bulk_write( :return: An instance of :class:`~pymongo.results.BulkWriteResult`. - .. seealso:: :ref:`writes-and-ids` + .. seealso:: `Writes and ids `_ .. note:: `bypass_document_validation` requires server version **>= 3.2** @@ -800,7 +800,7 @@ async def _insert_one( ordered: bool, write_concern: WriteConcern, op_id: Optional[int], - bypass_doc_val: bool, + bypass_doc_val: Optional[bool], session: Optional[AsyncClientSession], comment: Optional[Any] = None, ) -> Any: @@ -814,8 +814,8 @@ async def _insert_one( async def _insert_command( session: Optional[AsyncClientSession], conn: AsyncConnection, retryable_write: bool ) -> None: - if bypass_doc_val: - command["bypassDocumentValidation"] = True + if bypass_doc_val is not None: + command["bypassDocumentValidation"] = bypass_doc_val result = await conn.command( self._database.name, @@ -840,7 +840,7 @@ async def _insert_command( async def insert_one( self, document: Union[_DocumentType, RawBSONDocument], - bypass_document_validation: bool = False, + bypass_document_validation: Optional[bool] = None, session: Optional[AsyncClientSession] = None, comment: Optional[Any] = None, ) -> InsertOneResult: @@ -867,7 +867,7 @@ async def insert_one( :return: - An instance of :class:`~pymongo.results.InsertOneResult`. - .. seealso:: :ref:`writes-and-ids` + .. seealso:: `Writes and ids `_ .. note:: `bypass_document_validation` requires server version **>= 3.2** @@ -906,7 +906,7 @@ async def insert_many( self, documents: Iterable[Union[_DocumentType, RawBSONDocument]], ordered: bool = True, - bypass_document_validation: bool = False, + bypass_document_validation: Optional[bool] = None, session: Optional[AsyncClientSession] = None, comment: Optional[Any] = None, ) -> InsertManyResult: @@ -936,7 +936,7 @@ async def insert_many( :return: An instance of :class:`~pymongo.results.InsertManyResult`. - .. seealso:: :ref:`writes-and-ids` + .. seealso:: `Writes and ids `_ .. note:: `bypass_document_validation` requires server version **>= 3.2** @@ -986,7 +986,7 @@ async def _update( write_concern: Optional[WriteConcern] = None, op_id: Optional[int] = None, ordered: bool = True, - bypass_doc_val: Optional[bool] = False, + bypass_doc_val: Optional[bool] = None, collation: Optional[_CollationIn] = None, array_filters: Optional[Sequence[Mapping[str, Any]]] = None, hint: Optional[_IndexKeyHint] = None, @@ -1041,8 +1041,8 @@ async def _update( if comment is not None: command["comment"] = comment # Update command. - if bypass_doc_val: - command["bypassDocumentValidation"] = True + if bypass_doc_val is not None: + command["bypassDocumentValidation"] = bypass_doc_val # The command result has to be published for APM unmodified # so we make a shallow copy here before adding updatedExisting. @@ -1082,7 +1082,7 @@ async def _update_retryable( write_concern: Optional[WriteConcern] = None, op_id: Optional[int] = None, ordered: bool = True, - bypass_doc_val: Optional[bool] = False, + bypass_doc_val: Optional[bool] = None, collation: Optional[_CollationIn] = None, array_filters: Optional[Sequence[Mapping[str, Any]]] = None, hint: Optional[_IndexKeyHint] = None, @@ -1128,7 +1128,7 @@ async def replace_one( filter: Mapping[str, Any], replacement: Mapping[str, Any], upsert: bool = False, - bypass_document_validation: bool = False, + bypass_document_validation: Optional[bool] = None, collation: Optional[_CollationIn] = None, hint: Optional[_IndexKeyHint] = None, session: Optional[AsyncClientSession] = None, @@ -1237,7 +1237,7 @@ async def update_one( filter: Mapping[str, Any], update: Union[Mapping[str, Any], _Pipeline], upsert: bool = False, - bypass_document_validation: bool = False, + bypass_document_validation: Optional[bool] = None, collation: Optional[_CollationIn] = None, array_filters: Optional[Sequence[Mapping[str, Any]]] = None, hint: Optional[_IndexKeyHint] = None, @@ -1776,6 +1776,15 @@ def find(self, *args: Any, **kwargs: Any) -> AsyncCursor[_DocumentType]: improper type. Returns an instance of :class:`~pymongo.asynchronous.cursor.AsyncCursor` corresponding to this query. + Cursors are closed automatically when they are exhausted (the last batch of data is retrieved from the database). + If a cursor is not exhausted, it will be closed automatically upon garbage collection, which leaves resources open but unused for a potentially long period of time. + To avoid this, best practice is to call :meth:`AsyncCursor.close` when the cursor is no longer needed, + or use the cursor in a with statement:: + + async with collection.find() as cursor: + async for doc in cursor: + print(doc) + The :meth:`find` method obeys the :attr:`read_preference` of this :class:`AsyncCollection`. @@ -2041,7 +2050,7 @@ async def estimated_document_count(self, comment: Optional[Any] = None, **kwargs .. versionchanged:: 4.2 This method now always uses the `count`_ command. Due to an oversight in versions 5.0.0-5.0.8 of MongoDB, the count command was not included in V1 of the - :ref:`versioned-api-ref`. Users of the Stable API with estimated_document_count are + `versioned API `_. Users of the Stable API with estimated_document_count are recommended to upgrade their server version to 5.0.9+ or set :attr:`pymongo.server_api.ServerApi.strict` to ``False`` to avoid encountering errors. @@ -2135,11 +2144,9 @@ async def count_documents( if comment is not None: kwargs["comment"] = comment pipeline.append({"$group": {"_id": 1, "n": {"$sum": 1}}}) - cmd = {"aggregate": self._name, "pipeline": pipeline, "cursor": {}} if "hint" in kwargs and not isinstance(kwargs["hint"], str): kwargs["hint"] = helpers_shared._index_document(kwargs["hint"]) collation = validate_collation_or_none(kwargs.pop("collation", None)) - cmd.update(kwargs) async def _cmd( session: Optional[AsyncClientSession], @@ -2147,6 +2154,8 @@ async def _cmd( conn: AsyncConnection, read_preference: Optional[_ServerMode], ) -> int: + cmd: dict[str, Any] = {"aggregate": self._name, "pipeline": pipeline, "cursor": {}} + cmd.update(kwargs) result = await self._aggregate_one_result( conn, read_preference, cmd, collation, session ) @@ -2475,7 +2484,7 @@ async def _drop_index( name = helpers_shared._gen_index_name(index_or_name) if not isinstance(name, str): - raise TypeError("index_or_name must be an instance of str or list") + raise TypeError(f"index_or_name must be an instance of str or list, not {type(name)}") cmd = {"dropIndexes": self._name, "index": name} cmd.update(kwargs) @@ -2503,6 +2512,15 @@ async def list_indexes( ... SON([('v', 2), ('key', SON([('_id', 1)])), ('name', '_id_')]) + Cursors are closed automatically when they are exhausted (the last batch of data is retrieved from the database). + If a cursor is not exhausted, it will be closed automatically upon garbage collection, which leaves resources open but unused for a potentially long period of time. + To avoid this, best practice is to call :meth:`AsyncCursor.close` when the cursor is no longer needed, + or use the cursor in a with statement:: + + async with await collection.list_indexes() as cursor: + async for index in cursor: + print(index) + :param session: a :class:`~pymongo.asynchronous.client_session.AsyncClientSession`. :param comment: A user-provided comment to attach to this @@ -2525,13 +2543,12 @@ async def _list_indexes( session: Optional[AsyncClientSession] = None, comment: Optional[Any] = None, ) -> AsyncCommandCursor[MutableMapping[str, Any]]: - codec_options: CodecOptions = CodecOptions(SON) + codec_options: CodecOptions[Mapping[str, Any]] = CodecOptions(SON) coll = cast( AsyncCollection[MutableMapping[str, Any]], self.with_options(codec_options=codec_options, read_preference=ReadPreference.PRIMARY), ) read_pref = (session and session._txn_read_preference()) or ReadPreference.PRIMARY - explicit_session = session is not None async def _cmd( session: Optional[AsyncClientSession], @@ -2558,13 +2575,12 @@ async def _cmd( cursor, conn.address, session=session, - explicit_session=explicit_session, comment=cmd.get("comment"), ) await cmd_cursor._maybe_pin_connection(conn) return cmd_cursor - async with self._database.client._tmp_session(session, False) as s: + async with self._database.client._tmp_session(session) as s: return await self._database.client._retryable_read( _cmd, read_pref, s, operation=_Op.LIST_INDEXES ) @@ -2620,6 +2636,15 @@ async def list_search_indexes( ) -> AsyncCommandCursor[Mapping[str, Any]]: """Return a cursor over search indexes for the current collection. + Cursors are closed automatically when they are exhausted (the last batch of data is retrieved from the database). + If a cursor is not exhausted, it will be closed automatically upon garbage collection, which leaves resources open but unused for a potentially long period of time. + To avoid this, best practice is to call :meth:`AsyncCursor.close` when the cursor is no longer needed, + or use the cursor in a with statement:: + + async with await collection.list_search_indexes() as cursor: + async for index in cursor: + print(index) + :param name: If given, the name of the index to search for. Only indexes with matching index names will be returned. If not given, all search indexes for the current collection @@ -2651,7 +2676,6 @@ async def list_search_indexes( AsyncCommandCursor, pipeline, kwargs, - explicit_session=session is not None, comment=comment, user_fields={"cursor": {"firstBatch": 1}}, ) @@ -2871,9 +2895,8 @@ async def _aggregate( self, aggregation_command: Type[_AggregationCommand], pipeline: _Pipeline, - cursor_class: Type[AsyncCommandCursor], + cursor_class: Type[AsyncCommandCursor], # type: ignore[type-arg] session: Optional[AsyncClientSession], - explicit_session: bool, let: Optional[Mapping[str, Any]] = None, comment: Optional[Any] = None, **kwargs: Any, @@ -2885,7 +2908,6 @@ async def _aggregate( cursor_class, pipeline, kwargs, - explicit_session, let, user_fields={"cursor": {"firstBatch": 1}}, ) @@ -2916,12 +2938,21 @@ async def aggregate( .. note:: This method does not support the 'explain' option. Please use `PyMongoExplain `_ - instead. An example is included in the :ref:`aggregate-examples` + instead. An example is included in the `aggregation example `_ documentation. .. note:: The :attr:`~pymongo.asynchronous.collection.AsyncCollection.write_concern` of this collection is automatically applied to this operation. + Cursors are closed automatically when they are exhausted (the last batch of data is retrieved from the database). + If a cursor is not exhausted, it will be closed automatically upon garbage collection, which leaves resources open but unused for a potentially long period of time. + To avoid this, best practice is to call :meth:`AsyncCursor.close` when the cursor is no longer needed, + or use the cursor in a with statement:: + + async with await collection.aggregate() as cursor: + async for operation in cursor: + print(operation) + :param pipeline: a list of aggregation pipeline stages :param session: a :class:`~pymongo.asynchronous.client_session.AsyncClientSession`. @@ -2948,6 +2979,7 @@ async def aggregate( returning aggregate results using a cursor. - `collation` (optional): An instance of :class:`~pymongo.collation.Collation`. + - `bypassDocumentValidation` (bool): If ``True``, allows the write to opt-out of document level validation. :return: A :class:`~pymongo.asynchronous.command_cursor.AsyncCommandCursor` over the result @@ -2976,18 +3008,17 @@ async def aggregate( The :meth:`aggregate` method always returns an AsyncCommandCursor. The pipeline argument must be a list. - .. seealso:: :doc:`/examples/aggregation` + .. seealso:: `Aggregation `_ .. _aggregate command: https://mongodb.com/docs/manual/reference/command/aggregate """ - async with self._database.client._tmp_session(session, close=False) as s: + async with self._database.client._tmp_session(session) as s: return await self._aggregate( _CollectionAggregationCommand, pipeline, AsyncCommandCursor, session=s, - explicit_session=session is not None, let=let, comment=comment, **kwargs, @@ -3028,7 +3059,7 @@ async def aggregate_raw_batches( raise InvalidOperation("aggregate_raw_batches does not support auto encryption") if comment is not None: kwargs["comment"] = comment - async with self._database.client._tmp_session(session, close=False) as s: + async with self._database.client._tmp_session(session) as s: return cast( AsyncRawBatchCursor[_DocumentType], await self._aggregate( @@ -3036,7 +3067,6 @@ async def aggregate_raw_batches( pipeline, AsyncRawBatchCommandCursor, session=s, - explicit_session=session is not None, **kwargs, ), ) @@ -3078,7 +3108,7 @@ async def rename( """ if not isinstance(new_name, str): - raise TypeError("new_name must be an instance of str") + raise TypeError(f"new_name must be an instance of str, not {type(new_name)}") if not new_name or ".." in new_name: raise InvalidName("collection names cannot be empty") @@ -3111,8 +3141,9 @@ async def distinct( filter: Optional[Mapping[str, Any]] = None, session: Optional[AsyncClientSession] = None, comment: Optional[Any] = None, + hint: Optional[_IndexKeyHint] = None, **kwargs: Any, - ) -> list: + ) -> list[Any]: """Get a list of distinct values for `key` among all documents in this collection. @@ -3138,8 +3169,15 @@ async def distinct( :class:`~pymongo.asynchronous.client_session.AsyncClientSession`. :param comment: A user-provided comment to attach to this command. + :param hint: An index to use to support the query + predicate specified either by its string name, or in the same + format as passed to :meth:`~pymongo.asynchronous.collection.AsyncCollection.create_index` + (e.g. ``[('field', ASCENDING)]``). :param kwargs: See list of options above. + .. versionchanged:: 4.12 + Added ``hint`` parameter. + .. versionchanged:: 3.6 Added ``session`` parameter. @@ -3148,23 +3186,28 @@ async def distinct( """ if not isinstance(key, str): - raise TypeError("key must be an instance of str") - cmd = {"distinct": self._name, "key": key} + raise TypeError(f"key must be an instance of str, not {type(key)}") if filter is not None: if "query" in kwargs: raise ConfigurationError("can't pass both filter and query") kwargs["query"] = filter collation = validate_collation_or_none(kwargs.pop("collation", None)) - cmd.update(kwargs) - if comment is not None: - cmd["comment"] = comment + if hint is not None: + if not isinstance(hint, str): + hint = helpers_shared._index_document(hint) async def _cmd( session: Optional[AsyncClientSession], _server: Server, conn: AsyncConnection, read_preference: Optional[_ServerMode], - ) -> list: + ) -> list: # type: ignore[type-arg] + cmd = {"distinct": self._name, "key": key} + cmd.update(kwargs) + if comment is not None: + cmd["comment"] = comment + if hint is not None: + cmd["hint"] = hint # type: ignore[assignment] return ( await self._command( conn, @@ -3189,37 +3232,36 @@ async def _find_and_modify( array_filters: Optional[Sequence[Mapping[str, Any]]] = None, hint: Optional[_IndexKeyHint] = None, session: Optional[AsyncClientSession] = None, - let: Optional[Mapping] = None, + let: Optional[Mapping[str, Any]] = None, **kwargs: Any, ) -> Any: """Internal findAndModify helper.""" common.validate_is_mapping("filter", filter) if not isinstance(return_document, bool): raise ValueError( - "return_document must be ReturnDocument.BEFORE or ReturnDocument.AFTER" + f"return_document must be ReturnDocument.BEFORE or ReturnDocument.AFTER, not {type(return_document)}" ) collation = validate_collation_or_none(kwargs.pop("collation", None)) - cmd = {"findAndModify": self._name, "query": filter, "new": return_document} - if let is not None: - common.validate_is_mapping("let", let) - cmd["let"] = let - cmd.update(kwargs) - if projection is not None: - cmd["fields"] = helpers_shared._fields_list_to_dict(projection, "projection") - if sort is not None: - cmd["sort"] = helpers_shared._index_document(sort) - if upsert is not None: - validate_boolean("upsert", upsert) - cmd["upsert"] = upsert if hint is not None: if not isinstance(hint, str): hint = helpers_shared._index_document(hint) - - write_concern = self._write_concern_for_cmd(cmd, session) + write_concern = self._write_concern_for_cmd(kwargs, session) async def _find_and_modify_helper( session: Optional[AsyncClientSession], conn: AsyncConnection, retryable_write: bool ) -> Any: + cmd = {"findAndModify": self._name, "query": filter, "new": return_document} + if let is not None: + common.validate_is_mapping("let", let) + cmd["let"] = let + cmd.update(kwargs) + if projection is not None: + cmd["fields"] = helpers_shared._fields_list_to_dict(projection, "projection") + if sort is not None: + cmd["sort"] = helpers_shared._index_document(sort) + if upsert is not None: + validate_boolean("upsert", upsert) + cmd["upsert"] = upsert acknowledged = write_concern.acknowledged if array_filters is not None: if not acknowledged: diff --git a/pymongo/asynchronous/command_cursor.py b/pymongo/asynchronous/command_cursor.py index 5a4559bd77..e18b3a330e 100644 --- a/pymongo/asynchronous/command_cursor.py +++ b/pymongo/asynchronous/command_cursor.py @@ -64,7 +64,6 @@ def __init__( batch_size: int = 0, max_await_time_ms: Optional[int] = None, session: Optional[AsyncClientSession] = None, - explicit_session: bool = False, comment: Any = None, ) -> None: """Create a new command cursor.""" @@ -80,7 +79,8 @@ def __init__( self._max_await_time_ms = max_await_time_ms self._timeout = self._collection.database.client.options.timeout self._session = session - self._explicit_session = explicit_session + if self._session is not None: + self._session._attached_to_cursor = True self._killed = self._id == 0 self._comment = comment if self._killed: @@ -94,7 +94,9 @@ def __init__( self.batch_size(batch_size) if not isinstance(max_await_time_ms, int) and max_await_time_ms is not None: - raise TypeError("max_await_time_ms must be an integer or None") + raise TypeError( + f"max_await_time_ms must be an integer or None, not {type(max_await_time_ms)}" + ) def __del__(self) -> None: self._die_no_lock() @@ -115,7 +117,7 @@ def batch_size(self, batch_size: int) -> AsyncCommandCursor[_DocumentType]: :param batch_size: The size of each batch of results requested. """ if not isinstance(batch_size, int): - raise TypeError("batch_size must be an integer") + raise TypeError(f"batch_size must be an integer, not {type(batch_size)}") if batch_size < 0: raise ValueError("batch_size must be >= 0") @@ -195,7 +197,7 @@ def session(self) -> Optional[AsyncClientSession]: .. versionadded:: 3.6 """ - if self._explicit_session: + if self._session and not self._session._implicit: return self._session return None @@ -216,9 +218,10 @@ def _die_no_lock(self) -> None: """Closes this cursor without acquiring a lock.""" cursor_id, address = self._prepare_to_die() self._collection.database.client._cleanup_cursor_no_lock( - cursor_id, address, self._sock_mgr, self._session, self._explicit_session + cursor_id, address, self._sock_mgr, self._session ) - if not self._explicit_session: + if self._session and self._session._implicit: + self._session._attached_to_cursor = False self._session = None self._sock_mgr = None @@ -230,14 +233,15 @@ async def _die_lock(self) -> None: address, self._sock_mgr, self._session, - self._explicit_session, ) - if not self._explicit_session: + if self._session and self._session._implicit: + self._session._attached_to_cursor = False self._session = None self._sock_mgr = None def _end_session(self) -> None: - if self._session and not self._explicit_session: + if self._session and self._session._implicit: + self._session._attached_to_cursor = False self._session._end_implicit_session() self._session = None @@ -348,7 +352,7 @@ async def _try_next(self, get_more_allowed: bool) -> Optional[_DocumentType]: else: return None - async def _next_batch(self, result: list, total: Optional[int] = None) -> bool: + async def _next_batch(self, result: list, total: Optional[int] = None) -> bool: # type: ignore[type-arg] """Get all or some available documents from the cursor.""" if not len(self._data) and not self._killed: await self._refresh() @@ -428,7 +432,6 @@ def __init__( batch_size: int = 0, max_await_time_ms: Optional[int] = None, session: Optional[AsyncClientSession] = None, - explicit_session: bool = False, comment: Any = None, ) -> None: """Create a new cursor / iterator over raw batches of BSON data. @@ -447,7 +450,6 @@ def __init__( batch_size, max_await_time_ms, session, - explicit_session, comment, ) @@ -455,7 +457,7 @@ def _unpack_response( # type: ignore[override] self, response: Union[_OpReply, _OpMsg], cursor_id: Optional[int], - codec_options: CodecOptions, + codec_options: CodecOptions[dict[str, Any]], user_fields: Optional[Mapping[str, Any]] = None, legacy_response: bool = False, ) -> list[Mapping[str, Any]]: diff --git a/pymongo/asynchronous/cursor.py b/pymongo/asynchronous/cursor.py index 8193e53282..f19d3f6cee 100644 --- a/pymongo/asynchronous/cursor.py +++ b/pymongo/asynchronous/cursor.py @@ -37,7 +37,6 @@ from bson.code import Code from bson.son import SON from pymongo import _csot, helpers_shared -from pymongo.asynchronous.helpers import anext from pymongo.collation import validate_collation_or_none from pymongo.common import ( validate_is_document_type, @@ -138,19 +137,18 @@ def __init__( if session: self._session = session - self._explicit_session = True + self._session._attached_to_cursor = True else: self._session = None - self._explicit_session = False spec: Mapping[str, Any] = filter or {} validate_is_mapping("filter", spec) if not isinstance(skip, int): - raise TypeError("skip must be an instance of int") + raise TypeError(f"skip must be an instance of int, not {type(skip)}") if not isinstance(limit, int): - raise TypeError("limit must be an instance of int") + raise TypeError(f"limit must be an instance of int, not {type(limit)}") validate_boolean("no_cursor_timeout", no_cursor_timeout) - if no_cursor_timeout and not self._explicit_session: + if no_cursor_timeout and self._session and self._session._implicit: warnings.warn( "use an explicit session with no_cursor_timeout=True " "otherwise the cursor may still timeout after " @@ -171,7 +169,7 @@ def __init__( validate_boolean("allow_partial_results", allow_partial_results) validate_boolean("oplog_replay", oplog_replay) if not isinstance(batch_size, int): - raise TypeError("batch_size must be an integer") + raise TypeError(f"batch_size must be an integer, not {type(batch_size)}") if batch_size < 0: raise ValueError("batch_size must be >= 0") # Only set if allow_disk_use is provided by the user, else None. @@ -216,7 +214,7 @@ def __init__( # it anytime we change __limit. self._empty = False - self._data: deque = deque() + self._data: deque = deque() # type: ignore[type-arg] self._address: Optional[_Address] = None self._retrieved = 0 @@ -280,10 +278,10 @@ def clone(self) -> AsyncCursor[_DocumentType]: """ return self._clone(True) - def _clone(self, deepcopy: bool = True, base: Optional[AsyncCursor] = None) -> AsyncCursor: + def _clone(self, deepcopy: bool = True, base: Optional[AsyncCursor] = None) -> AsyncCursor: # type: ignore[type-arg] """Internal clone helper.""" if not base: - if self._explicit_session: + if self._session and not self._session._implicit: base = self._clone_base(self._session) else: base = self._clone_base(None) @@ -322,7 +320,7 @@ def _clone(self, deepcopy: bool = True, base: Optional[AsyncCursor] = None) -> A base.__dict__.update(data) return base - def _clone_base(self, session: Optional[AsyncClientSession]) -> AsyncCursor: + def _clone_base(self, session: Optional[AsyncClientSession]) -> AsyncCursor: # type: ignore[type-arg] """Creates an empty AsyncCursor object for information to be copied into.""" return self.__class__(self._collection, session=session) @@ -388,7 +386,7 @@ async def add_option(self, mask: int) -> AsyncCursor[_DocumentType]: cursor.add_option(2) """ if not isinstance(mask, int): - raise TypeError("mask must be an int") + raise TypeError(f"mask must be an int, not {type(mask)}") self._check_okay_to_chain() if mask & _QUERY_OPTIONS["exhaust"]: @@ -408,7 +406,7 @@ def remove_option(self, mask: int) -> AsyncCursor[_DocumentType]: cursor.remove_option(2) """ if not isinstance(mask, int): - raise TypeError("mask must be an int") + raise TypeError(f"mask must be an int, not {type(mask)}") self._check_okay_to_chain() if mask & _QUERY_OPTIONS["exhaust"]: @@ -432,7 +430,7 @@ def allow_disk_use(self, allow_disk_use: bool) -> AsyncCursor[_DocumentType]: .. versionadded:: 3.11 """ if not isinstance(allow_disk_use, bool): - raise TypeError("allow_disk_use must be a bool") + raise TypeError(f"allow_disk_use must be a bool, not {type(allow_disk_use)}") self._check_okay_to_chain() self._allow_disk_use = allow_disk_use @@ -451,7 +449,7 @@ def limit(self, limit: int) -> AsyncCursor[_DocumentType]: .. seealso:: The MongoDB documentation on `limit `_. """ if not isinstance(limit, int): - raise TypeError("limit must be an integer") + raise TypeError(f"limit must be an integer, not {type(limit)}") if self._exhaust: raise InvalidOperation("Can't use limit and exhaust together.") self._check_okay_to_chain() @@ -479,7 +477,7 @@ def batch_size(self, batch_size: int) -> AsyncCursor[_DocumentType]: :param batch_size: The size of each batch of results requested. """ if not isinstance(batch_size, int): - raise TypeError("batch_size must be an integer") + raise TypeError(f"batch_size must be an integer, not {type(batch_size)}") if batch_size < 0: raise ValueError("batch_size must be >= 0") self._check_okay_to_chain() @@ -499,7 +497,7 @@ def skip(self, skip: int) -> AsyncCursor[_DocumentType]: :param skip: the number of results to skip """ if not isinstance(skip, int): - raise TypeError("skip must be an integer") + raise TypeError(f"skip must be an integer, not {type(skip)}") if skip < 0: raise ValueError("skip must be >= 0") self._check_okay_to_chain() @@ -520,7 +518,7 @@ def max_time_ms(self, max_time_ms: Optional[int]) -> AsyncCursor[_DocumentType]: :param max_time_ms: the time limit after which the operation is aborted """ if not isinstance(max_time_ms, int) and max_time_ms is not None: - raise TypeError("max_time_ms must be an integer or None") + raise TypeError(f"max_time_ms must be an integer or None, not {type(max_time_ms)}") self._check_okay_to_chain() self._max_time_ms = max_time_ms @@ -543,7 +541,9 @@ def max_await_time_ms(self, max_await_time_ms: Optional[int]) -> AsyncCursor[_Do .. versionadded:: 3.2 """ if not isinstance(max_await_time_ms, int) and max_await_time_ms is not None: - raise TypeError("max_await_time_ms must be an integer or None") + raise TypeError( + f"max_await_time_ms must be an integer or None, not {type(max_await_time_ms)}" + ) self._check_okay_to_chain() # Ignore max_await_time_ms if not tailable or await_data is False. @@ -679,7 +679,7 @@ def max(self, spec: _Sort) -> AsyncCursor[_DocumentType]: .. versionadded:: 2.7 """ if not isinstance(spec, (list, tuple)): - raise TypeError("spec must be an instance of list or tuple") + raise TypeError(f"spec must be an instance of list or tuple, not {type(spec)}") self._check_okay_to_chain() self._max = dict(spec) @@ -701,7 +701,7 @@ def min(self, spec: _Sort) -> AsyncCursor[_DocumentType]: .. versionadded:: 2.7 """ if not isinstance(spec, (list, tuple)): - raise TypeError("spec must be an instance of list or tuple") + raise TypeError(f"spec must be an instance of list or tuple, not {type(spec)}") self._check_okay_to_chain() self._min = dict(spec) @@ -765,6 +765,8 @@ async def explain(self) -> _DocumentType: :meth:`~pymongo.asynchronous.database.AsyncDatabase.command` to run the explain command directly. + .. note:: The timeout of this method can be set using :func:`pymongo.timeout`. + .. seealso:: The MongoDB documentation on `explain `_. """ c = self.clone() @@ -860,7 +862,7 @@ def where(self, code: Union[str, Code]) -> AsyncCursor[_DocumentType]: if self._has_filter: spec = dict(self._spec) else: - spec = cast(dict, self._spec) + spec = cast(dict, self._spec) # type: ignore[type-arg] spec["$where"] = code self._spec = spec return self @@ -884,7 +886,7 @@ def _unpack_response( self, response: Union[_OpReply, _OpMsg], cursor_id: Optional[int], - codec_options: CodecOptions, + codec_options: CodecOptions, # type: ignore[type-arg] user_fields: Optional[Mapping[str, Any]] = None, legacy_response: bool = False, ) -> Sequence[_DocumentOut]: @@ -941,7 +943,7 @@ def session(self) -> Optional[AsyncClientSession]: .. versionadded:: 3.6 """ - if self._explicit_session: + if self._session and not self._session._implicit: return self._session return None @@ -960,29 +962,33 @@ def __deepcopy__(self, memo: Any) -> Any: return self._clone(deepcopy=True) @overload - def _deepcopy(self, x: Iterable, memo: Optional[dict[int, Union[list, dict]]] = None) -> list: + def _deepcopy(self, x: Iterable, memo: Optional[dict[int, Union[list, dict]]] = None) -> list: # type: ignore[type-arg] ... @overload def _deepcopy( - self, x: SupportsItems, memo: Optional[dict[int, Union[list, dict]]] = None - ) -> dict: + self, + x: SupportsItems, # type: ignore[type-arg] + memo: Optional[dict[int, Union[list, dict]]] = None, # type: ignore[type-arg] + ) -> dict: # type: ignore[type-arg] ... def _deepcopy( - self, x: Union[Iterable, SupportsItems], memo: Optional[dict[int, Union[list, dict]]] = None - ) -> Union[list, dict]: + self, + x: Union[Iterable, SupportsItems], # type: ignore[type-arg] + memo: Optional[dict[int, Union[list, dict]]] = None, # type: ignore[type-arg] + ) -> Union[list[Any], dict[str, Any]]: """Deepcopy helper for the data dictionary or list. Regular expressions cannot be deep copied but as they are immutable we don't have to copy them when cloning. """ - y: Union[list, dict] + y: Union[list[Any], dict[str, Any]] iterator: Iterable[tuple[Any, Any]] if not hasattr(x, "items"): y, is_list, iterator = [], True, enumerate(x) else: - y, is_list, iterator = {}, False, cast("SupportsItems", x).items() + y, is_list, iterator = {}, False, cast("SupportsItems", x).items() # type: ignore[type-arg] if memo is None: memo = {} val_id = id(x) @@ -1001,7 +1007,7 @@ def _deepcopy( else: if not isinstance(key, RE_TYPE): key = copy.deepcopy(key, memo) # noqa: PLW2901 - y[key] = value + y[key] = value # type:ignore[index] return y def _prepare_to_die(self, already_killed: bool) -> tuple[int, Optional[_CursorAddress]]: @@ -1026,9 +1032,10 @@ def _die_no_lock(self) -> None: cursor_id, address = self._prepare_to_die(already_killed) self._collection.database.client._cleanup_cursor_no_lock( - cursor_id, address, self._sock_mgr, self._session, self._explicit_session + cursor_id, address, self._sock_mgr, self._session ) - if not self._explicit_session: + if self._session and self._session._implicit: + self._session._attached_to_cursor = False self._session = None self._sock_mgr = None @@ -1046,9 +1053,9 @@ async def _die_lock(self) -> None: address, self._sock_mgr, self._session, - self._explicit_session, ) - if not self._explicit_session: + if self._session and self._session._implicit: + self._session._attached_to_cursor = False self._session = None self._sock_mgr = None @@ -1056,7 +1063,7 @@ async def close(self) -> None: """Explicitly close / kill this cursor.""" await self._die_lock() - async def distinct(self, key: str) -> list: + async def distinct(self, key: str) -> list[Any]: """Get a list of distinct values for `key` among all documents in the result set of this query. @@ -1124,10 +1131,10 @@ async def _send_message(self, operation: Union[_Query, _GetMore]) -> None: self._killed = True await self.close() raise - except Exception: + # Catch KeyboardInterrupt, CancelledError, etc. and cleanup. + except BaseException: await self.close() raise - self._address = response.address if isinstance(response, PinnedResponse): if not self._sock_mgr: @@ -1261,7 +1268,7 @@ async def next(self) -> _DocumentType: else: raise StopAsyncIteration - async def _next_batch(self, result: list, total: Optional[int] = None) -> bool: + async def _next_batch(self, result: list, total: Optional[int] = None) -> bool: # type: ignore[type-arg] """Get all or some documents from the cursor.""" if not self._exhaust_checked: self._exhaust_checked = True @@ -1321,7 +1328,7 @@ async def to_list(self, length: Optional[int] = None) -> list[_DocumentType]: return res -class AsyncRawBatchCursor(AsyncCursor, Generic[_DocumentType]): +class AsyncRawBatchCursor(AsyncCursor, Generic[_DocumentType]): # type: ignore[type-arg] """An asynchronous cursor / iterator over raw batches of BSON data from a query result.""" _query_class = _RawBatchQuery diff --git a/pymongo/asynchronous/database.py b/pymongo/asynchronous/database.py index 98a0a6ff3b..8e0afc9dc9 100644 --- a/pymongo/asynchronous/database.py +++ b/pymongo/asynchronous/database.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -122,7 +122,7 @@ def __init__( from pymongo.asynchronous.mongo_client import AsyncMongoClient if not isinstance(name, str): - raise TypeError("name must be an instance of str") + raise TypeError(f"name must be an instance of str, not {type(name)}") if not isinstance(client, AsyncMongoClient): # This is for compatibility with mocked and subclassed types, such as in Motor. @@ -611,6 +611,8 @@ async def create_collection( common.validate_is_mapping("clusteredIndex", clustered_index) async with self._client._tmp_session(session) as s: + if s and not s.in_transaction: + s._leave_alive = True # Skip this check in a transaction where listCollections is not # supported. if ( @@ -619,6 +621,8 @@ async def create_collection( and name in await self._list_collection_names(filter={"name": name}, session=s) ): raise CollectionInvalid("collection %s already exists" % name) + if s: + s._leave_alive = False coll = AsyncCollection( self, name, @@ -643,8 +647,8 @@ async def aggregate( .. code-block:: python # Lists all operations currently running on the server. - with client.admin.aggregate([{"$currentOp": {}}]) as cursor: - for operation in cursor: + async with await client.admin.aggregate([{"$currentOp": {}}]) as cursor: + async for operation in cursor: print(operation) The :meth:`aggregate` method obeys the :attr:`read_preference` of this @@ -652,6 +656,11 @@ async def aggregate( which case :attr:`~pymongo.read_preferences.ReadPreference.PRIMARY` is used. + Cursors are closed automatically when they are exhausted (the last batch of data is retrieved from the database). + If a cursor is not exhausted, it will be closed automatically upon garbage collection, which leaves resources open but unused for a potentially long period of time. + To avoid this, best practice is to call :meth:`AsyncCursor.close` when the cursor is no longer needed, + or use the cursor in a with statement. + .. note:: This method does not support the 'explain' option. Please use :meth:`~pymongo.asynchronous.database.AsyncDatabase.command` instead. @@ -694,13 +703,12 @@ async def aggregate( .. _aggregate command: https://mongodb.com/docs/manual/reference/command/aggregate """ - async with self.client._tmp_session(session, close=False) as s: + async with self.client._tmp_session(session) as s: cmd = _DatabaseAggregationCommand( self, AsyncCommandCursor, pipeline, kwargs, - session is not None, user_fields={"cursor": {"firstBatch": 1}}, ) return await self.client._retryable_read( @@ -771,7 +779,7 @@ async def _command( self._name, command, read_preference, - codec_options, + codec_options, # type: ignore[arg-type] check, allowable_errors, write_concern=write_concern, @@ -893,7 +901,7 @@ async def command( when decoding the command response. .. note:: If this client has been configured to use MongoDB Stable - API (see :ref:`versioned-api-ref`), then :meth:`command` will + API (see `versioned API `_), then :meth:`command` will automatically add API versioning options to the given command. Explicitly adding API versioning options in the command and declaring an API version on the client is not supported. @@ -994,7 +1002,7 @@ async def cursor_command( when decoding the command response. .. note:: If this client has been configured to use MongoDB Stable - API (see :ref:`versioned-api-ref`), then :meth:`command` will + API (see `versioned API `_), then :meth:`command` will automatically add API versioning options to the given command. Explicitly adding API versioning options in the command and declaring an API version on the client is not supported. @@ -1006,7 +1014,7 @@ async def cursor_command( else: command_name = next(iter(command)) - async with self._client._tmp_session(session, close=False) as tmp_session: + async with self._client._tmp_session(session) as tmp_session: opts = codec_options or DEFAULT_CODEC_OPTIONS if read_preference is None: @@ -1038,7 +1046,6 @@ async def cursor_command( conn.address, max_await_time_ms=max_await_time_ms, session=tmp_session, - explicit_session=session is not None, comment=comment, ) await cmd_cursor._maybe_pin_connection(conn) @@ -1084,7 +1091,7 @@ async def _list_collections( ) cmd = {"listCollections": 1, "cursor": {}} cmd.update(kwargs) - async with self._client._tmp_session(session, close=False) as tmp_session: + async with self._client._tmp_session(session) as tmp_session: cursor = ( await self._command(conn, cmd, read_preference=read_preference, session=tmp_session) )["cursor"] @@ -1093,7 +1100,6 @@ async def _list_collections( cursor, conn.address, session=tmp_session, - explicit_session=session is not None, comment=cmd.get("comment"), ) await cmd_cursor._maybe_pin_connection(conn) @@ -1154,6 +1160,15 @@ async def list_collections( ) -> AsyncCommandCursor[MutableMapping[str, Any]]: """Get a cursor over the collections of this database. + Cursors are closed automatically when they are exhausted (the last batch of data is retrieved from the database). + If a cursor is not exhausted, it will be closed automatically upon garbage collection, which leaves resources open but unused for a potentially long period of time. + To avoid this, best practice is to call :meth:`AsyncCursor.close` when the cursor is no longer needed, + or use the cursor in a with statement:: + + async with await database.list_collections() as cursor: + async for collection in cursor: + print(collection) + :param session: a :class:`~pymongo.asynchronous.client_session.AsyncClientSession`. :param filter: A query document to filter the list of @@ -1310,7 +1325,7 @@ async def drop_collection( name = name.name if not isinstance(name, str): - raise TypeError("name_or_collection must be an instance of str") + raise TypeError(f"name_or_collection must be an instance of str, not {type(name)}") encrypted_fields = await self._get_encrypted_fields( {"encryptedFields": encrypted_fields}, name, @@ -1374,7 +1389,9 @@ async def validate_collection( name = name.name if not isinstance(name, str): - raise TypeError("name_or_collection must be an instance of str or AsyncCollection") + raise TypeError( + f"name_or_collection must be an instance of str or AsyncCollection, not {type(name)}" + ) cmd = {"validate": name, "scandata": scandata, "full": full} if comment is not None: cmd["comment"] = comment diff --git a/pymongo/asynchronous/encryption.py b/pymongo/asynchronous/encryption.py index 1cf165e6a2..4dfd36aa49 100644 --- a/pymongo/asynchronous/encryption.py +++ b/pymongo/asynchronous/encryption.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -64,14 +64,14 @@ from pymongo.asynchronous.cursor import AsyncCursor from pymongo.asynchronous.database import AsyncDatabase from pymongo.asynchronous.mongo_client import AsyncMongoClient -from pymongo.asynchronous.pool import ( - _configured_socket, - _get_timeout_details, - _raise_connection_failure, -) from pymongo.common import CONNECT_TIMEOUT from pymongo.daemon import _spawn_daemon -from pymongo.encryption_options import AutoEncryptionOpts, RangeOpts +from pymongo.encryption_options import ( + AutoEncryptionOpts, + RangeOpts, + TextOpts, + check_min_pymongocrypt, +) from pymongo.errors import ( ConfigurationError, EncryptedCollectionError, @@ -80,14 +80,19 @@ NetworkTimeout, ServerSelectionTimeoutError, ) -from pymongo.network_layer import BLOCKING_IO_ERRORS, async_sendall +from pymongo.helpers_shared import _get_timeout_details +from pymongo.network_layer import async_socket_sendall from pymongo.operations import UpdateOne from pymongo.pool_options import PoolOptions +from pymongo.pool_shared import ( + _async_configured_socket, + _raise_connection_failure, +) from pymongo.read_concern import ReadConcern from pymongo.results import BulkWriteResult, DeleteResult -from pymongo.ssl_support import get_ssl_context +from pymongo.ssl_support import BLOCKING_IO_ERRORS, get_ssl_context from pymongo.typings import _DocumentType, _DocumentTypeArg -from pymongo.uri_parser import parse_host +from pymongo.uri_parser_shared import _parse_kms_tls_options, parse_host from pymongo.write_concern import WriteConcern if TYPE_CHECKING: @@ -113,7 +118,7 @@ async def _connect_kms(address: _Address, opts: PoolOptions) -> Union[socket.socket, _sslConn]: try: - return await _configured_socket(address, opts) + return await _async_configured_socket(address, opts) except Exception as exc: _raise_connection_failure(address, exc, timeout_details=_get_timeout_details(opts)) @@ -127,8 +132,6 @@ def _wrap_encryption_errors() -> Iterator[None]: # BSON encoding/decoding errors are unrelated to encryption so # we should propagate them unchanged. raise - except asyncio.CancelledError: - raise except Exception as exc: raise EncryptionError(exc) from exc @@ -159,6 +162,7 @@ def __init__( self.mongocryptd_client = mongocryptd_client self.opts = opts self._spawned = False + self._kms_ssl_contexts = opts._kms_ssl_contexts(_IS_SYNC) async def kms_request(self, kms_context: MongoCryptKmsContext) -> None: """Complete a KMS request. @@ -170,7 +174,7 @@ async def kms_request(self, kms_context: MongoCryptKmsContext) -> None: endpoint = kms_context.endpoint message = kms_context.message provider = kms_context.kms_provider - ctx = self.opts._kms_ssl_contexts.get(provider) + ctx = self._kms_ssl_contexts.get(provider) if ctx is None: # Enable strict certificate verification, OCSP, match hostname, and # SNI using the system default CA certificates. @@ -182,6 +186,7 @@ async def kms_request(self, kms_context: MongoCryptKmsContext) -> None: False, # allow_invalid_certificates False, # allow_invalid_hostnames False, # disable_ocsp_endpoint_check + _IS_SYNC, ) # CSOT: set timeout for socket creation. connect_timeout = max(_csot.clamp_remaining(_KMS_CONNECT_TIMEOUT), 0.001) @@ -198,10 +203,11 @@ async def kms_request(self, kms_context: MongoCryptKmsContext) -> None: try: conn = await _connect_kms(address, opts) try: - await async_sendall(conn, message) + await async_socket_sendall(conn, message) while kms_context.bytes_needed > 0: # CSOT: update timeout. conn.settimeout(max(_csot.clamp_remaining(_KMS_CONNECT_TIMEOUT), 0)) + data: memoryview | bytes if _IS_SYNC: data = conn.recv(kms_context.bytes_needed) else: @@ -219,7 +225,14 @@ async def kms_request(self, kms_context: MongoCryptKmsContext) -> None: # Wrap I/O errors in PyMongo exceptions. if isinstance(exc, BLOCKING_IO_ERRORS): exc = socket.timeout("timed out") - _raise_connection_failure(address, exc, timeout_details=_get_timeout_details(opts)) + # Async raises an OSError instead of returning empty bytes. + if isinstance(exc, OSError): + msg_prefix = "KMS connection closed" + else: + msg_prefix = None + _raise_connection_failure( + address, exc, msg_prefix=msg_prefix, timeout_details=_get_timeout_details(opts) + ) finally: conn.close() except MongoCryptError: @@ -237,7 +250,7 @@ async def kms_request(self, kms_context: MongoCryptKmsContext) -> None: ) raise exc from final_err - async def collection_info(self, database: str, filter: bytes) -> Optional[bytes]: + async def collection_info(self, database: str, filter: bytes) -> Optional[list[bytes]]: """Get the collection info for a namespace. The returned collection info is passed to libmongocrypt which reads @@ -246,14 +259,12 @@ async def collection_info(self, database: str, filter: bytes) -> Optional[bytes] :param database: The database on which to run listCollections. :param filter: The filter to pass to listCollections. - :return: The first document from the listCollections command response as BSON. + :return: All documents from the listCollections command response as BSON. """ async with await self.client_ref()[database].list_collections( filter=RawBSONDocument(filter) ) as cursor: - async for doc in cursor: - return _dict_to_bson(doc, False, _DATA_KEY_OPTS) - return None + return [_dict_to_bson(doc, False, _DATA_KEY_OPTS) async for doc in cursor] def spawn(self) -> None: """Spawn mongocryptd. @@ -266,7 +277,7 @@ def spawn(self) -> None: args.extend(self.opts._mongocryptd_spawn_args) _spawn_daemon(args) - async def mark_command(self, database: str, cmd: bytes) -> bytes: + async def mark_command(self, database: str, cmd: bytes) -> bytes | memoryview: """Mark a command for encryption. :param database: The database on which to run this command. @@ -293,7 +304,7 @@ async def mark_command(self, database: str, cmd: bytes) -> bytes: ) return res.raw - async def fetch_keys(self, filter: bytes) -> AsyncGenerator[bytes, None]: + async def fetch_keys(self, filter: bytes) -> AsyncGenerator[bytes | memoryview, None]: """Yields one or more keys from the key vault. :param filter: The filter to pass to find. @@ -315,7 +326,9 @@ async def insert_data_key(self, data_key: bytes) -> Binary: raw_doc = RawBSONDocument(data_key, _KEY_VAULT_OPTS) data_key_id = raw_doc.get("_id") if not isinstance(data_key_id, Binary) or data_key_id.subtype != UUID_SUBTYPE: - raise TypeError("data_key _id must be Binary with a UUID subtype") + raise TypeError( + f"data_key _id must be Binary with a UUID subtype, not {type(data_key_id)}" + ) assert self.key_vault_coll is not None await self.key_vault_coll.insert_one(raw_doc) @@ -391,6 +404,8 @@ def __init__(self, client: AsyncMongoClient[_DocumentTypeArg], opts: AutoEncrypt encrypted_fields_map = _dict_to_bson(opts._encrypted_fields_map, False, _DATA_KEY_OPTS) self._bypass_auto_encryption = opts._bypass_auto_encryption self._internal_client = None + # parsing kms_ssl_contexts here so that parsing errors will be raised before internal clients are created + opts._kms_ssl_contexts(_IS_SYNC) def _get_internal_client( encrypter: _Encrypter, mongo_client: AsyncMongoClient[_DocumentTypeArg] @@ -438,6 +453,7 @@ def _get_internal_client( bypass_encryption=opts._bypass_auto_encryption, encrypted_fields_map=encrypted_fields_map, bypass_query_analysis=opts._bypass_query_analysis, + key_expiration_ms=opts._key_expiration_ms, ), ) self._closed = False @@ -460,7 +476,7 @@ async def encrypt( # TODO: PYTHON-1922 avoid decoding the encrypted_cmd. return _inflate_bson(encrypted_cmd, DEFAULT_RAW_BSON_OPTIONS) - async def decrypt(self, response: bytes) -> Optional[bytes]: + async def decrypt(self, response: bytes | memoryview) -> Optional[bytes]: """Decrypt a MongoDB command response. :param response: A MongoDB command response as BSON. @@ -513,6 +529,11 @@ class Algorithm(str, enum.Enum): .. versionadded:: 4.4 """ + TEXTPREVIEW = "TextPreview" + """**BETA** - TextPreview. + + .. versionadded:: 4.15 + """ class QueryType(str, enum.Enum): @@ -538,13 +559,30 @@ class QueryType(str, enum.Enum): .. versionadded:: 4.4 """ + PREFIXPREVIEW = "prefixPreview" + """**BETA** - Used to encrypt a value for a prefixPreview query. + + .. versionadded:: 4.15 + """ + + SUFFIXPREVIEW = "suffixPreview" + """**BETA** - Used to encrypt a value for a suffixPreview query. + + .. versionadded:: 4.15 + """ + + SUBSTRINGPREVIEW = "substringPreview" + """**BETA** - Used to encrypt a value for a substringPreview query. + + .. versionadded:: 4.15 + """ + def _create_mongocrypt_options(**kwargs: Any) -> MongoCryptOptions: - opts = MongoCryptOptions(**kwargs) - # Opt into range V2 encryption. - if hasattr(opts, "enable_range_v2"): - opts.enable_range_v2 = True - return opts + # For compat with pymongocrypt <1.13, avoid setting the default key_expiration_ms. + if kwargs.get("key_expiration_ms") is None: + kwargs.pop("key_expiration_ms", None) + return MongoCryptOptions(**kwargs, enable_multiple_collinfo=True) class AsyncClientEncryption(Generic[_DocumentType]): @@ -557,6 +595,7 @@ def __init__( key_vault_client: AsyncMongoClient[_DocumentTypeArg], codec_options: CodecOptions[_DocumentTypeArg], kms_tls_options: Optional[Mapping[str, Any]] = None, + key_expiration_ms: Optional[int] = None, ) -> None: """Explicit client-side field level encryption. @@ -569,7 +608,7 @@ def __init__( creating data keys. It does not provide an API to query keys from the key vault collection, as this can be done directly on the AsyncMongoClient. - See :ref:`explicit-client-side-encryption` for an example. + See `explicit client-side encryption `_ for an example. :param kms_providers: Map of KMS provider options. The `kms_providers` map values differ by provider: @@ -598,7 +637,7 @@ def __init__( KMS providers may be specified with an optional name suffix separated by a colon, for example "kmip:name" or "aws:name". - Named KMS providers do not support :ref:`CSFLE on-demand credentials`. + Named KMS providers do not support `CSFLE on-demand credentials `_. :param key_vault_namespace: The namespace for the key vault collection. The key vault collection contains all data keys used for encryption and decryption. Data keys are stored as documents in this MongoDB @@ -623,7 +662,12 @@ def __init__( Or to supply a client certificate:: kms_tls_options={'kmip': {'tlsCertificateKeyFile': 'client.pem'}} + :param key_expiration_ms: The cache expiration time for data encryption keys. + Defaults to ``None`` which defers to libmongocrypt's default which is currently 60000. + Set to 0 to disable key expiration. + .. versionchanged:: 4.12 + Added the `key_expiration_ms` parameter. .. versionchanged:: 4.0 Added the `kms_tls_options` parameter and the "kmip" KMS provider. @@ -636,8 +680,12 @@ def __init__( "python -m pip install --upgrade 'pymongo[encryption]'" ) + check_min_pymongocrypt() + if not isinstance(codec_options, CodecOptions): - raise TypeError("codec_options must be an instance of bson.codec_options.CodecOptions") + raise TypeError( + f"codec_options must be an instance of bson.codec_options.CodecOptions, not {type(codec_options)}" + ) if not isinstance(key_vault_client, AsyncMongoClient): # This is for compatibility with mocked and subclassed types, such as in Motor. @@ -657,14 +705,20 @@ def __init__( key_vault_coll = key_vault_client[db][coll] opts = AutoEncryptionOpts( - kms_providers, key_vault_namespace, kms_tls_options=kms_tls_options + kms_providers, + key_vault_namespace, + kms_tls_options=kms_tls_options, + key_expiration_ms=key_expiration_ms, ) + self._kms_ssl_contexts = _parse_kms_tls_options(opts._kms_tls_options, _IS_SYNC) self._io_callbacks: Optional[_EncryptionIO] = _EncryptionIO( None, key_vault_coll, None, opts ) self._encryption = AsyncExplicitEncrypter( self._io_callbacks, - _create_mongocrypt_options(kms_providers=kms_providers, schema_map=None), + _create_mongocrypt_options( + kms_providers=kms_providers, schema_map=None, key_expiration_ms=key_expiration_ms + ), ) # Use the same key vault collection as the callback. assert self._io_callbacks.key_vault_coll is not None @@ -691,6 +745,7 @@ async def create_encrypted_collection( creation. :class:`~pymongo.errors.EncryptionError` will be raised if the collection already exists. + :param database: the database to create the collection :param name: the name of the collection to create :param encrypted_fields: Document that describes the encrypted fields for Queryable Encryption. The "keyId" may be set to ``None`` to auto-generate the data keys. For example: @@ -755,8 +810,6 @@ async def create_encrypted_collection( await database.create_collection(name=name, **kwargs), encrypted_fields, ) - except asyncio.CancelledError: - raise except Exception as exc: raise EncryptedCollectionError(exc, encrypted_fields) from exc @@ -861,6 +914,7 @@ async def _encrypt_helper( contention_factor: Optional[int] = None, range_opts: Optional[RangeOpts] = None, is_expression: bool = False, + text_opts: Optional[TextOpts] = None, ) -> Any: self._check_closed() if isinstance(key_id, uuid.UUID): @@ -880,6 +934,12 @@ async def _encrypt_helper( range_opts.document, codec_options=self._codec_options, ) + text_opts_bytes = None + if text_opts: + text_opts_bytes = encode( + text_opts.document, + codec_options=self._codec_options, + ) with _wrap_encryption_errors(): encrypted_doc = await self._encryption.encrypt( value=doc, @@ -890,6 +950,8 @@ async def _encrypt_helper( contention_factor=contention_factor, range_opts=range_opts_bytes, is_expression=is_expression, + # For compatibility with pymongocrypt < 1.16: + **{"text_opts": text_opts_bytes} if text_opts_bytes else {}, ) return decode(encrypted_doc)["v"] @@ -902,6 +964,7 @@ async def encrypt( query_type: Optional[str] = None, contention_factor: Optional[int] = None, range_opts: Optional[RangeOpts] = None, + text_opts: Optional[TextOpts] = None, ) -> Binary: """Encrypt a BSON value with a given key and algorithm. @@ -922,9 +985,14 @@ async def encrypt( used. :param range_opts: Index options for `range` queries. See :class:`RangeOpts` for some valid options. + :param text_opts: Index options for `textPreview` queries. See + :class:`TextOpts` for some valid options. :return: The encrypted value, a :class:`~bson.binary.Binary` with subtype 6. + .. versionchanged:: 4.9 + Added the `text_opts` parameter. + .. versionchanged:: 4.9 Added the `range_opts` parameter. @@ -945,6 +1013,7 @@ async def encrypt( contention_factor=contention_factor, range_opts=range_opts, is_expression=False, + text_opts=text_opts, ), ) diff --git a/pymongo/asynchronous/helpers.py b/pymongo/asynchronous/helpers.py index 1ac8b6630f..4a8c918133 100644 --- a/pymongo/asynchronous/helpers.py +++ b/pymongo/asynchronous/helpers.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -15,8 +15,8 @@ """Miscellaneous pieces that need to be synchronized.""" from __future__ import annotations -import builtins -import sys +import asyncio +import socket from typing import ( Any, Callable, @@ -62,21 +62,25 @@ async def inner(*args: Any, **kwargs: Any) -> Any: await conn.authenticate(reauthenticate=True) else: raise - return func(*args, **kwargs) + return await func(*args, **kwargs) raise return cast(F, inner) -if sys.version_info >= (3, 10): - anext = builtins.anext - aiter = builtins.aiter -else: - - async def anext(cls: Any) -> Any: - """Compatibility function until we drop 3.9 support: https://docs.python.org/3/library/functions.html#anext.""" - return await cls.__anext__() - - def aiter(cls: Any) -> Any: - """Compatibility function until we drop 3.9 support: https://docs.python.org/3/library/functions.html#anext.""" - return cls.__aiter__() +async def _getaddrinfo( + host: Any, port: Any, **kwargs: Any +) -> list[ + tuple[ + socket.AddressFamily, + socket.SocketKind, + int, + str, + tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes], + ] +]: + if not _IS_SYNC: + loop = asyncio.get_running_loop() + return await loop.getaddrinfo(host, port, **kwargs) # type: ignore[return-value] + else: + return socket.getaddrinfo(host, port, **kwargs) diff --git a/pymongo/asynchronous/mongo_client.py b/pymongo/asynchronous/mongo_client.py index 1600e50628..d9bf808d55 100644 --- a/pymongo/asynchronous/mongo_client.py +++ b/pymongo/asynchronous/mongo_client.py @@ -4,7 +4,7 @@ # may not use this file except in compliance with the License. You # may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,7 +14,7 @@ """Tools for connecting to MongoDB. -.. seealso:: :doc:`/examples/high_availability` for examples of connecting +.. seealso:: `Read and Write Settings `_ for examples of connecting to replica sets or sets of mongos servers. To get a :class:`~pymongo.asynchronous.database.AsyncDatabase` instance from a @@ -44,6 +44,7 @@ AsyncContextManager, AsyncGenerator, Callable, + Collection, Coroutine, FrozenSet, Generic, @@ -60,8 +61,8 @@ from bson.codec_options import DEFAULT_CODEC_OPTIONS, CodecOptions, TypeRegistry from bson.timestamp import Timestamp -from pymongo import _csot, common, helpers_shared, periodic_executor, uri_parser -from pymongo.asynchronous import client_session, database +from pymongo import _csot, common, helpers_shared, periodic_executor +from pymongo.asynchronous import client_session, database, uri_parser from pymongo.asynchronous.change_stream import AsyncChangeStream, AsyncClusterChangeStream from pymongo.asynchronous.client_bulk import _AsyncClientBulk from pymongo.asynchronous.client_session import _EmptyServerSession @@ -69,6 +70,7 @@ from pymongo.asynchronous.settings import TopologySettings from pymongo.asynchronous.topology import Topology, _ErrorContext from pymongo.client_options import ClientOptions +from pymongo.driver_info import DriverInfo from pymongo.errors import ( AutoReconnect, BulkWriteError, @@ -88,9 +90,15 @@ _async_create_lock, _release_locks, ) -from pymongo.logger import _CLIENT_LOGGER, _log_or_warn +from pymongo.logger import ( + _CLIENT_LOGGER, + _COMMAND_LOGGER, + _debug_log, + _log_client_error, + _log_or_warn, +) from pymongo.message import _CursorAddress, _GetMore, _Query -from pymongo.monitoring import ConnectionClosedReason +from pymongo.monitoring import ConnectionClosedReason, _EventListeners from pymongo.operations import ( DeleteMany, DeleteOne, @@ -102,6 +110,7 @@ ) from pymongo.read_preferences import ReadPreference, _ServerMode from pymongo.results import ClientBulkWriteResult +from pymongo.server_description import ServerDescription from pymongo.server_selectors import writable_server_selector from pymongo.server_type import SERVER_TYPE from pymongo.topology_description import TOPOLOGY_TYPE, TopologyDescription @@ -113,11 +122,14 @@ _DocumentTypeArg, _Pipeline, ) -from pymongo.uri_parser import ( +from pymongo.uri_parser_shared import ( + SRV_SCHEME, _check_options, _handle_option_deprecations, _handle_security_options, _normalize_options, + _validate_uri, + split_hosts, ) from pymongo.write_concern import DEFAULT_WRITE_CONCERN, WriteConcern @@ -128,6 +140,7 @@ from pymongo.asynchronous.bulk import _AsyncBulk from pymongo.asynchronous.client_session import AsyncClientSession, _ServerSession from pymongo.asynchronous.cursor import _ConnectionManager + from pymongo.asynchronous.encryption import _Encrypter from pymongo.asynchronous.pool import AsyncConnection from pymongo.asynchronous.server import Server from pymongo.read_concern import ReadConcern @@ -148,10 +161,10 @@ _IS_SYNC = False _WriteOp = Union[ - InsertOne, + InsertOne, # type: ignore[type-arg] DeleteOne, DeleteMany, - ReplaceOne, + ReplaceOne, # type: ignore[type-arg] UpdateOne, UpdateMany, ] @@ -163,7 +176,7 @@ class AsyncMongoClient(common.BaseObject, Generic[_DocumentType]): # Define order to retrieve options from ClientOptions for __repr__. # No host/port; these are retrieved from TopologySettings. _constructor_args = ("document_class", "tz_aware", "connect") - _clients: weakref.WeakValueDictionary = weakref.WeakValueDictionary() + _clients: weakref.WeakValueDictionary = weakref.WeakValueDictionary() # type: ignore[type-arg] def __init__( self, @@ -182,8 +195,6 @@ def __init__( For more details, see the relevant section of the PyMongo 4.x migration guide: :ref:`pymongo4-migration-direct-connection`. - .. warning:: This API is currently in beta, meaning the classes, methods, and behaviors described within may change before the full release. - The client object is thread-safe and has connection-pooling built in. If an operation fails because of a network error, :class:`~pymongo.errors.ConnectionFailure` is raised and the client @@ -191,8 +202,14 @@ def __init__( exception (recognizing that the operation failed) and then continue to execute. + Best practice is to call :meth:`AsyncMongoClient.close` when the client is no longer needed, + or use the client in a with statement:: + + async with AsyncMongoClient(url) as client: + # Use client here. + The `host` parameter can be a full `mongodb URI - `_, in addition to + `_, in addition to a simple hostname. It can also be a list of hostnames but no more than one URI. Any port specified in the host string(s) will override the `port` parameter. For username and @@ -252,7 +269,7 @@ def __init__( print("Server not available") .. warning:: When using PyMongo in a multiprocessing context, please - read :ref:`multiprocessing` first. + read `PyMongo multiprocessing `_ first. .. note:: Many of the following options can be passed using a MongoDB URI or keyword parameters. If the same option is passed in a URI and @@ -276,17 +293,16 @@ def __init__( :param type_registry: instance of :class:`~bson.codec_options.TypeRegistry` to enable encoding and decoding of custom types. - :param datetime_conversion: Specifies how UTC datetimes should be decoded + :param kwargs: **Additional optional parameters available as keyword arguments:** + + - `datetime_conversion` (optional): Specifies how UTC datetimes should be decoded within BSON. Valid options include 'datetime_ms' to return as a DatetimeMS, 'datetime' to return as a datetime.datetime and raising a ValueError for out-of-range values, 'datetime_auto' to return DatetimeMS objects when the underlying datetime is out-of-range and 'datetime_clamp' to clamp to the minimum and maximum possible datetimes. Defaults to 'datetime'. See - :ref:`handling-out-of-range-datetimes` for details. - - | **Other optional parameters can be passed as keyword arguments:** - + `handling out of range datetimes `_ for details. - `directConnection` (optional): if ``True``, forces this client to connect directly to the specified MongoDB host as a standalone. If ``false``, the client connects to the entire replica set of @@ -411,7 +427,7 @@ def __init__( package. By default no compression is used. Compression support must also be enabled on the server. MongoDB 3.6+ supports snappy and zlib compression. MongoDB 4.2+ adds support for zstd. - See :ref:`network-compression-example` for details. + See `compress network traffic `_ for details. - `zlibCompressionLevel`: (int) The zlib compression level to use when zlib is used as the wire protocol compressor. Supported values are -1 through 9. -1 tells the zlib library to use its default @@ -422,7 +438,7 @@ def __init__( values are the strings: "standard", "pythonLegacy", "javaLegacy", "csharpLegacy", and "unspecified" (the default). New applications should consider setting this to "standard" for cross language - compatibility. See :ref:`handling-uuid-data-example` for details. + compatibility. See `handling UUID data `_ for details. - `unicode_decode_error_handler`: The error handler to apply when a Unicode-related error occurs during BSON decoding that would otherwise raise :exc:`UnicodeDecodeError`. Valid options include @@ -486,7 +502,7 @@ def __init__( is set, it must be a positive integer greater than or equal to 90 seconds. - .. seealso:: :doc:`/examples/server_selection` + .. seealso:: `Customize Server Selection `_ | **Authentication:** @@ -512,7 +528,7 @@ def __init__( To specify the session token for MONGODB-AWS authentication pass ``authMechanismProperties='AWS_SESSION_TOKEN:'``. - .. seealso:: :doc:`/examples/authentication` + .. seealso:: `Authentication `_ | **TLS/SSL configuration:** @@ -575,7 +591,7 @@ def __init__( :class:`~pymongo.encryption_options.AutoEncryptionOpts` which configures this client to automatically encrypt collection commands and automatically decrypt results. See - :ref:`automatic-client-side-encryption` for an example. + `client-side field level encryption `_ for an example. If a :class:`AsyncMongoClient` is configured with ``auto_encryption_opts`` and a non-None ``maxPoolSize``, a separate internal ``AsyncMongoClient`` is created if any of the @@ -591,7 +607,7 @@ def __init__( - `server_api`: A :class:`~pymongo.server_api.ServerApi` which configures this - client to use Stable API. See :ref:`versioned-api-ref` for + client to use Stable API. See `versioned API `_ for details. .. seealso:: The MongoDB documentation on `connections `_. @@ -702,15 +718,15 @@ def __init__( reconnect to one of them. In PyMongo 3, the client monitors its network latency to all the mongoses continuously, and distributes operations evenly among those with the lowest latency. See - :ref:`mongos-load-balancing` for more information. + `load balancing `_ for more information. The ``connect`` option is added. The ``start_request``, ``in_request``, and ``end_request`` methods are removed, as well as the ``auto_start_request`` option. - The ``copy_database`` method is removed, see the - :doc:`copy_database examples ` for alternatives. + The ``copy_database`` method is removed, see + `Copy and Clone Databases `_ for alternatives. The :meth:`AsyncMongoClient.disconnect` method is removed; it was a synonym for :meth:`~pymongo.asynchronous.AsyncMongoClient.close`. @@ -750,7 +766,13 @@ def __init__( if port is None: port = self.PORT if not isinstance(port, int): - raise TypeError("port must be an instance of int") + raise TypeError(f"port must be an instance of int, not {type(port)}") + self._host = host + self._port = port + self._topology: Topology = None # type: ignore[assignment] + self._timeout: float | None = None + self._topology_settings: TopologySettings = None # type: ignore[assignment] + self._event_listeners: _EventListeners | None = None # _pool_class, _monitor_class, and _condition_class are for deep # customization of PyMongo, e.g. Motor. @@ -761,8 +783,10 @@ def __init__( # Parse options passed as kwargs. keyword_opts = common._CaseInsensitiveDictionary(kwargs) keyword_opts["document_class"] = doc_class + self._resolve_srv_info: dict[str, Any] = {"keyword_opts": keyword_opts} - seeds = set() + self._seeds = set() + is_srv = False username = None password = None dbase = None @@ -770,41 +794,34 @@ def __init__( fqdn = None srv_service_name = keyword_opts.get("srvservicename") srv_max_hosts = keyword_opts.get("srvmaxhosts") - if len([h for h in host if "/" in h]) > 1: + if len([h for h in self._host if "/" in h]) > 1: raise ConfigurationError("host must not contain multiple MongoDB URIs") - for entity in host: + for entity in self._host: # A hostname can only include a-z, 0-9, '-' and '.'. If we find a '/' # it must be a URI, # https://en.wikipedia.org/wiki/Hostname#Restrictions_on_valid_host_names if "/" in entity: - # Determine connection timeout from kwargs. - timeout = keyword_opts.get("connecttimeoutms") - if timeout is not None: - timeout = common.validate_timeout_or_none_or_zero( - keyword_opts.cased_key("connecttimeoutms"), timeout - ) - res = uri_parser.parse_uri( + res = _validate_uri( entity, port, validate=True, warn=True, normalize=False, - connect_timeout=timeout, - srv_service_name=srv_service_name, srv_max_hosts=srv_max_hosts, ) - seeds.update(res["nodelist"]) + is_srv = entity.startswith(SRV_SCHEME) + self._seeds.update(res["nodelist"]) username = res["username"] or username password = res["password"] or password dbase = res["database"] or dbase opts = res["options"] fqdn = res["fqdn"] else: - seeds.update(uri_parser.split_hosts(entity, port)) - if not seeds: + self._seeds.update(split_hosts(entity, self._port)) + if not self._seeds: raise ConfigurationError("need to specify at least one host") - for hostname in [node[0] for node in seeds]: + for hostname in [node[0] for node in self._seeds]: if _detect_external_db(hostname): break @@ -821,80 +838,180 @@ def __init__( keyword_opts["tz_aware"] = tz_aware keyword_opts["connect"] = connect - # Handle deprecated options in kwarg options. - keyword_opts = _handle_option_deprecations(keyword_opts) - # Validate kwarg options. - keyword_opts = common._CaseInsensitiveDictionary( - dict(common.validate(keyword_opts.cased_key(k), v) for k, v in keyword_opts.items()) - ) - - # Override connection string options with kwarg options. - opts.update(keyword_opts) + opts = self._validate_kwargs_and_update_opts(keyword_opts, opts) if srv_service_name is None: srv_service_name = opts.get("srvServiceName", common.SRV_SERVICE_NAME) srv_max_hosts = srv_max_hosts or opts.get("srvmaxhosts") - # Handle security-option conflicts in combined options. - opts = _handle_security_options(opts) - # Normalize combined options. - opts = _normalize_options(opts) - _check_options(seeds, opts) + opts = self._normalize_and_validate_options(opts, self._seeds) # Username and password passed as kwargs override user info in URI. username = opts.get("username", username) password = opts.get("password", password) - self._options = options = ClientOptions(username, password, dbase, opts, _IS_SYNC) + self._options = ClientOptions(username, password, dbase, opts, _IS_SYNC) self._default_database_name = dbase self._lock = _async_create_lock() - self._kill_cursors_queue: list = [] + self._kill_cursors_queue: list = [] # type: ignore[type-arg] + + self._encrypter: Optional[_Encrypter] = None + + self._resolve_srv_info.update( + { + "is_srv": is_srv, + "username": username, + "password": password, + "dbase": dbase, + "seeds": self._seeds, + "fqdn": fqdn, + "srv_service_name": srv_service_name, + "pool_class": pool_class, + "monitor_class": monitor_class, + "condition_class": condition_class, + } + ) - self._event_listeners = options.pool_options._event_listeners super().__init__( - options.codec_options, - options.read_preference, - options.write_concern, - options.read_concern, + self._options.codec_options, + self._options.read_preference, + self._options.write_concern, + self._options.read_concern, ) - self._topology_settings = TopologySettings( - seeds=seeds, - replica_set_name=options.replica_set_name, - pool_class=pool_class, - pool_options=options.pool_options, - monitor_class=monitor_class, - condition_class=condition_class, - local_threshold_ms=options.local_threshold_ms, - server_selection_timeout=options.server_selection_timeout, - server_selector=options.server_selector, - heartbeat_frequency=options.heartbeat_frequency, - fqdn=fqdn, - direct_connection=options.direct_connection, - load_balanced=options.load_balanced, - srv_service_name=srv_service_name, - srv_max_hosts=srv_max_hosts, - server_monitoring_mode=options.server_monitoring_mode, - ) + self._init_based_on_options(self._seeds, srv_max_hosts, srv_service_name) self._opened = False self._closed = False - self._init_background() + self._loop: Optional[asyncio.AbstractEventLoop] = None + if not is_srv: + self._init_background() if _IS_SYNC and connect: self._get_topology() # type: ignore[unused-coroutine] - self._encrypter = None + async def _resolve_srv(self) -> None: + keyword_opts = self._resolve_srv_info["keyword_opts"] + seeds = set() + opts = common._CaseInsensitiveDictionary() + srv_service_name = keyword_opts.get("srvservicename") + srv_max_hosts = keyword_opts.get("srvmaxhosts") + for entity in self._host: + # A hostname can only include a-z, 0-9, '-' and '.'. If we find a '/' + # it must be a URI, + # https://en.wikipedia.org/wiki/Hostname#Restrictions_on_valid_host_names + if "/" in entity: + # Determine connection timeout from kwargs. + timeout = keyword_opts.get("connecttimeoutms") + if timeout is not None: + timeout = common.validate_timeout_or_none_or_zero( + keyword_opts.cased_key("connecttimeoutms"), timeout + ) + res = await uri_parser._parse_srv( + entity, + self._port, + validate=True, + warn=True, + normalize=False, + connect_timeout=timeout, + srv_service_name=srv_service_name, + srv_max_hosts=srv_max_hosts, + ) + seeds.update(res["nodelist"]) + opts = res["options"] + else: + seeds.update(split_hosts(entity, self._port)) + + if not seeds: + raise ConfigurationError("need to specify at least one host") + + for hostname in [node[0] for node in seeds]: + if _detect_external_db(hostname): + break + + # Add options with named keyword arguments to the parsed kwarg options. + tz_aware = keyword_opts["tz_aware"] + connect = keyword_opts["connect"] + if tz_aware is None: + tz_aware = opts.get("tz_aware", False) + if connect is None: + # Default to connect=True unless on a FaaS system, which might use fork. + from pymongo.pool_options import _is_faas + + connect = opts.get("connect", not _is_faas()) + keyword_opts["tz_aware"] = tz_aware + keyword_opts["connect"] = connect + + opts = self._validate_kwargs_and_update_opts(keyword_opts, opts) + + if srv_service_name is None: + srv_service_name = opts.get("srvServiceName", common.SRV_SERVICE_NAME) + + srv_max_hosts = srv_max_hosts or opts.get("srvmaxhosts") + opts = self._normalize_and_validate_options(opts, seeds) + + # Username and password passed as kwargs override user info in URI. + username = opts.get("username", self._resolve_srv_info["username"]) + password = opts.get("password", self._resolve_srv_info["password"]) + self._options = ClientOptions( + username, password, self._resolve_srv_info["dbase"], opts, _IS_SYNC + ) + + self._init_based_on_options(seeds, srv_max_hosts, srv_service_name) + + def _init_based_on_options( + self, seeds: Collection[tuple[str, int]], srv_max_hosts: Any, srv_service_name: Any + ) -> None: + self._event_listeners = self._options.pool_options._event_listeners + self._topology_settings = TopologySettings( + seeds=seeds, + replica_set_name=self._options.replica_set_name, + pool_class=self._resolve_srv_info["pool_class"], + pool_options=self._options.pool_options, + monitor_class=self._resolve_srv_info["monitor_class"], + condition_class=self._resolve_srv_info["condition_class"], + local_threshold_ms=self._options.local_threshold_ms, + server_selection_timeout=self._options.server_selection_timeout, + server_selector=self._options.server_selector, + heartbeat_frequency=self._options.heartbeat_frequency, + fqdn=self._resolve_srv_info["fqdn"], + direct_connection=self._options.direct_connection, + load_balanced=self._options.load_balanced, + srv_service_name=srv_service_name, + srv_max_hosts=srv_max_hosts, + server_monitoring_mode=self._options.server_monitoring_mode, + topology_id=self._topology_settings._topology_id if self._topology_settings else None, + ) if self._options.auto_encryption_opts: from pymongo.asynchronous.encryption import _Encrypter self._encrypter = _Encrypter(self, self._options.auto_encryption_opts) self._timeout = self._options.timeout - if _HAS_REGISTER_AT_FORK: - # Add this client to the list of weakly referenced items. - # This will be used later if we fork. - AsyncMongoClient._clients[self._topology._topology_id] = self + def _normalize_and_validate_options( + self, opts: common._CaseInsensitiveDictionary, seeds: set[tuple[str, int | None]] + ) -> common._CaseInsensitiveDictionary: + # Handle security-option conflicts in combined options. + opts = _handle_security_options(opts) + # Normalize combined options. + opts = _normalize_options(opts) + _check_options(seeds, opts) + return opts + + def _validate_kwargs_and_update_opts( + self, + keyword_opts: common._CaseInsensitiveDictionary, + opts: common._CaseInsensitiveDictionary, + ) -> common._CaseInsensitiveDictionary: + # Handle deprecated options in kwarg options. + keyword_opts = _handle_option_deprecations(keyword_opts) + # Validate kwarg options. + keyword_opts = common._CaseInsensitiveDictionary( + dict(common.validate(keyword_opts.cased_key(k), v) for k, v in keyword_opts.items()) + ) + # Override connection string options with kwarg options. + opts.update(keyword_opts) + return opts async def aconnect(self) -> None: """Explicitly connect to MongoDB asynchronously instead of on the first operation.""" @@ -902,6 +1019,10 @@ async def aconnect(self) -> None: def _init_background(self, old_pid: Optional[int] = None) -> None: self._topology = Topology(self._topology_settings) + if _HAS_REGISTER_AT_FORK: + # Add this client to the list of weakly referenced items. + # This will be used later if we fork. + AsyncMongoClient._clients[self._topology._topology_id] = self # Seed the topology with the old one's pid so we can detect clients # that are opened before a fork and used after. self._topology._pid = old_pid @@ -926,6 +1047,20 @@ async def target() -> bool: self._kill_cursors_executor = executor self._opened = False + def append_metadata(self, driver_info: DriverInfo) -> None: + """Appends the given metadata to existing driver metadata. + + :param driver_info: a :class:`~pymongo.driver_info.DriverInfo` + + .. versionadded:: 4.14 + """ + + if not isinstance(driver_info, DriverInfo): + raise TypeError( + f"driver_info must be an instance of DriverInfo, not {type(driver_info)}" + ) + self._options.pool_options._update_metadata(driver_info) + def _should_pin_cursor(self, session: Optional[AsyncClientSession]) -> Optional[bool]: return self._options.load_balanced and not (session and session.in_transaction) @@ -935,7 +1070,7 @@ def _after_fork(self) -> None: # Reset the session pool to avoid duplicate sessions in the child process. self._topology._session_pool.reset() - def _duplicate(self, **kwargs: Any) -> AsyncMongoClient: + def _duplicate(self, **kwargs: Any) -> AsyncMongoClient: # type: ignore[type-arg] args = self._init_kwargs.copy() args.update(kwargs) return AsyncMongoClient(**args) @@ -1090,6 +1225,16 @@ def topology_description(self) -> TopologyDescription: .. versionadded:: 4.0 """ + if self._topology is None: + servers = {(host, port): ServerDescription((host, port)) for host, port in self._seeds} + return TopologyDescription( + TOPOLOGY_TYPE.Unknown, + servers, + None, + None, + None, + self._topology_settings, + ) return self._topology.description @property @@ -1103,6 +1248,8 @@ def nodes(self) -> FrozenSet[_Address]: to any servers, or a network partition causes it to lose connection to all servers. """ + if self._topology is None: + return frozenset() description = self._topology.description return frozenset(s.address for s in description.known_servers) @@ -1116,16 +1263,24 @@ def options(self) -> ClientOptions: """ return self._options + def eq_props(self) -> tuple[tuple[_Address, ...], Optional[str], Optional[str], str]: + return ( + tuple(sorted(self._resolve_srv_info["seeds"])), + self._options.replica_set_name, + self._resolve_srv_info["fqdn"], + self._resolve_srv_info["srv_service_name"], + ) + def __eq__(self, other: Any) -> bool: if isinstance(other, self.__class__): - return self._topology == other._topology + return self.eq_props() == other.eq_props() return NotImplemented def __ne__(self, other: Any) -> bool: return not self == other def __hash__(self) -> int: - return hash(self._topology) + return hash(self.eq_props()) def _repr_helper(self) -> str: def option_repr(option: str, value: Any) -> str: @@ -1141,13 +1296,16 @@ def option_repr(option: str, value: Any) -> str: return f"{option}={value!r}" # Host first... - options = [ - "host=%r" - % [ - "%s:%d" % (host, port) if port is not None else host - for host, port in self._topology_settings.seeds + if self._topology is None: + options = [f"host='mongodb+srv://{self._resolve_srv_info['fqdn']}'"] + else: + options = [ + "host=%r" + % [ + "%s:%d" % (host, port) if port is not None else host + for host, port in self._topology_settings.seeds + ] ] - ] # ... then everything in self._constructor_args... options.extend( option_repr(key, self._options._options[key]) for key in self._constructor_args @@ -1396,7 +1554,7 @@ def get_database( self, name, codec_options, read_preference, write_concern, read_concern ) - def _database_default_options(self, name: str) -> database.AsyncDatabase: + def _database_default_options(self, name: str) -> database.AsyncDatabase: # type: ignore[type-arg] """Get a AsyncDatabase instance with the default settings.""" return self.get_database( name, @@ -1450,6 +1608,8 @@ async def address(self) -> Optional[tuple[str, int]]: .. versionadded:: 3.0 """ + if self._topology is None: + await self._get_topology() topology_type = self._topology._description.topology_type if ( topology_type == TOPOLOGY_TYPE.Sharded @@ -1472,6 +1632,8 @@ async def primary(self) -> Optional[tuple[str, int]]: .. versionadded:: 3.0 AsyncMongoClient gained this property in version 3.0. """ + if self._topology is None: + await self._get_topology() return await self._topology.get_primary() # type: ignore[return-value] @property @@ -1485,6 +1647,8 @@ async def secondaries(self) -> set[_Address]: .. versionadded:: 3.0 AsyncMongoClient gained this property in version 3.0. """ + if self._topology is None: + await self._get_topology() return await self._topology.get_secondaries() @property @@ -1495,6 +1659,8 @@ async def arbiters(self) -> set[_Address]: connected to a replica set, there are no arbiters, or this client was created without the `replicaSet` option. """ + if self._topology is None: + await self._get_topology() return await self._topology.get_arbiters() @property @@ -1553,6 +1719,8 @@ async def close(self) -> None: .. versionchanged:: 3.6 End all server sessions created by this client. """ + if self._topology is None: + return session_ids = self._topology.pop_all_sessions() if session_ids: await self._end_sessions(session_ids) @@ -1565,6 +1733,12 @@ async def close(self) -> None: # TODO: PYTHON-1921 Encrypted MongoClients cannot be re-opened. await self._encrypter.close() self._closed = True + if not _IS_SYNC: + await asyncio.gather( + self._topology.cleanup_monitors(), # type: ignore[func-returns-value] + self._kill_cursors_executor.join(), # type: ignore[func-returns-value] + return_exceptions=True, + ) if not _IS_SYNC: # Add support for contextlib.aclosing. @@ -1576,7 +1750,17 @@ async def _get_topology(self) -> Topology: If this client was created with "connect=False", calling _get_topology launches the connection process in the background. """ + if not _IS_SYNC: + if self._loop is None: + self._loop = asyncio.get_running_loop() + elif self._loop != asyncio.get_running_loop(): + raise RuntimeError( + "Cannot use AsyncMongoClient in different event loop. AsyncMongoClient uses low-level asyncio APIs that bind it to the event loop it was created on." + ) if not self._opened: + if self._resolve_srv_info["is_srv"]: + await self._resolve_srv() + self._init_background() await self._topology.open() async with self._lock: self._kill_cursors_executor.open() @@ -1709,7 +1893,7 @@ async def _conn_for_reads( async def _run_operation( self, operation: Union[_Query, _GetMore], - unpack_res: Callable, + unpack_res: Callable, # type: ignore[type-arg] address: Optional[_Address] = None, ) -> Response: """Run a _Query/_GetMore operation and return a Response. @@ -1864,17 +2048,18 @@ async def _retryable_read( retryable = bool( retryable and self.options.retry_reads and not (session and session.in_transaction) ) - return await self._retry_internal( - func, - session, - None, - operation, - is_read=True, - address=address, - read_pref=read_pref, - retryable=retryable, - operation_id=operation_id, - ) + async with self._tmp_session(session) as s: + return await self._retry_internal( + func, + s, + None, + operation, + is_read=True, + address=address, + read_pref=read_pref, + retryable=retryable, + operation_id=operation_id, + ) async def _retryable_write( self, @@ -1907,7 +2092,6 @@ def _cleanup_cursor_no_lock( address: Optional[_CursorAddress], conn_mgr: _ConnectionManager, session: Optional[AsyncClientSession], - explicit_session: bool, ) -> None: """Cleanup a cursor from __del__ without locking. @@ -1922,7 +2106,7 @@ def _cleanup_cursor_no_lock( # The cursor will be closed later in a different session. if cursor_id or conn_mgr: self._close_cursor_soon(cursor_id, address, conn_mgr) - if session and not explicit_session: + if session and session._implicit and not session._leave_alive: session._end_implicit_session() async def _cleanup_cursor_lock( @@ -1931,7 +2115,6 @@ async def _cleanup_cursor_lock( address: Optional[_CursorAddress], conn_mgr: _ConnectionManager, session: Optional[AsyncClientSession], - explicit_session: bool, ) -> None: """Cleanup a cursor from cursor.close() using a lock. @@ -1943,7 +2126,6 @@ async def _cleanup_cursor_lock( :param address: The _CursorAddress. :param conn_mgr: The _ConnectionManager for the pinned connection or None. :param session: The cursor's session. - :param explicit_session: True if the session was passed explicitly. """ if cursor_id: if conn_mgr and conn_mgr.more_to_come: @@ -1951,12 +2133,12 @@ async def _cleanup_cursor_lock( # exhausted the result set we *must* close the socket # to stop the server from sending more data. assert conn_mgr.conn is not None - conn_mgr.conn.close_conn(ConnectionClosedReason.ERROR) + await conn_mgr.conn.close_conn(ConnectionClosedReason.ERROR) else: await self._close_cursor_now(cursor_id, address, session=session, conn_mgr=conn_mgr) if conn_mgr: await conn_mgr.close() - if session and not explicit_session: + if session and session._implicit and not session._leave_alive: session._end_implicit_session() async def _close_cursor_now( @@ -1971,7 +2153,7 @@ async def _close_cursor_now( The cursor is closed synchronously on the current thread. """ if not isinstance(cursor_id, int): - raise TypeError("cursor_id must be an instance of int") + raise TypeError(f"cursor_id must be an instance of int, not {type(cursor_id)}") try: if conn_mgr: @@ -2037,16 +2219,14 @@ async def _process_kill_cursors(self) -> None: for address, cursor_id, conn_mgr in pinned_cursors: try: - await self._cleanup_cursor_lock(cursor_id, address, conn_mgr, None, False) - except asyncio.CancelledError: - raise + await self._cleanup_cursor_lock(cursor_id, address, conn_mgr, None) except Exception as exc: if isinstance(exc, InvalidOperation) and self._topology._closed: # Raise the exception when client is closed so that it # can be caught in _process_periodic_tasks raise else: - helpers_shared._handle_exception() + _log_client_error() # Don't re-open topology if it's closed and there's no pending cursors. if address_to_cursor_ids: @@ -2054,13 +2234,11 @@ async def _process_kill_cursors(self) -> None: for address, cursor_ids in address_to_cursor_ids.items(): try: await self._kill_cursors(cursor_ids, address, topology, session=None) - except asyncio.CancelledError: - raise except Exception as exc: if isinstance(exc, InvalidOperation) and self._topology._closed: raise else: - helpers_shared._handle_exception() + _log_client_error() # This method is run periodically by a background thread. async def _process_periodic_tasks(self) -> None: @@ -2070,13 +2248,11 @@ async def _process_periodic_tasks(self) -> None: try: await self._process_kill_cursors() await self._topology.update_pool() - except asyncio.CancelledError: - raise except Exception as exc: if isinstance(exc, InvalidOperation) and self._topology._closed: return else: - helpers_shared._handle_exception() + _log_client_error() def _return_server_session( self, server_session: Union[_ServerSession, _EmptyServerSession] @@ -2088,12 +2264,14 @@ def _return_server_session( @contextlib.asynccontextmanager async def _tmp_session( - self, session: Optional[client_session.AsyncClientSession], close: bool = True - ) -> AsyncGenerator[Optional[client_session.AsyncClientSession], None, None]: + self, session: Optional[client_session.AsyncClientSession] + ) -> AsyncGenerator[Optional[client_session.AsyncClientSession], None]: """If provided session is None, lend a temporary session.""" if session is not None: if not isinstance(session, client_session.AsyncClientSession): - raise ValueError("'session' argument must be an AsyncClientSession or None.") + raise ValueError( + f"'session' argument must be an AsyncClientSession or None, not {type(session)}" + ) # Don't call end_session. yield session return @@ -2111,7 +2289,7 @@ async def _tmp_session( raise finally: # Call end_session when we exit this scope. - if close: + if not s._attached_to_cursor: await s.end_session() else: yield None @@ -2134,8 +2312,8 @@ async def server_info( .. versionchanged:: 3.6 Added ``session`` parameter. """ - return cast( - dict, + return cast( # type: ignore[redundant-cast] + dict[str, Any], await self.admin.command( "buildinfo", read_preference=ReadPreference.PRIMARY, session=session ), @@ -2171,6 +2349,15 @@ async def list_databases( ) -> AsyncCommandCursor[dict[str, Any]]: """Get a cursor over the databases of the connected server. + Cursors are closed automatically when they are exhausted (the last batch of data is retrieved from the database). + If a cursor is not exhausted, it will be closed automatically upon garbage collection, which leaves resources open but unused for a potentially long period of time. + To avoid this, best practice is to call :meth:`AsyncCursor.close` when the cursor is no longer needed, + or use the cursor in a with statement:: + + async with await client.list_databases() as cursor: + async for database in cursor: + print(database) + :param session: a :class:`~pymongo.asynchronous.client_session.AsyncClientSession`. :param comment: A user-provided comment to attach to this @@ -2247,7 +2434,9 @@ async def drop_database( name = name.name if not isinstance(name, str): - raise TypeError("name_or_database must be an instance of str or a AsyncDatabase") + raise TypeError( + f"name_or_database must be an instance of str or a AsyncDatabase, not {type(name)}" + ) async with await self._conn_for_writes(session, operation=_Op.DROP_DATABASE) as conn: await self[name]._command( @@ -2262,13 +2451,13 @@ async def drop_database( @_csot.apply async def bulk_write( self, - models: Sequence[_WriteOp[_DocumentType]], + models: Sequence[_WriteOp], session: Optional[AsyncClientSession] = None, ordered: bool = True, verbose_results: bool = False, bypass_document_validation: Optional[bool] = None, comment: Optional[Any] = None, - let: Optional[Mapping] = None, + let: Optional[Mapping[str, Any]] = None, write_concern: Optional[WriteConcern] = None, ) -> ClientBulkWriteResult: """Send a batch of write operations, potentially across multiple namespaces, to the server. @@ -2343,9 +2532,9 @@ async def bulk_write( :return: An instance of :class:`~pymongo.results.ClientBulkWriteResult`. - .. seealso:: For more info, see :doc:`/examples/client_bulk`. + .. seealso:: For more info, see `Client Bulk Write `_. - .. seealso:: :ref:`writes-and-ids` + .. seealso:: `Writes and ids `_ .. note:: requires MongoDB server version 8.0+. @@ -2455,7 +2644,10 @@ class _MongoClientErrorHandler: ) def __init__( - self, client: AsyncMongoClient, server: Server, session: Optional[AsyncClientSession] + self, + client: AsyncMongoClient, # type: ignore[type-arg] + server: Server, + session: Optional[AsyncClientSession], ): if not isinstance(client, AsyncMongoClient): # This is for compatibility with mocked and subclassed types, such as in Motor. @@ -2508,6 +2700,7 @@ async def handle( self.completed_handshake, self.service_id, ) + assert self.client._topology is not None await self.client._topology.handle_error(self.server_address, err_ctx) async def __aenter__(self) -> _MongoClientErrorHandler: @@ -2527,7 +2720,7 @@ class _ClientConnectionRetryable(Generic[T]): def __init__( self, - mongo_client: AsyncMongoClient, + mongo_client: AsyncMongoClient, # type: ignore[type-arg] func: _WriteCall[T] | _ReadCall[T], bulk: Optional[Union[_AsyncBulk, _AsyncClientBulk]], operation: str, @@ -2557,6 +2750,7 @@ def __init__( self._deprioritized_servers: list[Server] = [] self._operation = operation self._operation_id = operation_id + self._attempt_number = 0 async def run(self) -> T: """Runs the supplied func() and attempts a retry @@ -2599,6 +2793,7 @@ async def run(self) -> T: raise self._retrying = True self._last_error = exc + self._attempt_number += 1 else: raise @@ -2620,6 +2815,7 @@ async def run(self) -> T: raise self._last_error from exc else: raise + self._attempt_number += 1 if self._bulk: self._bulk.retrying = True else: @@ -2698,6 +2894,14 @@ async def _write(self) -> T: # not support sessions raise the last error. self._check_last_error() self._retryable = False + if self._retrying: + _debug_log( + _COMMAND_LOGGER, + message=f"Retrying write attempt number {self._attempt_number}", + clientId=self._client._topology_settings._topology_id, + commandName=self._operation, + operationId=self._operation_id, + ) return await self._func(self._session, conn, self._retryable) # type: ignore except PyMongoError as exc: if not self._retryable: @@ -2719,6 +2923,14 @@ async def _read(self) -> T: ): if self._retrying and not self._retryable: self._check_last_error() + if self._retrying: + _debug_log( + _COMMAND_LOGGER, + message=f"Retrying read attempt number {self._attempt_number}", + clientId=self._client._topology_settings._topology_id, + commandName=self._operation, + operationId=self._operation_id, + ) return await self._func(self._session, self._server, conn, read_pref) # type: ignore diff --git a/pymongo/asynchronous/monitor.py b/pymongo/asynchronous/monitor.py index ad1bc70aba..45c12b219f 100644 --- a/pymongo/asynchronous/monitor.py +++ b/pymongo/asynchronous/monitor.py @@ -4,7 +4,7 @@ # may not use this file except in compliance with the License. You # may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -21,11 +21,12 @@ import logging import time import weakref -from typing import TYPE_CHECKING, Any, Mapping, Optional, cast +from typing import TYPE_CHECKING, Any, Optional from pymongo import common, periodic_executor from pymongo._csot import MovingMinimum -from pymongo.errors import NetworkTimeout, NotPrimaryError, OperationFailure, _OperationCancelled +from pymongo.asynchronous.srv_resolver import _SrvResolver +from pymongo.errors import NetworkTimeout, _OperationCancelled from pymongo.hello import Hello from pymongo.lock import _async_create_lock from pymongo.logger import _SDAM_LOGGER, _debug_log, _SDAMStatusMessage @@ -33,10 +34,13 @@ from pymongo.pool_options import _is_faas from pymongo.read_preferences import MovingAverage from pymongo.server_description import ServerDescription -from pymongo.srv_resolver import _SrvResolver if TYPE_CHECKING: - from pymongo.asynchronous.pool import AsyncConnection, Pool, _CancellationContext + from pymongo.asynchronous.pool import ( # type: ignore[attr-defined] + AsyncConnection, + Pool, + _CancellationContext, + ) from pymongo.asynchronous.settings import TopologySettings from pymongo.asynchronous.topology import Topology @@ -112,9 +116,9 @@ async def close(self) -> None: """ self.gc_safe_close() - async def join(self, timeout: Optional[int] = None) -> None: + async def join(self) -> None: """Wait for the monitor to stop.""" - await self._executor.join(timeout) + await self._executor.join() def request_check(self) -> None: """If the monitor is sleeping, wake it soon.""" @@ -189,6 +193,11 @@ def gc_safe_close(self) -> None: self._rtt_monitor.gc_safe_close() self.cancel_check() + async def join(self) -> None: + await asyncio.gather( + self._executor.join(), self._rtt_monitor.join(), return_exceptions=True + ) # type: ignore[func-returns-value] + async def close(self) -> None: self.gc_safe_close() await self._rtt_monitor.close() @@ -250,15 +259,7 @@ async def _check_server(self) -> ServerDescription: self._conn_id = None start = time.monotonic() try: - try: - return await self._check_once() - except (OperationFailure, NotPrimaryError) as exc: - # Update max cluster time even when hello fails. - details = cast(Mapping[str, Any], exc.details) - await self._topology.receive_cluster_time(details.get("$clusterTime")) - raise - except asyncio.CancelledError: - raise + return await self._check_once() except ReferenceError: raise except Exception as error: @@ -273,6 +274,7 @@ async def _check_server(self) -> ServerDescription: if _SDAM_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _SDAM_LOGGER, + message=_SDAMStatusMessage.HEARTBEAT_FAIL, topologyId=self._topology._topology_id, serverHost=address[0], serverPort=address[1], @@ -280,7 +282,6 @@ async def _check_server(self) -> ServerDescription: durationMS=duration * 1000, failure=error, driverConnectionId=self._conn_id, - message=_SDAMStatusMessage.HEARTBEAT_FAIL, ) await self._reset_connection() if isinstance(error, _OperationCancelled): @@ -312,13 +313,13 @@ async def _check_once(self) -> ServerDescription: if _SDAM_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _SDAM_LOGGER, + message=_SDAMStatusMessage.HEARTBEAT_START, topologyId=self._topology._topology_id, driverConnectionId=conn.id, serverConnectionId=conn.server_connection_id, serverHost=address[0], serverPort=address[1], awaited=awaited, - message=_SDAMStatusMessage.HEARTBEAT_START, ) self._cancel_context = conn.cancel_context @@ -338,6 +339,7 @@ async def _check_once(self) -> ServerDescription: if _SDAM_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _SDAM_LOGGER, + message=_SDAMStatusMessage.HEARTBEAT_SUCCESS, topologyId=self._topology._topology_id, driverConnectionId=conn.id, serverConnectionId=conn.server_connection_id, @@ -346,16 +348,14 @@ async def _check_once(self) -> ServerDescription: awaited=awaited, durationMS=round_trip_time * 1000, reply=response.document, - message=_SDAMStatusMessage.HEARTBEAT_SUCCESS, ) return sd - async def _check_with_socket(self, conn: AsyncConnection) -> tuple[Hello, float]: + async def _check_with_socket(self, conn: AsyncConnection) -> tuple[Hello, float]: # type: ignore[type-arg] """Return (Hello, round_trip_time). Can raise ConnectionFailure or OperationFailure. """ - cluster_time = self._topology.max_cluster_time() start = time.monotonic() if conn.more_to_come: # Read the next streaming hello (MongoDB 4.4+). @@ -365,13 +365,12 @@ async def _check_with_socket(self, conn: AsyncConnection) -> tuple[Hello, float] ): # Initiate streaming hello (MongoDB 4.4+). response = await conn._hello( - cluster_time, self._server_description.topology_version, self._settings.heartbeat_frequency, ) else: # New connection handshake or polling hello (MongoDB <4.4). - response = await conn._hello(cluster_time, None, None) + response = await conn._hello(None, None) duration = _monotonic_duration(start) return response, duration @@ -400,7 +399,7 @@ async def _run(self) -> None: # Don't poll right after creation, wait 60 seconds first if time.monotonic() < self._startup_time + common.MIN_SRV_RESCAN_INTERVAL: return - seedlist = self._get_seedlist() + seedlist = await self._get_seedlist() if seedlist: self._seedlist = seedlist try: @@ -409,7 +408,7 @@ async def _run(self) -> None: # Topology was garbage-collected. await self.close() - def _get_seedlist(self) -> Optional[list[tuple[str, Any]]]: + async def _get_seedlist(self) -> Optional[list[tuple[str, Any]]]: """Poll SRV records for a seedlist. Returns a list of ServerDescriptions. @@ -420,18 +419,17 @@ def _get_seedlist(self) -> Optional[list[tuple[str, Any]]]: self._settings.pool_options.connect_timeout, self._settings.srv_service_name, ) - seedlist, ttl = resolver.get_hosts_and_min_ttl() + seedlist, ttl = await resolver.get_hosts_and_min_ttl() if len(seedlist) == 0: # As per the spec: this should be treated as a failure. raise Exception - except asyncio.CancelledError: - raise - except Exception: + except Exception as exc: # As per the spec, upon encountering an error: # - An error must not be raised # - SRV records must be rescanned every heartbeatFrequencyMS # - Topology must be left unchanged self.request_check() + _debug_log(_SDAM_LOGGER, message="SRV monitor check failed", failure=repr(exc)) return None else: self._executor.update_interval(max(ttl, common.MIN_SRV_RESCAN_INTERVAL)) @@ -489,8 +487,6 @@ async def _run(self) -> None: except ReferenceError: # Topology was garbage-collected. await self.close() - except asyncio.CancelledError: - raise except Exception: await self._pool.reset() diff --git a/pymongo/asynchronous/network.py b/pymongo/asynchronous/network.py index d17aead120..5a5dc7fa2c 100644 --- a/pymongo/asynchronous/network.py +++ b/pymongo/asynchronous/network.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -17,7 +17,6 @@ import datetime import logging -import time from typing import ( TYPE_CHECKING, Any, @@ -31,20 +30,16 @@ from bson import _decode_all_selective from pymongo import _csot, helpers_shared, message -from pymongo.common import MAX_MESSAGE_SIZE -from pymongo.compression_support import _NO_COMPRESSION, decompress +from pymongo.compression_support import _NO_COMPRESSION from pymongo.errors import ( NotPrimaryError, OperationFailure, - ProtocolError, ) from pymongo.logger import _COMMAND_LOGGER, _CommandStatusMessage, _debug_log -from pymongo.message import _UNPACK_REPLY, _OpMsg, _OpReply +from pymongo.message import _OpMsg from pymongo.monitoring import _is_speculative_authenticate from pymongo.network_layer import ( - _UNPACK_COMPRESSION_HEADER, - _UNPACK_HEADER, - async_receive_data, + async_receive_message, async_sendall, ) @@ -71,7 +66,7 @@ async def command( read_preference: Optional[_ServerMode], codec_options: CodecOptions[_DocumentType], session: Optional[AsyncClientSession], - client: Optional[AsyncMongoClient], + client: Optional[AsyncMongoClient[Any]], check: bool = True, allowable_errors: Optional[Sequence[Union[str, int]]] = None, address: Optional[_Address] = None, @@ -168,8 +163,8 @@ async def command( if _COMMAND_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _COMMAND_LOGGER, - clientId=client._topology_settings._topology_id, message=_CommandStatusMessage.STARTED, + clientId=client._topology_settings._topology_id, command=spec, commandName=next(iter(spec)), databaseName=dbname, @@ -194,19 +189,23 @@ async def command( ) try: - await async_sendall(conn.conn, msg) + await async_sendall(conn.conn.get_conn, msg) if use_op_msg and unacknowledged: # Unacknowledged, fake a successful command response. reply = None response_doc: _DocumentOut = {"ok": 1} else: - reply = await receive_message(conn, request_id) + reply = await async_receive_message(conn, request_id) conn.more_to_come = reply.more_to_come unpacked_docs = reply.unpack_response( codec_options=codec_options, user_fields=user_fields ) response_doc = unpacked_docs[0] + if not conn.ready: + cluster_time = response_doc.get("$clusterTime") + if cluster_time: + conn._cluster_time = cluster_time if client: await client._process_response(response_doc, session) if check: @@ -226,8 +225,8 @@ async def command( if _COMMAND_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _COMMAND_LOGGER, - clientId=client._topology_settings._topology_id, message=_CommandStatusMessage.FAILED, + clientId=client._topology_settings._topology_id, durationMS=duration, failure=failure, commandName=next(iter(spec)), @@ -260,8 +259,8 @@ async def command( if _COMMAND_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _COMMAND_LOGGER, - clientId=client._topology_settings._topology_id, message=_CommandStatusMessage.SUCCEEDED, + clientId=client._topology_settings._topology_id, durationMS=duration, reply=response_doc, commandName=next(iter(spec)), @@ -297,47 +296,3 @@ async def command( ) return response_doc # type: ignore[return-value] - - -async def receive_message( - conn: AsyncConnection, request_id: Optional[int], max_message_size: int = MAX_MESSAGE_SIZE -) -> Union[_OpReply, _OpMsg]: - """Receive a raw BSON message or raise socket.error.""" - if _csot.get_timeout(): - deadline = _csot.get_deadline() - else: - timeout = conn.conn.gettimeout() - if timeout: - deadline = time.monotonic() + timeout - else: - deadline = None - # Ignore the response's request id. - length, _, response_to, op_code = _UNPACK_HEADER(await async_receive_data(conn, 16, deadline)) - # No request_id for exhaust cursor "getMore". - if request_id is not None: - if request_id != response_to: - raise ProtocolError(f"Got response id {response_to!r} but expected {request_id!r}") - if length <= 16: - raise ProtocolError( - f"Message length ({length!r}) not longer than standard message header size (16)" - ) - if length > max_message_size: - raise ProtocolError( - f"Message length ({length!r}) is larger than server max " - f"message size ({max_message_size!r})" - ) - if op_code == 2012: - op_code, _, compressor_id = _UNPACK_COMPRESSION_HEADER( - await async_receive_data(conn, 9, deadline) - ) - data = decompress(await async_receive_data(conn, length - 25, deadline), compressor_id) - else: - data = await async_receive_data(conn, length - 16, deadline) - - try: - unpack_reply = _UNPACK_REPLY[op_code] - except KeyError: - raise ProtocolError( - f"Got opcode {op_code!r} but expected {_UNPACK_REPLY.keys()!r}" - ) from None - return unpack_reply(data) diff --git a/pymongo/asynchronous/pool.py b/pymongo/asynchronous/pool.py index 5dc5675a0a..f521091e3c 100644 --- a/pymongo/asynchronous/pool.py +++ b/pymongo/asynchronous/pool.py @@ -4,7 +4,7 @@ # may not use this file except in compliance with the License. You # may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -17,11 +17,8 @@ import asyncio import collections import contextlib -import functools import logging import os -import socket -import ssl import sys import time import weakref @@ -41,7 +38,7 @@ from pymongo import _csot, helpers_shared from pymongo.asynchronous.client_session import _validate_session_write_concern from pymongo.asynchronous.helpers import _handle_reauth -from pymongo.asynchronous.network import command, receive_message +from pymongo.asynchronous.network import command from pymongo.common import ( MAX_BSON_SIZE, MAX_MESSAGE_SIZE, @@ -52,18 +49,16 @@ from pymongo.errors import ( # type:ignore[attr-defined] AutoReconnect, ConfigurationError, - ConnectionFailure, DocumentTooLarge, ExecutionTimeout, InvalidOperation, - NetworkTimeout, NotPrimaryError, OperationFailure, PyMongoError, WaitQueueTimeoutError, - _CertificateError, ) from pymongo.hello import Hello, HelloCompat +from pymongo.helpers_shared import _get_timeout_details, format_timeout_details from pymongo.lock import ( _async_cond_wait, _async_create_condition, @@ -79,13 +74,18 @@ ConnectionCheckOutFailedReason, ConnectionClosedReason, ) -from pymongo.network_layer import async_sendall +from pymongo.network_layer import AsyncNetworkingInterface, async_receive_message, async_sendall from pymongo.pool_options import PoolOptions +from pymongo.pool_shared import ( + SSLErrors, + _CancellationContext, + _configured_protocol_interface, + _raise_connection_failure, +) from pymongo.read_preferences import ReadPreference from pymongo.server_api import _add_to_command from pymongo.server_type import SERVER_TYPE from pymongo.socket_checker import SocketChecker -from pymongo.ssl_support import HAS_SNI, SSLError if TYPE_CHECKING: from bson import CodecOptions @@ -99,10 +99,9 @@ ZstdContext, ) from pymongo.message import _OpMsg, _OpReply - from pymongo.pyopenssl_context import _sslConn from pymongo.read_concern import ReadConcern from pymongo.read_preferences import _ServerMode - from pymongo.typings import ClusterTime, _Address, _CollationIn + from pymongo.typings import _Address, _CollationIn from pymongo.write_concern import WriteConcern try: @@ -123,133 +122,6 @@ def _set_non_inheritable_non_atomic(fd: int) -> None: # noqa: ARG001 _IS_SYNC = False -_MAX_TCP_KEEPIDLE = 120 -_MAX_TCP_KEEPINTVL = 10 -_MAX_TCP_KEEPCNT = 9 - -if sys.platform == "win32": - try: - import _winreg as winreg - except ImportError: - import winreg - - def _query(key, name, default): - try: - value, _ = winreg.QueryValueEx(key, name) - # Ensure the value is a number or raise ValueError. - return int(value) - except (OSError, ValueError): - # QueryValueEx raises OSError when the key does not exist (i.e. - # the system is using the Windows default value). - return default - - try: - with winreg.OpenKey( - winreg.HKEY_LOCAL_MACHINE, r"SYSTEM\CurrentControlSet\Services\Tcpip\Parameters" - ) as key: - _WINDOWS_TCP_IDLE_MS = _query(key, "KeepAliveTime", 7200000) - _WINDOWS_TCP_INTERVAL_MS = _query(key, "KeepAliveInterval", 1000) - except OSError: - # We could not check the default values because winreg.OpenKey failed. - # Assume the system is using the default values. - _WINDOWS_TCP_IDLE_MS = 7200000 - _WINDOWS_TCP_INTERVAL_MS = 1000 - - def _set_keepalive_times(sock): - idle_ms = min(_WINDOWS_TCP_IDLE_MS, _MAX_TCP_KEEPIDLE * 1000) - interval_ms = min(_WINDOWS_TCP_INTERVAL_MS, _MAX_TCP_KEEPINTVL * 1000) - if idle_ms < _WINDOWS_TCP_IDLE_MS or interval_ms < _WINDOWS_TCP_INTERVAL_MS: - sock.ioctl(socket.SIO_KEEPALIVE_VALS, (1, idle_ms, interval_ms)) - -else: - - def _set_tcp_option(sock: socket.socket, tcp_option: str, max_value: int) -> None: - if hasattr(socket, tcp_option): - sockopt = getattr(socket, tcp_option) - try: - # PYTHON-1350 - NetBSD doesn't implement getsockopt for - # TCP_KEEPIDLE and friends. Don't attempt to set the - # values there. - default = sock.getsockopt(socket.IPPROTO_TCP, sockopt) - if default > max_value: - sock.setsockopt(socket.IPPROTO_TCP, sockopt, max_value) - except OSError: - pass - - def _set_keepalive_times(sock: socket.socket) -> None: - _set_tcp_option(sock, "TCP_KEEPIDLE", _MAX_TCP_KEEPIDLE) - _set_tcp_option(sock, "TCP_KEEPINTVL", _MAX_TCP_KEEPINTVL) - _set_tcp_option(sock, "TCP_KEEPCNT", _MAX_TCP_KEEPCNT) - - -def _raise_connection_failure( - address: Any, - error: Exception, - msg_prefix: Optional[str] = None, - timeout_details: Optional[dict[str, float]] = None, -) -> NoReturn: - """Convert a socket.error to ConnectionFailure and raise it.""" - host, port = address - # If connecting to a Unix socket, port will be None. - if port is not None: - msg = "%s:%d: %s" % (host, port, error) - else: - msg = f"{host}: {error}" - if msg_prefix: - msg = msg_prefix + msg - if "configured timeouts" not in msg: - msg += format_timeout_details(timeout_details) - if isinstance(error, socket.timeout): - raise NetworkTimeout(msg) from error - elif isinstance(error, SSLError) and "timed out" in str(error): - # Eventlet does not distinguish TLS network timeouts from other - # SSLErrors (https://github.com/eventlet/eventlet/issues/692). - # Luckily, we can work around this limitation because the phrase - # 'timed out' appears in all the timeout related SSLErrors raised. - raise NetworkTimeout(msg) from error - else: - raise AutoReconnect(msg) from error - - -def _get_timeout_details(options: PoolOptions) -> dict[str, float]: - details = {} - timeout = _csot.get_timeout() - socket_timeout = options.socket_timeout - connect_timeout = options.connect_timeout - if timeout: - details["timeoutMS"] = timeout * 1000 - if socket_timeout and not timeout: - details["socketTimeoutMS"] = socket_timeout * 1000 - if connect_timeout: - details["connectTimeoutMS"] = connect_timeout * 1000 - return details - - -def format_timeout_details(details: Optional[dict[str, float]]) -> str: - result = "" - if details: - result += " (configured timeouts:" - for timeout in ["socketTimeoutMS", "timeoutMS", "connectTimeoutMS"]: - if timeout in details: - result += f" {timeout}: {details[timeout]}ms," - result = result[:-1] - result += ")" - return result - - -class _CancellationContext: - def __init__(self) -> None: - self._cancelled = False - - def cancel(self) -> None: - """Cancel this context.""" - self._cancelled = True - - @property - def cancelled(self) -> bool: - """Was cancel called?""" - return self._cancelled - class AsyncConnection: """Store a connection with some metadata. @@ -258,15 +130,22 @@ class AsyncConnection: :param pool: a Pool instance :param address: the server's (host, port) :param id: the id of this socket in it's pool + :param is_sdam: SDAM connections do not call hello on creation """ def __init__( - self, conn: Union[socket.socket, _sslConn], pool: Pool, address: tuple[str, int], id: int + self, + conn: AsyncNetworkingInterface, + pool: Pool, + address: tuple[str, int], + id: int, + is_sdam: bool, ): self.pool_ref = weakref.ref(pool) self.conn = conn self.address = address self.id = id + self.is_sdam = is_sdam self.closed = False self.last_checkin_time = time.monotonic() self.performed_handshake = False @@ -310,16 +189,18 @@ def __init__( self.connect_rtt = 0.0 self._client_id = pool._client_id self.creation_time = time.monotonic() + # For gossiping $clusterTime from the connection handshake to the client. + self._cluster_time = None def set_conn_timeout(self, timeout: Optional[float]) -> None: """Cache last timeout to avoid duplicate calls to conn.settimeout.""" if timeout == self.last_timeout: return self.last_timeout = timeout - self.conn.settimeout(timeout) + self.conn.get_conn.settimeout(timeout) def apply_timeout( - self, client: AsyncMongoClient, cmd: Optional[MutableMapping[str, Any]] + self, client: AsyncMongoClient[Any], cmd: Optional[MutableMapping[str, Any]] ) -> Optional[float]: # CSOT: use remaining timeout when set. timeout = _csot.remaining() @@ -362,7 +243,7 @@ async def unpin(self) -> None: if pool: await pool.checkin(self) else: - self.close_conn(ConnectionClosedReason.STALE) + await self.close_conn(ConnectionClosedReason.STALE) def hello_cmd(self) -> dict[str, Any]: # Handshake spec requires us to use OP_MSG+hello command for the @@ -373,12 +254,11 @@ def hello_cmd(self) -> dict[str, Any]: else: return {HelloCompat.LEGACY_CMD: 1, "helloOk": True} - async def hello(self) -> Hello: - return await self._hello(None, None, None) + async def hello(self) -> Hello[dict[str, Any]]: + return await self._hello(None, None) async def _hello( self, - cluster_time: Optional[ClusterTime], topology_version: Optional[Any], heartbeat_frequency: Optional[int], ) -> Hello[dict[str, Any]]: @@ -401,9 +281,6 @@ async def _hello( if self.opts.connect_timeout: self.set_conn_timeout(self.opts.connect_timeout + heartbeat_frequency) - if not performing_handshake and cluster_time is not None: - cmd["$clusterTime"] = cluster_time - creds = self.opts._credentials if creds: if creds.mechanism == "DEFAULT" and creds.username: @@ -479,7 +356,7 @@ async def command( dbname: str, spec: MutableMapping[str, Any], read_preference: _ServerMode = ReadPreference.PRIMARY, - codec_options: CodecOptions = DEFAULT_CODEC_OPTIONS, + codec_options: CodecOptions[Mapping[str, Any]] = DEFAULT_CODEC_OPTIONS, # type: ignore[assignment] check: bool = True, allowable_errors: Optional[Sequence[Union[str, int]]] = None, read_concern: Optional[ReadConcern] = None, @@ -487,7 +364,7 @@ async def command( parse_write_concern_error: bool = False, collation: Optional[_CollationIn] = None, session: Optional[AsyncClientSession] = None, - client: Optional[AsyncMongoClient] = None, + client: Optional[AsyncMongoClient[Any]] = None, retryable_write: bool = False, publish_events: bool = True, user_fields: Optional[Mapping[str, Any]] = None, @@ -539,7 +416,7 @@ async def command( spec, self.is_mongos, read_preference, - codec_options, + codec_options, # type: ignore[arg-type] session, client, check, @@ -559,9 +436,9 @@ async def command( ) except (OperationFailure, NotPrimaryError): raise - # Catch socket.error, KeyboardInterrupt, etc. and close ourselves. + # Catch socket.error, KeyboardInterrupt, CancelledError, etc. and close ourselves. except BaseException as error: - self._raise_connection_failure(error) + await self._raise_connection_failure(error) async def send_message(self, message: bytes, max_doc_size: int) -> None: """Send a raw BSON message or raise ConnectionFailure. @@ -575,9 +452,10 @@ async def send_message(self, message: bytes, max_doc_size: int) -> None: ) try: - await async_sendall(self.conn, message) + await async_sendall(self.conn.get_conn, message) + # Catch KeyboardInterrupt, CancelledError, etc. and cleanup. except BaseException as error: - self._raise_connection_failure(error) + await self._raise_connection_failure(error) async def receive_message(self, request_id: Optional[int]) -> Union[_OpReply, _OpMsg]: """Receive a raw BSON message or raise ConnectionFailure. @@ -585,9 +463,10 @@ async def receive_message(self, request_id: Optional[int]) -> Union[_OpReply, _O If any exception is raised, the socket is closed. """ try: - return await receive_message(self, request_id, self.max_message_size) + return await async_receive_message(self, request_id, self.max_message_size) + # Catch KeyboardInterrupt, CancelledError, etc. and cleanup. except BaseException as error: - self._raise_connection_failure(error) + await self._raise_connection_failure(error) def _raise_if_not_writable(self, unacknowledged: bool) -> None: """Raise NotPrimaryError on unacknowledged write if this socket is not @@ -609,7 +488,7 @@ async def unack_write(self, msg: bytes, max_doc_size: int) -> None: await self.send_message(msg, max_doc_size) async def write_command( - self, request_id: int, msg: bytes, codec_options: CodecOptions + self, request_id: int, msg: bytes, codec_options: CodecOptions[Mapping[str, Any]] ) -> dict[str, Any]: """Send "insert" etc. command, returning response as a dict. @@ -652,8 +531,8 @@ async def authenticate(self, reauthenticate: bool = False) -> None: if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, - clientId=self._client_id, message=_ConnectionStatusMessage.CONN_READY, + clientId=self._client_id, serverHost=self.address[0], serverPort=self.address[1], driverConnectionId=self.id, @@ -661,7 +540,7 @@ async def authenticate(self, reauthenticate: bool = False) -> None: ) def validate_session( - self, client: Optional[AsyncMongoClient], session: Optional[AsyncClientSession] + self, client: Optional[AsyncMongoClient[Any]], session: Optional[AsyncClientSession] ) -> None: """Validate this session before use with client. @@ -673,11 +552,11 @@ def validate_session( "Can only use session with the AsyncMongoClient that started it" ) - def close_conn(self, reason: Optional[str]) -> None: + async def close_conn(self, reason: Optional[str]) -> None: """Close this connection with a reason.""" if self.closed: return - self._close_conn() + await self._close_conn() if reason: if self.enabled_for_cmap: assert self.listeners is not None @@ -685,8 +564,8 @@ def close_conn(self, reason: Optional[str]) -> None: if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, - clientId=self._client_id, message=_ConnectionStatusMessage.CONN_CLOSED, + clientId=self._client_id, serverHost=self.address[0], serverPort=self.address[1], driverConnectionId=self.id, @@ -694,7 +573,7 @@ def close_conn(self, reason: Optional[str]) -> None: error=reason, ) - def _close_conn(self) -> None: + async def _close_conn(self) -> None: """Close this connection.""" if self.closed: return @@ -703,21 +582,22 @@ def _close_conn(self) -> None: # Note: We catch exceptions to avoid spurious errors on interpreter # shutdown. try: - self.conn.close() - except asyncio.CancelledError: - raise + await self.conn.close() except Exception: # noqa: S110 pass def conn_closed(self) -> bool: """Return True if we know socket has been closed, False otherwise.""" - return self.socket_checker.socket_closed(self.conn) + if _IS_SYNC: + return self.socket_checker.socket_closed(self.conn.get_conn) + else: + return self.conn.is_closing() def send_cluster_time( self, command: MutableMapping[str, Any], session: Optional[AsyncClientSession], - client: Optional[AsyncMongoClient], + client: Optional[AsyncMongoClient[Any]], ) -> None: """Add $clusterTime.""" if client: @@ -738,7 +618,7 @@ def idle_time_seconds(self) -> float: """Seconds since this socket was last checked into its pool.""" return time.monotonic() - self.last_checkin_time - def _raise_connection_failure(self, error: BaseException) -> NoReturn: + async def _raise_connection_failure(self, error: BaseException) -> NoReturn: # Catch *all* exceptions from socket methods and close the socket. In # regular Python, socket operations only raise socket.error, even if # the underlying cause was a Ctrl-C: a signal raised during socket.recv @@ -748,7 +628,7 @@ def _raise_connection_failure(self, error: BaseException) -> NoReturn: # signals and throws KeyboardInterrupt into the current frame on the # main thread. # - # But in Gevent and Eventlet, the polling mechanism (epoll, kqueue, + # But in Gevent, the polling mechanism (epoll, kqueue, # ..) is called in Python code, which experiences the signal as a # KeyboardInterrupt from the start, rather than as an initial # socket.error, so we catch that, close the socket, and reraise it. @@ -758,9 +638,9 @@ def _raise_connection_failure(self, error: BaseException) -> NoReturn: reason = None else: reason = ConnectionClosedReason.ERROR - self.close_conn(reason) + await self.close_conn(reason) # SSLError from PyOpenSSL inherits directly from Exception. - if isinstance(error, (IOError, OSError, SSLError)): + if isinstance(error, (IOError, OSError, *SSLErrors)): details = _get_timeout_details(self.opts) _raise_connection_failure(self.address, error, timeout_details=details) else: @@ -783,145 +663,6 @@ def __repr__(self) -> str: ) -def _create_connection(address: _Address, options: PoolOptions) -> socket.socket: - """Given (host, port) and PoolOptions, connect and return a socket object. - - Can raise socket.error. - - This is a modified version of create_connection from CPython >= 2.7. - """ - host, port = address - - # Check if dealing with a unix domain socket - if host.endswith(".sock"): - if not hasattr(socket, "AF_UNIX"): - raise ConnectionFailure("UNIX-sockets are not supported on this system") - sock = socket.socket(socket.AF_UNIX) - # SOCK_CLOEXEC not supported for Unix sockets. - _set_non_inheritable_non_atomic(sock.fileno()) - try: - sock.connect(host) - return sock - except OSError: - sock.close() - raise - - # Don't try IPv6 if we don't support it. Also skip it if host - # is 'localhost' (::1 is fine). Avoids slow connect issues - # like PYTHON-356. - family = socket.AF_INET - if socket.has_ipv6 and host != "localhost": - family = socket.AF_UNSPEC - - err = None - for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM): - af, socktype, proto, dummy, sa = res - # SOCK_CLOEXEC was new in CPython 3.2, and only available on a limited - # number of platforms (newer Linux and *BSD). Starting with CPython 3.4 - # all file descriptors are created non-inheritable. See PEP 446. - try: - sock = socket.socket(af, socktype | getattr(socket, "SOCK_CLOEXEC", 0), proto) - except OSError: - # Can SOCK_CLOEXEC be defined even if the kernel doesn't support - # it? - sock = socket.socket(af, socktype, proto) - # Fallback when SOCK_CLOEXEC isn't available. - _set_non_inheritable_non_atomic(sock.fileno()) - try: - sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - # CSOT: apply timeout to socket connect. - timeout = _csot.remaining() - if timeout is None: - timeout = options.connect_timeout - elif timeout <= 0: - raise socket.timeout("timed out") - sock.settimeout(timeout) - sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, True) - _set_keepalive_times(sock) - sock.connect(sa) - return sock - except OSError as e: - err = e - sock.close() - - if err is not None: - raise err - else: - # This likely means we tried to connect to an IPv6 only - # host with an OS/kernel or Python interpreter that doesn't - # support IPv6. The test case is Jython2.5.1 which doesn't - # support IPv6 at all. - raise OSError("getaddrinfo failed") - - -async def _configured_socket( - address: _Address, options: PoolOptions -) -> Union[socket.socket, _sslConn]: - """Given (host, port) and PoolOptions, return a configured socket. - - Can raise socket.error, ConnectionFailure, or _CertificateError. - - Sets socket's SSL and timeout options. - """ - sock = _create_connection(address, options) - ssl_context = options._ssl_context - - if ssl_context is None: - sock.settimeout(options.socket_timeout) - return sock - - host = address[0] - try: - # We have to pass hostname / ip address to wrap_socket - # to use SSLContext.check_hostname. - if HAS_SNI: - if _IS_SYNC: - ssl_sock = ssl_context.wrap_socket(sock, server_hostname=host) - else: - if hasattr(ssl_context, "a_wrap_socket"): - ssl_sock = await ssl_context.a_wrap_socket(sock, server_hostname=host) # type: ignore[assignment, misc] - else: - loop = asyncio.get_running_loop() - ssl_sock = await loop.run_in_executor( - None, - functools.partial(ssl_context.wrap_socket, sock, server_hostname=host), # type: ignore[assignment, misc] - ) - else: - if _IS_SYNC: - ssl_sock = ssl_context.wrap_socket(sock) - else: - if hasattr(ssl_context, "a_wrap_socket"): - ssl_sock = await ssl_context.a_wrap_socket(sock) # type: ignore[assignment, misc] - else: - loop = asyncio.get_running_loop() - ssl_sock = await loop.run_in_executor(None, ssl_context.wrap_socket, sock) # type: ignore[assignment, misc] - except _CertificateError: - sock.close() - # Raise _CertificateError directly like we do after match_hostname - # below. - raise - except (OSError, SSLError) as exc: - sock.close() - # We raise AutoReconnect for transient and permanent SSL handshake - # failures alike. Permanent handshake failures, like protocol - # mismatch, will be turned into ServerSelectionTimeoutErrors later. - details = _get_timeout_details(options) - _raise_connection_failure(address, exc, "SSL handshake failed: ", timeout_details=details) - if ( - ssl_context.verify_mode - and not ssl_context.check_hostname - and not options.tls_allow_invalid_hostnames - ): - try: - ssl.match_hostname(ssl_sock.getpeercert(), hostname=host) # type:ignore[attr-defined] - except _CertificateError: - ssl_sock.close() - raise - - ssl_sock.settimeout(options.socket_timeout) - return ssl_sock - - class _PoolClosedError(PyMongoError): """Internal error raised when a thread tries to get a connection from a closed pool. @@ -966,19 +707,19 @@ class PoolState: # Do *not* explicitly inherit from object or Jython won't call __del__ -# http://bugs.jython.org/issue1057 +# https://bugs.jython.org/issue1057 class Pool: def __init__( self, address: _Address, options: PoolOptions, - handshake: bool = True, + is_sdam: bool = False, client_id: Optional[ObjectId] = None, ): """ :param address: a (hostname, port) tuple :param options: a PoolOptions instance - :param handshake: whether to call hello for each new AsyncConnection + :param is_sdam: whether to call hello for each new AsyncConnection """ if options.pause_enabled: self.state = PoolState.PAUSED @@ -990,7 +731,7 @@ def __init__( # LIFO pool. Sockets are ordered on idle time. Sockets claimed # and returned to pool from the left side. Stale sockets removed # from the right side. - self.conns: collections.deque = collections.deque() + self.conns: collections.deque[AsyncConnection] = collections.deque() self.active_contexts: set[_CancellationContext] = set() self.lock = _async_create_lock() self._max_connecting_cond = _async_create_condition(self.lock) @@ -1007,14 +748,14 @@ def __init__( self.pid = os.getpid() self.address = address self.opts = options - self.handshake = handshake + self.is_sdam = is_sdam # Don't publish events or logs in Monitor pools. self.enabled_for_cmap = ( - self.handshake + not self.is_sdam and self.opts._event_listeners is not None and self.opts._event_listeners.enabled_for_cmap ) - self.enabled_for_logging = self.handshake + self.enabled_for_logging = not self.is_sdam # The first portion of the wait queue. # Enforces: maxPoolSize @@ -1039,8 +780,8 @@ def __init__( if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, - clientId=self._client_id, message=_ConnectionStatusMessage.POOL_CREATED, + clientId=self._client_id, serverHost=self.address[0], serverPort=self.address[1], **self.opts.non_default_options, @@ -1065,8 +806,8 @@ async def ready(self) -> None: if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, - clientId=self._client_id, message=_ConnectionStatusMessage.POOL_READY, + clientId=self._client_id, serverHost=self.address[0], serverPort=self.address[1], ) @@ -1097,8 +838,8 @@ async def _reset( if service_id is None: sockets, self.conns = self.conns, collections.deque() else: - discard: collections.deque = collections.deque() - keep: collections.deque = collections.deque() + discard: collections.deque = collections.deque() # type: ignore[type-arg] + keep: collections.deque = collections.deque() # type: ignore[type-arg] for conn in self.conns: if conn.service_id == service_id: discard.append(conn) @@ -1122,16 +863,22 @@ async def _reset( # PoolClosedEvent but that reset() SHOULD close sockets *after* # publishing the PoolClearedEvent. if close: - for conn in sockets: - conn.close_conn(ConnectionClosedReason.POOL_CLOSED) + if not _IS_SYNC: + await asyncio.gather( + *[conn.close_conn(ConnectionClosedReason.POOL_CLOSED) for conn in sockets], # type: ignore[func-returns-value] + return_exceptions=True, + ) + else: + for conn in sockets: + await conn.close_conn(ConnectionClosedReason.POOL_CLOSED) if self.enabled_for_cmap: assert listeners is not None listeners.publish_pool_closed(self.address) if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, - clientId=self._client_id, message=_ConnectionStatusMessage.POOL_CLOSED, + clientId=self._client_id, serverHost=self.address[0], serverPort=self.address[1], ) @@ -1147,14 +894,20 @@ async def _reset( if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, - clientId=self._client_id, message=_ConnectionStatusMessage.POOL_CLEARED, + clientId=self._client_id, serverHost=self.address[0], serverPort=self.address[1], serviceId=service_id, ) - for conn in sockets: - conn.close_conn(ConnectionClosedReason.STALE) + if not _IS_SYNC: + await asyncio.gather( + *[conn.close_conn(ConnectionClosedReason.STALE) for conn in sockets], # type: ignore[func-returns-value] + return_exceptions=True, + ) + else: + for conn in sockets: + await conn.close_conn(ConnectionClosedReason.STALE) async def update_is_writable(self, is_writable: Optional[bool]) -> None: """Updates the is_writable attribute on all sockets currently in the @@ -1163,7 +916,7 @@ async def update_is_writable(self, is_writable: Optional[bool]) -> None: self.is_writable = is_writable async with self.lock: for _socket in self.conns: - _socket.update_is_writable(self.is_writable) + _socket.update_is_writable(self.is_writable) # type: ignore[arg-type] async def reset( self, service_id: Optional[ObjectId] = None, interrupt_connections: bool = False @@ -1193,13 +946,21 @@ async def remove_stale_sockets(self, reference_generation: int) -> None: return if self.opts.max_idle_time_seconds is not None: + close_conns = [] async with self.lock: while ( self.conns and self.conns[-1].idle_time_seconds() > self.opts.max_idle_time_seconds ): - conn = self.conns.pop() - conn.close_conn(ConnectionClosedReason.IDLE) + close_conns.append(self.conns.pop()) + if not _IS_SYNC: + await asyncio.gather( + *[conn.close_conn(ConnectionClosedReason.IDLE) for conn in close_conns], # type: ignore[func-returns-value] + return_exceptions=True, + ) + else: + for conn in close_conns: + await conn.close_conn(ConnectionClosedReason.IDLE) while True: async with self.size_cond: @@ -1219,14 +980,18 @@ async def remove_stale_sockets(self, reference_generation: int) -> None: self._pending += 1 incremented = True conn = await self.connect() + close_conn = False async with self.lock: # Close connection and return if the pool was reset during # socket creation or while acquiring the pool lock. if self.gen.get_overall() != reference_generation: - conn.close_conn(ConnectionClosedReason.STALE) - return - self.conns.appendleft(conn) - self.active_contexts.discard(conn.cancel_context) + close_conn = True + if not close_conn: + self.conns.appendleft(conn) + self.active_contexts.discard(conn.cancel_context) + if close_conn: + await conn.close_conn(ConnectionClosedReason.STALE) + return finally: if incremented: # Notify after adding the socket to the pool. @@ -1260,15 +1025,16 @@ async def connect(self, handler: Optional[_MongoClientErrorHandler] = None) -> A if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, - clientId=self._client_id, message=_ConnectionStatusMessage.CONN_CREATED, + clientId=self._client_id, serverHost=self.address[0], serverPort=self.address[1], driverConnectionId=conn_id, ) try: - sock = await _configured_socket(self.address, self.opts) + networking_interface = await _configured_protocol_interface(self.address, self.opts) + # Catch KeyboardInterrupt, CancelledError, etc. and cleanup. except BaseException as error: async with self.lock: self.active_contexts.discard(tmp_context) @@ -1280,40 +1046,44 @@ async def connect(self, handler: Optional[_MongoClientErrorHandler] = None) -> A if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, - clientId=self._client_id, message=_ConnectionStatusMessage.CONN_CLOSED, + clientId=self._client_id, serverHost=self.address[0], serverPort=self.address[1], driverConnectionId=conn_id, reason=_verbose_connection_error_reason(ConnectionClosedReason.ERROR), error=ConnectionClosedReason.ERROR, ) - if isinstance(error, (IOError, OSError, SSLError)): + if isinstance(error, (IOError, OSError, *SSLErrors)): details = _get_timeout_details(self.opts) _raise_connection_failure(self.address, error, timeout_details=details) raise - conn = AsyncConnection(sock, self, self.address, conn_id) # type: ignore[arg-type] + conn = AsyncConnection(networking_interface, self, self.address, conn_id, self.is_sdam) # type: ignore[arg-type] async with self.lock: self.active_contexts.add(conn.cancel_context) self.active_contexts.discard(tmp_context) if tmp_context.cancelled: conn.cancel_context.cancel() try: - if self.handshake: + if not self.is_sdam: await conn.hello() self.is_writable = conn.is_writable if handler: handler.contribute_socket(conn, completed_handshake=False) await conn.authenticate() + # Catch KeyboardInterrupt, CancelledError, etc. and cleanup. except BaseException: async with self.lock: self.active_contexts.discard(conn.cancel_context) - conn.close_conn(ConnectionClosedReason.ERROR) + await conn.close_conn(ConnectionClosedReason.ERROR) raise + if handler: + await handler.client._topology.receive_cluster_time(conn._cluster_time) + return conn @contextlib.asynccontextmanager @@ -1343,8 +1113,8 @@ async def checkout( if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, - clientId=self._client_id, message=_ConnectionStatusMessage.CHECKOUT_STARTED, + clientId=self._client_id, serverHost=self.address[0], serverPort=self.address[1], ) @@ -1358,8 +1128,8 @@ async def checkout( if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, - clientId=self._client_id, message=_ConnectionStatusMessage.CHECKOUT_SUCCEEDED, + clientId=self._client_id, serverHost=self.address[0], serverPort=self.address[1], driverConnectionId=conn.id, @@ -1369,6 +1139,7 @@ async def checkout( async with self.lock: self.active_contexts.add(conn.cancel_context) yield conn + # Catch KeyboardInterrupt, CancelledError, etc. and cleanup. except BaseException: # Exception in caller. Ensure the connection gets returned. # Note that when pinned is True, the session owns the @@ -1406,8 +1177,8 @@ def _raise_if_not_ready(self, checkout_started_time: float, emit_event: bool) -> if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, - clientId=self._client_id, message=_ConnectionStatusMessage.CHECKOUT_FAILED, + clientId=self._client_id, serverHost=self.address[0], serverPort=self.address[1], reason="An error occurred while trying to establish a new connection", @@ -1440,8 +1211,8 @@ async def _get_conn( if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, - clientId=self._client_id, message=_ConnectionStatusMessage.CHECKOUT_FAILED, + clientId=self._client_id, serverHost=self.address[0], serverPort=self.address[1], reason="Connection pool was closed", @@ -1505,7 +1276,7 @@ async def _get_conn( except IndexError: self._pending += 1 if conn: # We got a socket from the pool - if self._perished(conn): + if await self._perished(conn): conn = None continue else: # We need to create a new connection @@ -1515,10 +1286,11 @@ async def _get_conn( async with self._max_connecting_cond: self._pending -= 1 self._max_connecting_cond.notify() + # Catch KeyboardInterrupt, CancelledError, etc. and cleanup. except BaseException: if conn: # We checked out a socket but authentication failed. - conn.close_conn(ConnectionClosedReason.ERROR) + await conn.close_conn(ConnectionClosedReason.ERROR) async with self.size_cond: self.requests -= 1 if incremented: @@ -1535,8 +1307,8 @@ async def _get_conn( if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, - clientId=self._client_id, message=_ConnectionStatusMessage.CHECKOUT_FAILED, + clientId=self._client_id, serverHost=self.address[0], serverPort=self.address[1], reason="An error occurred while trying to establish a new connection", @@ -1568,8 +1340,8 @@ async def checkin(self, conn: AsyncConnection) -> None: if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, - clientId=self._client_id, message=_ConnectionStatusMessage.CHECKEDIN, + clientId=self._client_id, serverHost=self.address[0], serverPort=self.address[1], driverConnectionId=conn.id, @@ -1578,7 +1350,7 @@ async def checkin(self, conn: AsyncConnection) -> None: await self.reset_without_pause() else: if self.closed: - conn.close_conn(ConnectionClosedReason.POOL_CLOSED) + await conn.close_conn(ConnectionClosedReason.POOL_CLOSED) elif conn.closed: # CMAP requires the closed event be emitted after the check in. if self.enabled_for_cmap: @@ -1589,8 +1361,8 @@ async def checkin(self, conn: AsyncConnection) -> None: if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, - clientId=self._client_id, message=_ConnectionStatusMessage.CONN_CLOSED, + clientId=self._client_id, serverHost=self.address[0], serverPort=self.address[1], driverConnectionId=conn.id, @@ -1598,17 +1370,20 @@ async def checkin(self, conn: AsyncConnection) -> None: error=ConnectionClosedReason.ERROR, ) else: + close_conn = False async with self.lock: # Hold the lock to ensure this section does not race with # Pool.reset(). if self.stale_generation(conn.generation, conn.service_id): - conn.close_conn(ConnectionClosedReason.STALE) + close_conn = True else: conn.update_last_checkin_time() conn.update_is_writable(bool(self.is_writable)) self.conns.appendleft(conn) # Notify any threads waiting to create a connection. self._max_connecting_cond.notify() + if close_conn: + await conn.close_conn(ConnectionClosedReason.STALE) async with self.size_cond: if txn: @@ -1620,7 +1395,7 @@ async def checkin(self, conn: AsyncConnection) -> None: self.operation_count -= 1 self.size_cond.notify() - def _perished(self, conn: AsyncConnection) -> bool: + async def _perished(self, conn: AsyncConnection) -> bool: """Return True and close the connection if it is "perished". This side-effecty function checks if this socket has been idle for @@ -1640,18 +1415,18 @@ def _perished(self, conn: AsyncConnection) -> bool: self.opts.max_idle_time_seconds is not None and idle_time_seconds > self.opts.max_idle_time_seconds ): - conn.close_conn(ConnectionClosedReason.IDLE) + await conn.close_conn(ConnectionClosedReason.IDLE) return True if self._check_interval_seconds is not None and ( self._check_interval_seconds == 0 or idle_time_seconds > self._check_interval_seconds ): if conn.conn_closed(): - conn.close_conn(ConnectionClosedReason.ERROR) + await conn.close_conn(ConnectionClosedReason.ERROR) return True if self.stale_generation(conn.generation, conn.service_id): - conn.close_conn(ConnectionClosedReason.STALE) + await conn.close_conn(ConnectionClosedReason.STALE) return True return False @@ -1667,8 +1442,8 @@ def _raise_wait_queue_timeout(self, checkout_started_time: float) -> NoReturn: if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, - clientId=self._client_id, message=_ConnectionStatusMessage.CHECKOUT_FAILED, + clientId=self._client_id, serverHost=self.address[0], serverPort=self.address[1], reason="Wait queue timeout elapsed without a connection becoming available", @@ -1699,5 +1474,6 @@ def __del__(self) -> None: # Avoid ResourceWarnings in Python 3 # Close all sockets without calling reset() or close() because it is # not safe to acquire a lock in __del__. - for conn in self.conns: - conn.close_conn(None) + if _IS_SYNC: + for conn in self.conns: + conn.close_conn(None) # type: ignore[unused-coroutine] diff --git a/pymongo/asynchronous/server.py b/pymongo/asynchronous/server.py index 72f22584e2..f212306174 100644 --- a/pymongo/asynchronous/server.py +++ b/pymongo/asynchronous/server.py @@ -4,7 +4,7 @@ # may not use this file except in compliance with the License. You # may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -66,7 +66,7 @@ def __init__( monitor: Monitor, topology_id: Optional[ObjectId] = None, listeners: Optional[_EventListeners] = None, - events: Optional[ReferenceType[Queue]] = None, + events: Optional[ReferenceType[Queue[Any]]] = None, ) -> None: """Represent one MongoDB server.""" self._description = server_description @@ -108,10 +108,10 @@ async def close(self) -> None: if _SDAM_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _SDAM_LOGGER, + message=_SDAMStatusMessage.STOP_SERVER, topologyId=self._topology_id, serverHost=self._description.address[0], serverPort=self._description.address[1], - message=_SDAMStatusMessage.STOP_SERVER, ) await self._monitor.close() @@ -142,7 +142,7 @@ async def run_operation( read_preference: _ServerMode, listeners: Optional[_EventListeners], unpack_res: Callable[..., list[_DocumentOut]], - client: AsyncMongoClient, + client: AsyncMongoClient[Any], ) -> Response: """Run a _Query or _GetMore operation and return a Response object. @@ -173,8 +173,8 @@ async def run_operation( if _COMMAND_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _COMMAND_LOGGER, - clientId=client._topology_settings._topology_id, message=_CommandStatusMessage.STARTED, + clientId=client._topology_settings._topology_id, command=cmd, commandName=next(iter(cmd)), databaseName=dbn, @@ -224,7 +224,7 @@ async def run_operation( if use_cmd: first = docs[0] await operation.client._process_response(first, operation.session) # type: ignore[misc, arg-type] - _check_command_response(first, conn.max_wire_version) + _check_command_response(first, conn.max_wire_version, pool_opts=conn.opts) # type:ignore[has-type] except Exception as exc: duration = datetime.now() - start if isinstance(exc, (NotPrimaryError, OperationFailure)): @@ -234,8 +234,8 @@ async def run_operation( if _COMMAND_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _COMMAND_LOGGER, - clientId=client._topology_settings._topology_id, message=_CommandStatusMessage.FAILED, + clientId=client._topology_settings._topology_id, durationMS=duration, failure=failure, commandName=next(iter(cmd)), @@ -278,8 +278,8 @@ async def run_operation( if _COMMAND_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _COMMAND_LOGGER, - clientId=client._topology_settings._topology_id, message=_CommandStatusMessage.SUCCEEDED, + clientId=client._topology_settings._topology_id, durationMS=duration, reply=res, commandName=next(iter(cmd)), diff --git a/pymongo/asynchronous/settings.py b/pymongo/asynchronous/settings.py index 1103e1bd18..9c2331971a 100644 --- a/pymongo/asynchronous/settings.py +++ b/pymongo/asynchronous/settings.py @@ -4,7 +4,7 @@ # may not use this file except in compliance with the License. You # may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -51,6 +51,7 @@ def __init__( srv_service_name: str = common.SRV_SERVICE_NAME, srv_max_hosts: int = 0, server_monitoring_mode: str = common.SERVER_MONITORING_MODE, + topology_id: Optional[ObjectId] = None, ): """Represent MongoClient's configuration. @@ -78,8 +79,10 @@ def __init__( self._srv_service_name = srv_service_name self._srv_max_hosts = srv_max_hosts or 0 self._server_monitoring_mode = server_monitoring_mode - - self._topology_id = ObjectId() + if topology_id is not None: + self._topology_id = topology_id + else: + self._topology_id = ObjectId() # Store the allocation traceback to catch unclosed clients in the # test suite. self._stack = "".join(traceback.format_stack()[:-2]) diff --git a/pymongo/asynchronous/srv_resolver.py b/pymongo/asynchronous/srv_resolver.py new file mode 100644 index 0000000000..9c4d9a9d57 --- /dev/null +++ b/pymongo/asynchronous/srv_resolver.py @@ -0,0 +1,155 @@ +# Copyright 2019-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you +# may not use this file except in compliance with the License. You +# may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. See the License for the specific language governing +# permissions and limitations under the License. + +"""Support for resolving hosts and options from mongodb+srv:// URIs.""" +from __future__ import annotations + +import ipaddress +import random +from typing import TYPE_CHECKING, Any, Optional, Union + +from pymongo.common import CONNECT_TIMEOUT +from pymongo.errors import ConfigurationError + +if TYPE_CHECKING: + from dns import resolver + +_IS_SYNC = False + + +def _have_dnspython() -> bool: + try: + import dns # noqa: F401 + + return True + except ImportError: + return False + + +# dnspython can return bytes or str from various parts +# of its API depending on version. We always want str. +def maybe_decode(text: Union[str, bytes]) -> str: + if isinstance(text, bytes): + return text.decode() + return text + + +# PYTHON-2667 Lazily call dns.resolver methods for compatibility with eventlet. +async def _resolve(*args: Any, **kwargs: Any) -> resolver.Answer: + if _IS_SYNC: + from dns import resolver + + return resolver.resolve(*args, **kwargs) + else: + from dns import asyncresolver + + return await asyncresolver.resolve(*args, **kwargs) # type:ignore[return-value] + + +_INVALID_HOST_MSG = ( + "Invalid URI host: %s is not a valid hostname for 'mongodb+srv://'. " + "Did you mean to use 'mongodb://'?" +) + + +class _SrvResolver: + def __init__( + self, + fqdn: str, + connect_timeout: Optional[float], + srv_service_name: str, + srv_max_hosts: int = 0, + ): + self.__fqdn = fqdn + self.__srv = srv_service_name + self.__connect_timeout = connect_timeout or CONNECT_TIMEOUT + self.__srv_max_hosts = srv_max_hosts or 0 + # Validate the fully qualified domain name. + try: + ipaddress.ip_address(fqdn) + raise ConfigurationError(_INVALID_HOST_MSG % ("an IP address",)) + except ValueError: + pass + try: + split_fqdn = self.__fqdn.split(".") + self.__plist = split_fqdn[1:] if len(split_fqdn) > 2 else split_fqdn + except Exception: + raise ConfigurationError(_INVALID_HOST_MSG % (fqdn,)) from None + self.__slen = len(self.__plist) + self.nparts = len(split_fqdn) + + async def get_options(self) -> Optional[str]: + from dns import resolver + + try: + results = await _resolve(self.__fqdn, "TXT", lifetime=self.__connect_timeout) + except (resolver.NoAnswer, resolver.NXDOMAIN): + # No TXT records + return None + except Exception as exc: + raise ConfigurationError(str(exc)) from exc + if len(results) > 1: + raise ConfigurationError("Only one TXT record is supported") + return (b"&".join([b"".join(res.strings) for res in results])).decode("utf-8") # type: ignore[attr-defined] + + async def _resolve_uri(self, encapsulate_errors: bool) -> resolver.Answer: + try: + results = await _resolve( + "_" + self.__srv + "._tcp." + self.__fqdn, "SRV", lifetime=self.__connect_timeout + ) + except Exception as exc: + if not encapsulate_errors: + # Raise the original error. + raise + # Else, raise all errors as ConfigurationError. + raise ConfigurationError(str(exc)) from exc + return results + + async def _get_srv_response_and_hosts( + self, encapsulate_errors: bool + ) -> tuple[resolver.Answer, list[tuple[str, Any]]]: + results = await self._resolve_uri(encapsulate_errors) + + # Construct address tuples + nodes = [ + (maybe_decode(res.target.to_text(omit_final_dot=True)), res.port) # type: ignore[attr-defined] + for res in results + ] + + # Validate hosts + for node in nodes: + srv_host = node[0].lower() + if self.__fqdn == srv_host and self.nparts < 3: + raise ConfigurationError( + "Invalid SRV host: return address is identical to SRV hostname" + ) + try: + nlist = srv_host.split(".")[1:][-self.__slen :] + except Exception as exc: + raise ConfigurationError(f"Invalid SRV host: {node[0]}") from exc + if self.__plist != nlist: + raise ConfigurationError(f"Invalid SRV host: {node[0]}") + if self.__srv_max_hosts: + nodes = random.sample(nodes, min(self.__srv_max_hosts, len(nodes))) + return results, nodes + + async def get_hosts(self) -> list[tuple[str, Any]]: + _, nodes = await self._get_srv_response_and_hosts(True) + return nodes + + async def get_hosts_and_min_ttl(self) -> tuple[list[tuple[str, Any]], int]: + results, nodes = await self._get_srv_response_and_hosts(False) + rrset = results.rrset + ttl = rrset.ttl if rrset else 0 + return nodes, ttl diff --git a/pymongo/asynchronous/topology.py b/pymongo/asynchronous/topology.py index 6d67710a7e..283aabc690 100644 --- a/pymongo/asynchronous/topology.py +++ b/pymongo/asynchronous/topology.py @@ -4,7 +4,7 @@ # may not use this file except in compliance with the License. You # may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -16,6 +16,7 @@ from __future__ import annotations +import asyncio import logging import os import queue @@ -29,7 +30,7 @@ from pymongo import _csot, common, helpers_shared, periodic_executor from pymongo.asynchronous.client_session import _ServerSession, _ServerSessionPool -from pymongo.asynchronous.monitor import SrvMonitor +from pymongo.asynchronous.monitor import MonitorBase, SrvMonitor from pymongo.asynchronous.pool import Pool from pymongo.asynchronous.server import Server from pymongo.errors import ( @@ -40,6 +41,7 @@ OperationFailure, PyMongoError, ServerSelectionTimeoutError, + WaitQueueTimeoutError, WriteError, ) from pymongo.hello import Hello @@ -82,7 +84,7 @@ _pymongo_dir = str(Path(__file__).parent) -def process_events_queue(queue_ref: weakref.ReferenceType[queue.Queue]) -> bool: +def process_events_queue(queue_ref: weakref.ReferenceType[queue.Queue]) -> bool: # type: ignore[type-arg] q = queue_ref() if not q: return False # Cancel PeriodicExecutor. @@ -118,8 +120,8 @@ def __init__(self, topology_settings: TopologySettings): if _SDAM_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _SDAM_LOGGER, - topologyId=self._topology_id, message=_SDAMStatusMessage.START_TOPOLOGY, + topologyId=self._topology_id, ) if self._publish_tp: @@ -150,10 +152,10 @@ def __init__(self, topology_settings: TopologySettings): if _SDAM_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _SDAM_LOGGER, - topologyId=self._topology_id, - previousDescription=initial_td, - newDescription=self._description, message=_SDAMStatusMessage.TOPOLOGY_CHANGE, + topologyId=self._topology_id, + previousDescription=repr(initial_td), + newDescription=repr(self._description), ) for seed in topology_settings.seeds: @@ -163,10 +165,10 @@ def __init__(self, topology_settings: TopologySettings): if _SDAM_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _SDAM_LOGGER, + message=_SDAMStatusMessage.START_SERVER, topologyId=self._topology_id, serverHost=seed[0], serverPort=seed[1], - message=_SDAMStatusMessage.START_SERVER, ) # Store the seed list to help diagnose errors in _error_message(). @@ -184,7 +186,7 @@ def __init__(self, topology_settings: TopologySettings): if self._publish_server or self._publish_tp: assert self._events is not None - weak: weakref.ReferenceType[queue.Queue] + weak: weakref.ReferenceType[queue.Queue[Any]] async def target() -> bool: return process_events_queue(weak) @@ -207,6 +209,9 @@ async def target() -> bool: if self._settings.fqdn is not None and not self._settings.load_balanced: self._srv_monitor = SrvMonitor(self, self._settings) + # Stores all monitor tasks that need to be joined on close or server selection + self._monitor_tasks: list[MonitorBase] = [] + async def open(self) -> None: """Start monitoring, or restart after a fork. @@ -232,9 +237,7 @@ async def open(self) -> None: warnings.warn( # type: ignore[call-overload] # noqa: B028 "AsyncMongoClient opened before fork. May not be entirely fork-safe, " "proceed with caution. See PyMongo's documentation for details: " - "https://www.mongodb.com/docs/languages/" - "python/pymongo-driver/current/faq/" - "#is-pymongo-fork-safe-", + "https://dochub.mongodb.org/core/pymongo-fork-deadlock", **kwargs, ) async with self._lock: @@ -283,6 +286,10 @@ async def select_servers( else: server_timeout = server_selection_timeout + # Cleanup any completed monitor tasks safely + if not _IS_SYNC and self._monitor_tasks: + await self.cleanup_monitors() + async with self._lock: server_descriptions = await self._select_servers_loop( selector, server_timeout, operation, operation_id, address @@ -347,7 +354,7 @@ async def _select_servers_loop( operationId=operation_id, topologyDescription=self.description, clientId=self.description._topology_settings._topology_id, - remainingTimeMS=int(end_time - time.monotonic()), + remainingTimeMS=int(1000 * (end_time - time.monotonic())), ) logged_waiting = True @@ -493,7 +500,6 @@ async def _process_change( self._description = new_td await self._update_servers() - self._receive_cluster_time_no_lock(server_description.cluster_time) if self._publish_tp and not suppress_event: assert self._events is not None @@ -506,10 +512,10 @@ async def _process_change( if _SDAM_LOGGER.isEnabledFor(logging.DEBUG) and not suppress_event: _debug_log( _SDAM_LOGGER, - topologyId=self._topology_id, - previousDescription=td_old, - newDescription=self._description, message=_SDAMStatusMessage.TOPOLOGY_CHANGE, + topologyId=self._topology_id, + previousDescription=repr(td_old), + newDescription=repr(self._description), ) # Shutdown SRV polling for unsupported cluster types. @@ -520,12 +526,8 @@ async def _process_change( and self._description.topology_type not in SRV_POLLING_TOPOLOGIES ): await self._srv_monitor.close() - - # Clear the pool from a failed heartbeat. - if reset_pool: - server = self._servers.get(server_description.address) - if server: - await server.pool.reset(interrupt_connections=interrupt_connections) + if not _IS_SYNC: + self._monitor_tasks.append(self._srv_monitor) # Wake anything waiting in select_servers(). self._condition.notify_all() @@ -549,6 +551,11 @@ async def on_change( # that didn't include this server. if self._opened and self._description.has_server(server_description.address): await self._process_change(server_description, reset_pool, interrupt_connections) + # Clear the pool from a failed heartbeat, done outside the lock to avoid blocking on connection close. + if reset_pool: + server = self._servers.get(server_description.address) + if server: + await server.pool.reset(interrupt_connections=interrupt_connections) async def _process_srv_update(self, seedlist: list[tuple[str, Any]]) -> None: """Process a new seedlist on an opened topology. @@ -572,10 +579,10 @@ async def _process_srv_update(self, seedlist: list[tuple[str, Any]]) -> None: if _SDAM_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _SDAM_LOGGER, - topologyId=self._topology_id, - previousDescription=td_old, - newDescription=self._description, message=_SDAMStatusMessage.TOPOLOGY_CHANGE, + topologyId=self._topology_id, + previousDescription=repr(td_old), + newDescription=repr(self._description), ) async def on_srv_update(self, seedlist: list[tuple[str, Any]]) -> None: @@ -695,6 +702,8 @@ async def close(self) -> None: old_td = self._description for server in self._servers.values(): await server.close() + if not _IS_SYNC: + self._monitor_tasks.append(server._monitor) # Mark all servers Unknown. self._description = self._description.reset() @@ -705,6 +714,8 @@ async def close(self) -> None: # Stop SRV polling thread. if self._srv_monitor: await self._srv_monitor.close() + if not _IS_SYNC: + self._monitor_tasks.append(self._srv_monitor) self._opened = False self._closed = True @@ -734,13 +745,13 @@ async def close(self) -> None: if _SDAM_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _SDAM_LOGGER, - topologyId=self._topology_id, - previousDescription=old_td, - newDescription=self._description, message=_SDAMStatusMessage.TOPOLOGY_CHANGE, + topologyId=self._topology_id, + previousDescription=repr(old_td), + newDescription=repr(self._description), ) _debug_log( - _SDAM_LOGGER, topologyId=self._topology_id, message=_SDAMStatusMessage.STOP_TOPOLOGY + _SDAM_LOGGER, message=_SDAMStatusMessage.STOP_TOPOLOGY, topologyId=self._topology_id ) if self._publish_server or self._publish_tp: @@ -879,6 +890,8 @@ async def _handle_error(self, address: _Address, err_ctx: _ErrorContext) -> None # Clear the pool. await server.reset(service_id) elif isinstance(error, ConnectionFailure): + if isinstance(error, WaitQueueTimeoutError): + return # "Client MUST replace the server's description with type Unknown # ... MUST NOT request an immediate check of the server." if not self._settings.load_balanced: @@ -944,6 +957,8 @@ async def _update_servers(self) -> None: for address, server in list(self._servers.items()): if not self._description.has_server(address): await server.close() + if not _IS_SYNC: + self._monitor_tasks.append(server._monitor) self._servers.pop(address) def _create_pool_for_server(self, address: _Address) -> Pool: @@ -970,7 +985,7 @@ def _create_pool_for_monitor(self, address: _Address) -> Pool: ) return self._settings.pool_class( - address, monitor_pool_options, handshake=False, client_id=self._topology_id + address, monitor_pool_options, is_sdam=True, client_id=self._topology_id ) def _error_message(self, selector: Callable[[Selection], Selection]) -> str: @@ -1031,6 +1046,15 @@ def _error_message(self, selector: Callable[[Selection], Selection]) -> str: else: return ",".join(str(server.error) for server in servers if server.error) + async def cleanup_monitors(self) -> None: + tasks = [] + try: + while self._monitor_tasks: + tasks.append(self._monitor_tasks.pop()) + except IndexError: + pass + await asyncio.gather(*[t.join() for t in tasks], return_exceptions=True) # type: ignore[func-returns-value] + def __repr__(self) -> str: msg = "" if not self._opened: diff --git a/pymongo/asynchronous/uri_parser.py b/pymongo/asynchronous/uri_parser.py new file mode 100644 index 0000000000..055b04d75a --- /dev/null +++ b/pymongo/asynchronous/uri_parser.py @@ -0,0 +1,193 @@ +# Copyright 2011-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you +# may not use this file except in compliance with the License. You +# may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. See the License for the specific language governing +# permissions and limitations under the License. + + +"""Tools to parse and validate a MongoDB URI.""" +from __future__ import annotations + +from typing import Any, Optional +from urllib.parse import unquote_plus + +from pymongo.asynchronous.srv_resolver import _SrvResolver +from pymongo.common import SRV_SERVICE_NAME, _CaseInsensitiveDictionary +from pymongo.errors import ConfigurationError, InvalidURI +from pymongo.uri_parser_shared import ( + _ALLOWED_TXT_OPTS, + DEFAULT_PORT, + SCHEME, + SCHEME_LEN, + SRV_SCHEME_LEN, + _check_options, + _make_options_case_sensitive, + _validate_uri, + split_hosts, + split_options, +) + +_IS_SYNC = False + + +async def parse_uri( + uri: str, + default_port: Optional[int] = DEFAULT_PORT, + validate: bool = True, + warn: bool = False, + normalize: bool = True, + connect_timeout: Optional[float] = None, + srv_service_name: Optional[str] = None, + srv_max_hosts: Optional[int] = None, +) -> dict[str, Any]: + """Parse and validate a MongoDB URI. + + Returns a dict of the form:: + + { + 'nodelist': , + 'username': or None, + 'password': or None, + 'database': or None, + 'collection': or None, + 'options': , + 'fqdn': or None + } + + If the URI scheme is "mongodb+srv://" DNS SRV and TXT lookups will be done + to build nodelist and options. + + :param uri: The MongoDB URI to parse. + :param default_port: The port number to use when one wasn't specified + for a host in the URI. + :param validate: If ``True`` (the default), validate and + normalize all options. Default: ``True``. + :param warn: When validating, if ``True`` then will warn + the user then ignore any invalid options or values. If ``False``, + validation will error when options are unsupported or values are + invalid. Default: ``False``. + :param normalize: If ``True``, convert names of URI options + to their internally-used names. Default: ``True``. + :param connect_timeout: The maximum time in milliseconds to + wait for a response from the DNS server. + :param srv_service_name: A custom SRV service name + + .. versionchanged:: 4.14 + ``options`` is now type ``dict`` as opposed to a ``_CaseInsensitiveDictionary``. + + .. versionchanged:: 4.6 + The delimiting slash (``/``) between hosts and connection options is now optional. + For example, "mongodb://example.com?tls=true" is now a valid URI. + + .. versionchanged:: 4.0 + To better follow RFC 3986, unquoted percent signs ("%") are no longer + supported. + + .. versionchanged:: 3.9 + Added the ``normalize`` parameter. + + .. versionchanged:: 3.6 + Added support for mongodb+srv:// URIs. + + .. versionchanged:: 3.5 + Return the original value of the ``readPreference`` MongoDB URI option + instead of the validated read preference mode. + + .. versionchanged:: 3.1 + ``warn`` added so invalid options can be ignored. + """ + result = _validate_uri(uri, default_port, validate, warn, normalize, srv_max_hosts) + result.update( + await _parse_srv( + uri, + default_port, + validate, + warn, + normalize, + connect_timeout, + srv_service_name, + srv_max_hosts, + ) + ) + result["options"] = _make_options_case_sensitive(result["options"]) + return result + + +async def _parse_srv( + uri: str, + default_port: Optional[int] = DEFAULT_PORT, + validate: bool = True, + warn: bool = False, + normalize: bool = True, + connect_timeout: Optional[float] = None, + srv_service_name: Optional[str] = None, + srv_max_hosts: Optional[int] = None, +) -> dict[str, Any]: + if uri.startswith(SCHEME): + is_srv = False + scheme_free = uri[SCHEME_LEN:] + else: + is_srv = True + scheme_free = uri[SRV_SCHEME_LEN:] + + options = _CaseInsensitiveDictionary() + + host_plus_db_part, _, opts = scheme_free.partition("?") + if "/" in host_plus_db_part: + host_part, _, _ = host_plus_db_part.partition("/") + else: + host_part = host_plus_db_part + + if opts: + options.update(split_options(opts, validate, warn, normalize)) + if srv_service_name is None: + srv_service_name = options.get("srvServiceName", SRV_SERVICE_NAME) + if "@" in host_part: + _, _, hosts = host_part.rpartition("@") + else: + hosts = host_part + + hosts = unquote_plus(hosts) + srv_max_hosts = srv_max_hosts or options.get("srvMaxHosts") + if is_srv: + nodes = split_hosts(hosts, default_port=None) + fqdn, port = nodes[0] + + # Use the connection timeout. connectTimeoutMS passed as a keyword + # argument overrides the same option passed in the connection string. + connect_timeout = connect_timeout or options.get("connectTimeoutMS") + dns_resolver = _SrvResolver(fqdn, connect_timeout, srv_service_name, srv_max_hosts) + nodes = await dns_resolver.get_hosts() + dns_options = await dns_resolver.get_options() + if dns_options: + parsed_dns_options = split_options(dns_options, validate, warn, normalize) + if set(parsed_dns_options) - _ALLOWED_TXT_OPTS: + raise ConfigurationError( + "Only authSource, replicaSet, and loadBalanced are supported from DNS" + ) + for opt, val in parsed_dns_options.items(): + if opt not in options: + options[opt] = val + if options.get("loadBalanced") and srv_max_hosts: + raise InvalidURI("You cannot specify loadBalanced with srvMaxHosts") + if options.get("replicaSet") and srv_max_hosts: + raise InvalidURI("You cannot specify replicaSet with srvMaxHosts") + if "tls" not in options and "ssl" not in options: + options["tls"] = True if validate else "true" + else: + nodes = split_hosts(hosts, default_port=default_port) + + _check_options(nodes, options) + + return { + "nodelist": nodes, + "options": options, + } diff --git a/pymongo/auth.py b/pymongo/auth.py index a65113841d..a36f3f4233 100644 --- a/pymongo/auth.py +++ b/pymongo/auth.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/pymongo/auth_oidc.py b/pymongo/auth_oidc.py index 4ac266de5f..61764b8111 100644 --- a/pymongo/auth_oidc.py +++ b/pymongo/auth_oidc.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/pymongo/auth_oidc_shared.py b/pymongo/auth_oidc_shared.py index 9e0acaf6c8..d33397f52d 100644 --- a/pymongo/auth_oidc_shared.py +++ b/pymongo/auth_oidc_shared.py @@ -4,7 +4,7 @@ # may not use this file except in compliance with the License. You # may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/pymongo/auth_shared.py b/pymongo/auth_shared.py index 9534bd74ad..5a9a2b6732 100644 --- a/pymongo/auth_shared.py +++ b/pymongo/auth_shared.py @@ -4,7 +4,7 @@ # may not use this file except in compliance with the License. You # may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -107,7 +107,7 @@ def _build_credentials_tuple( ) -> MongoCredential: """Build and return a mechanism specific credentials tuple.""" if mech not in ("MONGODB-X509", "MONGODB-AWS", "MONGODB-OIDC") and user is None: - raise ConfigurationError(f"{mech} requires a username.") + raise ConfigurationError(f"{mech} requires a username") if mech == "GSSAPI": if source is not None and source != "$external": raise ValueError("authentication source must be $external or None for GSSAPI") @@ -219,7 +219,7 @@ def _build_credentials_tuple( else: source_database = source or database or "admin" if passwd is None: - raise ConfigurationError("A password is required.") + raise ConfigurationError("A password is required") return MongoCredential(mech, source_database, user, passwd, None, _Cache()) diff --git a/pymongo/bulk_shared.py b/pymongo/bulk_shared.py index 7aa6340d55..9276419d8a 100644 --- a/pymongo/bulk_shared.py +++ b/pymongo/bulk_shared.py @@ -4,7 +4,7 @@ # may not use this file except in compliance with the License. You # may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/pymongo/change_stream.py b/pymongo/change_stream.py index b96a1750cf..f9abddec44 100644 --- a/pymongo/change_stream.py +++ b/pymongo/change_stream.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/pymongo/client_options.py b/pymongo/client_options.py index 9b9b88a736..8b4eea7e65 100644 --- a/pymongo/client_options.py +++ b/pymongo/client_options.py @@ -4,7 +4,7 @@ # may not use this file except in compliance with the License. You # may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -84,7 +84,9 @@ def _parse_read_concern(options: Mapping[str, Any]) -> ReadConcern: return ReadConcern(concern) -def _parse_ssl_options(options: Mapping[str, Any]) -> tuple[Optional[SSLContext], bool]: +def _parse_ssl_options( + options: Mapping[str, Any], is_sync: bool +) -> tuple[Optional[SSLContext], bool]: """Parse ssl options.""" use_tls = options.get("tls") if use_tls is not None: @@ -138,6 +140,7 @@ def _parse_ssl_options(options: Mapping[str, Any]) -> tuple[Optional[SSLContext] allow_invalid_certificates, allow_invalid_hostnames, disable_ocsp_endpoint_check, + is_sync, ) return ctx, allow_invalid_hostnames return None, allow_invalid_hostnames @@ -167,7 +170,7 @@ def _parse_pool_options( compression_settings = CompressionSettings( options.get("compressors", []), options.get("zlibcompressionlevel", -1) ) - ssl_context, tls_allow_invalid_hostnames = _parse_ssl_options(options) + ssl_context, tls_allow_invalid_hostnames = _parse_ssl_options(options, is_sync) load_balanced = options.get("loadbalanced") max_connecting = options.get("maxconnecting", common.MAX_CONNECTING) return PoolOptions( @@ -244,7 +247,7 @@ def connect(self) -> Optional[bool]: return self.__connect @property - def codec_options(self) -> CodecOptions: + def codec_options(self) -> CodecOptions[Any]: """A :class:`~bson.codec_options.CodecOptions` instance.""" return self.__codec_options diff --git a/pymongo/client_session.py b/pymongo/client_session.py index 1a3af44e12..db72b0b2e1 100644 --- a/pymongo/client_session.py +++ b/pymongo/client_session.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/pymongo/collation.py b/pymongo/collation.py index 9adcb2e408..8a1eca7aff 100644 --- a/pymongo/collation.py +++ b/pymongo/collation.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -223,4 +223,4 @@ def validate_collation_or_none( return value.document if isinstance(value, dict): return value - raise TypeError("collation must be a dict, an instance of collation.Collation, or None.") + raise TypeError("collation must be a dict, an instance of collation.Collation, or None") diff --git a/pymongo/collection.py b/pymongo/collection.py index f726ed0376..16063425a7 100644 --- a/pymongo/collection.py +++ b/pymongo/collection.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/pymongo/common.py b/pymongo/common.py index 5661de011c..e23adac426 100644 --- a/pymongo/common.py +++ b/pymongo/common.py @@ -4,7 +4,7 @@ # may not use this file except in compliance with the License. You # may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -20,6 +20,7 @@ import warnings from collections import OrderedDict, abc from difflib import get_close_matches +from importlib.metadata import requires, version from typing import ( TYPE_CHECKING, Any, @@ -56,18 +57,18 @@ from pymongo.typings import _AgnosticClientSession -ORDERED_TYPES: Sequence[Type] = (SON, OrderedDict) +ORDERED_TYPES: Sequence[Type[Any]] = (SON, OrderedDict) # Defaults until we connect to a server and get updated limits. MAX_BSON_SIZE = 16 * (1024**2) -MAX_MESSAGE_SIZE: int = 2 * MAX_BSON_SIZE +MAX_MESSAGE_SIZE = 48 * 1000 * 1000 MIN_WIRE_VERSION = 0 MAX_WIRE_VERSION = 0 -MAX_WRITE_BATCH_SIZE = 1000 +MAX_WRITE_BATCH_SIZE = 100000 # What this version of PyMongo supports. -MIN_SUPPORTED_SERVER_VERSION = "4.0" -MIN_SUPPORTED_WIRE_VERSION = 7 +MIN_SUPPORTED_SERVER_VERSION = "4.2" +MIN_SUPPORTED_WIRE_VERSION = 8 # MongoDB 8.0 MAX_SUPPORTED_WIRE_VERSION = 25 @@ -160,13 +161,13 @@ def clean_node(node: str) -> tuple[str, int]: host, port = partition_node(node) # Normalize hostname to lowercase, since DNS is case-insensitive: - # http://tools.ietf.org/html/rfc4343 + # https://tools.ietf.org/html/rfc4343 # This prevents useless rediscovery if "foo.com" is in the seed list but # "FOO.com" is in the hello response. return host.lower(), port -def raise_config_error(key: str, suggestions: Optional[list] = None) -> NoReturn: +def raise_config_error(key: str, suggestions: Optional[list[str]] = None) -> NoReturn: """Raise ConfigurationError with the given key name.""" msg = f"Unknown option: {key}." if suggestions: @@ -202,7 +203,7 @@ def validate_integer(option: str, value: Any) -> int: return int(value) except ValueError: raise ValueError(f"The value of {option} must be an integer") from None - raise TypeError(f"Wrong type for {option}, value must be an integer") + raise TypeError(f"Wrong type for {option}, value must be an integer, not {type(value)}") def validate_positive_integer(option: str, value: Any) -> int: @@ -250,7 +251,7 @@ def validate_string(option: str, value: Any) -> str: """Validates that 'value' is an instance of `str`.""" if isinstance(value, str): return value - raise TypeError(f"Wrong type for {option}, value must be an instance of str") + raise TypeError(f"Wrong type for {option}, value must be an instance of str, not {type(value)}") def validate_string_or_none(option: str, value: Any) -> Optional[str]: @@ -269,7 +270,9 @@ def validate_int_or_basestring(option: str, value: Any) -> Union[int, str]: return int(value) except ValueError: return value - raise TypeError(f"Wrong type for {option}, value must be an integer or a string") + raise TypeError( + f"Wrong type for {option}, value must be an integer or a string, not {type(value)}" + ) def validate_non_negative_int_or_basestring(option: Any, value: Any) -> Union[int, str]: @@ -282,7 +285,9 @@ def validate_non_negative_int_or_basestring(option: Any, value: Any) -> Union[in except ValueError: return value return validate_non_negative_integer(option, val) - raise TypeError(f"Wrong type for {option}, value must be an non negative integer or a string") + raise TypeError( + f"Wrong type for {option}, value must be an non negative integer or a string, not {type(value)}" + ) def validate_positive_float(option: str, value: Any) -> float: @@ -365,7 +370,7 @@ def validate_max_staleness(option: str, value: Any) -> int: def validate_read_preference(dummy: Any, value: Any) -> _ServerMode: """Validate a read preference.""" if not isinstance(value, _ServerMode): - raise TypeError(f"{value!r} is not a read preference.") + raise TypeError(f"{value!r} is not a read preference") return value @@ -407,7 +412,7 @@ def validate_read_preference_tags(name: str, value: Any) -> list[dict[str, str]] if not isinstance(value, list): value = [value] - tag_sets: list = [] + tag_sets: list[dict[str, Any]] = [] for tag_set in value: if tag_set == "": tag_sets.append({}) @@ -441,7 +446,9 @@ def validate_auth_mechanism_properties(option: str, value: Any) -> dict[str, Uni props: dict[str, Any] = {} if not isinstance(value, str): if not isinstance(value, dict): - raise ValueError("Auth mechanism properties must be given as a string or a dictionary") + raise ValueError( + f"Auth mechanism properties must be given as a string or a dictionary, not {type(value)}" + ) for key, value in value.items(): # noqa: B020 if isinstance(value, str): props[key] = value @@ -453,7 +460,7 @@ def validate_auth_mechanism_properties(option: str, value: Any) -> dict[str, Uni from pymongo.auth_oidc_shared import OIDCCallback if not isinstance(value, OIDCCallback): - raise ValueError("callback must be an OIDCCallback object") + raise ValueError(f"callback must be an OIDCCallback object, not {type(value)}") props[key] = value else: raise ValueError(f"Invalid type for auth mechanism property {key}, {type(value)}") @@ -476,7 +483,7 @@ def validate_auth_mechanism_properties(option: str, value: Any) -> dict[str, Uni raise ValueError( f"{key} is not a supported auth " "mechanism property. Must be one of " - f"{tuple(_MECHANISM_PROPS)}." + f"{tuple(_MECHANISM_PROPS)}" ) if key == "CANONICALIZE_HOST_NAME": @@ -491,7 +498,7 @@ def validate_auth_mechanism_properties(option: str, value: Any) -> dict[str, Uni def validate_document_class( option: str, value: Any -) -> Union[Type[MutableMapping], Type[RawBSONDocument]]: +) -> Union[Type[MutableMapping[str, Any]], Type[RawBSONDocument]]: """Validate the document_class option.""" # issubclass can raise TypeError for generic aliases like SON[str, Any]. # In that case we can use the base class for the comparison. @@ -517,14 +524,14 @@ def validate_type_registry(option: Any, value: Any) -> Optional[TypeRegistry]: return value -def validate_list(option: str, value: Any) -> list: +def validate_list(option: str, value: Any) -> list[Any]: """Validates that 'value' is a list.""" if not isinstance(value, list): - raise TypeError(f"{option} must be a list") + raise TypeError(f"{option} must be a list, not {type(value)}") return value -def validate_list_or_none(option: Any, value: Any) -> Optional[list]: +def validate_list_or_none(option: Any, value: Any) -> Optional[list[Any]]: """Validates that 'value' is a list or None.""" if value is None: return value @@ -587,16 +594,16 @@ def validate_server_api_or_none(option: Any, value: Any) -> Optional[ServerApi]: if value is None: return value if not isinstance(value, ServerApi): - raise TypeError(f"{option} must be an instance of ServerApi") + raise TypeError(f"{option} must be an instance of ServerApi, not {type(value)}") return value -def validate_is_callable_or_none(option: Any, value: Any) -> Optional[Callable]: +def validate_is_callable_or_none(option: Any, value: Any) -> Optional[Callable[..., Any]]: """Validates that 'value' is a callable.""" if value is None: return value if not callable(value): - raise ValueError(f"{option} must be a callable") + raise ValueError(f"{option} must be a callable, not {type(value)}") return value @@ -651,7 +658,7 @@ def validate_auto_encryption_opts_or_none(option: Any, value: Any) -> Optional[A from pymongo.encryption_options import AutoEncryptionOpts if not isinstance(value, AutoEncryptionOpts): - raise TypeError(f"{option} must be an instance of AutoEncryptionOpts") + raise TypeError(f"{option} must be an instance of AutoEncryptionOpts, not {type(value)}") return value @@ -668,7 +675,9 @@ def validate_datetime_conversion(option: Any, value: Any) -> Optional[DatetimeCo elif isinstance(value, int): return DatetimeConversion(value) - raise TypeError(f"{option} must be a str or int representing DatetimeConversion") + raise TypeError( + f"{option} must be a str or int representing DatetimeConversion, not {type(value)}" + ) def validate_server_monitoring_mode(option: str, value: str) -> str: @@ -821,7 +830,7 @@ def validate_auth_option(option: str, value: Any) -> tuple[str, Any]: def _get_validator( key: str, validators: dict[str, Callable[[Any, Any], Any]], normed_key: Optional[str] = None -) -> Callable: +) -> Callable[[Any, Any], Any]: normed_key = normed_key or key try: return validators[normed_key] @@ -909,7 +918,7 @@ class BaseObject: def __init__( self, - codec_options: CodecOptions, + codec_options: CodecOptions[Any], read_preference: _ServerMode, write_concern: WriteConcern, read_concern: ReadConcern, @@ -928,16 +937,18 @@ def __init__( if not isinstance(write_concern, WriteConcern): raise TypeError( - "write_concern must be an instance of pymongo.write_concern.WriteConcern" + f"write_concern must be an instance of pymongo.write_concern.WriteConcern, not {type(write_concern)}" ) self._write_concern = write_concern if not isinstance(read_concern, ReadConcern): - raise TypeError("read_concern must be an instance of pymongo.read_concern.ReadConcern") + raise TypeError( + f"read_concern must be an instance of pymongo.read_concern.ReadConcern, not {type(read_concern)}" + ) self._read_concern = read_concern @property - def codec_options(self) -> CodecOptions: + def codec_options(self) -> CodecOptions[Any]: """Read only access to the :class:`~bson.codec_options.CodecOptions` of this instance. """ @@ -1082,3 +1093,91 @@ def has_c() -> bool: return True except ImportError: return False + + +class Version(tuple[int, ...]): + """A class that can be used to compare version strings.""" + + def __new__(cls, *version: int) -> Version: + padded_version = cls._padded(version, 4) + return super().__new__(cls, tuple(padded_version)) + + @classmethod + def _padded(cls, iter: Any, length: int, padding: int = 0) -> list[int]: + as_list = list(iter) + if len(as_list) < length: + for _ in range(length - len(as_list)): + as_list.append(padding) + return as_list + + @classmethod + def from_string(cls, version_string: str) -> Version: + mod = 0 + bump_patch_level = False + if version_string.endswith("+"): + version_string = version_string[0:-1] + mod = 1 + elif version_string.endswith("-pre-"): + version_string = version_string[0:-5] + mod = -1 + elif version_string.endswith("-"): + version_string = version_string[0:-1] + mod = -1 + # Deal with .devX substrings + if ".dev" in version_string: + version_string = version_string[0 : version_string.find(".dev")] + mod = -1 + # Deal with '-rcX' substrings + if "-rc" in version_string: + version_string = version_string[0 : version_string.find("-rc")] + mod = -1 + # Deal with git describe generated substrings + elif "-" in version_string: + version_string = version_string[0 : version_string.find("-")] + mod = -1 + bump_patch_level = True + + version = [int(part) for part in version_string.split(".")] + version = cls._padded(version, 3) + # Make from_string and from_version_array agree. For example: + # MongoDB Enterprise > db.runCommand('buildInfo').versionArray + # [ 3, 2, 1, -100 ] + # MongoDB Enterprise > db.runCommand('buildInfo').version + # 3.2.0-97-g1ef94fe + if bump_patch_level: + version[-1] += 1 + version.append(mod) + + return Version(*version) + + @classmethod + def from_version_array(cls, version_array: Any) -> Version: + version = list(version_array) + if version[-1] < 0: + version[-1] = -1 + version = cls._padded(version, 3) + return Version(*version) + + def at_least(self, *other_version: Any) -> bool: + return self >= Version(*other_version) + + def __str__(self) -> str: + return ".".join(map(str, self)) + + +def check_for_min_version(package_name: str) -> tuple[str, str, bool]: + """Test whether an installed package is of the desired version.""" + package_version_str = version(package_name) + package_version = Version.from_string(package_version_str) + # Dependency is expected to be in one of the forms: + # "pymongocrypt<2.0.0,>=1.13.0; extra == 'encryption'" + # 'dnspython<3.0.0,>=1.16.0' + # + requirements = requires("pymongo") + assert requirements is not None + requirement = [i for i in requirements if i.startswith(package_name)][0] # noqa: RUF015 + if ";" in requirement: + requirement = requirement.split(";")[0] + required_version = requirement[requirement.find(">=") + 2 :] + is_valid = package_version >= Version.from_string(required_version) + return package_version_str, required_version, is_valid diff --git a/pymongo/compression_support.py b/pymongo/compression_support.py index c71e4bddcf..64ffe052ec 100644 --- a/pymongo/compression_support.py +++ b/pymongo/compression_support.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -26,7 +26,7 @@ def _have_snappy() -> bool: try: - import snappy # type:ignore[import-not-found] # noqa: F401 + import snappy # type:ignore[import-untyped] # noqa: F401 return True except ImportError: @@ -91,7 +91,7 @@ def validate_zlib_compression_level(option: str, value: Any) -> int: try: level = int(value) except Exception: - raise TypeError(f"{option} must be an integer, not {value!r}.") from None + raise TypeError(f"{option} must be an integer, not {value!r}") from None if level < -1 or level > 9: raise ValueError("%s must be between -1 and 9, not %d." % (option, level)) return level @@ -152,7 +152,7 @@ def compress(data: bytes) -> bytes: return zstandard.ZstdCompressor().compress(data) -def decompress(data: bytes, compressor_id: int) -> bytes: +def decompress(data: bytes | memoryview, compressor_id: int) -> bytes: if compressor_id == SnappyContext.compressor_id: # python-snappy doesn't support the buffer interface. # https://github.com/andrix/python-snappy/issues/65 diff --git a/pymongo/daemon.py b/pymongo/daemon.py index b40384df13..c0a01db16d 100644 --- a/pymongo/daemon.py +++ b/pymongo/daemon.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -16,7 +16,7 @@ PyMongo only attempts to spawn the mongocryptd daemon process when automatic client-side field level encryption is enabled. See -:ref:`automatic-client-side-encryption` for more info. +`Client-side Field Level Encryption `_ for more info. """ from __future__ import annotations diff --git a/pymongo/database.py b/pymongo/database.py index bbd05702dc..f85b312f91 100644 --- a/pymongo/database.py +++ b/pymongo/database.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/pymongo/database_shared.py b/pymongo/database_shared.py index 2d4e37feef..d6563a4b3d 100644 --- a/pymongo/database_shared.py +++ b/pymongo/database_shared.py @@ -4,7 +4,7 @@ # may not use this file except in compliance with the License. You # may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/pymongo/driver_info.py b/pymongo/driver_info.py index 5ca3f952cd..f24321d973 100644 --- a/pymongo/driver_info.py +++ b/pymongo/driver_info.py @@ -4,7 +4,7 @@ # may not use this file except in compliance with the License. You # may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -39,7 +39,7 @@ def __new__( for key, value in self._asdict().items(): if value is not None and not isinstance(value, str): raise TypeError( - f"Wrong type for DriverInfo {key} option, value must be an instance of str" + f"Wrong type for DriverInfo {key} option, value must be an instance of str, not {type(value)}" ) return self diff --git a/pymongo/encryption.py b/pymongo/encryption.py index 5bc2a75909..71c1d4b723 100644 --- a/pymongo/encryption.py +++ b/pymongo/encryption.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/pymongo/encryption_options.py b/pymongo/encryption_options.py index ee749e7ac1..b2037617b0 100644 --- a/pymongo/encryption_options.py +++ b/pymongo/encryption_options.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -18,10 +18,12 @@ """ from __future__ import annotations -from typing import TYPE_CHECKING, Any, Mapping, Optional +from typing import TYPE_CHECKING, Any, Mapping, Optional, TypedDict + +from pymongo.uri_parser_shared import _parse_kms_tls_options try: - import pymongocrypt # type:ignore[import-untyped] # noqa: F401 + import pymongocrypt # type:ignore[import-untyped] # noqa: F401 # Check for pymongocrypt>=1.10. from pymongocrypt import synchronous as _ # noqa: F401 @@ -30,12 +32,24 @@ except ImportError: _HAVE_PYMONGOCRYPT = False from bson import int64 -from pymongo.common import validate_is_mapping +from pymongo.common import check_for_min_version, validate_is_mapping from pymongo.errors import ConfigurationError -from pymongo.uri_parser import _parse_kms_tls_options if TYPE_CHECKING: - from pymongo.typings import _AgnosticMongoClient, _DocumentTypeArg + from pymongo.pyopenssl_context import SSLContext + from pymongo.typings import _AgnosticMongoClient + + +def check_min_pymongocrypt() -> None: + """Raise an appropriate error if the min pymongocrypt is not installed.""" + pymongocrypt_version, required_version, is_valid = check_for_min_version("pymongocrypt") + if not is_valid: + raise ConfigurationError( + f"client side encryption requires pymongocrypt>={required_version}, " + f"found version {pymongocrypt_version}. " + "Install a compatible version with: " + "python -m pip install 'pymongo[encryption]'" + ) class AutoEncryptionOpts: @@ -45,7 +59,7 @@ def __init__( self, kms_providers: Mapping[str, Any], key_vault_namespace: str, - key_vault_client: Optional[_AgnosticMongoClient[_DocumentTypeArg]] = None, + key_vault_client: Optional[_AgnosticMongoClient] = None, schema_map: Optional[Mapping[str, Any]] = None, bypass_auto_encryption: bool = False, mongocryptd_uri: str = "mongodb://localhost:27020", @@ -57,6 +71,7 @@ def __init__( crypt_shared_lib_required: bool = False, bypass_query_analysis: bool = False, encrypted_fields_map: Optional[Mapping[str, Any]] = None, + key_expiration_ms: Optional[int] = None, ) -> None: """Options to configure automatic client-side field level encryption. @@ -72,7 +87,7 @@ def __init__( encryption and explicit decryption is also supported for all users with the :class:`~pymongo.asynchronous.encryption.AsyncClientEncryption` and :class:`~pymongo.encryption.ClientEncryption` classes. - See :ref:`automatic-client-side-encryption` for an example. + See `client-side field level encryption `_ for an example. :param kms_providers: Map of KMS provider options. The `kms_providers` map values differ by provider: @@ -101,7 +116,7 @@ def __init__( KMS providers may be specified with an optional name suffix separated by a colon, for example "kmip:name" or "aws:name". - Named KMS providers do not support :ref:`CSFLE on-demand credentials`. + Named KMS providers do not support `CSFLE on-demand credentials `_. Named KMS providers enables more than one of each KMS provider type to be configured. For example, to configure multiple local KMS providers:: @@ -191,9 +206,14 @@ def __init__( ] } } + :param key_expiration_ms: The cache expiration time for data encryption keys. + Defaults to ``None`` which defers to libmongocrypt's default which is currently 60000. + Set to 0 to disable key expiration. + .. versionchanged:: 4.12 + Added the `key_expiration_ms` parameter. .. versionchanged:: 4.2 - Added `encrypted_fields_map` `crypt_shared_lib_path`, `crypt_shared_lib_required`, + Added the `encrypted_fields_map`, `crypt_shared_lib_path`, `crypt_shared_lib_required`, and `bypass_query_analysis` parameters. .. versionchanged:: 4.0 @@ -207,10 +227,10 @@ def __init__( "install a compatible version with: " "python -m pip install 'pymongo[encryption]'" ) + check_min_pymongocrypt() if encrypted_fields_map: validate_is_mapping("encrypted_fields_map", encrypted_fields_map) self._encrypted_fields_map = encrypted_fields_map - self._bypass_query_analysis = bypass_query_analysis self._crypt_shared_lib_path = crypt_shared_lib_path self._crypt_shared_lib_required = crypt_shared_lib_required self._kms_providers = kms_providers @@ -225,12 +245,27 @@ def __init__( mongocryptd_spawn_args = ["--idleShutdownTimeoutSecs=60"] self._mongocryptd_spawn_args = mongocryptd_spawn_args if not isinstance(self._mongocryptd_spawn_args, list): - raise TypeError("mongocryptd_spawn_args must be a list") + raise TypeError( + f"mongocryptd_spawn_args must be a list, not {type(self._mongocryptd_spawn_args)}" + ) if not any("idleShutdownTimeoutSecs" in s for s in self._mongocryptd_spawn_args): self._mongocryptd_spawn_args.append("--idleShutdownTimeoutSecs=60") # Maps KMS provider name to a SSLContext. - self._kms_ssl_contexts = _parse_kms_tls_options(kms_tls_options) + self._kms_tls_options = kms_tls_options + self._sync_kms_ssl_contexts: Optional[dict[str, SSLContext]] = None + self._async_kms_ssl_contexts: Optional[dict[str, SSLContext]] = None self._bypass_query_analysis = bypass_query_analysis + self._key_expiration_ms = key_expiration_ms + + def _kms_ssl_contexts(self, is_sync: bool) -> dict[str, SSLContext]: + if is_sync: + if self._sync_kms_ssl_contexts is None: + self._sync_kms_ssl_contexts = _parse_kms_tls_options(self._kms_tls_options, True) + return self._sync_kms_ssl_contexts + else: + if self._async_kms_ssl_contexts is None: + self._async_kms_ssl_contexts = _parse_kms_tls_options(self._kms_tls_options, False) + return self._async_kms_ssl_contexts class RangeOpts: @@ -273,3 +308,85 @@ def document(self) -> dict[str, Any]: if v is not None: doc[k] = v return doc + + +class TextOpts: + """**BETA** Options to configure encrypted queries using the text algorithm. + + TextOpts is currently unstable API and subject to backwards breaking changes.""" + + def __init__( + self, + substring: Optional[SubstringOpts] = None, + prefix: Optional[PrefixOpts] = None, + suffix: Optional[SuffixOpts] = None, + case_sensitive: Optional[bool] = None, + diacritic_sensitive: Optional[bool] = None, + ) -> None: + """Options to configure encrypted queries using the text algorithm. + + :param substring: Further options to support substring queries. + :param prefix: Further options to support prefix queries. + :param suffix: Further options to support suffix queries. + :param case_sensitive: Whether text indexes for this field are case sensitive. + :param diacritic_sensitive: Whether text indexes for this field are diacritic sensitive. + + .. versionadded:: 4.15 + """ + self.substring = substring + self.prefix = prefix + self.suffix = suffix + self.case_sensitive = case_sensitive + self.diacritic_sensitive = diacritic_sensitive + + @property + def document(self) -> dict[str, Any]: + doc = {} + for k, v in [ + ("substring", self.substring), + ("prefix", self.prefix), + ("suffix", self.suffix), + ("caseSensitive", self.case_sensitive), + ("diacriticSensitive", self.diacritic_sensitive), + ]: + if v is not None: + doc[k] = v + return doc + + +class SubstringOpts(TypedDict): + """**BETA** Options for substring text queries. + + SubstringOpts is currently unstable API and subject to backwards breaking changes. + """ + + # strMaxLength is the maximum allowed length to insert. Inserting longer strings will error. + strMaxLength: int + # strMinQueryLength is the minimum allowed query length. Querying with a shorter string will error. + strMinQueryLength: int + # strMaxQueryLength is the maximum allowed query length. Querying with a longer string will error. + strMaxQueryLength: int + + +class PrefixOpts(TypedDict): + """**BETA** Options for prefix text queries. + + PrefixOpts is currently unstable API and subject to backwards breaking changes. + """ + + # strMinQueryLength is the minimum allowed query length. Querying with a shorter string will error. + strMinQueryLength: int + # strMaxQueryLength is the maximum allowed query length. Querying with a longer string will error. + strMaxQueryLength: int + + +class SuffixOpts(TypedDict): + """**BETA** Options for suffix text queries. + + SuffixOpts is currently unstable API and subject to backwards breaking changes. + """ + + # strMinQueryLength is the minimum allowed query length. Querying with a shorter string will error. + strMinQueryLength: int + # strMaxQueryLength is the maximum allowed query length. Querying with a longer string will error. + strMaxQueryLength: int diff --git a/pymongo/errors.py b/pymongo/errors.py index 2cd1081e3b..794b5a9398 100644 --- a/pymongo/errors.py +++ b/pymongo/errors.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/pymongo/event_loggers.py b/pymongo/event_loggers.py index 86b53c6376..80acaa10c0 100644 --- a/pymongo/event_loggers.py +++ b/pymongo/event_loggers.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/pymongo/hello.py b/pymongo/hello.py index 62bb799805..1eb40ed929 100644 --- a/pymongo/hello.py +++ b/pymongo/hello.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -133,7 +133,7 @@ def max_bson_size(self) -> int: @property def max_message_size(self) -> int: - return self._doc.get("maxMessageSizeBytes", 2 * self.max_bson_size) + return self._doc.get("maxMessageSizeBytes", common.MAX_MESSAGE_SIZE) @property def max_write_batch_size(self) -> int: diff --git a/pymongo/helpers_shared.py b/pymongo/helpers_shared.py index 83ea2ddf78..c3611df7c8 100644 --- a/pymongo/helpers_shared.py +++ b/pymongo/helpers_shared.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -47,12 +47,13 @@ if TYPE_CHECKING: from pymongo.cursor_shared import _Hint from pymongo.operations import _IndexList + from pymongo.pool_options import PoolOptions from pymongo.typings import _DocumentOut # From the SDAM spec, the "node is shutting down" codes. -_SHUTDOWN_CODES: frozenset = frozenset( +_SHUTDOWN_CODES: frozenset[int] = frozenset( [ 11600, # InterruptedAtShutdown 91, # ShutdownInProgress @@ -61,7 +62,7 @@ # From the SDAM spec, the "not primary" error codes are combined with the # "node is recovering" error codes (of which the "node is shutting down" # errors are a subset). -_NOT_PRIMARY_CODES: frozenset = ( +_NOT_PRIMARY_CODES: frozenset[int] = ( frozenset( [ 10058, # LegacyNotPrimary <=3.2 "not primary" error code @@ -75,7 +76,7 @@ | _SHUTDOWN_CODES ) # From the retryable writes spec. -_RETRYABLE_ERROR_CODES: frozenset = _NOT_PRIMARY_CODES | frozenset( +_RETRYABLE_ERROR_CODES: frozenset[int] = _NOT_PRIMARY_CODES | frozenset( [ 7, # HostNotFound 6, # HostUnreachable @@ -95,7 +96,7 @@ # Note - to avoid bugs from forgetting which if these is all lowercase and # which are camelCase, and at the same time avoid having to add a test for # every command, use all lowercase here and test against command_name.lower(). -_SENSITIVE_COMMANDS: set = { +_SENSITIVE_COMMANDS: set[str] = { "authenticate", "saslstart", "saslcontinue", @@ -108,6 +109,34 @@ } +def _get_timeout_details(options: PoolOptions) -> dict[str, float]: + from pymongo import _csot + + details = {} + timeout = _csot.get_timeout() + socket_timeout = options.socket_timeout + connect_timeout = options.connect_timeout + if timeout: + details["timeoutMS"] = timeout * 1000 + if socket_timeout and not timeout: + details["socketTimeoutMS"] = socket_timeout * 1000 + if connect_timeout: + details["connectTimeoutMS"] = connect_timeout * 1000 + return details + + +def format_timeout_details(details: Optional[dict[str, float]]) -> str: + result = "" + if details: + result += " (configured timeouts:" + for timeout in ["socketTimeoutMS", "timeoutMS", "connectTimeoutMS"]: + if timeout in details: + result += f" {timeout}: {details[timeout]}ms," + result = result[:-1] + result += ")" + return result + + def _gen_index_name(keys: _IndexList) -> str: """Generate an index name from the set of fields it is over.""" return "_".join(["{}_{}".format(*item) for item in keys]) @@ -122,7 +151,7 @@ def _index_list( """ if direction is not None: if not isinstance(key_or_list, str): - raise TypeError("Expected a string and a direction") + raise TypeError(f"Expected a string and a direction, not {type(key_or_list)}") return [(key_or_list, direction)] else: if isinstance(key_or_list, str): @@ -132,7 +161,9 @@ def _index_list( elif isinstance(key_or_list, abc.Mapping): return list(key_or_list.items()) elif not isinstance(key_or_list, (list, tuple)): - raise TypeError("if no direction is specified, key_or_list must be an instance of list") + raise TypeError( + f"if no direction is specified, key_or_list must be an instance of list, not {type(key_or_list)}" + ) values: list[tuple[str, int]] = [] for item in key_or_list: if isinstance(item, str): @@ -172,11 +203,12 @@ def _index_document(index_list: _IndexList) -> dict[str, Any]: def _validate_index_key_pair(key: Any, value: Any) -> None: if not isinstance(key, str): - raise TypeError("first item in each key pair must be an instance of str") + raise TypeError(f"first item in each key pair must be an instance of str, not {type(key)}") if not isinstance(value, (str, int, abc.Mapping)): raise TypeError( "second item in each key pair must be 1, -1, " "'2d', or another valid MongoDB index specifier." + f", not {type(value)}" ) @@ -185,6 +217,7 @@ def _check_command_response( max_wire_version: Optional[int], allowable_errors: Optional[Container[Union[int, str]]] = None, parse_write_concern_error: bool = False, + pool_opts: Optional[PoolOptions] = None, ) -> None: """Check the response to a command for errors.""" if "ok" not in response: @@ -240,6 +273,10 @@ def _check_command_response( if code in (11000, 11001, 12582): raise DuplicateKeyError(errmsg, code, response, max_wire_version) elif code == 50: + # Append timeout details to MaxTimeMSExpired responses. + if pool_opts: + timeout_details = _get_timeout_details(pool_opts) + errmsg += format_timeout_details(timeout_details) raise ExecutionTimeout(errmsg, code, response, max_wire_version) elif code == 43: raise CursorNotFound(errmsg, code, response, max_wire_version) diff --git a/pymongo/lock.py b/pymongo/lock.py index 6bf7138017..ad990fce3f 100644 --- a/pymongo/lock.py +++ b/pymongo/lock.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/pymongo/logger.py b/pymongo/logger.py index 2ff35328b4..1b3fe43b86 100644 --- a/pymongo/logger.py +++ b/pymongo/logger.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -96,6 +96,14 @@ class _SDAMStatusMessage(str, enum.Enum): } +def _log_client_error() -> None: + # This is called from a daemon thread so check for None to account for interpreter shutdown. + logger = _CLIENT_LOGGER + if logger: + # logger.exception includes the full traceback. + logger.exception("MongoClient background task encountered an error:") + + def _debug_log(logger: logging.Logger, **fields: Any) -> None: logger.debug(LogMessage(**fields)) diff --git a/pymongo/max_staleness_selectors.py b/pymongo/max_staleness_selectors.py index 89bfa65281..5f1e404720 100644 --- a/pymongo/max_staleness_selectors.py +++ b/pymongo/max_staleness_selectors.py @@ -4,7 +4,7 @@ # may not use this file except in compliance with the License. You # may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/pymongo/message.py b/pymongo/message.py index b6c00f06cb..0f3aaaba77 100644 --- a/pymongo/message.py +++ b/pymongo/message.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -24,7 +24,6 @@ import datetime import random import struct -from collections import ChainMap from io import BytesIO as _BytesIO from typing import ( TYPE_CHECKING, @@ -106,7 +105,7 @@ "insert": "documents", "update": "updates", "delete": "deletes", - "bulkWrite": "bulkWrite", + "bulkWrite": "ops", } _UNICODE_REPLACE_CODEC_OPTIONS: CodecOptions[Mapping[str, Any]] = CodecOptions( @@ -334,7 +333,7 @@ def _op_msg_no_header( command: Mapping[str, Any], identifier: str, docs: Optional[list[Mapping[str, Any]]], - opts: CodecOptions, + opts: CodecOptions[Any], ) -> tuple[bytes, int, int]: """Get a OP_MSG message. @@ -366,7 +365,7 @@ def _op_msg_compressed( command: Mapping[str, Any], identifier: str, docs: Optional[list[Mapping[str, Any]]], - opts: CodecOptions, + opts: CodecOptions[Any], ctx: Union[SnappyContext, ZlibContext, ZstdContext], ) -> tuple[int, bytes, int, int]: """Internal OP_MSG message helper.""" @@ -380,7 +379,7 @@ def _op_msg_uncompressed( command: Mapping[str, Any], identifier: str, docs: Optional[list[Mapping[str, Any]]], - opts: CodecOptions, + opts: CodecOptions[Any], ) -> tuple[int, bytes, int, int]: """Internal compressed OP_MSG message helper.""" data, total_size, max_bson_size = _op_msg_no_header(flags, command, identifier, docs, opts) @@ -397,7 +396,7 @@ def _op_msg( command: MutableMapping[str, Any], dbname: str, read_preference: Optional[_ServerMode], - opts: CodecOptions, + opts: CodecOptions[Any], ctx: Union[SnappyContext, ZlibContext, ZstdContext, None] = None, ) -> tuple[int, bytes, int, int]: """Get a OP_MSG message.""" @@ -431,7 +430,7 @@ def _query_impl( num_to_return: int, query: Mapping[str, Any], field_selector: Optional[Mapping[str, Any]], - opts: CodecOptions, + opts: CodecOptions[Any], ) -> tuple[bytes, int]: """Get an OP_QUERY message.""" encoded = _dict_to_bson(query, False, opts) @@ -462,7 +461,7 @@ def _query_compressed( num_to_return: int, query: Mapping[str, Any], field_selector: Optional[Mapping[str, Any]], - opts: CodecOptions, + opts: CodecOptions[Any], ctx: Union[SnappyContext, ZlibContext, ZstdContext], ) -> tuple[int, bytes, int]: """Internal compressed query message helper.""" @@ -480,7 +479,7 @@ def _query_uncompressed( num_to_return: int, query: Mapping[str, Any], field_selector: Optional[Mapping[str, Any]], - opts: CodecOptions, + opts: CodecOptions[Any], ) -> tuple[int, bytes, int]: """Internal query message helper.""" op_query, max_bson_size = _query_impl( @@ -501,7 +500,7 @@ def _query( num_to_return: int, query: Mapping[str, Any], field_selector: Optional[Mapping[str, Any]], - opts: CodecOptions, + opts: CodecOptions[Any], ctx: Union[SnappyContext, ZlibContext, ZstdContext, None] = None, ) -> tuple[int, bytes, int]: """Get a **query** message.""" @@ -599,7 +598,7 @@ def __init__( listeners: _EventListeners, session: Optional[_AgnosticClientSession], op_type: int, - codec: CodecOptions, + codec: CodecOptions[Any], ): self.db_name = database_name self.conn = conn @@ -680,7 +679,7 @@ def __init__( listeners: _EventListeners, session: Optional[_AgnosticClientSession], op_type: int, - codec: CodecOptions, + codec: CodecOptions[Any], ): super().__init__( database_name, @@ -772,7 +771,7 @@ def _batched_op_msg_impl( command: Mapping[str, Any], docs: list[Mapping[str, Any]], ack: bool, - opts: CodecOptions, + opts: CodecOptions[Any], ctx: _BulkWriteContext, buf: _BytesIO, ) -> tuple[list[Mapping[str, Any]], int]: @@ -840,7 +839,7 @@ def _encode_batched_op_msg( command: Mapping[str, Any], docs: list[Mapping[str, Any]], ack: bool, - opts: CodecOptions, + opts: CodecOptions[Any], ctx: _BulkWriteContext, ) -> tuple[bytes, list[Mapping[str, Any]]]: """Encode the next batched insert, update, or delete operation @@ -861,7 +860,7 @@ def _batched_op_msg_compressed( command: Mapping[str, Any], docs: list[Mapping[str, Any]], ack: bool, - opts: CodecOptions, + opts: CodecOptions[Any], ctx: _BulkWriteContext, ) -> tuple[int, bytes, list[Mapping[str, Any]]]: """Create the next batched insert, update, or delete operation @@ -879,7 +878,7 @@ def _batched_op_msg( command: Mapping[str, Any], docs: list[Mapping[str, Any]], ack: bool, - opts: CodecOptions, + opts: CodecOptions[Any], ctx: _BulkWriteContext, ) -> tuple[int, bytes, list[Mapping[str, Any]]]: """OP_MSG implementation entry point.""" @@ -911,7 +910,7 @@ def _do_batched_op_msg( operation: int, command: MutableMapping[str, Any], docs: list[Mapping[str, Any]], - opts: CodecOptions, + opts: CodecOptions[Any], ctx: _BulkWriteContext, ) -> tuple[int, bytes, list[Mapping[str, Any]]]: """Create the next batched insert, update, or delete operation @@ -940,7 +939,7 @@ def __init__( operation_id: int, listeners: _EventListeners, session: Optional[_AgnosticClientSession], - codec: CodecOptions, + codec: CodecOptions[Any], ): super().__init__( database_name, @@ -1044,7 +1043,7 @@ def _client_batched_op_msg_impl( operations: list[tuple[str, Mapping[str, Any]]], namespaces: list[str], ack: bool, - opts: CodecOptions, + opts: CodecOptions[Any], ctx: _ClientBulkWriteContext, buf: _BytesIO, ) -> tuple[list[Mapping[str, Any]], list[Mapping[str, Any]], int]: @@ -1116,18 +1115,8 @@ def _check_doc_size_limits( # key and the index of its namespace within ns_info as its value. op_doc[op_type] = ns_info[namespace] # type: ignore[index] - # Since the data document itself is nested within the insert document - # it won't be automatically re-ordered by the BSON conversion. - # We use ChainMap here to make the _id field the first field instead. - doc_to_encode = op_doc - if real_op_type == "insert": - doc = op_doc["document"] - if not isinstance(doc, RawBSONDocument): - doc_to_encode = op_doc.copy() # type: ignore[attr-defined] # Shallow copy - doc_to_encode["document"] = ChainMap(doc, {"_id": doc["_id"]}) # type: ignore[index] - # Encode current operation doc and, if newly added, namespace doc. - op_doc_encoded = _dict_to_bson(doc_to_encode, False, opts) + op_doc_encoded = _dict_to_bson(op_doc, False, opts) op_length = len(op_doc_encoded) if ns_doc: ns_doc_encoded = _dict_to_bson(ns_doc, False, opts) @@ -1172,7 +1161,7 @@ def _client_encode_batched_op_msg( operations: list[tuple[str, Mapping[str, Any]]], namespaces: list[str], ack: bool, - opts: CodecOptions, + opts: CodecOptions[Any], ctx: _ClientBulkWriteContext, ) -> tuple[bytes, list[Mapping[str, Any]], list[Mapping[str, Any]]]: """Encode the next batched client-level bulkWrite @@ -1191,7 +1180,7 @@ def _client_batched_op_msg_compressed( operations: list[tuple[str, Mapping[str, Any]]], namespaces: list[str], ack: bool, - opts: CodecOptions, + opts: CodecOptions[Any], ctx: _ClientBulkWriteContext, ) -> tuple[int, bytes, list[Mapping[str, Any]], list[Mapping[str, Any]]]: """Create the next batched client-level bulkWrite operation @@ -1211,7 +1200,7 @@ def _client_batched_op_msg( operations: list[tuple[str, Mapping[str, Any]]], namespaces: list[str], ack: bool, - opts: CodecOptions, + opts: CodecOptions[Any], ctx: _ClientBulkWriteContext, ) -> tuple[int, bytes, list[Mapping[str, Any]], list[Mapping[str, Any]]]: """OP_MSG implementation entry point for client-level bulkWrite.""" @@ -1240,7 +1229,7 @@ def _client_do_batched_op_msg( command: MutableMapping[str, Any], operations: list[tuple[str, Mapping[str, Any]]], namespaces: list[str], - opts: CodecOptions, + opts: CodecOptions[Any], ctx: _ClientBulkWriteContext, ) -> tuple[int, bytes, list[Mapping[str, Any]], list[Mapping[str, Any]]]: """Create the next batched client-level bulkWrite @@ -1264,7 +1253,7 @@ def _encode_batched_write_command( operation: int, command: MutableMapping[str, Any], docs: list[Mapping[str, Any]], - opts: CodecOptions, + opts: CodecOptions[Any], ctx: _BulkWriteContext, ) -> tuple[bytes, list[Mapping[str, Any]]]: """Encode the next batched insert, update, or delete command.""" @@ -1283,7 +1272,7 @@ def _batched_write_command_impl( operation: int, command: MutableMapping[str, Any], docs: list[Mapping[str, Any]], - opts: CodecOptions, + opts: CodecOptions[Any], ctx: _BulkWriteContext, buf: _BytesIO, ) -> tuple[list[Mapping[str, Any]], int]: @@ -1363,7 +1352,9 @@ class _OpReply: UNPACK_FROM = struct.Struct(" list[bytes]: + ) -> list[bytes | memoryview]: """Check the response header from the database, without decoding BSON. Check the response for errors and unpack. @@ -1394,7 +1385,7 @@ def raw_response( errobj = {"ok": 0, "errmsg": msg, "code": 43} raise CursorNotFound(msg, 43, errobj) elif self.flags & 2: - error_object: dict = bson.BSON(self.documents).decode() + error_object: dict[str, Any] = bson.BSON(self.documents).decode() # Fake the ok field if it doesn't exist. error_object.setdefault("ok", 0) if error_object["$err"].startswith(HelloCompat.LEGACY_ERROR): @@ -1416,7 +1407,7 @@ def raw_response( def unpack_response( self, cursor_id: Optional[int] = None, - codec_options: CodecOptions = _UNICODE_REPLACE_CODEC_OPTIONS, + codec_options: CodecOptions[Any] = _UNICODE_REPLACE_CODEC_OPTIONS, user_fields: Optional[Mapping[str, Any]] = None, legacy_response: bool = False, ) -> list[dict[str, Any]]: @@ -1442,7 +1433,7 @@ def unpack_response( return bson.decode_all(self.documents, codec_options) return bson._decode_all_selective(self.documents, codec_options, user_fields) - def command_response(self, codec_options: CodecOptions) -> dict[str, Any]: + def command_response(self, codec_options: CodecOptions[Any]) -> dict[str, Any]: """Unpack a command response.""" docs = self.unpack_response(codec_options=codec_options) assert self.number_returned == 1 @@ -1459,7 +1450,7 @@ def more_to_come(self) -> bool: return False @classmethod - def unpack(cls, msg: bytes) -> _OpReply: + def unpack(cls, msg: bytes | memoryview) -> _OpReply: """Construct an _OpReply from raw bytes.""" # PYTHON-945: ignore starting_from field. flags, cursor_id, _, number_returned = cls.UNPACK_FROM(msg) @@ -1481,7 +1472,7 @@ class _OpMsg: MORE_TO_COME = 1 << 1 EXHAUST_ALLOWED = 1 << 16 # Only present on requests. - def __init__(self, flags: int, payload_document: bytes): + def __init__(self, flags: int, payload_document: bytes | memoryview): self.flags = flags self.payload_document = payload_document @@ -1502,7 +1493,7 @@ def raw_response( def unpack_response( self, cursor_id: Optional[int] = None, - codec_options: CodecOptions = _UNICODE_REPLACE_CODEC_OPTIONS, + codec_options: CodecOptions[Any] = _UNICODE_REPLACE_CODEC_OPTIONS, user_fields: Optional[Mapping[str, Any]] = None, legacy_response: bool = False, ) -> list[dict[str, Any]]: @@ -1519,11 +1510,11 @@ def unpack_response( assert not legacy_response return bson._decode_all_selective(self.payload_document, codec_options, user_fields) - def command_response(self, codec_options: CodecOptions) -> dict[str, Any]: + def command_response(self, codec_options: CodecOptions[Any]) -> dict[str, Any]: """Unpack a command response.""" return self.unpack_response(codec_options=codec_options)[0] - def raw_command_response(self) -> bytes: + def raw_command_response(self) -> bytes | memoryview: """Return the bytes of the command response.""" return self.payload_document @@ -1533,7 +1524,7 @@ def more_to_come(self) -> bool: return bool(self.flags & self.MORE_TO_COME) @classmethod - def unpack(cls, msg: bytes) -> _OpMsg: + def unpack(cls, msg: bytes | memoryview) -> _OpMsg: """Construct an _OpMsg from raw bytes.""" flags, first_payload_type, first_payload_size = cls.UNPACK_FROM(msg) if flags != 0: @@ -1552,7 +1543,7 @@ def unpack(cls, msg: bytes) -> _OpMsg: return cls(flags, payload_document) -_UNPACK_REPLY: dict[int, Callable[[bytes], Union[_OpReply, _OpMsg]]] = { +_UNPACK_REPLY: dict[int, Callable[[bytes | memoryview], Union[_OpReply, _OpMsg]]] = { _OpReply.OP_CODE: _OpReply.unpack, _OpMsg.OP_CODE: _OpMsg.unpack, } @@ -1594,7 +1585,7 @@ def __init__( ntoskip: int, spec: Mapping[str, Any], fields: Optional[Mapping[str, Any]], - codec_options: CodecOptions, + codec_options: CodecOptions[Any], read_preference: _ServerMode, limit: int, batch_size: int, @@ -1768,7 +1759,7 @@ def __init__( coll: str, ntoreturn: int, cursor_id: int, - codec_options: CodecOptions, + codec_options: CodecOptions[Any], read_preference: _ServerMode, session: Optional[_AgnosticClientSession], client: _AgnosticMongoClient, @@ -1882,7 +1873,7 @@ def use_command(self, conn: _AgnosticConnection) -> bool: return False -class _CursorAddress(tuple): +class _CursorAddress(tuple[Any, ...]): """The server address (host, port) of a cursor, with namespace property.""" __namespace: Any diff --git a/pymongo/mongo_client.py b/pymongo/mongo_client.py index a815cbc8a9..778abe27ef 100644 --- a/pymongo/mongo_client.py +++ b/pymongo/mongo_client.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/pymongo/monitoring.py b/pymongo/monitoring.py index 96f88597d2..46a78aea0b 100644 --- a/pymongo/monitoring.py +++ b/pymongo/monitoring.py @@ -4,7 +4,7 @@ # may not use this file except in compliance with the License. You # may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -472,14 +472,15 @@ def _validate_event_listeners( ) -> Sequence[_EventListeners]: """Validate event listeners""" if not isinstance(listeners, abc.Sequence): - raise TypeError(f"{option} must be a list or tuple") + raise TypeError(f"{option} must be a list or tuple, not {type(listeners)}") for listener in listeners: if not isinstance(listener, _EventListener): raise TypeError( f"Listeners for {option} must be either a " "CommandListener, ServerHeartbeatListener, " "ServerListener, TopologyListener, or " - "ConnectionPoolListener." + "ConnectionPoolListener," + f"not {type(listener)}" ) return listeners @@ -496,7 +497,8 @@ def register(listener: _EventListener) -> None: f"Listeners for {listener} must be either a " "CommandListener, ServerHeartbeatListener, " "ServerListener, TopologyListener, or " - "ConnectionPoolListener." + "ConnectionPoolListener," + f"not {type(listener)}" ) if isinstance(listener, CommandListener): _LISTENERS.command_listeners.append(listener) @@ -1345,7 +1347,11 @@ class ServerHeartbeatSucceededEvent(_ServerHeartbeatEvent): __slots__ = ("__duration", "__reply") def __init__( - self, duration: float, reply: Hello, connection_id: _Address, awaited: bool = False + self, + duration: float, + reply: Hello[dict[str, Any]], + connection_id: _Address, + awaited: bool = False, ) -> None: super().__init__(connection_id, awaited) self.__duration = duration @@ -1357,7 +1363,7 @@ def duration(self) -> float: return self.__duration @property - def reply(self) -> Hello: + def reply(self) -> Hello[dict[str, Any]]: """An instance of :class:`~pymongo.hello.Hello`.""" return self.__reply @@ -1645,7 +1651,7 @@ def publish_server_heartbeat_started(self, connection_id: _Address, awaited: boo _handle_exception() def publish_server_heartbeat_succeeded( - self, connection_id: _Address, duration: float, reply: Hello, awaited: bool + self, connection_id: _Address, duration: float, reply: Hello[dict[str, Any]], awaited: bool ) -> None: """Publish a ServerHeartbeatSucceededEvent to all server heartbeat listeners. diff --git a/pymongo/network_layer.py b/pymongo/network_layer.py index beffba6d18..7c62a251f8 100644 --- a/pymongo/network_layer.py +++ b/pymongo/network_layer.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -16,21 +16,26 @@ from __future__ import annotations import asyncio +import collections import errno import socket import struct import sys import time -from asyncio import AbstractEventLoop, Future +from asyncio import AbstractEventLoop, BaseTransport, BufferedProtocol, Future, Transport from typing import ( TYPE_CHECKING, + Any, Optional, Union, ) -from pymongo import ssl_support +from pymongo import _csot, ssl_support from pymongo._asyncio_task import create_task -from pymongo.errors import _OperationCancelled +from pymongo.common import MAX_MESSAGE_SIZE +from pymongo.compression_support import decompress +from pymongo.errors import ProtocolError, _OperationCancelled +from pymongo.message import _UNPACK_REPLY, _OpMsg, _OpReply from pymongo.socket_checker import _errno_from_exception try: @@ -41,22 +46,18 @@ _HAVE_SSL = False try: - from pymongo.pyopenssl_context import ( - BLOCKING_IO_LOOKUP_ERROR, - BLOCKING_IO_READ_ERROR, - BLOCKING_IO_WRITE_ERROR, - _sslConn, - ) + from pymongo.pyopenssl_context import _sslConn _HAVE_PYOPENSSL = True except ImportError: _HAVE_PYOPENSSL = False - _sslConn = SSLSocket # type: ignore - from pymongo.ssl_support import ( # type: ignore[assignment] - BLOCKING_IO_LOOKUP_ERROR, - BLOCKING_IO_READ_ERROR, - BLOCKING_IO_WRITE_ERROR, - ) + _sslConn = SSLSocket # type: ignore[assignment, misc] + +from pymongo.ssl_support import ( + BLOCKING_IO_LOOKUP_ERROR, + BLOCKING_IO_READ_ERROR, + BLOCKING_IO_WRITE_ERROR, +) if TYPE_CHECKING: from pymongo.asynchronous.pool import AsyncConnection @@ -66,16 +67,18 @@ _UNPACK_COMPRESSION_HEADER = struct.Struct(" None: +# These socket-based I/O methods are for KMS requests and any other network operations that do not use +# the MongoDB wire protocol +async def async_socket_sendall(sock: Union[socket.socket, _sslConn], buf: bytes) -> None: timeout = sock.gettimeout() sock.settimeout(0.0) - loop = asyncio.get_event_loop() + loop = asyncio.get_running_loop() try: if _HAVE_SSL and isinstance(sock, (SSLSocket, _sslConn)): - await asyncio.wait_for(_async_sendall_ssl(sock, buf, loop), timeout=timeout) + await asyncio.wait_for(_async_socket_sendall_ssl(sock, buf, loop), timeout=timeout) else: await asyncio.wait_for(loop.sock_sendall(sock, buf), timeout=timeout) # type: ignore[arg-type] except asyncio.TimeoutError as exc: @@ -87,13 +90,13 @@ async def async_sendall(sock: Union[socket.socket, _sslConn], buf: bytes) -> Non if sys.platform != "win32": - async def _async_sendall_ssl( + async def _async_socket_sendall_ssl( sock: Union[socket.socket, _sslConn], buf: bytes, loop: AbstractEventLoop ) -> None: view = memoryview(buf) sent = 0 - def _is_ready(fut: Future) -> None: + def _is_ready(fut: Future[Any]) -> None: if fut.done(): return fut.set_result(None) @@ -130,13 +133,13 @@ def _is_ready(fut: Future) -> None: loop.remove_reader(fd) loop.remove_writer(fd) - async def _async_receive_ssl( + async def _async_socket_receive_ssl( conn: _sslConn, length: int, loop: AbstractEventLoop, once: Optional[bool] = False ) -> memoryview: mv = memoryview(bytearray(length)) total_read = 0 - def _is_ready(fut: Future) -> None: + def _is_ready(fut: Future[Any]) -> None: if fut.done(): return fut.set_result(None) @@ -184,7 +187,7 @@ def _is_ready(fut: Future) -> None: # The default Windows asyncio event loop does not support loop.add_reader/add_writer: # https://docs.python.org/3/library/asyncio-platforms.html#asyncio-platform-support # Note: In PYTHON-4493 we plan to replace this code with asyncio streams. - async def _async_sendall_ssl( + async def _async_socket_sendall_ssl( sock: Union[socket.socket, _sslConn], buf: bytes, dummy: AbstractEventLoop ) -> None: view = memoryview(buf) @@ -205,7 +208,7 @@ async def _async_sendall_ssl( backoff = min(backoff * 2, 0.512) total_sent += sent - async def _async_receive_ssl( + async def _async_socket_receive_ssl( conn: _sslConn, length: int, dummy: AbstractEventLoop, once: Optional[bool] = False ) -> memoryview: mv = memoryview(bytearray(length)) @@ -244,45 +247,6 @@ async def _poll_cancellation(conn: AsyncConnection) -> None: await asyncio.sleep(_POLL_TIMEOUT) -async def async_receive_data( - conn: AsyncConnection, length: int, deadline: Optional[float] -) -> memoryview: - sock = conn.conn - sock_timeout = sock.gettimeout() - timeout: Optional[Union[float, int]] - if deadline: - # When the timeout has expired perform one final check to - # see if the socket is readable. This helps avoid spurious - # timeouts on AWS Lambda and other FaaS environments. - timeout = max(deadline - time.monotonic(), 0) - else: - timeout = sock_timeout - - sock.settimeout(0.0) - loop = asyncio.get_event_loop() - cancellation_task = create_task(_poll_cancellation(conn)) - try: - if _HAVE_SSL and isinstance(sock, (SSLSocket, _sslConn)): - read_task = create_task(_async_receive_ssl(sock, length, loop)) # type: ignore[arg-type] - else: - read_task = create_task(_async_receive(sock, length, loop)) # type: ignore[arg-type] - tasks = [read_task, cancellation_task] - done, pending = await asyncio.wait( - tasks, timeout=timeout, return_when=asyncio.FIRST_COMPLETED - ) - for task in pending: - task.cancel() - if pending: - await asyncio.wait(pending) - if len(done) == 0: - raise socket.timeout("timed out") - if read_task in done: - return read_task.result() - raise _OperationCancelled("operation cancelled") - finally: - sock.settimeout(sock_timeout) - - async def async_receive_data_socket( sock: Union[socket.socket, _sslConn], length: int ) -> memoryview: @@ -290,22 +254,27 @@ async def async_receive_data_socket( timeout = sock_timeout sock.settimeout(0.0) - loop = asyncio.get_event_loop() + loop = asyncio.get_running_loop() try: if _HAVE_SSL and isinstance(sock, (SSLSocket, _sslConn)): return await asyncio.wait_for( - _async_receive_ssl(sock, length, loop, once=True), # type: ignore[arg-type] + _async_socket_receive_ssl(sock, length, loop, once=True), # type: ignore[arg-type] timeout=timeout, ) else: - return await asyncio.wait_for(_async_receive(sock, length, loop), timeout=timeout) # type: ignore[arg-type] + return await asyncio.wait_for( + _async_socket_receive(sock, length, loop), # type: ignore[arg-type] + timeout=timeout, + ) except asyncio.TimeoutError as err: raise socket.timeout("timed out") from err finally: sock.settimeout(sock_timeout) -async def _async_receive(conn: socket.socket, length: int, loop: AbstractEventLoop) -> memoryview: +async def _async_socket_receive( + conn: socket.socket, length: int, loop: AbstractEventLoop +) -> memoryview: mv = memoryview(bytearray(length)) bytes_read = 0 while bytes_read < length: @@ -316,6 +285,43 @@ async def _async_receive(conn: socket.socket, length: int, loop: AbstractEventLo return mv +_PYPY = "PyPy" in sys.version +_WINDOWS = sys.platform == "win32" + + +def wait_for_read(conn: Connection, deadline: Optional[float]) -> None: + """Block until at least one byte is read, or a timeout, or a cancel.""" + sock = conn.conn.sock + timed_out = False + # Check if the connection's socket has been manually closed + if sock.fileno() == -1: + return + while True: + # SSLSocket can have buffered data which won't be caught by select. + if hasattr(sock, "pending") and sock.pending() > 0: + readable = True + else: + # Wait up to 500ms for the socket to become readable and then + # check for cancellation. + if deadline: + remaining = deadline - time.monotonic() + # When the timeout has expired perform one final check to + # see if the socket is readable. This helps avoid spurious + # timeouts on AWS Lambda and other FaaS environments. + if remaining <= 0: + timed_out = True + timeout = max(min(remaining, _POLL_TIMEOUT), 0) + else: + timeout = _POLL_TIMEOUT + readable = conn.socket_checker.select(sock, read=True, timeout=timeout) + if conn.cancel_context.cancelled: + raise _OperationCancelled("operation cancelled") + if readable: + return + if timed_out: + raise socket.timeout("timed out") + + def receive_data(conn: Connection, length: int, deadline: Optional[float]) -> memoryview: buf = bytearray(length) mv = memoryview(buf) @@ -324,18 +330,26 @@ def receive_data(conn: Connection, length: int, deadline: Optional[float]) -> me # check for the cancellation signal after each timeout. Alternatively we # could close the socket but that does not reliably cancel recv() calls # on all OSes. + # When the timeout has expired we perform one final non-blocking recv. + # This helps avoid spurious timeouts when the response is actually already + # buffered on the client. orig_timeout = conn.conn.gettimeout() try: while bytes_read < length: - if deadline is not None: - # CSOT: Update timeout. When the timeout has expired perform one - # final non-blocking recv. This helps avoid spurious timeouts when - # the response is actually already buffered on the client. - short_timeout = min(max(deadline - time.monotonic(), 0), _POLL_TIMEOUT) - else: - short_timeout = _POLL_TIMEOUT - conn.set_conn_timeout(short_timeout) try: + # Use the legacy wait_for_read cancellation approach on PyPy due to PYTHON-5011. + # also use it on Windows due to PYTHON-5405 + if _PYPY or _WINDOWS: + wait_for_read(conn, deadline) + if _csot.get_timeout() and deadline is not None: + conn.set_conn_timeout(max(deadline - time.monotonic(), 0)) + else: + if deadline is not None: + short_timeout = min(max(deadline - time.monotonic(), 0), _POLL_TIMEOUT) + else: + short_timeout = _POLL_TIMEOUT + conn.set_conn_timeout(short_timeout) + chunk_length = conn.conn.recv_into(mv[bytes_read:]) except BLOCKING_IO_ERRORS: if conn.cancel_context.cancelled: @@ -345,6 +359,15 @@ def receive_data(conn: Connection, length: int, deadline: Optional[float]) -> me except socket.timeout: if conn.cancel_context.cancelled: raise _OperationCancelled("operation cancelled") from None + if ( + _PYPY + or _WINDOWS + or not conn.is_sdam + and deadline is not None + and deadline - time.monotonic() < 0 + ): + # We reached the true deadline. + raise continue except OSError as exc: if conn.cancel_context.cancelled: @@ -360,3 +383,404 @@ def receive_data(conn: Connection, length: int, deadline: Optional[float]) -> me conn.set_conn_timeout(orig_timeout) return mv + + +class NetworkingInterfaceBase: + def __init__(self, conn: Any): + self.conn = conn + + @property + def gettimeout(self) -> Any: + raise NotImplementedError + + def settimeout(self, timeout: float | None) -> None: + raise NotImplementedError + + def close(self) -> Any: + raise NotImplementedError + + def is_closing(self) -> bool: + raise NotImplementedError + + @property + def get_conn(self) -> Any: + raise NotImplementedError + + @property + def sock(self) -> Any: + raise NotImplementedError + + +class AsyncNetworkingInterface(NetworkingInterfaceBase): + def __init__(self, conn: tuple[Transport, PyMongoProtocol]): + super().__init__(conn) + + @property + def gettimeout(self) -> float | None: + return self.conn[1].gettimeout + + def settimeout(self, timeout: float | None) -> None: + self.conn[1].settimeout(timeout) + + async def close(self) -> None: + self.conn[1].close() + await self.conn[1].wait_closed() + + def is_closing(self) -> bool: + return self.conn[0].is_closing() + + @property + def get_conn(self) -> PyMongoProtocol: + return self.conn[1] + + @property + def sock(self) -> socket.socket: + return self.conn[0].get_extra_info("socket") + + +class NetworkingInterface(NetworkingInterfaceBase): + def __init__(self, conn: Union[socket.socket, _sslConn]): + super().__init__(conn) + + def gettimeout(self) -> float | None: + return self.conn.gettimeout() + + def settimeout(self, timeout: float | None) -> None: + self.conn.settimeout(timeout) + + def close(self) -> None: + self.conn.close() + + def is_closing(self) -> bool: + return self.conn.is_closing() + + @property + def get_conn(self) -> Union[socket.socket, _sslConn]: + return self.conn + + @property + def sock(self) -> Union[socket.socket, _sslConn]: + return self.conn + + def fileno(self) -> int: + return self.conn.fileno() + + def recv_into(self, buffer: bytes | memoryview) -> int: + return self.conn.recv_into(buffer) + + +class PyMongoProtocol(BufferedProtocol): + def __init__(self, timeout: Optional[float] = None): + self.transport: Transport = None # type: ignore[assignment] + # Each message is reader in 2-3 parts: header, compression header, and message body + # The message buffer is allocated after the header is read. + self._header = memoryview(bytearray(16)) + self._header_index = 0 + self._compression_header = memoryview(bytearray(9)) + self._compression_index = 0 + self._message: Optional[memoryview] = None + self._message_index = 0 + # State. TODO: replace booleans with an enum? + self._expecting_header = True + self._expecting_compression = False + self._message_size = 0 + self._op_code = 0 + self._connection_lost = False + self._read_waiter: Optional[Future[Any]] = None + self._timeout = timeout + self._is_compressed = False + self._compressor_id: Optional[int] = None + self._max_message_size = MAX_MESSAGE_SIZE + self._response_to: Optional[int] = None + self._closed = asyncio.get_running_loop().create_future() + self._pending_messages: collections.deque[Future[Any]] = collections.deque() + self._done_messages: collections.deque[Future[Any]] = collections.deque() + + def settimeout(self, timeout: float | None) -> None: + self._timeout = timeout + + @property + def gettimeout(self) -> float | None: + """The configured timeout for the socket that underlies our protocol pair.""" + return self._timeout + + def connection_made(self, transport: BaseTransport) -> None: + """Called exactly once when a connection is made. + The transport argument is the transport representing the write side of the connection. + """ + self.transport = transport # type: ignore[assignment] + self.transport.set_write_buffer_limits(MAX_MESSAGE_SIZE, MAX_MESSAGE_SIZE) + + async def write(self, message: bytes) -> None: + """Write a message to this connection's transport.""" + if self.transport.is_closing(): + raise OSError("Connection is closed") + self.transport.write(message) + self.transport.resume_reading() + + async def read(self, request_id: Optional[int], max_message_size: int) -> tuple[bytes, int]: + """Read a single MongoDB Wire Protocol message from this connection.""" + if self.transport: + try: + self.transport.resume_reading() + # Known bug in SSL Protocols, fixed in Python 3.11: https://github.com/python/cpython/issues/89322 + except AttributeError: + raise OSError("connection is already closed") from None + self._max_message_size = max_message_size + if self._done_messages: + message = await self._done_messages.popleft() + else: + if self.transport and self.transport.is_closing(): + raise OSError("connection is already closed") + read_waiter = asyncio.get_running_loop().create_future() + self._pending_messages.append(read_waiter) + try: + message = await read_waiter + finally: + if read_waiter in self._done_messages: + self._done_messages.remove(read_waiter) + if message: + op_code, compressor_id, response_to, data = message + # No request_id for exhaust cursor "getMore". + if request_id is not None: + if request_id != response_to: + raise ProtocolError( + f"Got response id {response_to!r} but expected {request_id!r}" + ) + if compressor_id is not None: + data = decompress(data, compressor_id) + return data, op_code + raise OSError("connection closed") + + def get_buffer(self, sizehint: int) -> memoryview: + """Called to allocate a new receive buffer. + The asyncio loop calls this method expecting to receive a non-empty buffer to fill with data. + If any data does not fit into the returned buffer, this method will be called again until + either no data remains or an empty buffer is returned. + """ + # Due to a bug, Python <=3.11 will call get_buffer() even after we raise + # ProtocolError in buffer_updated() and call connection_lost(). We allocate + # a temp buffer to drain the waiting data. + if self._connection_lost: + if not self._message: + self._message = memoryview(bytearray(2**14)) + return self._message + # TODO: optimize this by caching pointers to the buffers. + # return self._buffer[self._index:] + if self._expecting_header: + return self._header[self._header_index :] + if self._expecting_compression: + return self._compression_header[self._compression_index :] + return self._message[self._message_index :] # type: ignore[index] + + def buffer_updated(self, nbytes: int) -> None: + """Called when the buffer was updated with the received data""" + # Wrote 0 bytes into a non-empty buffer, signal connection closed + if nbytes == 0: + self.close(OSError("connection closed")) + return + if self._connection_lost: + return + if self._expecting_header: + self._header_index += nbytes + if self._header_index >= 16: + self._expecting_header = False + try: + ( + self._message_size, + self._op_code, + self._response_to, + self._expecting_compression, + ) = self.process_header() + except ProtocolError as exc: + self.close(exc) + return + self._message = memoryview(bytearray(self._message_size)) + return + if self._expecting_compression: + self._compression_index += nbytes + if self._compression_index >= 9: + self._expecting_compression = False + self._op_code, self._compressor_id = self.process_compression_header() + return + + self._message_index += nbytes + if self._message_index >= self._message_size: + self._expecting_header = True + # Pause reading to avoid storing an arbitrary number of messages in memory. + self.transport.pause_reading() + if self._pending_messages: + result = self._pending_messages.popleft() + else: + result = asyncio.get_running_loop().create_future() + # Future has been cancelled, close this connection + if result.done(): + self.close(None) + return + # Necessary values to reconstruct and verify message + result.set_result( + (self._op_code, self._compressor_id, self._response_to, self._message) + ) + self._done_messages.append(result) + # Reset internal state to expect a new message + self._header_index = 0 + self._compression_index = 0 + self._message_index = 0 + self._message_size = 0 + self._message = None + self._op_code = 0 + self._compressor_id = None + self._response_to = None + + def process_header(self) -> tuple[int, int, int, bool]: + """Unpack a MongoDB Wire Protocol header.""" + length, _, response_to, op_code = _UNPACK_HEADER(self._header) + expecting_compression = False + if op_code == 2012: # OP_COMPRESSED + if length <= 25: + raise ProtocolError( + f"Message length ({length!r}) not longer than standard OP_COMPRESSED message header size (25)" + ) + expecting_compression = True + length -= 9 + if length <= 16: + raise ProtocolError( + f"Message length ({length!r}) not longer than standard message header size (16)" + ) + if length > self._max_message_size: + raise ProtocolError( + f"Message length ({length!r}) is larger than server max " + f"message size ({self._max_message_size!r})" + ) + + return length - 16, op_code, response_to, expecting_compression + + def process_compression_header(self) -> tuple[int, int]: + """Unpack a MongoDB Wire Protocol compression header.""" + op_code, _, compressor_id = _UNPACK_COMPRESSION_HEADER(self._compression_header) + return op_code, compressor_id + + def _resolve_pending_messages(self, exc: Optional[Exception] = None) -> None: + pending = list(self._pending_messages) + for msg in pending: + if not msg.done(): + if exc is None: + msg.set_result(None) + else: + msg.set_exception(exc) + self._done_messages.append(msg) + + def close(self, exc: Optional[Exception] = None) -> None: + self.transport.abort() + self._resolve_pending_messages(exc) + self._connection_lost = True + + def connection_lost(self, exc: Optional[Exception] = None) -> None: + self._resolve_pending_messages(exc) + if not self._closed.done(): + self._closed.set_result(None) + + async def wait_closed(self) -> None: + await self._closed + + +async def async_sendall(conn: PyMongoProtocol, buf: bytes) -> None: + try: + await asyncio.wait_for(conn.write(buf), timeout=conn.gettimeout) + except asyncio.TimeoutError as exc: + # Convert the asyncio.wait_for timeout error to socket.timeout which pool.py understands. + raise socket.timeout("timed out") from exc + + +async def async_receive_message( + conn: AsyncConnection, + request_id: Optional[int], + max_message_size: int = MAX_MESSAGE_SIZE, +) -> Union[_OpReply, _OpMsg]: + """Receive a raw BSON message or raise socket.error.""" + timeout: Optional[Union[float, int]] + timeout = conn.conn.gettimeout + if _csot.get_timeout(): + deadline = _csot.get_deadline() + else: + if timeout: + deadline = time.monotonic() + timeout + else: + deadline = None + if deadline: + # When the timeout has expired perform one final check to + # see if the socket is readable. This helps avoid spurious + # timeouts on AWS Lambda and other FaaS environments. + timeout = max(deadline - time.monotonic(), 0) + + cancellation_task = create_task(_poll_cancellation(conn)) + read_task = create_task(conn.conn.get_conn.read(request_id, max_message_size)) + tasks = [read_task, cancellation_task] + try: + done, pending = await asyncio.wait( + tasks, timeout=timeout, return_when=asyncio.FIRST_COMPLETED + ) + for task in pending: + task.cancel() + if pending: + await asyncio.wait(pending) + if len(done) == 0: + raise socket.timeout("timed out") + if read_task in done: + data, op_code = read_task.result() + try: + unpack_reply = _UNPACK_REPLY[op_code] + except KeyError: + raise ProtocolError( + f"Got opcode {op_code!r} but expected {_UNPACK_REPLY.keys()!r}" + ) from None + return unpack_reply(data) + raise _OperationCancelled("operation cancelled") + except asyncio.CancelledError: + for task in tasks: + task.cancel() + await asyncio.wait(tasks) + raise + + +def receive_message( + conn: Connection, request_id: Optional[int], max_message_size: int = MAX_MESSAGE_SIZE +) -> Union[_OpReply, _OpMsg]: + """Receive a raw BSON message or raise socket.error.""" + if _csot.get_timeout(): + deadline = _csot.get_deadline() + else: + timeout = conn.conn.gettimeout() + if timeout: + deadline = time.monotonic() + timeout + else: + deadline = None + # Ignore the response's request id. + length, _, response_to, op_code = _UNPACK_HEADER(receive_data(conn, 16, deadline)) + # No request_id for exhaust cursor "getMore". + if request_id is not None: + if request_id != response_to: + raise ProtocolError(f"Got response id {response_to!r} but expected {request_id!r}") + if length <= 16: + raise ProtocolError( + f"Message length ({length!r}) not longer than standard message header size (16)" + ) + if length > max_message_size: + raise ProtocolError( + f"Message length ({length!r}) is larger than server max " + f"message size ({max_message_size!r})" + ) + data: memoryview | bytes + if op_code == 2012: + op_code, _, compressor_id = _UNPACK_COMPRESSION_HEADER(receive_data(conn, 9, deadline)) + data = decompress(receive_data(conn, length - 25, deadline), compressor_id) + else: + data = receive_data(conn, length - 16, deadline) + + try: + unpack_reply = _UNPACK_REPLY[op_code] + except KeyError: + raise ProtocolError( + f"Got opcode {op_code!r} but expected {_UNPACK_REPLY.keys()!r}" + ) from None + return unpack_reply(data) diff --git a/pymongo/ocsp_cache.py b/pymongo/ocsp_cache.py index 3facefe350..2df232848f 100644 --- a/pymongo/ocsp_cache.py +++ b/pymongo/ocsp_cache.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/pymongo/ocsp_support.py b/pymongo/ocsp_support.py index ee359b71c2..8322f821fb 100644 --- a/pymongo/ocsp_support.py +++ b/pymongo/ocsp_support.py @@ -4,7 +4,7 @@ # may not use this file except in compliance with the License. You # may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/pymongo/operations.py b/pymongo/operations.py index 482ab68003..73fb8b5f36 100644 --- a/pymongo/operations.py +++ b/pymongo/operations.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -781,7 +781,7 @@ def __init__(self, keys: _IndexKeyHint, **kwargs: Any) -> None: Added the ``partialFilterExpression`` option to support partial indexes. - .. _wildcard index: https://mongodb.com/docs/master/core/index-wildcard/ + .. _wildcard index: https://dochub.mongodb.org/core/index-wildcard/ """ keys = _index_list(keys) if kwargs.get("name") is None: diff --git a/pymongo/periodic_executor.py b/pymongo/periodic_executor.py index 2f89b91deb..82f506f039 100644 --- a/pymongo/periodic_executor.py +++ b/pymongo/periodic_executor.py @@ -4,7 +4,7 @@ # may not use this file except in compliance with the License. You # may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -23,6 +23,7 @@ import weakref from typing import Any, Optional +from pymongo import _csot from pymongo._asyncio_task import create_task from pymongo.lock import _create_lock @@ -52,7 +53,7 @@ def __init__( self._min_interval = min_interval self._target = target self._stopped = False - self._task: Optional[asyncio.Task] = None + self._task: Optional[asyncio.Task[Any]] = None self._name = name self._skip_sleep = False @@ -75,17 +76,12 @@ def close(self, dummy: Any = None) -> None: callback; see monitor.py. """ self._stopped = True + if self._task is not None: + self._task.cancel() async def join(self, timeout: Optional[int] = None) -> None: if self._task is not None: - try: - await asyncio.wait_for(self._task, timeout=timeout) # type-ignore: [arg-type] - except asyncio.TimeoutError: - # Task timed out - pass - except asyncio.exceptions.CancelledError: - # Task was already finished, or not yet started. - raise + await asyncio.wait([self._task], timeout=timeout) # type-ignore: [arg-type] def wake(self) -> None: """Execute the target function soon.""" @@ -98,6 +94,8 @@ def skip_sleep(self) -> None: self._skip_sleep = True async def _run(self) -> None: + # The CSOT contextvars must be cleared inside the executor task before execution begins + _csot.reset_all() while not self._stopped: if self._task and self._task.cancelling(): # type: ignore[unused-ignore, attr-defined] raise asyncio.CancelledError @@ -105,6 +103,7 @@ async def _run(self) -> None: if not await self._target(): self._stopped = True break + # Catch KeyboardInterrupt, CancelledError, etc. and cleanup. except BaseException: self._stopped = True raise @@ -237,6 +236,7 @@ def _run(self) -> None: if not self._target(): self._stopped = True break + # Catch KeyboardInterrupt, etc. and cleanup. except BaseException: with self._lock: self._stopped = True diff --git a/pymongo/pool.py b/pymongo/pool.py index fbbb70fc68..456ff3df0a 100644 --- a/pymongo/pool.py +++ b/pymongo/pool.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/pymongo/pool_options.py b/pymongo/pool_options.py index 038dbb3b5d..a5d76007b0 100644 --- a/pymongo/pool_options.py +++ b/pymongo/pool_options.py @@ -4,7 +4,7 @@ # may not use this file except in compliance with the License. You # may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -376,18 +376,7 @@ def __init__( "async", ) if driver: - if driver.name: - self.__metadata["driver"]["name"] = "{}|{}".format( - self.__metadata["driver"]["name"], - driver.name, - ) - if driver.version: - self.__metadata["driver"]["version"] = "{}|{}".format( - _METADATA["driver"]["version"], - driver.version, - ) - if driver.platform: - self.__metadata["platform"] = "{}|{}".format(_METADATA["platform"], driver.platform) + self._update_metadata(driver) env = _metadata_env() if env: @@ -395,6 +384,30 @@ def __init__( _truncate_metadata(self.__metadata) + def _update_metadata(self, driver: DriverInfo) -> None: + """Updates the client's metadata""" + if driver.name and driver.name.lower() in self.__metadata["driver"]["name"].lower().split( + "|" + ): + return + + metadata = copy.deepcopy(self.__metadata) + + if driver.name: + metadata["driver"]["name"] = "{}|{}".format( + metadata["driver"]["name"], + driver.name, + ) + if driver.version: + metadata["driver"]["version"] = "{}|{}".format( + metadata["driver"]["version"], + driver.version, + ) + if driver.platform: + metadata["platform"] = "{}|{}".format(metadata["platform"], driver.platform) + + self.__metadata = metadata + @property def _credentials(self) -> Optional[MongoCredential]: """A :class:`~pymongo.auth.MongoCredentials` instance or None.""" diff --git a/pymongo/pool_shared.py b/pymongo/pool_shared.py new file mode 100644 index 0000000000..8db26ccead --- /dev/null +++ b/pymongo/pool_shared.py @@ -0,0 +1,521 @@ +# Copyright 2025-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Pool utilities and shared helper methods.""" +from __future__ import annotations + +import asyncio +import functools +import socket +import ssl +import sys +from typing import ( + TYPE_CHECKING, + Any, + NoReturn, + Optional, + Union, +) + +from pymongo import _csot +from pymongo.asynchronous.helpers import _getaddrinfo +from pymongo.errors import ( # type:ignore[attr-defined] + AutoReconnect, + ConnectionFailure, + NetworkTimeout, + _CertificateError, +) +from pymongo.helpers_shared import _get_timeout_details, format_timeout_details +from pymongo.network_layer import AsyncNetworkingInterface, NetworkingInterface, PyMongoProtocol +from pymongo.pool_options import PoolOptions +from pymongo.ssl_support import PYSSLError, SSLError, _has_sni + +SSLErrors = (PYSSLError, SSLError) +if TYPE_CHECKING: + from pymongo.pyopenssl_context import _sslConn + from pymongo.typings import _Address + +try: + from fcntl import F_GETFD, F_SETFD, FD_CLOEXEC, fcntl + + def _set_non_inheritable_non_atomic(fd: int) -> None: + """Set the close-on-exec flag on the given file descriptor.""" + flags = fcntl(fd, F_GETFD) + fcntl(fd, F_SETFD, flags | FD_CLOEXEC) + +except ImportError: + # Windows, various platforms we don't claim to support + # (Jython, IronPython, ..), systems that don't provide + # everything we need from fcntl, etc. + def _set_non_inheritable_non_atomic(fd: int) -> None: # noqa: ARG001 + """Dummy function for platforms that don't provide fcntl.""" + + +_MAX_TCP_KEEPIDLE = 120 +_MAX_TCP_KEEPINTVL = 10 +_MAX_TCP_KEEPCNT = 9 + +if sys.platform == "win32": + try: + import _winreg as winreg + except ImportError: + import winreg + + def _query(key, name, default): + try: + value, _ = winreg.QueryValueEx(key, name) + # Ensure the value is a number or raise ValueError. + return int(value) + except (OSError, ValueError): + # QueryValueEx raises OSError when the key does not exist (i.e. + # the system is using the Windows default value). + return default + + try: + with winreg.OpenKey( + winreg.HKEY_LOCAL_MACHINE, r"SYSTEM\CurrentControlSet\Services\Tcpip\Parameters" + ) as key: + _WINDOWS_TCP_IDLE_MS = _query(key, "KeepAliveTime", 7200000) + _WINDOWS_TCP_INTERVAL_MS = _query(key, "KeepAliveInterval", 1000) + except OSError: + # We could not check the default values because winreg.OpenKey failed. + # Assume the system is using the default values. + _WINDOWS_TCP_IDLE_MS = 7200000 + _WINDOWS_TCP_INTERVAL_MS = 1000 + + def _set_keepalive_times(sock): + idle_ms = min(_WINDOWS_TCP_IDLE_MS, _MAX_TCP_KEEPIDLE * 1000) + interval_ms = min(_WINDOWS_TCP_INTERVAL_MS, _MAX_TCP_KEEPINTVL * 1000) + if idle_ms < _WINDOWS_TCP_IDLE_MS or interval_ms < _WINDOWS_TCP_INTERVAL_MS: + sock.ioctl(socket.SIO_KEEPALIVE_VALS, (1, idle_ms, interval_ms)) + +else: + + def _set_tcp_option(sock: socket.socket, tcp_option: str, max_value: int) -> None: + if hasattr(socket, tcp_option): + sockopt = getattr(socket, tcp_option) + try: + # PYTHON-1350 - NetBSD doesn't implement getsockopt for + # TCP_KEEPIDLE and friends. Don't attempt to set the + # values there. + default = sock.getsockopt(socket.IPPROTO_TCP, sockopt) + if default > max_value: + sock.setsockopt(socket.IPPROTO_TCP, sockopt, max_value) + except OSError: + pass + + def _set_keepalive_times(sock: socket.socket) -> None: + _set_tcp_option(sock, "TCP_KEEPIDLE", _MAX_TCP_KEEPIDLE) + _set_tcp_option(sock, "TCP_KEEPINTVL", _MAX_TCP_KEEPINTVL) + _set_tcp_option(sock, "TCP_KEEPCNT", _MAX_TCP_KEEPCNT) + + +def _raise_connection_failure( + address: Any, + error: Exception, + msg_prefix: Optional[str] = None, + timeout_details: Optional[dict[str, float]] = None, +) -> NoReturn: + """Convert a socket.error to ConnectionFailure and raise it.""" + host, port = address + # If connecting to a Unix socket, port will be None. + if port is not None: + msg = "%s:%d: %s" % (host, port, error) + else: + msg = f"{host}: {error}" + if msg_prefix: + msg = msg_prefix + msg + if "configured timeouts" not in msg: + msg += format_timeout_details(timeout_details) + if ( + isinstance(error, socket.timeout) + or isinstance(error, SSLErrors) + and "timed out" in str(error) + ): + raise NetworkTimeout(msg) from error + else: + raise AutoReconnect(msg) from error + + +class _CancellationContext: + def __init__(self) -> None: + self._cancelled = False + + def cancel(self) -> None: + """Cancel this context.""" + self._cancelled = True + + @property + def cancelled(self) -> bool: + """Was cancel called?""" + return self._cancelled + + +async def _async_create_connection(address: _Address, options: PoolOptions) -> socket.socket: + """Given (host, port) and PoolOptions, connect and return a raw socket object. + + Can raise socket.error. + + This is a modified version of create_connection from CPython >= 2.7. + """ + host, port = address + + # Check if dealing with a unix domain socket + if host.endswith(".sock"): + if not hasattr(socket, "AF_UNIX"): + raise ConnectionFailure("UNIX-sockets are not supported on this system") + sock = socket.socket(socket.AF_UNIX) + # SOCK_CLOEXEC not supported for Unix sockets. + _set_non_inheritable_non_atomic(sock.fileno()) + try: + sock.setblocking(False) + await asyncio.get_running_loop().sock_connect(sock, host) + return sock + except OSError: + sock.close() + raise + + # Don't try IPv6 if we don't support it. Also skip it if host + # is 'localhost' (::1 is fine). Avoids slow connect issues + # like PYTHON-356. + family = socket.AF_INET + if socket.has_ipv6 and host != "localhost": + family = socket.AF_UNSPEC + + err = None + for res in await _getaddrinfo(host, port, family=family, type=socket.SOCK_STREAM): + af, socktype, proto, dummy, sa = res + # SOCK_CLOEXEC was new in CPython 3.2, and only available on a limited + # number of platforms (newer Linux and *BSD). Starting with CPython 3.4 + # all file descriptors are created non-inheritable. See PEP 446. + try: + sock = socket.socket(af, socktype | getattr(socket, "SOCK_CLOEXEC", 0), proto) + except OSError: + # Can SOCK_CLOEXEC be defined even if the kernel doesn't support + # it? + sock = socket.socket(af, socktype, proto) + # Fallback when SOCK_CLOEXEC isn't available. + _set_non_inheritable_non_atomic(sock.fileno()) + try: + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + # CSOT: apply timeout to socket connect. + timeout = _csot.remaining() + if timeout is None: + timeout = options.connect_timeout + elif timeout <= 0: + raise socket.timeout("timed out") + sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, True) + _set_keepalive_times(sock) + # Socket needs to be non-blocking during connection to not block the event loop + sock.setblocking(False) + await asyncio.wait_for( + asyncio.get_running_loop().sock_connect(sock, sa), timeout=timeout + ) + sock.settimeout(timeout) + return sock + except asyncio.TimeoutError as e: + sock.close() + err = socket.timeout("timed out") + err.__cause__ = e + except OSError as e: + sock.close() + err = e # type: ignore[assignment] + + if err is not None: + raise err + else: + # This likely means we tried to connect to an IPv6 only + # host with an OS/kernel or Python interpreter that doesn't + # support IPv6. The test case is Jython2.5.1 which doesn't + # support IPv6 at all. + raise OSError("getaddrinfo failed") + + +async def _async_configured_socket( + address: _Address, options: PoolOptions +) -> Union[socket.socket, _sslConn]: + """Given (host, port) and PoolOptions, return a raw configured socket. + + Can raise socket.error, ConnectionFailure, or _CertificateError. + + Sets socket's SSL and timeout options. + """ + sock = await _async_create_connection(address, options) + ssl_context = options._ssl_context + + if ssl_context is None: + sock.settimeout(options.socket_timeout) + return sock + + host = address[0] + try: + # We have to pass hostname / ip address to wrap_socket + # to use SSLContext.check_hostname. + if _has_sni(False): + loop = asyncio.get_running_loop() + ssl_sock = await loop.run_in_executor( + None, + functools.partial(ssl_context.wrap_socket, sock, server_hostname=host), # type: ignore[assignment, misc, unused-ignore] + ) + else: + loop = asyncio.get_running_loop() + ssl_sock = await loop.run_in_executor(None, ssl_context.wrap_socket, sock) # type: ignore[assignment, misc, unused-ignore] + except _CertificateError: + sock.close() + # Raise _CertificateError directly like we do after match_hostname + # below. + raise + except (OSError, *SSLErrors) as exc: + sock.close() + # We raise AutoReconnect for transient and permanent SSL handshake + # failures alike. Permanent handshake failures, like protocol + # mismatch, will be turned into ServerSelectionTimeoutErrors later. + details = _get_timeout_details(options) + _raise_connection_failure(address, exc, "SSL handshake failed: ", timeout_details=details) + if ( + ssl_context.verify_mode + and not ssl_context.check_hostname + and not options.tls_allow_invalid_hostnames + ): + try: + ssl.match_hostname(ssl_sock.getpeercert(), hostname=host) # type:ignore[attr-defined, unused-ignore] + except _CertificateError: + ssl_sock.close() + raise + + ssl_sock.settimeout(options.socket_timeout) + return ssl_sock + + +async def _configured_protocol_interface( + address: _Address, options: PoolOptions +) -> AsyncNetworkingInterface: + """Given (host, port) and PoolOptions, return a configured AsyncNetworkingInterface. + + Can raise socket.error, ConnectionFailure, or _CertificateError. + + Sets protocol's SSL and timeout options. + """ + sock = await _async_create_connection(address, options) + ssl_context = options._ssl_context + timeout = options.socket_timeout + + if ssl_context is None: + return AsyncNetworkingInterface( + await asyncio.get_running_loop().create_connection( + lambda: PyMongoProtocol(timeout=timeout), sock=sock + ) + ) + + host = address[0] + try: + # We have to pass hostname / ip address to wrap_socket + # to use SSLContext.check_hostname. + transport, protocol = await asyncio.get_running_loop().create_connection( # type: ignore[call-overload] + lambda: PyMongoProtocol(timeout=timeout), + sock=sock, + server_hostname=host, + ssl=ssl_context, + ) + except _CertificateError: + # Raise _CertificateError directly like we do after match_hostname + # below. + raise + except (OSError, *SSLErrors) as exc: + # We raise AutoReconnect for transient and permanent SSL handshake + # failures alike. Permanent handshake failures, like protocol + # mismatch, will be turned into ServerSelectionTimeoutErrors later. + details = _get_timeout_details(options) + _raise_connection_failure(address, exc, "SSL handshake failed: ", timeout_details=details) + if ( + ssl_context.verify_mode + and not ssl_context.check_hostname + and not options.tls_allow_invalid_hostnames + ): + try: + ssl.match_hostname(transport.get_extra_info("peercert"), hostname=host) # type:ignore[attr-defined,unused-ignore] + except _CertificateError: + transport.abort() + raise + + return AsyncNetworkingInterface((transport, protocol)) + + +def _create_connection(address: _Address, options: PoolOptions) -> socket.socket: + """Given (host, port) and PoolOptions, connect and return a raw socket object. + + Can raise socket.error. + + This is a modified version of create_connection from CPython >= 2.7. + """ + host, port = address + + # Check if dealing with a unix domain socket + if host.endswith(".sock"): + if not hasattr(socket, "AF_UNIX"): + raise ConnectionFailure("UNIX-sockets are not supported on this system") + sock = socket.socket(socket.AF_UNIX) + # SOCK_CLOEXEC not supported for Unix sockets. + _set_non_inheritable_non_atomic(sock.fileno()) + try: + sock.connect(host) + return sock + except OSError: + sock.close() + raise + + # Don't try IPv6 if we don't support it. Also skip it if host + # is 'localhost' (::1 is fine). Avoids slow connect issues + # like PYTHON-356. + family = socket.AF_INET + if socket.has_ipv6 and host != "localhost": + family = socket.AF_UNSPEC + + err = None + for res in socket.getaddrinfo(host, port, family=family, type=socket.SOCK_STREAM): # type: ignore[attr-defined, unused-ignore] + af, socktype, proto, dummy, sa = res + # SOCK_CLOEXEC was new in CPython 3.2, and only available on a limited + # number of platforms (newer Linux and *BSD). Starting with CPython 3.4 + # all file descriptors are created non-inheritable. See PEP 446. + try: + sock = socket.socket(af, socktype | getattr(socket, "SOCK_CLOEXEC", 0), proto) + except OSError: + # Can SOCK_CLOEXEC be defined even if the kernel doesn't support + # it? + sock = socket.socket(af, socktype, proto) + # Fallback when SOCK_CLOEXEC isn't available. + _set_non_inheritable_non_atomic(sock.fileno()) + try: + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + # CSOT: apply timeout to socket connect. + timeout = _csot.remaining() + if timeout is None: + timeout = options.connect_timeout + elif timeout <= 0: + raise socket.timeout("timed out") + sock.settimeout(timeout) + sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, True) + _set_keepalive_times(sock) + sock.connect(sa) + return sock + except OSError as e: + err = e + sock.close() + + if err is not None: + raise err + else: + # This likely means we tried to connect to an IPv6 only + # host with an OS/kernel or Python interpreter that doesn't + # support IPv6. The test case is Jython2.5.1 which doesn't + # support IPv6 at all. + raise OSError("getaddrinfo failed") + + +def _configured_socket(address: _Address, options: PoolOptions) -> Union[socket.socket, _sslConn]: + """Given (host, port) and PoolOptions, return a raw configured socket. + + Can raise socket.error, ConnectionFailure, or _CertificateError. + + Sets socket's SSL and timeout options. + """ + sock = _create_connection(address, options) + ssl_context = options._ssl_context + + if ssl_context is None: + sock.settimeout(options.socket_timeout) + return sock + + host = address[0] + try: + # We have to pass hostname / ip address to wrap_socket + # to use SSLContext.check_hostname. + if _has_sni(True): + ssl_sock = ssl_context.wrap_socket(sock, server_hostname=host) # type: ignore[assignment, misc, unused-ignore] + else: + ssl_sock = ssl_context.wrap_socket(sock) # type: ignore[assignment, misc, unused-ignore] + except _CertificateError: + sock.close() + # Raise _CertificateError directly like we do after match_hostname + # below. + raise + except (OSError, *SSLErrors) as exc: + sock.close() + # We raise AutoReconnect for transient and permanent SSL handshake + # failures alike. Permanent handshake failures, like protocol + # mismatch, will be turned into ServerSelectionTimeoutErrors later. + details = _get_timeout_details(options) + _raise_connection_failure(address, exc, "SSL handshake failed: ", timeout_details=details) + if ( + ssl_context.verify_mode + and not ssl_context.check_hostname + and not options.tls_allow_invalid_hostnames + ): + try: + ssl.match_hostname(ssl_sock.getpeercert(), hostname=host) # type:ignore[attr-defined, unused-ignore] + except _CertificateError: + ssl_sock.close() + raise + + ssl_sock.settimeout(options.socket_timeout) + return ssl_sock + + +def _configured_socket_interface(address: _Address, options: PoolOptions) -> NetworkingInterface: + """Given (host, port) and PoolOptions, return a NetworkingInterface wrapping a configured socket. + + Can raise socket.error, ConnectionFailure, or _CertificateError. + + Sets socket's SSL and timeout options. + """ + sock = _create_connection(address, options) + ssl_context = options._ssl_context + + if ssl_context is None: + sock.settimeout(options.socket_timeout) + return NetworkingInterface(sock) + + host = address[0] + try: + # We have to pass hostname / ip address to wrap_socket + # to use SSLContext.check_hostname. + if _has_sni(True): + ssl_sock = ssl_context.wrap_socket(sock, server_hostname=host) + else: + ssl_sock = ssl_context.wrap_socket(sock) + except _CertificateError: + sock.close() + # Raise _CertificateError directly like we do after match_hostname + # below. + raise + except (OSError, *SSLErrors) as exc: + sock.close() + # We raise AutoReconnect for transient and permanent SSL handshake + # failures alike. Permanent handshake failures, like protocol + # mismatch, will be turned into ServerSelectionTimeoutErrors later. + details = _get_timeout_details(options) + _raise_connection_failure(address, exc, "SSL handshake failed: ", timeout_details=details) + if ( + ssl_context.verify_mode + and not ssl_context.check_hostname + and not options.tls_allow_invalid_hostnames + ): + try: + ssl.match_hostname(ssl_sock.getpeercert(), hostname=host) # type:ignore[attr-defined,unused-ignore] + except _CertificateError: + ssl_sock.close() + raise + + ssl_sock.settimeout(options.socket_timeout) + return NetworkingInterface(ssl_sock) diff --git a/pymongo/pyopenssl_context.py b/pymongo/pyopenssl_context.py index 50d8680a74..08fe99c889 100644 --- a/pymongo/pyopenssl_context.py +++ b/pymongo/pyopenssl_context.py @@ -4,7 +4,7 @@ # may not use this file except in compliance with the License. You # may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,10 +14,11 @@ """A CPython compatible SSLContext implementation wrapping PyOpenSSL's context. + +Due to limitations of the CPython asyncio.Protocol implementation for SSL, the async API does not support PyOpenSSL. """ from __future__ import annotations -import asyncio import socket as _socket import ssl as _stdlibssl import sys as _sys @@ -109,15 +110,12 @@ def __init__( ctx: _SSL.Context, sock: Optional[_socket.socket], suppress_ragged_eofs: bool, - is_async: bool = False, ): self.socket_checker = _SocketChecker() self.suppress_ragged_eofs = suppress_ragged_eofs super().__init__(ctx, sock) - self._is_async = is_async def _call(self, call: Callable[..., _T], *args: Any, **kwargs: Any) -> _T: - is_async = kwargs.pop("allow_async", True) and self._is_async timeout = self.gettimeout() if timeout: start = _time.monotonic() @@ -125,7 +123,8 @@ def _call(self, call: Callable[..., _T], *args: Any, **kwargs: Any) -> _T: try: return call(*args, **kwargs) except BLOCKING_IO_ERRORS as exc: - if is_async: + # Do not retry if the connection is in non-blocking mode. + if timeout == 0: raise exc # Check for closed socket. if self.fileno() == -1: @@ -147,7 +146,6 @@ def _call(self, call: Callable[..., _T], *args: Any, **kwargs: Any) -> _T: continue def do_handshake(self, *args: Any, **kwargs: Any) -> None: - kwargs["allow_async"] = False return self._call(super().do_handshake, *args, **kwargs) def recv(self, *args: Any, **kwargs: Any) -> bytes: @@ -272,7 +270,7 @@ def __set_check_ocsp_endpoint(self, value: bool) -> None: check_ocsp_endpoint = property(__get_check_ocsp_endpoint, __set_check_ocsp_endpoint) - def __get_options(self) -> None: + def __get_options(self) -> int: # Calling set_options adds the option to the existing bitmask and # returns the new bitmask. # https://www.pyopenssl.org/en/stable/api/ssl.html#OpenSSL.SSL.Context.set_options @@ -378,58 +376,6 @@ def set_default_verify_paths(self) -> None: # but not that same as CPython's. self._ctx.set_default_verify_paths() - async def a_wrap_socket( - self, - sock: _socket.socket, - server_side: bool = False, - do_handshake_on_connect: bool = True, - suppress_ragged_eofs: bool = True, - server_hostname: Optional[str] = None, - session: Optional[_SSL.Session] = None, - ) -> _sslConn: - """Wrap an existing Python socket connection and return a TLS socket - object. - """ - ssl_conn = _sslConn(self._ctx, sock, suppress_ragged_eofs, True) - loop = asyncio.get_running_loop() - if session: - ssl_conn.set_session(session) - if server_side is True: - ssl_conn.set_accept_state() - else: - # SNI - if server_hostname and not _is_ip_address(server_hostname): - # XXX: Do this in a callback registered with - # SSLContext.set_info_callback? See Twisted for an example. - ssl_conn.set_tlsext_host_name(server_hostname.encode("idna")) - if self.verify_mode != _stdlibssl.CERT_NONE: - # Request a stapled OCSP response. - await loop.run_in_executor(None, ssl_conn.request_ocsp) - ssl_conn.set_connect_state() - # If this wasn't true the caller of wrap_socket would call - # do_handshake() - if do_handshake_on_connect: - # XXX: If we do hostname checking in a callback we can get rid - # of this call to do_handshake() since the handshake - # will happen automatically later. - await loop.run_in_executor(None, ssl_conn.do_handshake) - # XXX: Do this in a callback registered with - # SSLContext.set_info_callback? See Twisted for an example. - if self.check_hostname and server_hostname is not None: - from service_identity import pyopenssl - - try: - if _is_ip_address(server_hostname): - pyopenssl.verify_ip_address(ssl_conn, server_hostname) - else: - pyopenssl.verify_hostname(ssl_conn, server_hostname) - except ( # type:ignore[misc] - service_identity.SICertificateError, - service_identity.SIVerificationError, - ) as exc: - raise _CertificateError(str(exc)) from None - return ssl_conn - def wrap_socket( self, sock: _socket.socket, @@ -474,9 +420,9 @@ def wrap_socket( pyopenssl.verify_ip_address(ssl_conn, server_hostname) else: pyopenssl.verify_hostname(ssl_conn, server_hostname) - except ( # type:ignore[misc] - service_identity.SICertificateError, - service_identity.SIVerificationError, + except ( + service_identity.CertificateError, + service_identity.VerificationError, ) as exc: raise _CertificateError(str(exc)) from None return ssl_conn diff --git a/pymongo/read_concern.py b/pymongo/read_concern.py index fa2f4a318a..2adc403366 100644 --- a/pymongo/read_concern.py +++ b/pymongo/read_concern.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -38,7 +38,7 @@ def __init__(self, level: Optional[str] = None) -> None: if level is None or isinstance(level, str): self.__level = level else: - raise TypeError("level must be a string or None.") + raise TypeError(f"level must be a string or None, not {type(level)}") @property def level(self) -> Optional[str]: diff --git a/pymongo/read_preferences.py b/pymongo/read_preferences.py index 8c6e6de45d..35b92c4d01 100644 --- a/pymongo/read_preferences.py +++ b/pymongo/read_preferences.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -19,6 +19,7 @@ from __future__ import annotations +import warnings from collections import abc from typing import TYPE_CHECKING, Any, Mapping, Optional, Sequence @@ -103,6 +104,11 @@ def _validate_hedge(hedge: Optional[_Hedge]) -> Optional[_Hedge]: if not isinstance(hedge, dict): raise TypeError(f"hedge must be a dictionary, not {hedge!r}") + warnings.warn( + "The read preference 'hedge' option is deprecated in PyMongo 4.12+ because hedged reads are deprecated in MongoDB version 8.0+. Support for 'hedge' will be removed in PyMongo 5.0.", + DeprecationWarning, + stacklevel=4, + ) return hedge @@ -183,7 +189,9 @@ def max_staleness(self) -> int: @property def hedge(self) -> Optional[_Hedge]: - """The read preference ``hedge`` parameter. + """**DEPRECATED** - The read preference 'hedge' option is deprecated in PyMongo 4.12+ because hedged reads are deprecated in MongoDB version 8.0+. Support for 'hedge' will be removed in PyMongo 5.0. + + The read preference ``hedge`` parameter. A dictionary that configures how the server will perform hedged reads. It consists of the following keys: @@ -203,6 +211,12 @@ def hedge(self) -> Optional[_Hedge]: .. versionadded:: 3.11 """ + if self.__hedge is not None: + warnings.warn( + "The read preference 'hedge' option is deprecated in PyMongo 4.12+ because hedged reads are deprecated in MongoDB version 8.0+. Support for 'hedge' will be removed in PyMongo 5.0.", + DeprecationWarning, + stacklevel=2, + ) return self.__hedge @property @@ -312,7 +326,7 @@ class PrimaryPreferred(_ServerMode): replication before it will no longer be selected for operations. Default -1, meaning no maximum. If it is set, it must be at least 90 seconds. - :param hedge: The :attr:`~hedge` to use if the primary is not available. + :param hedge: **DEPRECATED** - The :attr:`~hedge` for this read preference. .. versionchanged:: 3.11 Added ``hedge`` parameter. @@ -354,7 +368,7 @@ class Secondary(_ServerMode): replication before it will no longer be selected for operations. Default -1, meaning no maximum. If it is set, it must be at least 90 seconds. - :param hedge: The :attr:`~hedge` for this read preference. + :param hedge: **DEPRECATED** - The :attr:`~hedge` for this read preference. .. versionchanged:: 3.11 Added ``hedge`` parameter. @@ -397,7 +411,7 @@ class SecondaryPreferred(_ServerMode): replication before it will no longer be selected for operations. Default -1, meaning no maximum. If it is set, it must be at least 90 seconds. - :param hedge: The :attr:`~hedge` for this read preference. + :param hedge: **DEPRECATED** - The :attr:`~hedge` for this read preference. .. versionchanged:: 3.11 Added ``hedge`` parameter. @@ -441,7 +455,7 @@ class Nearest(_ServerMode): replication before it will no longer be selected for operations. Default -1, meaning no maximum. If it is set, it must be at least 90 seconds. - :param hedge: The :attr:`~hedge` for this read preference. + :param hedge: **DEPRECATED** - The :attr:`~hedge` for this read preference. .. versionchanged:: 3.11 Added ``hedge`` parameter. @@ -537,7 +551,7 @@ class ReadPreference: Nearest(tag_sets=[{"node":"analytics"}]) - See :doc:`/examples/high_availability` for code examples. + See `Read and Write Settings `_ for code examples. A read preference is used in three cases: diff --git a/pymongo/response.py b/pymongo/response.py index e47749423f..211ddf2354 100644 --- a/pymongo/response.py +++ b/pymongo/response.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/pymongo/results.py b/pymongo/results.py index d17ff1c3ea..bcce121fe7 100644 --- a/pymongo/results.py +++ b/pymongo/results.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/pymongo/saslprep.py b/pymongo/saslprep.py index 7fb546f61b..9cef22419e 100644 --- a/pymongo/saslprep.py +++ b/pymongo/saslprep.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/pymongo/server_api.py b/pymongo/server_api.py index 4a746008c4..40bb1aac3e 100644 --- a/pymongo/server_api.py +++ b/pymongo/server_api.py @@ -4,7 +4,7 @@ # may not use this file except in compliance with the License. You # may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/pymongo/server_description.py b/pymongo/server_description.py index 064ad43375..d038c04b1c 100644 --- a/pymongo/server_description.py +++ b/pymongo/server_description.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -69,7 +69,7 @@ class ServerDescription: def __init__( self, address: _Address, - hello: Optional[Hello] = None, + hello: Optional[Hello[dict[str, Any]]] = None, round_trip_time: Optional[float] = None, error: Optional[Exception] = None, min_round_trip_time: float = 0.0, @@ -299,4 +299,4 @@ def __repr__(self) -> str: ) # For unittesting only. Use under no circumstances! - _host_to_round_trip_time: dict = {} + _host_to_round_trip_time: dict = {} # type: ignore[type-arg] diff --git a/pymongo/server_selectors.py b/pymongo/server_selectors.py index c22ad599ee..0d1425ab31 100644 --- a/pymongo/server_selectors.py +++ b/pymongo/server_selectors.py @@ -4,7 +4,7 @@ # may not use this file except in compliance with the License. You # may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/pymongo/server_type.py b/pymongo/server_type.py index 937855cc7a..7a6d2aaf14 100644 --- a/pymongo/server_type.py +++ b/pymongo/server_type.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/pymongo/ssl_context.py b/pymongo/ssl_context.py index ee32145c02..2ff7428cab 100644 --- a/pymongo/ssl_context.py +++ b/pymongo/ssl_context.py @@ -4,7 +4,7 @@ # may not use this file except in compliance with the License. You # may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/pymongo/ssl_support.py b/pymongo/ssl_support.py index 580d71f9b0..7dbd0f2148 100644 --- a/pymongo/ssl_support.py +++ b/pymongo/ssl_support.py @@ -4,7 +4,7 @@ # may not use this file except in compliance with the License. You # may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -15,16 +15,19 @@ """Support for SSL in PyMongo.""" from __future__ import annotations +import types import warnings -from typing import Optional +from typing import Any, Optional, Union from pymongo.errors import ConfigurationError HAVE_SSL = True +HAVE_PYSSL = True try: - import pymongo.pyopenssl_context as _ssl + import pymongo.pyopenssl_context as _pyssl except (ImportError, AttributeError) as exc: + HAVE_PYSSL = False if isinstance(exc, AttributeError): warnings.warn( "Failed to use the installed version of PyOpenSSL. " @@ -35,10 +38,10 @@ UserWarning, stacklevel=2, ) - try: - import pymongo.ssl_context as _ssl # type: ignore[no-redef] - except ImportError: - HAVE_SSL = False +try: + import pymongo.ssl_context as _ssl +except ImportError: + HAVE_SSL = False if HAVE_SSL: @@ -49,14 +52,34 @@ import ssl as _stdlibssl # noqa: F401 from ssl import CERT_NONE, CERT_REQUIRED - HAS_SNI = _ssl.HAS_SNI IPADDR_SAFE = True + + if HAVE_PYSSL: + PYSSLError: Any = _pyssl.SSLError + BLOCKING_IO_ERRORS: tuple = ( # type: ignore[type-arg] + _ssl.BLOCKING_IO_ERRORS + _pyssl.BLOCKING_IO_ERRORS + ) + BLOCKING_IO_READ_ERROR: tuple = ( # type: ignore[type-arg] + _pyssl.BLOCKING_IO_READ_ERROR, + _ssl.BLOCKING_IO_READ_ERROR, + ) + BLOCKING_IO_WRITE_ERROR: tuple = ( # type: ignore[type-arg] + _pyssl.BLOCKING_IO_WRITE_ERROR, + _ssl.BLOCKING_IO_WRITE_ERROR, + ) + else: + PYSSLError = _ssl.SSLError + BLOCKING_IO_ERRORS: tuple = _ssl.BLOCKING_IO_ERRORS # type: ignore[type-arg, no-redef] + BLOCKING_IO_READ_ERROR: tuple = (_ssl.BLOCKING_IO_READ_ERROR,) # type: ignore[type-arg, no-redef] + BLOCKING_IO_WRITE_ERROR: tuple = (_ssl.BLOCKING_IO_WRITE_ERROR,) # type: ignore[type-arg, no-redef] SSLError = _ssl.SSLError - BLOCKING_IO_ERRORS = _ssl.BLOCKING_IO_ERRORS - BLOCKING_IO_READ_ERROR = _ssl.BLOCKING_IO_READ_ERROR - BLOCKING_IO_WRITE_ERROR = _ssl.BLOCKING_IO_WRITE_ERROR BLOCKING_IO_LOOKUP_ERROR = BLOCKING_IO_READ_ERROR + def _has_sni(is_sync: bool) -> bool: + if is_sync and HAVE_PYSSL: + return _pyssl.HAS_SNI + return _ssl.HAS_SNI + def get_ssl_context( certfile: Optional[str], passphrase: Optional[str], @@ -65,10 +88,15 @@ def get_ssl_context( allow_invalid_certificates: bool, allow_invalid_hostnames: bool, disable_ocsp_endpoint_check: bool, - ) -> _ssl.SSLContext: + is_sync: bool, + ) -> Union[_pyssl.SSLContext, _ssl.SSLContext]: # type: ignore[name-defined] """Create and return an SSLContext object.""" + if is_sync and HAVE_PYSSL: + ssl: types.ModuleType = _pyssl + else: + ssl = _ssl verify_mode = CERT_NONE if allow_invalid_certificates else CERT_REQUIRED - ctx = _ssl.SSLContext(_ssl.PROTOCOL_SSLv23) + ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) if verify_mode != CERT_NONE: ctx.check_hostname = not allow_invalid_hostnames else: @@ -80,22 +108,20 @@ def get_ssl_context( # up to date versions of MongoDB 2.4 and above already disable # SSLv2 and SSLv3, python disables SSLv2 by default in >= 2.7.7 # and >= 3.3.4 and SSLv3 in >= 3.4.3. - ctx.options |= _ssl.OP_NO_SSLv2 - ctx.options |= _ssl.OP_NO_SSLv3 - ctx.options |= _ssl.OP_NO_COMPRESSION - ctx.options |= _ssl.OP_NO_RENEGOTIATION + ctx.options |= ssl.OP_NO_SSLv2 + ctx.options |= ssl.OP_NO_SSLv3 + ctx.options |= ssl.OP_NO_COMPRESSION + ctx.options |= ssl.OP_NO_RENEGOTIATION if certfile is not None: try: ctx.load_cert_chain(certfile, None, passphrase) - except _ssl.SSLError as exc: + except ssl.SSLError as exc: raise ConfigurationError(f"Private key doesn't match certificate: {exc}") from None if crlfile is not None: - if _ssl.IS_PYOPENSSL: + if ssl.IS_PYOPENSSL: raise ConfigurationError("tlsCRLFile cannot be used with PyOpenSSL") # Match the server's behavior. - ctx.verify_flags = getattr( # type:ignore[attr-defined] - _ssl, "VERIFY_CRL_CHECK_LEAF", 0 - ) + ctx.verify_flags = getattr(ssl, "VERIFY_CRL_CHECK_LEAF", 0) ctx.load_verify_locations(crlfile) if ca_certs is not None: ctx.load_verify_locations(ca_certs) @@ -109,10 +135,12 @@ def get_ssl_context( class SSLError(Exception): # type: ignore pass - HAS_SNI = False IPADDR_SAFE = False - BLOCKING_IO_ERRORS = () # type:ignore[assignment] + BLOCKING_IO_ERRORS: tuple = () # type: ignore[type-arg, no-redef] + + def _has_sni(is_sync: bool) -> bool: # noqa: ARG001 + return False def get_ssl_context(*dummy): # type: ignore """No ssl module, raise ConfigurationError.""" - raise ConfigurationError("The ssl module is not available.") + raise ConfigurationError("The ssl module is not available") diff --git a/pymongo/synchronous/aggregation.py b/pymongo/synchronous/aggregation.py index 7c7e6252f7..486768ab7d 100644 --- a/pymongo/synchronous/aggregation.py +++ b/pymongo/synchronous/aggregation.py @@ -4,7 +4,7 @@ # may not use this file except in compliance with the License. You # may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -46,11 +46,10 @@ class _AggregationCommand: def __init__( self, - target: Union[Database, Collection], - cursor_class: type[CommandCursor], + target: Union[Database[Any], Collection[Any]], + cursor_class: type[CommandCursor[Any]], pipeline: _Pipeline, options: MutableMapping[str, Any], - explicit_session: bool, let: Optional[Mapping[str, Any]] = None, user_fields: Optional[MutableMapping[str, Any]] = None, result_processor: Optional[Callable[[Mapping[str, Any], Connection], None]] = None, @@ -92,7 +91,6 @@ def __init__( self._options["cursor"]["batchSize"] = self._batch_size self._cursor_class = cursor_class - self._explicit_session = explicit_session self._user_fields = user_fields self._result_processor = result_processor @@ -111,12 +109,12 @@ def _cursor_namespace(self) -> str: """The namespace in which the aggregate command is run.""" raise NotImplementedError - def _cursor_collection(self, cursor_doc: Mapping[str, Any]) -> Collection: + def _cursor_collection(self, cursor_doc: Mapping[str, Any]) -> Collection[Any]: """The Collection used for the aggregate command cursor.""" raise NotImplementedError @property - def _database(self) -> Database: + def _database(self) -> Database[Any]: """The database against which the aggregation command is run.""" raise NotImplementedError @@ -197,7 +195,6 @@ def get_cursor( batch_size=self._batch_size or 0, max_await_time_ms=self._max_await_time_ms, session=session, - explicit_session=self._explicit_session, comment=self._options.get("comment"), ) cmd_cursor._maybe_pin_connection(conn) @@ -205,7 +202,7 @@ def get_cursor( class _CollectionAggregationCommand(_AggregationCommand): - _target: Collection + _target: Collection[Any] @property def _aggregation_target(self) -> str: @@ -215,12 +212,12 @@ def _aggregation_target(self) -> str: def _cursor_namespace(self) -> str: return self._target.full_name - def _cursor_collection(self, cursor: Mapping[str, Any]) -> Collection: + def _cursor_collection(self, cursor: Mapping[str, Any]) -> Collection[Any]: """The Collection used for the aggregate command cursor.""" return self._target @property - def _database(self) -> Database: + def _database(self) -> Database[Any]: return self._target.database @@ -234,7 +231,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class _DatabaseAggregationCommand(_AggregationCommand): - _target: Database + _target: Database[Any] @property def _aggregation_target(self) -> int: @@ -245,10 +242,10 @@ def _cursor_namespace(self) -> str: return f"{self._target.name}.$cmd.aggregate" @property - def _database(self) -> Database: + def _database(self) -> Database[Any]: return self._target - def _cursor_collection(self, cursor: Mapping[str, Any]) -> Collection: + def _cursor_collection(self, cursor: Mapping[str, Any]) -> Collection[Any]: """The Collection used for the aggregate command cursor.""" # Collection level aggregate may not always return the "ns" field # according to our MockupDB tests. Let's handle that case for db level diff --git a/pymongo/synchronous/auth.py b/pymongo/synchronous/auth.py index 0e51ff8b7f..650e25234d 100644 --- a/pymongo/synchronous/auth.py +++ b/pymongo/synchronous/auth.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -45,6 +45,7 @@ _authenticate_oidc, _get_authenticator, ) +from pymongo.synchronous.helpers import _getaddrinfo if TYPE_CHECKING: from pymongo.hello import Hello @@ -157,7 +158,7 @@ def _password_digest(username: str, password: str) -> str: if len(password) == 0: raise ValueError("password can't be empty") if not isinstance(username, str): - raise TypeError("username must be an instance of str") + raise TypeError(f"username must be an instance of str, not {type(username)}") md5hash = hashlib.md5() # noqa: S324 data = f"{username}:mongo:{password}" @@ -180,9 +181,16 @@ def _canonicalize_hostname(hostname: str, option: str | bool) -> str: if option in [False, "none"]: return hostname - af, socktype, proto, canonname, sockaddr = socket.getaddrinfo( - hostname, None, 0, 0, socket.IPPROTO_TCP, socket.AI_CANONNAME - )[0] + af, socktype, proto, canonname, sockaddr = ( + _getaddrinfo( + hostname, + None, + family=0, + type=0, + proto=socket.IPPROTO_TCP, + flags=socket.AI_CANONNAME, + ) + )[0] # type: ignore[index] # For forward just to resolve the cname as dns.lookup() will not return it. if option == "forward": diff --git a/pymongo/synchronous/auth_aws.py b/pymongo/synchronous/auth_aws.py index 7c0d24f3a1..c7ea47886f 100644 --- a/pymongo/synchronous/auth_aws.py +++ b/pymongo/synchronous/auth_aws.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/pymongo/synchronous/auth_oidc.py b/pymongo/synchronous/auth_oidc.py index 5a8967d96b..583ee39f67 100644 --- a/pymongo/synchronous/auth_oidc.py +++ b/pymongo/synchronous/auth_oidc.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -15,6 +15,7 @@ """MONGODB-OIDC Authentication helpers.""" from __future__ import annotations +import asyncio import threading import time from dataclasses import dataclass, field @@ -36,6 +37,7 @@ ) from pymongo.errors import ConfigurationError, OperationFailure from pymongo.helpers_shared import _AUTHENTICATION_FAILURE_CODE +from pymongo.lock import Lock, _create_lock if TYPE_CHECKING: from pymongo.auth_shared import MongoCredential @@ -81,7 +83,11 @@ class _OIDCAuthenticator: access_token: Optional[str] = field(default=None) idp_info: Optional[OIDCIdPInfo] = field(default=None) token_gen_id: int = field(default=0) - lock: threading.Lock = field(default_factory=threading.Lock) + if not _IS_SYNC: + lock: Lock = field(default_factory=_create_lock) # type: ignore[assignment] + else: + lock: threading.Lock = field(default_factory=_create_lock) # type: ignore[assignment, no-redef] + last_call_time: float = field(default=0) def reauthenticate(self, conn: Connection) -> Optional[Mapping[str, Any]]: @@ -186,7 +192,7 @@ def _get_access_token(self) -> Optional[str]: return None if not prev_token and cb is not None: - with self.lock: + with self.lock: # type: ignore[attr-defined] # See if the token was changed while we were waiting for the # lock. new_token = self.access_token @@ -211,9 +217,14 @@ def _get_access_token(self) -> Optional[str]: idp_info=self.idp_info, username=self.properties.username, ) - resp = cb.fetch(context) + if not _IS_SYNC: + resp = asyncio.get_running_loop().run_in_executor(None, cb.fetch, context) # type: ignore[assignment] + else: + resp = cb.fetch(context) if not isinstance(resp, OIDCCallbackResult): - raise ValueError("Callback result must be of type OIDCCallbackResult") + raise ValueError( + f"Callback result must be of type OIDCCallbackResult, not {type(resp)}" + ) self.refresh_token = resp.refresh_token self.access_token = resp.access_token self.token_gen_id += 1 @@ -246,7 +257,7 @@ def _sasl_continue_jwt( ) -> Mapping[str, Any]: self.access_token = None self.refresh_token = None - start_payload: dict = bson.decode(start_resp["payload"]) + start_payload: dict[str, Any] = bson.decode(start_resp["payload"]) if "issuer" in start_payload: self.idp_info = OIDCIdPInfo(**start_payload) access_token = self._get_access_token() diff --git a/pymongo/synchronous/bulk.py b/pymongo/synchronous/bulk.py index 0b709f1acf..22d6a7a76a 100644 --- a/pymongo/synchronous/bulk.py +++ b/pymongo/synchronous/bulk.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -87,7 +87,7 @@ def __init__( self, collection: Collection[_DocumentType], ordered: bool, - bypass_document_validation: bool, + bypass_document_validation: Optional[bool], comment: Optional[str] = None, let: Optional[Any] = None, ) -> None: @@ -248,15 +248,15 @@ def write_command( request_id: int, msg: bytes, docs: list[Mapping[str, Any]], - client: MongoClient, + client: MongoClient[Any], ) -> dict[str, Any]: """A proxy for SocketInfo.write_command that handles event publishing.""" cmd[bwc.field] = docs if _COMMAND_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _COMMAND_LOGGER, - clientId=client._topology_settings._topology_id, message=_CommandStatusMessage.STARTED, + clientId=client._topology_settings._topology_id, command=cmd, commandName=next(iter(cmd)), databaseName=bwc.db_name, @@ -276,8 +276,8 @@ def write_command( if _COMMAND_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _COMMAND_LOGGER, - clientId=client._topology_settings._topology_id, message=_CommandStatusMessage.SUCCEEDED, + clientId=client._topology_settings._topology_id, durationMS=duration, reply=reply, commandName=next(iter(cmd)), @@ -302,8 +302,8 @@ def write_command( if _COMMAND_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _COMMAND_LOGGER, - clientId=client._topology_settings._topology_id, message=_CommandStatusMessage.FAILED, + clientId=client._topology_settings._topology_id, durationMS=duration, failure=failure, commandName=next(iter(cmd)), @@ -334,14 +334,14 @@ def unack_write( msg: bytes, max_doc_size: int, docs: list[Mapping[str, Any]], - client: MongoClient, + client: MongoClient[Any], ) -> Optional[Mapping[str, Any]]: """A proxy for Connection.unack_write that handles event publishing.""" if _COMMAND_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _COMMAND_LOGGER, - clientId=client._topology_settings._topology_id, message=_CommandStatusMessage.STARTED, + clientId=client._topology_settings._topology_id, command=cmd, commandName=next(iter(cmd)), databaseName=bwc.db_name, @@ -366,8 +366,8 @@ def unack_write( if _COMMAND_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _COMMAND_LOGGER, - clientId=client._topology_settings._topology_id, message=_CommandStatusMessage.SUCCEEDED, + clientId=client._topology_settings._topology_id, durationMS=duration, reply=reply, commandName=next(iter(cmd)), @@ -393,8 +393,8 @@ def unack_write( if _COMMAND_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _COMMAND_LOGGER, - clientId=client._topology_settings._topology_id, message=_CommandStatusMessage.FAILED, + clientId=client._topology_settings._topology_id, durationMS=duration, failure=failure, commandName=next(iter(cmd)), @@ -419,7 +419,7 @@ def _execute_batch_unack( bwc: Union[_BulkWriteContext, _EncryptedBulkWriteContext], cmd: dict[str, Any], ops: list[Mapping[str, Any]], - client: MongoClient, + client: MongoClient[Any], ) -> list[Mapping[str, Any]]: if self.is_encrypted: _, batched_cmd, to_send = bwc.batch_command(cmd, ops) @@ -446,7 +446,7 @@ def _execute_batch( bwc: Union[_BulkWriteContext, _EncryptedBulkWriteContext], cmd: dict[str, Any], ops: list[Mapping[str, Any]], - client: MongoClient, + client: MongoClient[Any], ) -> tuple[dict[str, Any], list[Mapping[str, Any]]]: if self.is_encrypted: _, batched_cmd, to_send = bwc.batch_command(cmd, ops) @@ -516,8 +516,8 @@ def _execute_command( if self.comment: cmd["comment"] = self.comment _csot.apply_write_concern(cmd, write_concern) - if self.bypass_doc_val: - cmd["bypassDocumentValidation"] = True + if self.bypass_doc_val is not None: + cmd["bypassDocumentValidation"] = self.bypass_doc_val if self.let is not None and run.op_type in (_DELETE, _UPDATE): cmd["let"] = self.let if session: diff --git a/pymongo/synchronous/change_stream.py b/pymongo/synchronous/change_stream.py index a971ad08c0..7e34d7b848 100644 --- a/pymongo/synchronous/change_stream.py +++ b/pymongo/synchronous/change_stream.py @@ -4,7 +4,7 @@ # may not use this file except in compliance with the License. You # may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -164,7 +164,7 @@ def _aggregation_command_class(self) -> Type[_AggregationCommand]: raise NotImplementedError @property - def _client(self) -> MongoClient: + def _client(self) -> MongoClient: # type: ignore[type-arg] """The client against which the aggregation commands for this ChangeStream will be run. """ @@ -206,7 +206,7 @@ def _command_options(self) -> dict[str, Any]: def _aggregation_pipeline(self) -> list[dict[str, Any]]: """Return the full aggregation pipeline for this ChangeStream.""" options = self._change_stream_options() - full_pipeline: list = [{"$changeStream": options}] + full_pipeline: list[dict[str, Any]] = [{"$changeStream": options}] full_pipeline.extend(self._pipeline) return full_pipeline @@ -235,9 +235,7 @@ def _process_result(self, result: Mapping[str, Any], conn: Connection) -> None: f"response : {result!r}" ) - def _run_aggregation_cmd( - self, session: Optional[ClientSession], explicit_session: bool - ) -> CommandCursor: + def _run_aggregation_cmd(self, session: Optional[ClientSession]) -> CommandCursor: # type: ignore[type-arg] """Run the full aggregation pipeline for this ChangeStream and return the corresponding CommandCursor. """ @@ -246,7 +244,6 @@ def _run_aggregation_cmd( CommandCursor, self._aggregation_pipeline(), self._command_options(), - explicit_session, result_processor=self._process_result, comment=self._comment, ) @@ -257,9 +254,9 @@ def _run_aggregation_cmd( operation=_Op.AGGREGATE, ) - def _create_cursor(self) -> CommandCursor: - with self._client._tmp_session(self._session, close=False) as s: - return self._run_aggregation_cmd(session=s, explicit_session=self._session is not None) + def _create_cursor(self) -> CommandCursor: # type: ignore[type-arg] + with self._client._tmp_session(self._session) as s: + return self._run_aggregation_cmd(session=s) def _resume(self) -> None: """Reestablish this change stream after a resumable error.""" @@ -389,7 +386,8 @@ def try_next(self) -> Optional[_DocumentType]: if not _resumable(exc) and not exc.timeout: self.close() raise - except Exception: + # Catch KeyboardInterrupt, CancelledError, etc. and cleanup. + except BaseException: self.close() raise diff --git a/pymongo/synchronous/client_bulk.py b/pymongo/synchronous/client_bulk.py index 9f6e3f7cf0..a606d028e1 100644 --- a/pymongo/synchronous/client_bulk.py +++ b/pymongo/synchronous/client_bulk.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -88,7 +88,7 @@ class _ClientBulk: def __init__( self, - client: MongoClient, + client: MongoClient[Any], write_concern: WriteConcern, ordered: bool = True, bypass_document_validation: Optional[bool] = None, @@ -233,7 +233,7 @@ def write_command( msg: Union[bytes, dict[str, Any]], op_docs: list[Mapping[str, Any]], ns_docs: list[Mapping[str, Any]], - client: MongoClient, + client: MongoClient[Any], ) -> dict[str, Any]: """A proxy for Connection.write_command that handles event publishing.""" cmd["ops"] = op_docs @@ -241,8 +241,8 @@ def write_command( if _COMMAND_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _COMMAND_LOGGER, - clientId=client._topology_settings._topology_id, message=_CommandStatusMessage.STARTED, + clientId=client._topology_settings._topology_id, command=cmd, commandName=next(iter(cmd)), databaseName=bwc.db_name, @@ -262,8 +262,8 @@ def write_command( if _COMMAND_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _COMMAND_LOGGER, - clientId=client._topology_settings._topology_id, message=_CommandStatusMessage.SUCCEEDED, + clientId=client._topology_settings._topology_id, durationMS=duration, reply=reply, commandName=next(iter(cmd)), @@ -289,8 +289,8 @@ def write_command( if _COMMAND_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _COMMAND_LOGGER, - clientId=client._topology_settings._topology_id, message=_CommandStatusMessage.FAILED, + clientId=client._topology_settings._topology_id, durationMS=duration, failure=failure, commandName=next(iter(cmd)), @@ -324,14 +324,14 @@ def unack_write( msg: bytes, op_docs: list[Mapping[str, Any]], ns_docs: list[Mapping[str, Any]], - client: MongoClient, + client: MongoClient[Any], ) -> Optional[Mapping[str, Any]]: """A proxy for Connection.unack_write that handles event publishing.""" if _COMMAND_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _COMMAND_LOGGER, - clientId=client._topology_settings._topology_id, message=_CommandStatusMessage.STARTED, + clientId=client._topology_settings._topology_id, command=cmd, commandName=next(iter(cmd)), databaseName=bwc.db_name, @@ -356,8 +356,8 @@ def unack_write( if _COMMAND_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _COMMAND_LOGGER, - clientId=client._topology_settings._topology_id, message=_CommandStatusMessage.SUCCEEDED, + clientId=client._topology_settings._topology_id, durationMS=duration, reply=reply, commandName=next(iter(cmd)), @@ -383,8 +383,8 @@ def unack_write( if _COMMAND_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _COMMAND_LOGGER, - clientId=client._topology_settings._topology_id, message=_CommandStatusMessage.FAILED, + clientId=client._topology_settings._topology_id, durationMS=duration, failure=failure, commandName=next(iter(cmd)), @@ -438,6 +438,8 @@ def _process_results_cursor( ) -> None: """Internal helper for processing the server reply command cursor.""" if result.get("cursor"): + if session: + session._leave_alive = True coll = Collection( database=Database(self.client, "admin"), name="$cmd.bulkWrite", @@ -447,7 +449,6 @@ def _process_results_cursor( result["cursor"], conn.address, session=session, - explicit_session=session is not None, comment=self.comment, ) cmd_cursor._maybe_pin_connection(conn) diff --git a/pymongo/synchronous/client_session.py b/pymongo/synchronous/client_session.py index f1d680fc0a..9b547dc946 100644 --- a/pymongo/synchronous/client_session.py +++ b/pymongo/synchronous/client_session.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -165,7 +165,6 @@ WTimeoutError, ) from pymongo.helpers_shared import _RETRYABLE_ERROR_CODES -from pymongo.operations import _Op from pymongo.read_concern import ReadConcern from pymongo.read_preferences import ReadPreference, _ServerMode from pymongo.server_type import SERVER_TYPE @@ -309,7 +308,9 @@ def __init__( ) if max_commit_time_ms is not None: if not isinstance(max_commit_time_ms, int): - raise TypeError("max_commit_time_ms must be an integer or None") + raise TypeError( + f"max_commit_time_ms must be an integer or None, not {type(max_commit_time_ms)}" + ) @property def read_concern(self) -> Optional[ReadConcern]: @@ -393,7 +394,7 @@ class _TxnState: class _Transaction: """Internal class to hold transaction information in a ClientSession.""" - def __init__(self, opts: Optional[TransactionOptions], client: MongoClient): + def __init__(self, opts: Optional[TransactionOptions], client: MongoClient[Any]): self.opts = opts self.state = _TxnState.NONE self.sharded = False @@ -455,10 +456,10 @@ def _max_time_expired_error(exc: PyMongoError) -> bool: # From the transactions spec, all the retryable writes errors plus -# WriteConcernFailed. -_UNKNOWN_COMMIT_ERROR_CODES: frozenset = _RETRYABLE_ERROR_CODES | frozenset( +# WriteConcernTimeout. +_UNKNOWN_COMMIT_ERROR_CODES: frozenset = _RETRYABLE_ERROR_CODES | frozenset( # type: ignore[type-arg] [ - 64, # WriteConcernFailed + 64, # WriteConcernTimeout 50, # MaxTimeMSExpired ] ) @@ -496,13 +497,13 @@ class ClientSession: def __init__( self, - client: MongoClient, + client: MongoClient[Any], server_session: Any, options: SessionOptions, implicit: bool, ) -> None: # A MongoClient, a _ServerSession, a SessionOptions, and a set. - self._client: MongoClient = client + self._client: MongoClient[Any] = client self._server_session = server_session self._options = options self._cluster_time: Optional[Mapping[str, Any]] = None @@ -511,6 +512,10 @@ def __init__( # Is this an implicitly created session? self._implicit = implicit self._transaction = _Transaction(None, client) + # Is this session attached to a cursor? + self._attached_to_cursor = False + # Should we leave the session alive when the cursor is closed? + self._leave_alive = False def end_session(self) -> None: """Finish this session. If a transaction has started, abort it. @@ -533,7 +538,7 @@ def _end_session(self, lock: bool) -> None: def _end_implicit_session(self) -> None: # Implicit sessions can't be part of transactions or pinned connections - if self._server_session is not None: + if not self._leave_alive and self._server_session is not None: self._client._return_server_session(self._server_session) self._server_session = None @@ -548,7 +553,7 @@ def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None: self._end_session(lock=True) @property - def client(self) -> MongoClient: + def client(self) -> MongoClient[Any]: """The :class:`~pymongo.mongo_client.MongoClient` this session was created from. """ @@ -657,6 +662,12 @@ def callback(session, custom_arg, custom_kwarg=None): ``with_transaction`` starts a new transaction and re-executes the ``callback``. + The ``callback`` MUST NOT silently handle command errors + without allowing such errors to propagate. Command errors may abort the + transaction on the server, and an attempt to commit the transaction will + be rejected with a ``NoSuchTransaction`` error. For more information see + the `transactions specification`_. + When :meth:`~ClientSession.commit_transaction` raises an exception with the ``"UnknownTransactionCommitResult"`` error label, ``with_transaction`` retries the commit until the result of the @@ -686,13 +697,17 @@ def callback(session, custom_arg, custom_kwarg=None): :return: The return value of the ``callback``. .. versionadded:: 3.9 + + .. _transactions specification: + https://github.com/mongodb/specifications/blob/master/source/transactions-convenient-api/transactions-convenient-api.md#handling-errors-inside-the-callback """ start_time = time.monotonic() while True: self.start_transaction(read_concern, write_concern, read_preference, max_commit_time_ms) try: ret = callback(self) - except Exception as exc: + # Catch KeyboardInterrupt, CancelledError, etc. and cleanup. + except BaseException as exc: if self.in_transaction: self.abort_transaction() if ( @@ -736,7 +751,7 @@ def start_transaction( write_concern: Optional[WriteConcern] = None, read_preference: Optional[_ServerMode] = None, max_commit_time_ms: Optional[int] = None, - ) -> ContextManager: + ) -> ContextManager[Any]: """Start a multi-statement transaction. Takes the same arguments as :class:`TransactionOptions`. @@ -852,7 +867,9 @@ def func( ) -> dict[str, Any]: return self._finish_transaction(conn, command_name) - return self._client._retry_internal(func, self, None, retryable=True, operation=_Op.ABORT) + return self._client._retry_internal( + func, self, None, retryable=True, operation=command_name + ) def _finish_transaction(self, conn: Connection, command_name: str) -> dict[str, Any]: self._transaction.attempt += 1 @@ -897,7 +914,9 @@ def advance_cluster_time(self, cluster_time: Mapping[str, Any]) -> None: another `ClientSession` instance. """ if not isinstance(cluster_time, _Mapping): - raise TypeError("cluster_time must be a subclass of collections.Mapping") + raise TypeError( + f"cluster_time must be a subclass of collections.Mapping, not {type(cluster_time)}" + ) if not isinstance(cluster_time.get("clusterTime"), Timestamp): raise ValueError("Invalid cluster_time") self._advance_cluster_time(cluster_time) @@ -918,7 +937,9 @@ def advance_operation_time(self, operation_time: Timestamp) -> None: another `ClientSession` instance. """ if not isinstance(operation_time, Timestamp): - raise TypeError("operation_time must be an instance of bson.timestamp.Timestamp") + raise TypeError( + f"operation_time must be an instance of bson.timestamp.Timestamp, not {type(operation_time)}" + ) self._advance_operation_time(operation_time) def _process_response(self, reply: Mapping[str, Any]) -> None: @@ -1102,7 +1123,7 @@ def inc_transaction_id(self) -> None: self._transaction_id += 1 -class _ServerSessionPool(collections.deque): +class _ServerSessionPool(collections.deque): # type: ignore[type-arg] """Pool of _ServerSession objects. This class is thread-safe. diff --git a/pymongo/synchronous/collection.py b/pymongo/synchronous/collection.py index 6edfddc9a9..4e5f7d08fb 100644 --- a/pymongo/synchronous/collection.py +++ b/pymongo/synchronous/collection.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -231,7 +231,7 @@ def __init__( read_concern or database.read_concern, ) if not isinstance(name, str): - raise TypeError("name must be an instance of str") + raise TypeError(f"name must be an instance of str, not {type(name)}") from pymongo.synchronous.database import Database if not isinstance(database, Database): @@ -582,7 +582,7 @@ def _command( conn: Connection, command: MutableMapping[str, Any], read_preference: Optional[_ServerMode] = None, - codec_options: Optional[CodecOptions] = None, + codec_options: Optional[CodecOptions[Mapping[str, Any]]] = None, check: bool = True, allowable_errors: Optional[Sequence[Union[str, int]]] = None, read_concern: Optional[ReadConcern] = None, @@ -700,10 +700,10 @@ def bulk_write( self, requests: Sequence[_WriteOp[_DocumentType]], ordered: bool = True, - bypass_document_validation: bool = False, + bypass_document_validation: Optional[bool] = None, session: Optional[ClientSession] = None, comment: Optional[Any] = None, - let: Optional[Mapping] = None, + let: Optional[Mapping[str, Any]] = None, ) -> BulkWriteResult: """Send a batch of write operations to the server. @@ -761,7 +761,7 @@ def bulk_write( :return: An instance of :class:`~pymongo.results.BulkWriteResult`. - .. seealso:: :ref:`writes-and-ids` + .. seealso:: `Writes and ids `_ .. note:: `bypass_document_validation` requires server version **>= 3.2** @@ -799,7 +799,7 @@ def _insert_one( ordered: bool, write_concern: WriteConcern, op_id: Optional[int], - bypass_doc_val: bool, + bypass_doc_val: Optional[bool], session: Optional[ClientSession], comment: Optional[Any] = None, ) -> Any: @@ -813,8 +813,8 @@ def _insert_one( def _insert_command( session: Optional[ClientSession], conn: Connection, retryable_write: bool ) -> None: - if bypass_doc_val: - command["bypassDocumentValidation"] = True + if bypass_doc_val is not None: + command["bypassDocumentValidation"] = bypass_doc_val result = conn.command( self._database.name, @@ -839,7 +839,7 @@ def _insert_command( def insert_one( self, document: Union[_DocumentType, RawBSONDocument], - bypass_document_validation: bool = False, + bypass_document_validation: Optional[bool] = None, session: Optional[ClientSession] = None, comment: Optional[Any] = None, ) -> InsertOneResult: @@ -866,7 +866,7 @@ def insert_one( :return: - An instance of :class:`~pymongo.results.InsertOneResult`. - .. seealso:: :ref:`writes-and-ids` + .. seealso:: `Writes and ids `_ .. note:: `bypass_document_validation` requires server version **>= 3.2** @@ -905,7 +905,7 @@ def insert_many( self, documents: Iterable[Union[_DocumentType, RawBSONDocument]], ordered: bool = True, - bypass_document_validation: bool = False, + bypass_document_validation: Optional[bool] = None, session: Optional[ClientSession] = None, comment: Optional[Any] = None, ) -> InsertManyResult: @@ -935,7 +935,7 @@ def insert_many( :return: An instance of :class:`~pymongo.results.InsertManyResult`. - .. seealso:: :ref:`writes-and-ids` + .. seealso:: `Writes and ids `_ .. note:: `bypass_document_validation` requires server version **>= 3.2** @@ -985,7 +985,7 @@ def _update( write_concern: Optional[WriteConcern] = None, op_id: Optional[int] = None, ordered: bool = True, - bypass_doc_val: Optional[bool] = False, + bypass_doc_val: Optional[bool] = None, collation: Optional[_CollationIn] = None, array_filters: Optional[Sequence[Mapping[str, Any]]] = None, hint: Optional[_IndexKeyHint] = None, @@ -1040,8 +1040,8 @@ def _update( if comment is not None: command["comment"] = comment # Update command. - if bypass_doc_val: - command["bypassDocumentValidation"] = True + if bypass_doc_val is not None: + command["bypassDocumentValidation"] = bypass_doc_val # The command result has to be published for APM unmodified # so we make a shallow copy here before adding updatedExisting. @@ -1081,7 +1081,7 @@ def _update_retryable( write_concern: Optional[WriteConcern] = None, op_id: Optional[int] = None, ordered: bool = True, - bypass_doc_val: Optional[bool] = False, + bypass_doc_val: Optional[bool] = None, collation: Optional[_CollationIn] = None, array_filters: Optional[Sequence[Mapping[str, Any]]] = None, hint: Optional[_IndexKeyHint] = None, @@ -1127,7 +1127,7 @@ def replace_one( filter: Mapping[str, Any], replacement: Mapping[str, Any], upsert: bool = False, - bypass_document_validation: bool = False, + bypass_document_validation: Optional[bool] = None, collation: Optional[_CollationIn] = None, hint: Optional[_IndexKeyHint] = None, session: Optional[ClientSession] = None, @@ -1236,7 +1236,7 @@ def update_one( filter: Mapping[str, Any], update: Union[Mapping[str, Any], _Pipeline], upsert: bool = False, - bypass_document_validation: bool = False, + bypass_document_validation: Optional[bool] = None, collation: Optional[_CollationIn] = None, array_filters: Optional[Sequence[Mapping[str, Any]]] = None, hint: Optional[_IndexKeyHint] = None, @@ -1775,6 +1775,15 @@ def find(self, *args: Any, **kwargs: Any) -> Cursor[_DocumentType]: improper type. Returns an instance of :class:`~pymongo.cursor.Cursor` corresponding to this query. + Cursors are closed automatically when they are exhausted (the last batch of data is retrieved from the database). + If a cursor is not exhausted, it will be closed automatically upon garbage collection, which leaves resources open but unused for a potentially long period of time. + To avoid this, best practice is to call :meth:`Cursor.close` when the cursor is no longer needed, + or use the cursor in a with statement:: + + with collection.find() as cursor: + for doc in cursor: + print(doc) + The :meth:`find` method obeys the :attr:`read_preference` of this :class:`Collection`. @@ -2040,7 +2049,7 @@ def estimated_document_count(self, comment: Optional[Any] = None, **kwargs: Any) .. versionchanged:: 4.2 This method now always uses the `count`_ command. Due to an oversight in versions 5.0.0-5.0.8 of MongoDB, the count command was not included in V1 of the - :ref:`versioned-api-ref`. Users of the Stable API with estimated_document_count are + `versioned API `_. Users of the Stable API with estimated_document_count are recommended to upgrade their server version to 5.0.9+ or set :attr:`pymongo.server_api.ServerApi.strict` to ``False`` to avoid encountering errors. @@ -2134,11 +2143,9 @@ def count_documents( if comment is not None: kwargs["comment"] = comment pipeline.append({"$group": {"_id": 1, "n": {"$sum": 1}}}) - cmd = {"aggregate": self._name, "pipeline": pipeline, "cursor": {}} if "hint" in kwargs and not isinstance(kwargs["hint"], str): kwargs["hint"] = helpers_shared._index_document(kwargs["hint"]) collation = validate_collation_or_none(kwargs.pop("collation", None)) - cmd.update(kwargs) def _cmd( session: Optional[ClientSession], @@ -2146,6 +2153,8 @@ def _cmd( conn: Connection, read_preference: Optional[_ServerMode], ) -> int: + cmd: dict[str, Any] = {"aggregate": self._name, "pipeline": pipeline, "cursor": {}} + cmd.update(kwargs) result = self._aggregate_one_result(conn, read_preference, cmd, collation, session) if not result: return 0 @@ -2472,7 +2481,7 @@ def _drop_index( name = helpers_shared._gen_index_name(index_or_name) if not isinstance(name, str): - raise TypeError("index_or_name must be an instance of str or list") + raise TypeError(f"index_or_name must be an instance of str or list, not {type(name)}") cmd = {"dropIndexes": self._name, "index": name} cmd.update(kwargs) @@ -2500,6 +2509,15 @@ def list_indexes( ... SON([('v', 2), ('key', SON([('_id', 1)])), ('name', '_id_')]) + Cursors are closed automatically when they are exhausted (the last batch of data is retrieved from the database). + If a cursor is not exhausted, it will be closed automatically upon garbage collection, which leaves resources open but unused for a potentially long period of time. + To avoid this, best practice is to call :meth:`Cursor.close` when the cursor is no longer needed, + or use the cursor in a with statement:: + + with collection.list_indexes() as cursor: + for index in cursor: + print(index) + :param session: a :class:`~pymongo.client_session.ClientSession`. :param comment: A user-provided comment to attach to this @@ -2522,13 +2540,12 @@ def _list_indexes( session: Optional[ClientSession] = None, comment: Optional[Any] = None, ) -> CommandCursor[MutableMapping[str, Any]]: - codec_options: CodecOptions = CodecOptions(SON) + codec_options: CodecOptions[Mapping[str, Any]] = CodecOptions(SON) coll = cast( Collection[MutableMapping[str, Any]], self.with_options(codec_options=codec_options, read_preference=ReadPreference.PRIMARY), ) read_pref = (session and session._txn_read_preference()) or ReadPreference.PRIMARY - explicit_session = session is not None def _cmd( session: Optional[ClientSession], @@ -2555,13 +2572,12 @@ def _cmd( cursor, conn.address, session=session, - explicit_session=explicit_session, comment=cmd.get("comment"), ) cmd_cursor._maybe_pin_connection(conn) return cmd_cursor - with self._database.client._tmp_session(session, False) as s: + with self._database.client._tmp_session(session) as s: return self._database.client._retryable_read( _cmd, read_pref, s, operation=_Op.LIST_INDEXES ) @@ -2617,6 +2633,15 @@ def list_search_indexes( ) -> CommandCursor[Mapping[str, Any]]: """Return a cursor over search indexes for the current collection. + Cursors are closed automatically when they are exhausted (the last batch of data is retrieved from the database). + If a cursor is not exhausted, it will be closed automatically upon garbage collection, which leaves resources open but unused for a potentially long period of time. + To avoid this, best practice is to call :meth:`Cursor.close` when the cursor is no longer needed, + or use the cursor in a with statement:: + + with collection.list_search_indexes() as cursor: + for index in cursor: + print(index) + :param name: If given, the name of the index to search for. Only indexes with matching index names will be returned. If not given, all search indexes for the current collection @@ -2648,7 +2673,6 @@ def list_search_indexes( CommandCursor, pipeline, kwargs, - explicit_session=session is not None, comment=comment, user_fields={"cursor": {"firstBatch": 1}}, ) @@ -2864,9 +2888,8 @@ def _aggregate( self, aggregation_command: Type[_AggregationCommand], pipeline: _Pipeline, - cursor_class: Type[CommandCursor], + cursor_class: Type[CommandCursor], # type: ignore[type-arg] session: Optional[ClientSession], - explicit_session: bool, let: Optional[Mapping[str, Any]] = None, comment: Optional[Any] = None, **kwargs: Any, @@ -2878,7 +2901,6 @@ def _aggregate( cursor_class, pipeline, kwargs, - explicit_session, let, user_fields={"cursor": {"firstBatch": 1}}, ) @@ -2909,12 +2931,21 @@ def aggregate( .. note:: This method does not support the 'explain' option. Please use `PyMongoExplain `_ - instead. An example is included in the :ref:`aggregate-examples` + instead. An example is included in the `aggregation example `_ documentation. .. note:: The :attr:`~pymongo.collection.Collection.write_concern` of this collection is automatically applied to this operation. + Cursors are closed automatically when they are exhausted (the last batch of data is retrieved from the database). + If a cursor is not exhausted, it will be closed automatically upon garbage collection, which leaves resources open but unused for a potentially long period of time. + To avoid this, best practice is to call :meth:`Cursor.close` when the cursor is no longer needed, + or use the cursor in a with statement:: + + with collection.aggregate() as cursor: + for operation in cursor: + print(operation) + :param pipeline: a list of aggregation pipeline stages :param session: a :class:`~pymongo.client_session.ClientSession`. @@ -2941,6 +2972,7 @@ def aggregate( returning aggregate results using a cursor. - `collation` (optional): An instance of :class:`~pymongo.collation.Collation`. + - `bypassDocumentValidation` (bool): If ``True``, allows the write to opt-out of document level validation. :return: A :class:`~pymongo.command_cursor.CommandCursor` over the result @@ -2969,18 +3001,17 @@ def aggregate( The :meth:`aggregate` method always returns a CommandCursor. The pipeline argument must be a list. - .. seealso:: :doc:`/examples/aggregation` + .. seealso:: `Aggregation `_ .. _aggregate command: https://mongodb.com/docs/manual/reference/command/aggregate """ - with self._database.client._tmp_session(session, close=False) as s: + with self._database.client._tmp_session(session) as s: return self._aggregate( _CollectionAggregationCommand, pipeline, CommandCursor, session=s, - explicit_session=session is not None, let=let, comment=comment, **kwargs, @@ -3021,7 +3052,7 @@ def aggregate_raw_batches( raise InvalidOperation("aggregate_raw_batches does not support auto encryption") if comment is not None: kwargs["comment"] = comment - with self._database.client._tmp_session(session, close=False) as s: + with self._database.client._tmp_session(session) as s: return cast( RawBatchCursor[_DocumentType], self._aggregate( @@ -3029,7 +3060,6 @@ def aggregate_raw_batches( pipeline, RawBatchCommandCursor, session=s, - explicit_session=session is not None, **kwargs, ), ) @@ -3071,7 +3101,7 @@ def rename( """ if not isinstance(new_name, str): - raise TypeError("new_name must be an instance of str") + raise TypeError(f"new_name must be an instance of str, not {type(new_name)}") if not new_name or ".." in new_name: raise InvalidName("collection names cannot be empty") @@ -3104,8 +3134,9 @@ def distinct( filter: Optional[Mapping[str, Any]] = None, session: Optional[ClientSession] = None, comment: Optional[Any] = None, + hint: Optional[_IndexKeyHint] = None, **kwargs: Any, - ) -> list: + ) -> list[Any]: """Get a list of distinct values for `key` among all documents in this collection. @@ -3131,8 +3162,15 @@ def distinct( :class:`~pymongo.client_session.ClientSession`. :param comment: A user-provided comment to attach to this command. + :param hint: An index to use to support the query + predicate specified either by its string name, or in the same + format as passed to :meth:`~pymongo.collection.Collection.create_index` + (e.g. ``[('field', ASCENDING)]``). :param kwargs: See list of options above. + .. versionchanged:: 4.12 + Added ``hint`` parameter. + .. versionchanged:: 3.6 Added ``session`` parameter. @@ -3141,23 +3179,28 @@ def distinct( """ if not isinstance(key, str): - raise TypeError("key must be an instance of str") - cmd = {"distinct": self._name, "key": key} + raise TypeError(f"key must be an instance of str, not {type(key)}") if filter is not None: if "query" in kwargs: raise ConfigurationError("can't pass both filter and query") kwargs["query"] = filter collation = validate_collation_or_none(kwargs.pop("collation", None)) - cmd.update(kwargs) - if comment is not None: - cmd["comment"] = comment + if hint is not None: + if not isinstance(hint, str): + hint = helpers_shared._index_document(hint) def _cmd( session: Optional[ClientSession], _server: Server, conn: Connection, read_preference: Optional[_ServerMode], - ) -> list: + ) -> list: # type: ignore[type-arg] + cmd = {"distinct": self._name, "key": key} + cmd.update(kwargs) + if comment is not None: + cmd["comment"] = comment + if hint is not None: + cmd["hint"] = hint # type: ignore[assignment] return ( self._command( conn, @@ -3182,37 +3225,36 @@ def _find_and_modify( array_filters: Optional[Sequence[Mapping[str, Any]]] = None, hint: Optional[_IndexKeyHint] = None, session: Optional[ClientSession] = None, - let: Optional[Mapping] = None, + let: Optional[Mapping[str, Any]] = None, **kwargs: Any, ) -> Any: """Internal findAndModify helper.""" common.validate_is_mapping("filter", filter) if not isinstance(return_document, bool): raise ValueError( - "return_document must be ReturnDocument.BEFORE or ReturnDocument.AFTER" + f"return_document must be ReturnDocument.BEFORE or ReturnDocument.AFTER, not {type(return_document)}" ) collation = validate_collation_or_none(kwargs.pop("collation", None)) - cmd = {"findAndModify": self._name, "query": filter, "new": return_document} - if let is not None: - common.validate_is_mapping("let", let) - cmd["let"] = let - cmd.update(kwargs) - if projection is not None: - cmd["fields"] = helpers_shared._fields_list_to_dict(projection, "projection") - if sort is not None: - cmd["sort"] = helpers_shared._index_document(sort) - if upsert is not None: - validate_boolean("upsert", upsert) - cmd["upsert"] = upsert if hint is not None: if not isinstance(hint, str): hint = helpers_shared._index_document(hint) - - write_concern = self._write_concern_for_cmd(cmd, session) + write_concern = self._write_concern_for_cmd(kwargs, session) def _find_and_modify_helper( session: Optional[ClientSession], conn: Connection, retryable_write: bool ) -> Any: + cmd = {"findAndModify": self._name, "query": filter, "new": return_document} + if let is not None: + common.validate_is_mapping("let", let) + cmd["let"] = let + cmd.update(kwargs) + if projection is not None: + cmd["fields"] = helpers_shared._fields_list_to_dict(projection, "projection") + if sort is not None: + cmd["sort"] = helpers_shared._index_document(sort) + if upsert is not None: + validate_boolean("upsert", upsert) + cmd["upsert"] = upsert acknowledged = write_concern.acknowledged if array_filters is not None: if not acknowledged: diff --git a/pymongo/synchronous/command_cursor.py b/pymongo/synchronous/command_cursor.py index 3a4372856a..a09a67efc9 100644 --- a/pymongo/synchronous/command_cursor.py +++ b/pymongo/synchronous/command_cursor.py @@ -64,7 +64,6 @@ def __init__( batch_size: int = 0, max_await_time_ms: Optional[int] = None, session: Optional[ClientSession] = None, - explicit_session: bool = False, comment: Any = None, ) -> None: """Create a new command cursor.""" @@ -80,7 +79,8 @@ def __init__( self._max_await_time_ms = max_await_time_ms self._timeout = self._collection.database.client.options.timeout self._session = session - self._explicit_session = explicit_session + if self._session is not None: + self._session._attached_to_cursor = True self._killed = self._id == 0 self._comment = comment if self._killed: @@ -94,7 +94,9 @@ def __init__( self.batch_size(batch_size) if not isinstance(max_await_time_ms, int) and max_await_time_ms is not None: - raise TypeError("max_await_time_ms must be an integer or None") + raise TypeError( + f"max_await_time_ms must be an integer or None, not {type(max_await_time_ms)}" + ) def __del__(self) -> None: self._die_no_lock() @@ -115,7 +117,7 @@ def batch_size(self, batch_size: int) -> CommandCursor[_DocumentType]: :param batch_size: The size of each batch of results requested. """ if not isinstance(batch_size, int): - raise TypeError("batch_size must be an integer") + raise TypeError(f"batch_size must be an integer, not {type(batch_size)}") if batch_size < 0: raise ValueError("batch_size must be >= 0") @@ -195,7 +197,7 @@ def session(self) -> Optional[ClientSession]: .. versionadded:: 3.6 """ - if self._explicit_session: + if self._session and not self._session._implicit: return self._session return None @@ -216,9 +218,10 @@ def _die_no_lock(self) -> None: """Closes this cursor without acquiring a lock.""" cursor_id, address = self._prepare_to_die() self._collection.database.client._cleanup_cursor_no_lock( - cursor_id, address, self._sock_mgr, self._session, self._explicit_session + cursor_id, address, self._sock_mgr, self._session ) - if not self._explicit_session: + if self._session and self._session._implicit: + self._session._attached_to_cursor = False self._session = None self._sock_mgr = None @@ -230,14 +233,15 @@ def _die_lock(self) -> None: address, self._sock_mgr, self._session, - self._explicit_session, ) - if not self._explicit_session: + if self._session and self._session._implicit: + self._session._attached_to_cursor = False self._session = None self._sock_mgr = None def _end_session(self) -> None: - if self._session and not self._explicit_session: + if self._session and self._session._implicit: + self._session._attached_to_cursor = False self._session._end_implicit_session() self._session = None @@ -348,7 +352,7 @@ def _try_next(self, get_more_allowed: bool) -> Optional[_DocumentType]: else: return None - def _next_batch(self, result: list, total: Optional[int] = None) -> bool: + def _next_batch(self, result: list, total: Optional[int] = None) -> bool: # type: ignore[type-arg] """Get all or some available documents from the cursor.""" if not len(self._data) and not self._killed: self._refresh() @@ -428,7 +432,6 @@ def __init__( batch_size: int = 0, max_await_time_ms: Optional[int] = None, session: Optional[ClientSession] = None, - explicit_session: bool = False, comment: Any = None, ) -> None: """Create a new cursor / iterator over raw batches of BSON data. @@ -447,7 +450,6 @@ def __init__( batch_size, max_await_time_ms, session, - explicit_session, comment, ) @@ -455,7 +457,7 @@ def _unpack_response( # type: ignore[override] self, response: Union[_OpReply, _OpMsg], cursor_id: Optional[int], - codec_options: CodecOptions, + codec_options: CodecOptions[dict[str, Any]], user_fields: Optional[Mapping[str, Any]] = None, legacy_response: bool = False, ) -> list[Mapping[str, Any]]: diff --git a/pymongo/synchronous/cursor.py b/pymongo/synchronous/cursor.py index b35098a327..fcd8ebeb1d 100644 --- a/pymongo/synchronous/cursor.py +++ b/pymongo/synchronous/cursor.py @@ -55,7 +55,6 @@ _RawBatchQuery, ) from pymongo.response import PinnedResponse -from pymongo.synchronous.helpers import next from pymongo.typings import _Address, _CollationIn, _DocumentOut, _DocumentType from pymongo.write_concern import validate_boolean @@ -138,19 +137,18 @@ def __init__( if session: self._session = session - self._explicit_session = True + self._session._attached_to_cursor = True else: self._session = None - self._explicit_session = False spec: Mapping[str, Any] = filter or {} validate_is_mapping("filter", spec) if not isinstance(skip, int): - raise TypeError("skip must be an instance of int") + raise TypeError(f"skip must be an instance of int, not {type(skip)}") if not isinstance(limit, int): - raise TypeError("limit must be an instance of int") + raise TypeError(f"limit must be an instance of int, not {type(limit)}") validate_boolean("no_cursor_timeout", no_cursor_timeout) - if no_cursor_timeout and not self._explicit_session: + if no_cursor_timeout and self._session and self._session._implicit: warnings.warn( "use an explicit session with no_cursor_timeout=True " "otherwise the cursor may still timeout after " @@ -171,7 +169,7 @@ def __init__( validate_boolean("allow_partial_results", allow_partial_results) validate_boolean("oplog_replay", oplog_replay) if not isinstance(batch_size, int): - raise TypeError("batch_size must be an integer") + raise TypeError(f"batch_size must be an integer, not {type(batch_size)}") if batch_size < 0: raise ValueError("batch_size must be >= 0") # Only set if allow_disk_use is provided by the user, else None. @@ -216,7 +214,7 @@ def __init__( # it anytime we change __limit. self._empty = False - self._data: deque = deque() + self._data: deque = deque() # type: ignore[type-arg] self._address: Optional[_Address] = None self._retrieved = 0 @@ -280,10 +278,10 @@ def clone(self) -> Cursor[_DocumentType]: """ return self._clone(True) - def _clone(self, deepcopy: bool = True, base: Optional[Cursor] = None) -> Cursor: + def _clone(self, deepcopy: bool = True, base: Optional[Cursor] = None) -> Cursor: # type: ignore[type-arg] """Internal clone helper.""" if not base: - if self._explicit_session: + if self._session and not self._session._implicit: base = self._clone_base(self._session) else: base = self._clone_base(None) @@ -322,7 +320,7 @@ def _clone(self, deepcopy: bool = True, base: Optional[Cursor] = None) -> Cursor base.__dict__.update(data) return base - def _clone_base(self, session: Optional[ClientSession]) -> Cursor: + def _clone_base(self, session: Optional[ClientSession]) -> Cursor: # type: ignore[type-arg] """Creates an empty Cursor object for information to be copied into.""" return self.__class__(self._collection, session=session) @@ -388,7 +386,7 @@ def add_option(self, mask: int) -> Cursor[_DocumentType]: cursor.add_option(2) """ if not isinstance(mask, int): - raise TypeError("mask must be an int") + raise TypeError(f"mask must be an int, not {type(mask)}") self._check_okay_to_chain() if mask & _QUERY_OPTIONS["exhaust"]: @@ -408,7 +406,7 @@ def remove_option(self, mask: int) -> Cursor[_DocumentType]: cursor.remove_option(2) """ if not isinstance(mask, int): - raise TypeError("mask must be an int") + raise TypeError(f"mask must be an int, not {type(mask)}") self._check_okay_to_chain() if mask & _QUERY_OPTIONS["exhaust"]: @@ -432,7 +430,7 @@ def allow_disk_use(self, allow_disk_use: bool) -> Cursor[_DocumentType]: .. versionadded:: 3.11 """ if not isinstance(allow_disk_use, bool): - raise TypeError("allow_disk_use must be a bool") + raise TypeError(f"allow_disk_use must be a bool, not {type(allow_disk_use)}") self._check_okay_to_chain() self._allow_disk_use = allow_disk_use @@ -451,7 +449,7 @@ def limit(self, limit: int) -> Cursor[_DocumentType]: .. seealso:: The MongoDB documentation on `limit `_. """ if not isinstance(limit, int): - raise TypeError("limit must be an integer") + raise TypeError(f"limit must be an integer, not {type(limit)}") if self._exhaust: raise InvalidOperation("Can't use limit and exhaust together.") self._check_okay_to_chain() @@ -479,7 +477,7 @@ def batch_size(self, batch_size: int) -> Cursor[_DocumentType]: :param batch_size: The size of each batch of results requested. """ if not isinstance(batch_size, int): - raise TypeError("batch_size must be an integer") + raise TypeError(f"batch_size must be an integer, not {type(batch_size)}") if batch_size < 0: raise ValueError("batch_size must be >= 0") self._check_okay_to_chain() @@ -499,7 +497,7 @@ def skip(self, skip: int) -> Cursor[_DocumentType]: :param skip: the number of results to skip """ if not isinstance(skip, int): - raise TypeError("skip must be an integer") + raise TypeError(f"skip must be an integer, not {type(skip)}") if skip < 0: raise ValueError("skip must be >= 0") self._check_okay_to_chain() @@ -520,7 +518,7 @@ def max_time_ms(self, max_time_ms: Optional[int]) -> Cursor[_DocumentType]: :param max_time_ms: the time limit after which the operation is aborted """ if not isinstance(max_time_ms, int) and max_time_ms is not None: - raise TypeError("max_time_ms must be an integer or None") + raise TypeError(f"max_time_ms must be an integer or None, not {type(max_time_ms)}") self._check_okay_to_chain() self._max_time_ms = max_time_ms @@ -543,7 +541,9 @@ def max_await_time_ms(self, max_await_time_ms: Optional[int]) -> Cursor[_Documen .. versionadded:: 3.2 """ if not isinstance(max_await_time_ms, int) and max_await_time_ms is not None: - raise TypeError("max_await_time_ms must be an integer or None") + raise TypeError( + f"max_await_time_ms must be an integer or None, not {type(max_await_time_ms)}" + ) self._check_okay_to_chain() # Ignore max_await_time_ms if not tailable or await_data is False. @@ -677,7 +677,7 @@ def max(self, spec: _Sort) -> Cursor[_DocumentType]: .. versionadded:: 2.7 """ if not isinstance(spec, (list, tuple)): - raise TypeError("spec must be an instance of list or tuple") + raise TypeError(f"spec must be an instance of list or tuple, not {type(spec)}") self._check_okay_to_chain() self._max = dict(spec) @@ -699,7 +699,7 @@ def min(self, spec: _Sort) -> Cursor[_DocumentType]: .. versionadded:: 2.7 """ if not isinstance(spec, (list, tuple)): - raise TypeError("spec must be an instance of list or tuple") + raise TypeError(f"spec must be an instance of list or tuple, not {type(spec)}") self._check_okay_to_chain() self._min = dict(spec) @@ -763,6 +763,8 @@ def explain(self) -> _DocumentType: :meth:`~pymongo.database.Database.command` to run the explain command directly. + .. note:: The timeout of this method can be set using :func:`pymongo.timeout`. + .. seealso:: The MongoDB documentation on `explain `_. """ c = self.clone() @@ -858,7 +860,7 @@ def where(self, code: Union[str, Code]) -> Cursor[_DocumentType]: if self._has_filter: spec = dict(self._spec) else: - spec = cast(dict, self._spec) + spec = cast(dict, self._spec) # type: ignore[type-arg] spec["$where"] = code self._spec = spec return self @@ -882,7 +884,7 @@ def _unpack_response( self, response: Union[_OpReply, _OpMsg], cursor_id: Optional[int], - codec_options: CodecOptions, + codec_options: CodecOptions, # type: ignore[type-arg] user_fields: Optional[Mapping[str, Any]] = None, legacy_response: bool = False, ) -> Sequence[_DocumentOut]: @@ -939,7 +941,7 @@ def session(self) -> Optional[ClientSession]: .. versionadded:: 3.6 """ - if self._explicit_session: + if self._session and not self._session._implicit: return self._session return None @@ -958,29 +960,33 @@ def __deepcopy__(self, memo: Any) -> Any: return self._clone(deepcopy=True) @overload - def _deepcopy(self, x: Iterable, memo: Optional[dict[int, Union[list, dict]]] = None) -> list: + def _deepcopy(self, x: Iterable, memo: Optional[dict[int, Union[list, dict]]] = None) -> list: # type: ignore[type-arg] ... @overload def _deepcopy( - self, x: SupportsItems, memo: Optional[dict[int, Union[list, dict]]] = None - ) -> dict: + self, + x: SupportsItems, # type: ignore[type-arg] + memo: Optional[dict[int, Union[list, dict]]] = None, # type: ignore[type-arg] + ) -> dict: # type: ignore[type-arg] ... def _deepcopy( - self, x: Union[Iterable, SupportsItems], memo: Optional[dict[int, Union[list, dict]]] = None - ) -> Union[list, dict]: + self, + x: Union[Iterable, SupportsItems], # type: ignore[type-arg] + memo: Optional[dict[int, Union[list, dict]]] = None, # type: ignore[type-arg] + ) -> Union[list[Any], dict[str, Any]]: """Deepcopy helper for the data dictionary or list. Regular expressions cannot be deep copied but as they are immutable we don't have to copy them when cloning. """ - y: Union[list, dict] + y: Union[list[Any], dict[str, Any]] iterator: Iterable[tuple[Any, Any]] if not hasattr(x, "items"): y, is_list, iterator = [], True, enumerate(x) else: - y, is_list, iterator = {}, False, cast("SupportsItems", x).items() + y, is_list, iterator = {}, False, cast("SupportsItems", x).items() # type: ignore[type-arg] if memo is None: memo = {} val_id = id(x) @@ -999,7 +1005,7 @@ def _deepcopy( else: if not isinstance(key, RE_TYPE): key = copy.deepcopy(key, memo) # noqa: PLW2901 - y[key] = value + y[key] = value # type:ignore[index] return y def _prepare_to_die(self, already_killed: bool) -> tuple[int, Optional[_CursorAddress]]: @@ -1024,9 +1030,10 @@ def _die_no_lock(self) -> None: cursor_id, address = self._prepare_to_die(already_killed) self._collection.database.client._cleanup_cursor_no_lock( - cursor_id, address, self._sock_mgr, self._session, self._explicit_session + cursor_id, address, self._sock_mgr, self._session ) - if not self._explicit_session: + if self._session and self._session._implicit: + self._session._attached_to_cursor = False self._session = None self._sock_mgr = None @@ -1044,9 +1051,9 @@ def _die_lock(self) -> None: address, self._sock_mgr, self._session, - self._explicit_session, ) - if not self._explicit_session: + if self._session and self._session._implicit: + self._session._attached_to_cursor = False self._session = None self._sock_mgr = None @@ -1054,7 +1061,7 @@ def close(self) -> None: """Explicitly close / kill this cursor.""" self._die_lock() - def distinct(self, key: str) -> list: + def distinct(self, key: str) -> list[Any]: """Get a list of distinct values for `key` among all documents in the result set of this query. @@ -1122,10 +1129,10 @@ def _send_message(self, operation: Union[_Query, _GetMore]) -> None: self._killed = True self.close() raise - except Exception: + # Catch KeyboardInterrupt, CancelledError, etc. and cleanup. + except BaseException: self.close() raise - self._address = response.address if isinstance(response, PinnedResponse): if not self._sock_mgr: @@ -1259,7 +1266,7 @@ def next(self) -> _DocumentType: else: raise StopIteration - def _next_batch(self, result: list, total: Optional[int] = None) -> bool: + def _next_batch(self, result: list, total: Optional[int] = None) -> bool: # type: ignore[type-arg] """Get all or some documents from the cursor.""" if not self._exhaust_checked: self._exhaust_checked = True @@ -1319,7 +1326,7 @@ def to_list(self, length: Optional[int] = None) -> list[_DocumentType]: return res -class RawBatchCursor(Cursor, Generic[_DocumentType]): +class RawBatchCursor(Cursor, Generic[_DocumentType]): # type: ignore[type-arg] """A cursor / iterator over raw batches of BSON data from a query result.""" _query_class = _RawBatchQuery diff --git a/pymongo/synchronous/database.py b/pymongo/synchronous/database.py index a0bef55343..0d129ba972 100644 --- a/pymongo/synchronous/database.py +++ b/pymongo/synchronous/database.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -122,7 +122,7 @@ def __init__( from pymongo.synchronous.mongo_client import MongoClient if not isinstance(name, str): - raise TypeError("name must be an instance of str") + raise TypeError(f"name must be an instance of str, not {type(name)}") if not isinstance(client, MongoClient): # This is for compatibility with mocked and subclassed types, such as in Motor. @@ -611,6 +611,8 @@ def create_collection( common.validate_is_mapping("clusteredIndex", clustered_index) with self._client._tmp_session(session) as s: + if s and not s.in_transaction: + s._leave_alive = True # Skip this check in a transaction where listCollections is not # supported. if ( @@ -619,6 +621,8 @@ def create_collection( and name in self._list_collection_names(filter={"name": name}, session=s) ): raise CollectionInvalid("collection %s already exists" % name) + if s: + s._leave_alive = False coll = Collection( self, name, @@ -652,6 +656,11 @@ def aggregate( which case :attr:`~pymongo.read_preferences.ReadPreference.PRIMARY` is used. + Cursors are closed automatically when they are exhausted (the last batch of data is retrieved from the database). + If a cursor is not exhausted, it will be closed automatically upon garbage collection, which leaves resources open but unused for a potentially long period of time. + To avoid this, best practice is to call :meth:`Cursor.close` when the cursor is no longer needed, + or use the cursor in a with statement. + .. note:: This method does not support the 'explain' option. Please use :meth:`~pymongo.database.Database.command` instead. @@ -694,13 +703,12 @@ def aggregate( .. _aggregate command: https://mongodb.com/docs/manual/reference/command/aggregate """ - with self.client._tmp_session(session, close=False) as s: + with self.client._tmp_session(session) as s: cmd = _DatabaseAggregationCommand( self, CommandCursor, pipeline, kwargs, - session is not None, user_fields={"cursor": {"firstBatch": 1}}, ) return self.client._retryable_read( @@ -771,7 +779,7 @@ def _command( self._name, command, read_preference, - codec_options, + codec_options, # type: ignore[arg-type] check, allowable_errors, write_concern=write_concern, @@ -893,7 +901,7 @@ def command( when decoding the command response. .. note:: If this client has been configured to use MongoDB Stable - API (see :ref:`versioned-api-ref`), then :meth:`command` will + API (see `versioned API `_), then :meth:`command` will automatically add API versioning options to the given command. Explicitly adding API versioning options in the command and declaring an API version on the client is not supported. @@ -992,7 +1000,7 @@ def cursor_command( when decoding the command response. .. note:: If this client has been configured to use MongoDB Stable - API (see :ref:`versioned-api-ref`), then :meth:`command` will + API (see `versioned API `_), then :meth:`command` will automatically add API versioning options to the given command. Explicitly adding API versioning options in the command and declaring an API version on the client is not supported. @@ -1004,7 +1012,7 @@ def cursor_command( else: command_name = next(iter(command)) - with self._client._tmp_session(session, close=False) as tmp_session: + with self._client._tmp_session(session) as tmp_session: opts = codec_options or DEFAULT_CODEC_OPTIONS if read_preference is None: @@ -1034,7 +1042,6 @@ def cursor_command( conn.address, max_await_time_ms=max_await_time_ms, session=tmp_session, - explicit_session=session is not None, comment=comment, ) cmd_cursor._maybe_pin_connection(conn) @@ -1080,7 +1087,7 @@ def _list_collections( ) cmd = {"listCollections": 1, "cursor": {}} cmd.update(kwargs) - with self._client._tmp_session(session, close=False) as tmp_session: + with self._client._tmp_session(session) as tmp_session: cursor = ( self._command(conn, cmd, read_preference=read_preference, session=tmp_session) )["cursor"] @@ -1089,7 +1096,6 @@ def _list_collections( cursor, conn.address, session=tmp_session, - explicit_session=session is not None, comment=cmd.get("comment"), ) cmd_cursor._maybe_pin_connection(conn) @@ -1148,6 +1154,15 @@ def list_collections( ) -> CommandCursor[MutableMapping[str, Any]]: """Get a cursor over the collections of this database. + Cursors are closed automatically when they are exhausted (the last batch of data is retrieved from the database). + If a cursor is not exhausted, it will be closed automatically upon garbage collection, which leaves resources open but unused for a potentially long period of time. + To avoid this, best practice is to call :meth:`Cursor.close` when the cursor is no longer needed, + or use the cursor in a with statement:: + + with database.list_collections() as cursor: + for collection in cursor: + print(collection) + :param session: a :class:`~pymongo.client_session.ClientSession`. :param filter: A query document to filter the list of @@ -1303,7 +1318,7 @@ def drop_collection( name = name.name if not isinstance(name, str): - raise TypeError("name_or_collection must be an instance of str") + raise TypeError(f"name_or_collection must be an instance of str, not {type(name)}") encrypted_fields = self._get_encrypted_fields( {"encryptedFields": encrypted_fields}, name, @@ -1367,7 +1382,9 @@ def validate_collection( name = name.name if not isinstance(name, str): - raise TypeError("name_or_collection must be an instance of str or Collection") + raise TypeError( + f"name_or_collection must be an instance of str or Collection, not {type(name)}" + ) cmd = {"validate": name, "scandata": scandata, "full": full} if comment is not None: cmd["comment"] = comment diff --git a/pymongo/synchronous/encryption.py b/pymongo/synchronous/encryption.py index ef49855059..2d666b9763 100644 --- a/pymongo/synchronous/encryption.py +++ b/pymongo/synchronous/encryption.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -15,7 +15,6 @@ """Support for explicit client-side field level encryption.""" from __future__ import annotations -import asyncio import contextlib import enum import socket @@ -62,7 +61,12 @@ from pymongo import _csot from pymongo.common import CONNECT_TIMEOUT from pymongo.daemon import _spawn_daemon -from pymongo.encryption_options import AutoEncryptionOpts, RangeOpts +from pymongo.encryption_options import ( + AutoEncryptionOpts, + RangeOpts, + TextOpts, + check_min_pymongocrypt, +) from pymongo.errors import ( ConfigurationError, EncryptedCollectionError, @@ -71,23 +75,23 @@ NetworkTimeout, ServerSelectionTimeoutError, ) -from pymongo.network_layer import BLOCKING_IO_ERRORS, sendall +from pymongo.helpers_shared import _get_timeout_details +from pymongo.network_layer import sendall from pymongo.operations import UpdateOne from pymongo.pool_options import PoolOptions +from pymongo.pool_shared import ( + _configured_socket, + _raise_connection_failure, +) from pymongo.read_concern import ReadConcern from pymongo.results import BulkWriteResult, DeleteResult -from pymongo.ssl_support import get_ssl_context +from pymongo.ssl_support import BLOCKING_IO_ERRORS, get_ssl_context from pymongo.synchronous.collection import Collection from pymongo.synchronous.cursor import Cursor from pymongo.synchronous.database import Database from pymongo.synchronous.mongo_client import MongoClient -from pymongo.synchronous.pool import ( - _configured_socket, - _get_timeout_details, - _raise_connection_failure, -) from pymongo.typings import _DocumentType, _DocumentTypeArg -from pymongo.uri_parser import parse_host +from pymongo.uri_parser_shared import _parse_kms_tls_options, parse_host from pymongo.write_concern import WriteConcern if TYPE_CHECKING: @@ -127,8 +131,6 @@ def _wrap_encryption_errors() -> Iterator[None]: # BSON encoding/decoding errors are unrelated to encryption so # we should propagate them unchanged. raise - except asyncio.CancelledError: - raise except Exception as exc: raise EncryptionError(exc) from exc @@ -159,6 +161,7 @@ def __init__( self.mongocryptd_client = mongocryptd_client self.opts = opts self._spawned = False + self._kms_ssl_contexts = opts._kms_ssl_contexts(_IS_SYNC) def kms_request(self, kms_context: MongoCryptKmsContext) -> None: """Complete a KMS request. @@ -170,7 +173,7 @@ def kms_request(self, kms_context: MongoCryptKmsContext) -> None: endpoint = kms_context.endpoint message = kms_context.message provider = kms_context.kms_provider - ctx = self.opts._kms_ssl_contexts.get(provider) + ctx = self._kms_ssl_contexts.get(provider) if ctx is None: # Enable strict certificate verification, OCSP, match hostname, and # SNI using the system default CA certificates. @@ -182,6 +185,7 @@ def kms_request(self, kms_context: MongoCryptKmsContext) -> None: False, # allow_invalid_certificates False, # allow_invalid_hostnames False, # disable_ocsp_endpoint_check + _IS_SYNC, ) # CSOT: set timeout for socket creation. connect_timeout = max(_csot.clamp_remaining(_KMS_CONNECT_TIMEOUT), 0.001) @@ -202,6 +206,7 @@ def kms_request(self, kms_context: MongoCryptKmsContext) -> None: while kms_context.bytes_needed > 0: # CSOT: update timeout. conn.settimeout(max(_csot.clamp_remaining(_KMS_CONNECT_TIMEOUT), 0)) + data: memoryview | bytes if _IS_SYNC: data = conn.recv(kms_context.bytes_needed) else: @@ -219,7 +224,14 @@ def kms_request(self, kms_context: MongoCryptKmsContext) -> None: # Wrap I/O errors in PyMongo exceptions. if isinstance(exc, BLOCKING_IO_ERRORS): exc = socket.timeout("timed out") - _raise_connection_failure(address, exc, timeout_details=_get_timeout_details(opts)) + # Async raises an OSError instead of returning empty bytes. + if isinstance(exc, OSError): + msg_prefix = "KMS connection closed" + else: + msg_prefix = None + _raise_connection_failure( + address, exc, msg_prefix=msg_prefix, timeout_details=_get_timeout_details(opts) + ) finally: conn.close() except MongoCryptError: @@ -237,7 +249,7 @@ def kms_request(self, kms_context: MongoCryptKmsContext) -> None: ) raise exc from final_err - def collection_info(self, database: str, filter: bytes) -> Optional[bytes]: + def collection_info(self, database: str, filter: bytes) -> Optional[list[bytes]]: """Get the collection info for a namespace. The returned collection info is passed to libmongocrypt which reads @@ -246,12 +258,10 @@ def collection_info(self, database: str, filter: bytes) -> Optional[bytes]: :param database: The database on which to run listCollections. :param filter: The filter to pass to listCollections. - :return: The first document from the listCollections command response as BSON. + :return: All documents from the listCollections command response as BSON. """ with self.client_ref()[database].list_collections(filter=RawBSONDocument(filter)) as cursor: - for doc in cursor: - return _dict_to_bson(doc, False, _DATA_KEY_OPTS) - return None + return [_dict_to_bson(doc, False, _DATA_KEY_OPTS) for doc in cursor] def spawn(self) -> None: """Spawn mongocryptd. @@ -264,7 +274,7 @@ def spawn(self) -> None: args.extend(self.opts._mongocryptd_spawn_args) _spawn_daemon(args) - def mark_command(self, database: str, cmd: bytes) -> bytes: + def mark_command(self, database: str, cmd: bytes) -> bytes | memoryview: """Mark a command for encryption. :param database: The database on which to run this command. @@ -291,7 +301,7 @@ def mark_command(self, database: str, cmd: bytes) -> bytes: ) return res.raw - def fetch_keys(self, filter: bytes) -> Generator[bytes, None]: + def fetch_keys(self, filter: bytes) -> Generator[bytes | memoryview, None]: """Yields one or more keys from the key vault. :param filter: The filter to pass to find. @@ -313,7 +323,9 @@ def insert_data_key(self, data_key: bytes) -> Binary: raw_doc = RawBSONDocument(data_key, _KEY_VAULT_OPTS) data_key_id = raw_doc.get("_id") if not isinstance(data_key_id, Binary) or data_key_id.subtype != UUID_SUBTYPE: - raise TypeError("data_key _id must be Binary with a UUID subtype") + raise TypeError( + f"data_key _id must be Binary with a UUID subtype, not {type(data_key_id)}" + ) assert self.key_vault_coll is not None self.key_vault_coll.insert_one(raw_doc) @@ -389,6 +401,8 @@ def __init__(self, client: MongoClient[_DocumentTypeArg], opts: AutoEncryptionOp encrypted_fields_map = _dict_to_bson(opts._encrypted_fields_map, False, _DATA_KEY_OPTS) self._bypass_auto_encryption = opts._bypass_auto_encryption self._internal_client = None + # parsing kms_ssl_contexts here so that parsing errors will be raised before internal clients are created + opts._kms_ssl_contexts(_IS_SYNC) def _get_internal_client( encrypter: _Encrypter, mongo_client: MongoClient[_DocumentTypeArg] @@ -436,6 +450,7 @@ def _get_internal_client( bypass_encryption=opts._bypass_auto_encryption, encrypted_fields_map=encrypted_fields_map, bypass_query_analysis=opts._bypass_query_analysis, + key_expiration_ms=opts._key_expiration_ms, ), ) self._closed = False @@ -458,7 +473,7 @@ def encrypt( # TODO: PYTHON-1922 avoid decoding the encrypted_cmd. return _inflate_bson(encrypted_cmd, DEFAULT_RAW_BSON_OPTIONS) - def decrypt(self, response: bytes) -> Optional[bytes]: + def decrypt(self, response: bytes | memoryview) -> Optional[bytes]: """Decrypt a MongoDB command response. :param response: A MongoDB command response as BSON. @@ -511,6 +526,11 @@ class Algorithm(str, enum.Enum): .. versionadded:: 4.4 """ + TEXTPREVIEW = "TextPreview" + """**BETA** - TextPreview. + + .. versionadded:: 4.15 + """ class QueryType(str, enum.Enum): @@ -536,13 +556,30 @@ class QueryType(str, enum.Enum): .. versionadded:: 4.4 """ + PREFIXPREVIEW = "prefixPreview" + """**BETA** - Used to encrypt a value for a prefixPreview query. + + .. versionadded:: 4.15 + """ + + SUFFIXPREVIEW = "suffixPreview" + """**BETA** - Used to encrypt a value for a suffixPreview query. + + .. versionadded:: 4.15 + """ + + SUBSTRINGPREVIEW = "substringPreview" + """**BETA** - Used to encrypt a value for a substringPreview query. + + .. versionadded:: 4.15 + """ + def _create_mongocrypt_options(**kwargs: Any) -> MongoCryptOptions: - opts = MongoCryptOptions(**kwargs) - # Opt into range V2 encryption. - if hasattr(opts, "enable_range_v2"): - opts.enable_range_v2 = True - return opts + # For compat with pymongocrypt <1.13, avoid setting the default key_expiration_ms. + if kwargs.get("key_expiration_ms") is None: + kwargs.pop("key_expiration_ms", None) + return MongoCryptOptions(**kwargs, enable_multiple_collinfo=True) class ClientEncryption(Generic[_DocumentType]): @@ -555,6 +592,7 @@ def __init__( key_vault_client: MongoClient[_DocumentTypeArg], codec_options: CodecOptions[_DocumentTypeArg], kms_tls_options: Optional[Mapping[str, Any]] = None, + key_expiration_ms: Optional[int] = None, ) -> None: """Explicit client-side field level encryption. @@ -567,7 +605,7 @@ def __init__( creating data keys. It does not provide an API to query keys from the key vault collection, as this can be done directly on the MongoClient. - See :ref:`explicit-client-side-encryption` for an example. + See `explicit client-side encryption `_ for an example. :param kms_providers: Map of KMS provider options. The `kms_providers` map values differ by provider: @@ -596,7 +634,7 @@ def __init__( KMS providers may be specified with an optional name suffix separated by a colon, for example "kmip:name" or "aws:name". - Named KMS providers do not support :ref:`CSFLE on-demand credentials`. + Named KMS providers do not support `CSFLE on-demand credentials `_. :param key_vault_namespace: The namespace for the key vault collection. The key vault collection contains all data keys used for encryption and decryption. Data keys are stored as documents in this MongoDB @@ -621,7 +659,12 @@ def __init__( Or to supply a client certificate:: kms_tls_options={'kmip': {'tlsCertificateKeyFile': 'client.pem'}} + :param key_expiration_ms: The cache expiration time for data encryption keys. + Defaults to ``None`` which defers to libmongocrypt's default which is currently 60000. + Set to 0 to disable key expiration. + .. versionchanged:: 4.12 + Added the `key_expiration_ms` parameter. .. versionchanged:: 4.0 Added the `kms_tls_options` parameter and the "kmip" KMS provider. @@ -634,8 +677,12 @@ def __init__( "python -m pip install --upgrade 'pymongo[encryption]'" ) + check_min_pymongocrypt() + if not isinstance(codec_options, CodecOptions): - raise TypeError("codec_options must be an instance of bson.codec_options.CodecOptions") + raise TypeError( + f"codec_options must be an instance of bson.codec_options.CodecOptions, not {type(codec_options)}" + ) if not isinstance(key_vault_client, MongoClient): # This is for compatibility with mocked and subclassed types, such as in Motor. @@ -651,14 +698,20 @@ def __init__( key_vault_coll = key_vault_client[db][coll] opts = AutoEncryptionOpts( - kms_providers, key_vault_namespace, kms_tls_options=kms_tls_options + kms_providers, + key_vault_namespace, + kms_tls_options=kms_tls_options, + key_expiration_ms=key_expiration_ms, ) + self._kms_ssl_contexts = _parse_kms_tls_options(opts._kms_tls_options, _IS_SYNC) self._io_callbacks: Optional[_EncryptionIO] = _EncryptionIO( None, key_vault_coll, None, opts ) self._encryption = ExplicitEncrypter( self._io_callbacks, - _create_mongocrypt_options(kms_providers=kms_providers, schema_map=None), + _create_mongocrypt_options( + kms_providers=kms_providers, schema_map=None, key_expiration_ms=key_expiration_ms + ), ) # Use the same key vault collection as the callback. assert self._io_callbacks.key_vault_coll is not None @@ -685,6 +738,7 @@ def create_encrypted_collection( creation. :class:`~pymongo.errors.EncryptionError` will be raised if the collection already exists. + :param database: the database to create the collection :param name: the name of the collection to create :param encrypted_fields: Document that describes the encrypted fields for Queryable Encryption. The "keyId" may be set to ``None`` to auto-generate the data keys. For example: @@ -749,8 +803,6 @@ def create_encrypted_collection( database.create_collection(name=name, **kwargs), encrypted_fields, ) - except asyncio.CancelledError: - raise except Exception as exc: raise EncryptedCollectionError(exc, encrypted_fields) from exc @@ -855,6 +907,7 @@ def _encrypt_helper( contention_factor: Optional[int] = None, range_opts: Optional[RangeOpts] = None, is_expression: bool = False, + text_opts: Optional[TextOpts] = None, ) -> Any: self._check_closed() if isinstance(key_id, uuid.UUID): @@ -874,6 +927,12 @@ def _encrypt_helper( range_opts.document, codec_options=self._codec_options, ) + text_opts_bytes = None + if text_opts: + text_opts_bytes = encode( + text_opts.document, + codec_options=self._codec_options, + ) with _wrap_encryption_errors(): encrypted_doc = self._encryption.encrypt( value=doc, @@ -884,6 +943,8 @@ def _encrypt_helper( contention_factor=contention_factor, range_opts=range_opts_bytes, is_expression=is_expression, + # For compatibility with pymongocrypt < 1.16: + **{"text_opts": text_opts_bytes} if text_opts_bytes else {}, ) return decode(encrypted_doc)["v"] @@ -896,6 +957,7 @@ def encrypt( query_type: Optional[str] = None, contention_factor: Optional[int] = None, range_opts: Optional[RangeOpts] = None, + text_opts: Optional[TextOpts] = None, ) -> Binary: """Encrypt a BSON value with a given key and algorithm. @@ -916,9 +978,14 @@ def encrypt( used. :param range_opts: Index options for `range` queries. See :class:`RangeOpts` for some valid options. + :param text_opts: Index options for `textPreview` queries. See + :class:`TextOpts` for some valid options. :return: The encrypted value, a :class:`~bson.binary.Binary` with subtype 6. + .. versionchanged:: 4.9 + Added the `text_opts` parameter. + .. versionchanged:: 4.9 Added the `range_opts` parameter. @@ -939,6 +1006,7 @@ def encrypt( contention_factor=contention_factor, range_opts=range_opts, is_expression=False, + text_opts=text_opts, ), ) diff --git a/pymongo/synchronous/helpers.py b/pymongo/synchronous/helpers.py index 064583dad3..c1b75a3c95 100644 --- a/pymongo/synchronous/helpers.py +++ b/pymongo/synchronous/helpers.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -15,8 +15,8 @@ """Miscellaneous pieces that need to be synchronized.""" from __future__ import annotations -import builtins -import sys +import asyncio +import socket from typing import ( Any, Callable, @@ -68,15 +68,19 @@ def inner(*args: Any, **kwargs: Any) -> Any: return cast(F, inner) -if sys.version_info >= (3, 10): - next = builtins.next - iter = builtins.iter -else: - - def next(cls: Any) -> Any: - """Compatibility function until we drop 3.9 support: https://docs.python.org/3/library/functions.html#next.""" - return cls.__next__() - - def iter(cls: Any) -> Any: - """Compatibility function until we drop 3.9 support: https://docs.python.org/3/library/functions.html#next.""" - return cls.__iter__() +def _getaddrinfo( + host: Any, port: Any, **kwargs: Any +) -> list[ + tuple[ + socket.AddressFamily, + socket.SocketKind, + int, + str, + tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes], + ] +]: + if not _IS_SYNC: + loop = asyncio.get_running_loop() + return loop.getaddrinfo(host, port, **kwargs) # type: ignore[return-value] + else: + return socket.getaddrinfo(host, port, **kwargs) diff --git a/pymongo/synchronous/mongo_client.py b/pymongo/synchronous/mongo_client.py index a694a58c1e..6e716402f4 100644 --- a/pymongo/synchronous/mongo_client.py +++ b/pymongo/synchronous/mongo_client.py @@ -4,7 +4,7 @@ # may not use this file except in compliance with the License. You # may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,7 +14,7 @@ """Tools for connecting to MongoDB. -.. seealso:: :doc:`/examples/high_availability` for examples of connecting +.. seealso:: `Read and Write Settings `_ for examples of connecting to replica sets or sets of mongos servers. To get a :class:`~pymongo.database.Database` instance from a @@ -42,6 +42,7 @@ TYPE_CHECKING, Any, Callable, + Collection, ContextManager, FrozenSet, Generator, @@ -59,8 +60,9 @@ from bson.codec_options import DEFAULT_CODEC_OPTIONS, CodecOptions, TypeRegistry from bson.timestamp import Timestamp -from pymongo import _csot, common, helpers_shared, periodic_executor, uri_parser +from pymongo import _csot, common, helpers_shared, periodic_executor from pymongo.client_options import ClientOptions +from pymongo.driver_info import DriverInfo from pymongo.errors import ( AutoReconnect, BulkWriteError, @@ -80,9 +82,15 @@ _create_lock, _release_locks, ) -from pymongo.logger import _CLIENT_LOGGER, _log_or_warn +from pymongo.logger import ( + _CLIENT_LOGGER, + _COMMAND_LOGGER, + _debug_log, + _log_client_error, + _log_or_warn, +) from pymongo.message import _CursorAddress, _GetMore, _Query -from pymongo.monitoring import ConnectionClosedReason +from pymongo.monitoring import ConnectionClosedReason, _EventListeners from pymongo.operations import ( DeleteMany, DeleteOne, @@ -94,9 +102,10 @@ ) from pymongo.read_preferences import ReadPreference, _ServerMode from pymongo.results import ClientBulkWriteResult +from pymongo.server_description import ServerDescription from pymongo.server_selectors import writable_server_selector from pymongo.server_type import SERVER_TYPE -from pymongo.synchronous import client_session, database +from pymongo.synchronous import client_session, database, uri_parser from pymongo.synchronous.change_stream import ChangeStream, ClusterChangeStream from pymongo.synchronous.client_bulk import _ClientBulk from pymongo.synchronous.client_session import _EmptyServerSession @@ -112,11 +121,14 @@ _DocumentTypeArg, _Pipeline, ) -from pymongo.uri_parser import ( +from pymongo.uri_parser_shared import ( + SRV_SCHEME, _check_options, _handle_option_deprecations, _handle_security_options, _normalize_options, + _validate_uri, + split_hosts, ) from pymongo.write_concern import DEFAULT_WRITE_CONCERN, WriteConcern @@ -130,6 +142,7 @@ from pymongo.synchronous.bulk import _Bulk from pymongo.synchronous.client_session import ClientSession, _ServerSession from pymongo.synchronous.cursor import _ConnectionManager + from pymongo.synchronous.encryption import _Encrypter from pymongo.synchronous.pool import Connection from pymongo.synchronous.server import Server @@ -145,10 +158,10 @@ _IS_SYNC = True _WriteOp = Union[ - InsertOne, + InsertOne, # type: ignore[type-arg] DeleteOne, DeleteMany, - ReplaceOne, + ReplaceOne, # type: ignore[type-arg] UpdateOne, UpdateMany, ] @@ -160,7 +173,7 @@ class MongoClient(common.BaseObject, Generic[_DocumentType]): # Define order to retrieve options from ClientOptions for __repr__. # No host/port; these are retrieved from TopologySettings. _constructor_args = ("document_class", "tz_aware", "connect") - _clients: weakref.WeakValueDictionary = weakref.WeakValueDictionary() + _clients: weakref.WeakValueDictionary = weakref.WeakValueDictionary() # type: ignore[type-arg] def __init__( self, @@ -186,8 +199,14 @@ def __init__( exception (recognizing that the operation failed) and then continue to execute. + Best practice is to call :meth:`MongoClient.close` when the client is no longer needed, + or use the client in a with statement:: + + with MongoClient(url) as client: + # Use client here. + The `host` parameter can be a full `mongodb URI - `_, in addition to + `_, in addition to a simple hostname. It can also be a list of hostnames but no more than one URI. Any port specified in the host string(s) will override the `port` parameter. For username and @@ -247,7 +266,7 @@ def __init__( print("Server not available") .. warning:: When using PyMongo in a multiprocessing context, please - read :ref:`multiprocessing` first. + read `PyMongo multiprocessing `_ first. .. note:: Many of the following options can be passed using a MongoDB URI or keyword parameters. If the same option is passed in a URI and @@ -274,17 +293,16 @@ def __init__( :param type_registry: instance of :class:`~bson.codec_options.TypeRegistry` to enable encoding and decoding of custom types. - :param datetime_conversion: Specifies how UTC datetimes should be decoded + :param kwargs: **Additional optional parameters available as keyword arguments:** + + - `datetime_conversion` (optional): Specifies how UTC datetimes should be decoded within BSON. Valid options include 'datetime_ms' to return as a DatetimeMS, 'datetime' to return as a datetime.datetime and raising a ValueError for out-of-range values, 'datetime_auto' to return DatetimeMS objects when the underlying datetime is out-of-range and 'datetime_clamp' to clamp to the minimum and maximum possible datetimes. Defaults to 'datetime'. See - :ref:`handling-out-of-range-datetimes` for details. - - | **Other optional parameters can be passed as keyword arguments:** - + `handling out of range datetimes `_ for details. - `directConnection` (optional): if ``True``, forces this client to connect directly to the specified MongoDB host as a standalone. If ``false``, the client connects to the entire replica set of @@ -409,7 +427,7 @@ def __init__( package. By default no compression is used. Compression support must also be enabled on the server. MongoDB 3.6+ supports snappy and zlib compression. MongoDB 4.2+ adds support for zstd. - See :ref:`network-compression-example` for details. + See `compress network traffic `_ for details. - `zlibCompressionLevel`: (int) The zlib compression level to use when zlib is used as the wire protocol compressor. Supported values are -1 through 9. -1 tells the zlib library to use its default @@ -420,7 +438,7 @@ def __init__( values are the strings: "standard", "pythonLegacy", "javaLegacy", "csharpLegacy", and "unspecified" (the default). New applications should consider setting this to "standard" for cross language - compatibility. See :ref:`handling-uuid-data-example` for details. + compatibility. See `handling UUID data `_ for details. - `unicode_decode_error_handler`: The error handler to apply when a Unicode-related error occurs during BSON decoding that would otherwise raise :exc:`UnicodeDecodeError`. Valid options include @@ -484,7 +502,7 @@ def __init__( is set, it must be a positive integer greater than or equal to 90 seconds. - .. seealso:: :doc:`/examples/server_selection` + .. seealso:: `Customize Server Selection `_ | **Authentication:** @@ -510,7 +528,7 @@ def __init__( To specify the session token for MONGODB-AWS authentication pass ``authMechanismProperties='AWS_SESSION_TOKEN:'``. - .. seealso:: :doc:`/examples/authentication` + .. seealso:: `Authentication `_ | **TLS/SSL configuration:** @@ -573,7 +591,7 @@ def __init__( :class:`~pymongo.encryption_options.AutoEncryptionOpts` which configures this client to automatically encrypt collection commands and automatically decrypt results. See - :ref:`automatic-client-side-encryption` for an example. + `client-side field level encryption `_ for an example. If a :class:`MongoClient` is configured with ``auto_encryption_opts`` and a non-None ``maxPoolSize``, a separate internal ``MongoClient`` is created if any of the @@ -589,7 +607,7 @@ def __init__( - `server_api`: A :class:`~pymongo.server_api.ServerApi` which configures this - client to use Stable API. See :ref:`versioned-api-ref` for + client to use Stable API. See `versioned API `_ for details. .. seealso:: The MongoDB documentation on `connections `_. @@ -700,15 +718,15 @@ def __init__( reconnect to one of them. In PyMongo 3, the client monitors its network latency to all the mongoses continuously, and distributes operations evenly among those with the lowest latency. See - :ref:`mongos-load-balancing` for more information. + `load balancing `_ for more information. The ``connect`` option is added. The ``start_request``, ``in_request``, and ``end_request`` methods are removed, as well as the ``auto_start_request`` option. - The ``copy_database`` method is removed, see the - :doc:`copy_database examples ` for alternatives. + The ``copy_database`` method is removed, see + `Copy and Clone Databases `_ for alternatives. The :meth:`MongoClient.disconnect` method is removed; it was a synonym for :meth:`~pymongo.MongoClient.close`. @@ -748,7 +766,13 @@ def __init__( if port is None: port = self.PORT if not isinstance(port, int): - raise TypeError("port must be an instance of int") + raise TypeError(f"port must be an instance of int, not {type(port)}") + self._host = host + self._port = port + self._topology: Topology = None # type: ignore[assignment] + self._timeout: float | None = None + self._topology_settings: TopologySettings = None # type: ignore[assignment] + self._event_listeners: _EventListeners | None = None # _pool_class, _monitor_class, and _condition_class are for deep # customization of PyMongo, e.g. Motor. @@ -759,8 +783,10 @@ def __init__( # Parse options passed as kwargs. keyword_opts = common._CaseInsensitiveDictionary(kwargs) keyword_opts["document_class"] = doc_class + self._resolve_srv_info: dict[str, Any] = {"keyword_opts": keyword_opts} - seeds = set() + self._seeds = set() + is_srv = False username = None password = None dbase = None @@ -768,41 +794,34 @@ def __init__( fqdn = None srv_service_name = keyword_opts.get("srvservicename") srv_max_hosts = keyword_opts.get("srvmaxhosts") - if len([h for h in host if "/" in h]) > 1: + if len([h for h in self._host if "/" in h]) > 1: raise ConfigurationError("host must not contain multiple MongoDB URIs") - for entity in host: + for entity in self._host: # A hostname can only include a-z, 0-9, '-' and '.'. If we find a '/' # it must be a URI, # https://en.wikipedia.org/wiki/Hostname#Restrictions_on_valid_host_names if "/" in entity: - # Determine connection timeout from kwargs. - timeout = keyword_opts.get("connecttimeoutms") - if timeout is not None: - timeout = common.validate_timeout_or_none_or_zero( - keyword_opts.cased_key("connecttimeoutms"), timeout - ) - res = uri_parser.parse_uri( + res = _validate_uri( entity, port, validate=True, warn=True, normalize=False, - connect_timeout=timeout, - srv_service_name=srv_service_name, srv_max_hosts=srv_max_hosts, ) - seeds.update(res["nodelist"]) + is_srv = entity.startswith(SRV_SCHEME) + self._seeds.update(res["nodelist"]) username = res["username"] or username password = res["password"] or password dbase = res["database"] or dbase opts = res["options"] fqdn = res["fqdn"] else: - seeds.update(uri_parser.split_hosts(entity, port)) - if not seeds: + self._seeds.update(split_hosts(entity, self._port)) + if not self._seeds: raise ConfigurationError("need to specify at least one host") - for hostname in [node[0] for node in seeds]: + for hostname in [node[0] for node in self._seeds]: if _detect_external_db(hostname): break @@ -819,80 +838,180 @@ def __init__( keyword_opts["tz_aware"] = tz_aware keyword_opts["connect"] = connect - # Handle deprecated options in kwarg options. - keyword_opts = _handle_option_deprecations(keyword_opts) - # Validate kwarg options. - keyword_opts = common._CaseInsensitiveDictionary( - dict(common.validate(keyword_opts.cased_key(k), v) for k, v in keyword_opts.items()) - ) - - # Override connection string options with kwarg options. - opts.update(keyword_opts) + opts = self._validate_kwargs_and_update_opts(keyword_opts, opts) if srv_service_name is None: srv_service_name = opts.get("srvServiceName", common.SRV_SERVICE_NAME) srv_max_hosts = srv_max_hosts or opts.get("srvmaxhosts") - # Handle security-option conflicts in combined options. - opts = _handle_security_options(opts) - # Normalize combined options. - opts = _normalize_options(opts) - _check_options(seeds, opts) + opts = self._normalize_and_validate_options(opts, self._seeds) # Username and password passed as kwargs override user info in URI. username = opts.get("username", username) password = opts.get("password", password) - self._options = options = ClientOptions(username, password, dbase, opts, _IS_SYNC) + self._options = ClientOptions(username, password, dbase, opts, _IS_SYNC) self._default_database_name = dbase self._lock = _create_lock() - self._kill_cursors_queue: list = [] + self._kill_cursors_queue: list = [] # type: ignore[type-arg] + + self._encrypter: Optional[_Encrypter] = None + + self._resolve_srv_info.update( + { + "is_srv": is_srv, + "username": username, + "password": password, + "dbase": dbase, + "seeds": self._seeds, + "fqdn": fqdn, + "srv_service_name": srv_service_name, + "pool_class": pool_class, + "monitor_class": monitor_class, + "condition_class": condition_class, + } + ) - self._event_listeners = options.pool_options._event_listeners super().__init__( - options.codec_options, - options.read_preference, - options.write_concern, - options.read_concern, + self._options.codec_options, + self._options.read_preference, + self._options.write_concern, + self._options.read_concern, ) - self._topology_settings = TopologySettings( - seeds=seeds, - replica_set_name=options.replica_set_name, - pool_class=pool_class, - pool_options=options.pool_options, - monitor_class=monitor_class, - condition_class=condition_class, - local_threshold_ms=options.local_threshold_ms, - server_selection_timeout=options.server_selection_timeout, - server_selector=options.server_selector, - heartbeat_frequency=options.heartbeat_frequency, - fqdn=fqdn, - direct_connection=options.direct_connection, - load_balanced=options.load_balanced, - srv_service_name=srv_service_name, - srv_max_hosts=srv_max_hosts, - server_monitoring_mode=options.server_monitoring_mode, - ) + self._init_based_on_options(self._seeds, srv_max_hosts, srv_service_name) self._opened = False self._closed = False - self._init_background() + self._loop: Optional[asyncio.AbstractEventLoop] = None + if not is_srv: + self._init_background() if _IS_SYNC and connect: self._get_topology() # type: ignore[unused-coroutine] - self._encrypter = None + def _resolve_srv(self) -> None: + keyword_opts = self._resolve_srv_info["keyword_opts"] + seeds = set() + opts = common._CaseInsensitiveDictionary() + srv_service_name = keyword_opts.get("srvservicename") + srv_max_hosts = keyword_opts.get("srvmaxhosts") + for entity in self._host: + # A hostname can only include a-z, 0-9, '-' and '.'. If we find a '/' + # it must be a URI, + # https://en.wikipedia.org/wiki/Hostname#Restrictions_on_valid_host_names + if "/" in entity: + # Determine connection timeout from kwargs. + timeout = keyword_opts.get("connecttimeoutms") + if timeout is not None: + timeout = common.validate_timeout_or_none_or_zero( + keyword_opts.cased_key("connecttimeoutms"), timeout + ) + res = uri_parser._parse_srv( + entity, + self._port, + validate=True, + warn=True, + normalize=False, + connect_timeout=timeout, + srv_service_name=srv_service_name, + srv_max_hosts=srv_max_hosts, + ) + seeds.update(res["nodelist"]) + opts = res["options"] + else: + seeds.update(split_hosts(entity, self._port)) + + if not seeds: + raise ConfigurationError("need to specify at least one host") + + for hostname in [node[0] for node in seeds]: + if _detect_external_db(hostname): + break + + # Add options with named keyword arguments to the parsed kwarg options. + tz_aware = keyword_opts["tz_aware"] + connect = keyword_opts["connect"] + if tz_aware is None: + tz_aware = opts.get("tz_aware", False) + if connect is None: + # Default to connect=True unless on a FaaS system, which might use fork. + from pymongo.pool_options import _is_faas + + connect = opts.get("connect", not _is_faas()) + keyword_opts["tz_aware"] = tz_aware + keyword_opts["connect"] = connect + + opts = self._validate_kwargs_and_update_opts(keyword_opts, opts) + + if srv_service_name is None: + srv_service_name = opts.get("srvServiceName", common.SRV_SERVICE_NAME) + + srv_max_hosts = srv_max_hosts or opts.get("srvmaxhosts") + opts = self._normalize_and_validate_options(opts, seeds) + + # Username and password passed as kwargs override user info in URI. + username = opts.get("username", self._resolve_srv_info["username"]) + password = opts.get("password", self._resolve_srv_info["password"]) + self._options = ClientOptions( + username, password, self._resolve_srv_info["dbase"], opts, _IS_SYNC + ) + + self._init_based_on_options(seeds, srv_max_hosts, srv_service_name) + + def _init_based_on_options( + self, seeds: Collection[tuple[str, int]], srv_max_hosts: Any, srv_service_name: Any + ) -> None: + self._event_listeners = self._options.pool_options._event_listeners + self._topology_settings = TopologySettings( + seeds=seeds, + replica_set_name=self._options.replica_set_name, + pool_class=self._resolve_srv_info["pool_class"], + pool_options=self._options.pool_options, + monitor_class=self._resolve_srv_info["monitor_class"], + condition_class=self._resolve_srv_info["condition_class"], + local_threshold_ms=self._options.local_threshold_ms, + server_selection_timeout=self._options.server_selection_timeout, + server_selector=self._options.server_selector, + heartbeat_frequency=self._options.heartbeat_frequency, + fqdn=self._resolve_srv_info["fqdn"], + direct_connection=self._options.direct_connection, + load_balanced=self._options.load_balanced, + srv_service_name=srv_service_name, + srv_max_hosts=srv_max_hosts, + server_monitoring_mode=self._options.server_monitoring_mode, + topology_id=self._topology_settings._topology_id if self._topology_settings else None, + ) if self._options.auto_encryption_opts: from pymongo.synchronous.encryption import _Encrypter self._encrypter = _Encrypter(self, self._options.auto_encryption_opts) self._timeout = self._options.timeout - if _HAS_REGISTER_AT_FORK: - # Add this client to the list of weakly referenced items. - # This will be used later if we fork. - MongoClient._clients[self._topology._topology_id] = self + def _normalize_and_validate_options( + self, opts: common._CaseInsensitiveDictionary, seeds: set[tuple[str, int | None]] + ) -> common._CaseInsensitiveDictionary: + # Handle security-option conflicts in combined options. + opts = _handle_security_options(opts) + # Normalize combined options. + opts = _normalize_options(opts) + _check_options(seeds, opts) + return opts + + def _validate_kwargs_and_update_opts( + self, + keyword_opts: common._CaseInsensitiveDictionary, + opts: common._CaseInsensitiveDictionary, + ) -> common._CaseInsensitiveDictionary: + # Handle deprecated options in kwarg options. + keyword_opts = _handle_option_deprecations(keyword_opts) + # Validate kwarg options. + keyword_opts = common._CaseInsensitiveDictionary( + dict(common.validate(keyword_opts.cased_key(k), v) for k, v in keyword_opts.items()) + ) + # Override connection string options with kwarg options. + opts.update(keyword_opts) + return opts def _connect(self) -> None: """Explicitly connect to MongoDB synchronously instead of on the first operation.""" @@ -900,6 +1019,10 @@ def _connect(self) -> None: def _init_background(self, old_pid: Optional[int] = None) -> None: self._topology = Topology(self._topology_settings) + if _HAS_REGISTER_AT_FORK: + # Add this client to the list of weakly referenced items. + # This will be used later if we fork. + MongoClient._clients[self._topology._topology_id] = self # Seed the topology with the old one's pid so we can detect clients # that are opened before a fork and used after. self._topology._pid = old_pid @@ -924,6 +1047,20 @@ def target() -> bool: self._kill_cursors_executor = executor self._opened = False + def append_metadata(self, driver_info: DriverInfo) -> None: + """Appends the given metadata to existing driver metadata. + + :param driver_info: a :class:`~pymongo.driver_info.DriverInfo` + + .. versionadded:: 4.14 + """ + + if not isinstance(driver_info, DriverInfo): + raise TypeError( + f"driver_info must be an instance of DriverInfo, not {type(driver_info)}" + ) + self._options.pool_options._update_metadata(driver_info) + def _should_pin_cursor(self, session: Optional[ClientSession]) -> Optional[bool]: return self._options.load_balanced and not (session and session.in_transaction) @@ -933,7 +1070,7 @@ def _after_fork(self) -> None: # Reset the session pool to avoid duplicate sessions in the child process. self._topology._session_pool.reset() - def _duplicate(self, **kwargs: Any) -> MongoClient: + def _duplicate(self, **kwargs: Any) -> MongoClient: # type: ignore[type-arg] args = self._init_kwargs.copy() args.update(kwargs) return MongoClient(**args) @@ -1088,6 +1225,16 @@ def topology_description(self) -> TopologyDescription: .. versionadded:: 4.0 """ + if self._topology is None: + servers = {(host, port): ServerDescription((host, port)) for host, port in self._seeds} + return TopologyDescription( + TOPOLOGY_TYPE.Unknown, + servers, + None, + None, + None, + self._topology_settings, + ) return self._topology.description @property @@ -1101,6 +1248,8 @@ def nodes(self) -> FrozenSet[_Address]: to any servers, or a network partition causes it to lose connection to all servers. """ + if self._topology is None: + return frozenset() description = self._topology.description return frozenset(s.address for s in description.known_servers) @@ -1114,16 +1263,24 @@ def options(self) -> ClientOptions: """ return self._options + def eq_props(self) -> tuple[tuple[_Address, ...], Optional[str], Optional[str], str]: + return ( + tuple(sorted(self._resolve_srv_info["seeds"])), + self._options.replica_set_name, + self._resolve_srv_info["fqdn"], + self._resolve_srv_info["srv_service_name"], + ) + def __eq__(self, other: Any) -> bool: if isinstance(other, self.__class__): - return self._topology == other._topology + return self.eq_props() == other.eq_props() return NotImplemented def __ne__(self, other: Any) -> bool: return not self == other def __hash__(self) -> int: - return hash(self._topology) + return hash(self.eq_props()) def _repr_helper(self) -> str: def option_repr(option: str, value: Any) -> str: @@ -1139,13 +1296,16 @@ def option_repr(option: str, value: Any) -> str: return f"{option}={value!r}" # Host first... - options = [ - "host=%r" - % [ - "%s:%d" % (host, port) if port is not None else host - for host, port in self._topology_settings.seeds + if self._topology is None: + options = [f"host='mongodb+srv://{self._resolve_srv_info['fqdn']}'"] + else: + options = [ + "host=%r" + % [ + "%s:%d" % (host, port) if port is not None else host + for host, port in self._topology_settings.seeds + ] ] - ] # ... then everything in self._constructor_args... options.extend( option_repr(key, self._options._options[key]) for key in self._constructor_args @@ -1392,7 +1552,7 @@ def get_database( self, name, codec_options, read_preference, write_concern, read_concern ) - def _database_default_options(self, name: str) -> database.Database: + def _database_default_options(self, name: str) -> database.Database: # type: ignore[type-arg] """Get a Database instance with the default settings.""" return self.get_database( name, @@ -1444,6 +1604,8 @@ def address(self) -> Optional[tuple[str, int]]: .. versionadded:: 3.0 """ + if self._topology is None: + self._get_topology() topology_type = self._topology._description.topology_type if ( topology_type == TOPOLOGY_TYPE.Sharded @@ -1466,6 +1628,8 @@ def primary(self) -> Optional[tuple[str, int]]: .. versionadded:: 3.0 MongoClient gained this property in version 3.0. """ + if self._topology is None: + self._get_topology() return self._topology.get_primary() # type: ignore[return-value] @property @@ -1479,6 +1643,8 @@ def secondaries(self) -> set[_Address]: .. versionadded:: 3.0 MongoClient gained this property in version 3.0. """ + if self._topology is None: + self._get_topology() return self._topology.get_secondaries() @property @@ -1489,6 +1655,8 @@ def arbiters(self) -> set[_Address]: connected to a replica set, there are no arbiters, or this client was created without the `replicaSet` option. """ + if self._topology is None: + self._get_topology() return self._topology.get_arbiters() @property @@ -1547,6 +1715,8 @@ def close(self) -> None: .. versionchanged:: 3.6 End all server sessions created by this client. """ + if self._topology is None: + return session_ids = self._topology.pop_all_sessions() if session_ids: self._end_sessions(session_ids) @@ -1559,6 +1729,12 @@ def close(self) -> None: # TODO: PYTHON-1921 Encrypted MongoClients cannot be re-opened. self._encrypter.close() self._closed = True + if not _IS_SYNC: + asyncio.gather( + self._topology.cleanup_monitors(), # type: ignore[func-returns-value] + self._kill_cursors_executor.join(), # type: ignore[func-returns-value] + return_exceptions=True, + ) if not _IS_SYNC: # Add support for contextlib.closing. @@ -1570,7 +1746,17 @@ def _get_topology(self) -> Topology: If this client was created with "connect=False", calling _get_topology launches the connection process in the background. """ + if not _IS_SYNC: + if self._loop is None: + self._loop = asyncio.get_running_loop() + elif self._loop != asyncio.get_running_loop(): + raise RuntimeError( + "Cannot use MongoClient in different event loop. MongoClient uses low-level asyncio APIs that bind it to the event loop it was created on." + ) if not self._opened: + if self._resolve_srv_info["is_srv"]: + self._resolve_srv() + self._init_background() self._topology.open() with self._lock: self._kill_cursors_executor.open() @@ -1703,7 +1889,7 @@ def _conn_for_reads( def _run_operation( self, operation: Union[_Query, _GetMore], - unpack_res: Callable, + unpack_res: Callable, # type: ignore[type-arg] address: Optional[_Address] = None, ) -> Response: """Run a _Query/_GetMore operation and return a Response. @@ -1858,17 +2044,18 @@ def _retryable_read( retryable = bool( retryable and self.options.retry_reads and not (session and session.in_transaction) ) - return self._retry_internal( - func, - session, - None, - operation, - is_read=True, - address=address, - read_pref=read_pref, - retryable=retryable, - operation_id=operation_id, - ) + with self._tmp_session(session) as s: + return self._retry_internal( + func, + s, + None, + operation, + is_read=True, + address=address, + read_pref=read_pref, + retryable=retryable, + operation_id=operation_id, + ) def _retryable_write( self, @@ -1901,7 +2088,6 @@ def _cleanup_cursor_no_lock( address: Optional[_CursorAddress], conn_mgr: _ConnectionManager, session: Optional[ClientSession], - explicit_session: bool, ) -> None: """Cleanup a cursor from __del__ without locking. @@ -1916,7 +2102,7 @@ def _cleanup_cursor_no_lock( # The cursor will be closed later in a different session. if cursor_id or conn_mgr: self._close_cursor_soon(cursor_id, address, conn_mgr) - if session and not explicit_session: + if session and session._implicit and not session._leave_alive: session._end_implicit_session() def _cleanup_cursor_lock( @@ -1925,7 +2111,6 @@ def _cleanup_cursor_lock( address: Optional[_CursorAddress], conn_mgr: _ConnectionManager, session: Optional[ClientSession], - explicit_session: bool, ) -> None: """Cleanup a cursor from cursor.close() using a lock. @@ -1937,7 +2122,6 @@ def _cleanup_cursor_lock( :param address: The _CursorAddress. :param conn_mgr: The _ConnectionManager for the pinned connection or None. :param session: The cursor's session. - :param explicit_session: True if the session was passed explicitly. """ if cursor_id: if conn_mgr and conn_mgr.more_to_come: @@ -1950,7 +2134,7 @@ def _cleanup_cursor_lock( self._close_cursor_now(cursor_id, address, session=session, conn_mgr=conn_mgr) if conn_mgr: conn_mgr.close() - if session and not explicit_session: + if session and session._implicit and not session._leave_alive: session._end_implicit_session() def _close_cursor_now( @@ -1965,7 +2149,7 @@ def _close_cursor_now( The cursor is closed synchronously on the current thread. """ if not isinstance(cursor_id, int): - raise TypeError("cursor_id must be an instance of int") + raise TypeError(f"cursor_id must be an instance of int, not {type(cursor_id)}") try: if conn_mgr: @@ -2031,16 +2215,14 @@ def _process_kill_cursors(self) -> None: for address, cursor_id, conn_mgr in pinned_cursors: try: - self._cleanup_cursor_lock(cursor_id, address, conn_mgr, None, False) - except asyncio.CancelledError: - raise + self._cleanup_cursor_lock(cursor_id, address, conn_mgr, None) except Exception as exc: if isinstance(exc, InvalidOperation) and self._topology._closed: # Raise the exception when client is closed so that it # can be caught in _process_periodic_tasks raise else: - helpers_shared._handle_exception() + _log_client_error() # Don't re-open topology if it's closed and there's no pending cursors. if address_to_cursor_ids: @@ -2048,13 +2230,11 @@ def _process_kill_cursors(self) -> None: for address, cursor_ids in address_to_cursor_ids.items(): try: self._kill_cursors(cursor_ids, address, topology, session=None) - except asyncio.CancelledError: - raise except Exception as exc: if isinstance(exc, InvalidOperation) and self._topology._closed: raise else: - helpers_shared._handle_exception() + _log_client_error() # This method is run periodically by a background thread. def _process_periodic_tasks(self) -> None: @@ -2064,13 +2244,11 @@ def _process_periodic_tasks(self) -> None: try: self._process_kill_cursors() self._topology.update_pool() - except asyncio.CancelledError: - raise except Exception as exc: if isinstance(exc, InvalidOperation) and self._topology._closed: return else: - helpers_shared._handle_exception() + _log_client_error() def _return_server_session( self, server_session: Union[_ServerSession, _EmptyServerSession] @@ -2082,12 +2260,14 @@ def _return_server_session( @contextlib.contextmanager def _tmp_session( - self, session: Optional[client_session.ClientSession], close: bool = True - ) -> Generator[Optional[client_session.ClientSession], None, None]: + self, session: Optional[client_session.ClientSession] + ) -> Generator[Optional[client_session.ClientSession], None]: """If provided session is None, lend a temporary session.""" if session is not None: if not isinstance(session, client_session.ClientSession): - raise ValueError("'session' argument must be a ClientSession or None.") + raise ValueError( + f"'session' argument must be a ClientSession or None, not {type(session)}" + ) # Don't call end_session. yield session return @@ -2105,7 +2285,7 @@ def _tmp_session( raise finally: # Call end_session when we exit this scope. - if close: + if not s._attached_to_cursor: s.end_session() else: yield None @@ -2124,8 +2304,8 @@ def server_info(self, session: Optional[client_session.ClientSession] = None) -> .. versionchanged:: 3.6 Added ``session`` parameter. """ - return cast( - dict, + return cast( # type: ignore[redundant-cast] + dict[str, Any], self.admin.command( "buildinfo", read_preference=ReadPreference.PRIMARY, session=session ), @@ -2159,6 +2339,15 @@ def list_databases( ) -> CommandCursor[dict[str, Any]]: """Get a cursor over the databases of the connected server. + Cursors are closed automatically when they are exhausted (the last batch of data is retrieved from the database). + If a cursor is not exhausted, it will be closed automatically upon garbage collection, which leaves resources open but unused for a potentially long period of time. + To avoid this, best practice is to call :meth:`Cursor.close` when the cursor is no longer needed, + or use the cursor in a with statement:: + + with client.list_databases() as cursor: + for database in cursor: + print(database) + :param session: a :class:`~pymongo.client_session.ClientSession`. :param comment: A user-provided comment to attach to this @@ -2235,7 +2424,9 @@ def drop_database( name = name.name if not isinstance(name, str): - raise TypeError("name_or_database must be an instance of str or a Database") + raise TypeError( + f"name_or_database must be an instance of str or a Database, not {type(name)}" + ) with self._conn_for_writes(session, operation=_Op.DROP_DATABASE) as conn: self[name]._command( @@ -2250,13 +2441,13 @@ def drop_database( @_csot.apply def bulk_write( self, - models: Sequence[_WriteOp[_DocumentType]], + models: Sequence[_WriteOp], session: Optional[ClientSession] = None, ordered: bool = True, verbose_results: bool = False, bypass_document_validation: Optional[bool] = None, comment: Optional[Any] = None, - let: Optional[Mapping] = None, + let: Optional[Mapping[str, Any]] = None, write_concern: Optional[WriteConcern] = None, ) -> ClientBulkWriteResult: """Send a batch of write operations, potentially across multiple namespaces, to the server. @@ -2331,9 +2522,9 @@ def bulk_write( :return: An instance of :class:`~pymongo.results.ClientBulkWriteResult`. - .. seealso:: For more info, see :doc:`/examples/client_bulk`. + .. seealso:: For more info, see `Client Bulk Write `_. - .. seealso:: :ref:`writes-and-ids` + .. seealso:: `Writes and ids `_ .. note:: requires MongoDB server version 8.0+. @@ -2442,7 +2633,12 @@ class _MongoClientErrorHandler: "handled", ) - def __init__(self, client: MongoClient, server: Server, session: Optional[ClientSession]): + def __init__( + self, + client: MongoClient, # type: ignore[type-arg] + server: Server, + session: Optional[ClientSession], + ): if not isinstance(client, MongoClient): # This is for compatibility with mocked and subclassed types, such as in Motor. if not any(cls.__name__ == "MongoClient" for cls in type(client).__mro__): @@ -2494,6 +2690,7 @@ def handle( self.completed_handshake, self.service_id, ) + assert self.client._topology is not None self.client._topology.handle_error(self.server_address, err_ctx) def __enter__(self) -> _MongoClientErrorHandler: @@ -2513,7 +2710,7 @@ class _ClientConnectionRetryable(Generic[T]): def __init__( self, - mongo_client: MongoClient, + mongo_client: MongoClient, # type: ignore[type-arg] func: _WriteCall[T] | _ReadCall[T], bulk: Optional[Union[_Bulk, _ClientBulk]], operation: str, @@ -2543,6 +2740,7 @@ def __init__( self._deprioritized_servers: list[Server] = [] self._operation = operation self._operation_id = operation_id + self._attempt_number = 0 def run(self) -> T: """Runs the supplied func() and attempts a retry @@ -2585,6 +2783,7 @@ def run(self) -> T: raise self._retrying = True self._last_error = exc + self._attempt_number += 1 else: raise @@ -2606,6 +2805,7 @@ def run(self) -> T: raise self._last_error from exc else: raise + self._attempt_number += 1 if self._bulk: self._bulk.retrying = True else: @@ -2684,6 +2884,14 @@ def _write(self) -> T: # not support sessions raise the last error. self._check_last_error() self._retryable = False + if self._retrying: + _debug_log( + _COMMAND_LOGGER, + message=f"Retrying write attempt number {self._attempt_number}", + clientId=self._client._topology_settings._topology_id, + commandName=self._operation, + operationId=self._operation_id, + ) return self._func(self._session, conn, self._retryable) # type: ignore except PyMongoError as exc: if not self._retryable: @@ -2705,6 +2913,14 @@ def _read(self) -> T: ): if self._retrying and not self._retryable: self._check_last_error() + if self._retrying: + _debug_log( + _COMMAND_LOGGER, + message=f"Retrying read attempt number {self._attempt_number}", + clientId=self._client._topology_settings._topology_id, + commandName=self._operation, + operationId=self._operation_id, + ) return self._func(self._session, self._server, conn, read_pref) # type: ignore diff --git a/pymongo/synchronous/monitor.py b/pymongo/synchronous/monitor.py index df4130d4ab..f395588814 100644 --- a/pymongo/synchronous/monitor.py +++ b/pymongo/synchronous/monitor.py @@ -4,7 +4,7 @@ # may not use this file except in compliance with the License. You # may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -21,11 +21,11 @@ import logging import time import weakref -from typing import TYPE_CHECKING, Any, Mapping, Optional, cast +from typing import TYPE_CHECKING, Any, Optional from pymongo import common, periodic_executor from pymongo._csot import MovingMinimum -from pymongo.errors import NetworkTimeout, NotPrimaryError, OperationFailure, _OperationCancelled +from pymongo.errors import NetworkTimeout, _OperationCancelled from pymongo.hello import Hello from pymongo.lock import _create_lock from pymongo.logger import _SDAM_LOGGER, _debug_log, _SDAMStatusMessage @@ -33,10 +33,14 @@ from pymongo.pool_options import _is_faas from pymongo.read_preferences import MovingAverage from pymongo.server_description import ServerDescription -from pymongo.srv_resolver import _SrvResolver +from pymongo.synchronous.srv_resolver import _SrvResolver if TYPE_CHECKING: - from pymongo.synchronous.pool import Connection, Pool, _CancellationContext + from pymongo.synchronous.pool import ( # type: ignore[attr-defined] + Connection, + Pool, + _CancellationContext, + ) from pymongo.synchronous.settings import TopologySettings from pymongo.synchronous.topology import Topology @@ -112,9 +116,9 @@ def close(self) -> None: """ self.gc_safe_close() - def join(self, timeout: Optional[int] = None) -> None: + def join(self) -> None: """Wait for the monitor to stop.""" - self._executor.join(timeout) + self._executor.join() def request_check(self) -> None: """If the monitor is sleeping, wake it soon.""" @@ -189,6 +193,9 @@ def gc_safe_close(self) -> None: self._rtt_monitor.gc_safe_close() self.cancel_check() + def join(self) -> None: + asyncio.gather(self._executor.join(), self._rtt_monitor.join(), return_exceptions=True) # type: ignore[func-returns-value] + def close(self) -> None: self.gc_safe_close() self._rtt_monitor.close() @@ -250,15 +257,7 @@ def _check_server(self) -> ServerDescription: self._conn_id = None start = time.monotonic() try: - try: - return self._check_once() - except (OperationFailure, NotPrimaryError) as exc: - # Update max cluster time even when hello fails. - details = cast(Mapping[str, Any], exc.details) - self._topology.receive_cluster_time(details.get("$clusterTime")) - raise - except asyncio.CancelledError: - raise + return self._check_once() except ReferenceError: raise except Exception as error: @@ -273,6 +272,7 @@ def _check_server(self) -> ServerDescription: if _SDAM_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _SDAM_LOGGER, + message=_SDAMStatusMessage.HEARTBEAT_FAIL, topologyId=self._topology._topology_id, serverHost=address[0], serverPort=address[1], @@ -280,7 +280,6 @@ def _check_server(self) -> ServerDescription: durationMS=duration * 1000, failure=error, driverConnectionId=self._conn_id, - message=_SDAMStatusMessage.HEARTBEAT_FAIL, ) self._reset_connection() if isinstance(error, _OperationCancelled): @@ -312,13 +311,13 @@ def _check_once(self) -> ServerDescription: if _SDAM_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _SDAM_LOGGER, + message=_SDAMStatusMessage.HEARTBEAT_START, topologyId=self._topology._topology_id, driverConnectionId=conn.id, serverConnectionId=conn.server_connection_id, serverHost=address[0], serverPort=address[1], awaited=awaited, - message=_SDAMStatusMessage.HEARTBEAT_START, ) self._cancel_context = conn.cancel_context @@ -338,6 +337,7 @@ def _check_once(self) -> ServerDescription: if _SDAM_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _SDAM_LOGGER, + message=_SDAMStatusMessage.HEARTBEAT_SUCCESS, topologyId=self._topology._topology_id, driverConnectionId=conn.id, serverConnectionId=conn.server_connection_id, @@ -346,16 +346,14 @@ def _check_once(self) -> ServerDescription: awaited=awaited, durationMS=round_trip_time * 1000, reply=response.document, - message=_SDAMStatusMessage.HEARTBEAT_SUCCESS, ) return sd - def _check_with_socket(self, conn: Connection) -> tuple[Hello, float]: + def _check_with_socket(self, conn: Connection) -> tuple[Hello, float]: # type: ignore[type-arg] """Return (Hello, round_trip_time). Can raise ConnectionFailure or OperationFailure. """ - cluster_time = self._topology.max_cluster_time() start = time.monotonic() if conn.more_to_come: # Read the next streaming hello (MongoDB 4.4+). @@ -365,13 +363,12 @@ def _check_with_socket(self, conn: Connection) -> tuple[Hello, float]: ): # Initiate streaming hello (MongoDB 4.4+). response = conn._hello( - cluster_time, self._server_description.topology_version, self._settings.heartbeat_frequency, ) else: # New connection handshake or polling hello (MongoDB <4.4). - response = conn._hello(cluster_time, None, None) + response = conn._hello(None, None) duration = _monotonic_duration(start) return response, duration @@ -424,14 +421,13 @@ def _get_seedlist(self) -> Optional[list[tuple[str, Any]]]: if len(seedlist) == 0: # As per the spec: this should be treated as a failure. raise Exception - except asyncio.CancelledError: - raise - except Exception: + except Exception as exc: # As per the spec, upon encountering an error: # - An error must not be raised # - SRV records must be rescanned every heartbeatFrequencyMS # - Topology must be left unchanged self.request_check() + _debug_log(_SDAM_LOGGER, message="SRV monitor check failed", failure=repr(exc)) return None else: self._executor.update_interval(max(ttl, common.MIN_SRV_RESCAN_INTERVAL)) @@ -489,8 +485,6 @@ def _run(self) -> None: except ReferenceError: # Topology was garbage-collected. self.close() - except asyncio.CancelledError: - raise except Exception: self._pool.reset() diff --git a/pymongo/synchronous/network.py b/pymongo/synchronous/network.py index 7206dca735..7d9bca4d58 100644 --- a/pymongo/synchronous/network.py +++ b/pymongo/synchronous/network.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -17,7 +17,6 @@ import datetime import logging -import time from typing import ( TYPE_CHECKING, Any, @@ -31,20 +30,16 @@ from bson import _decode_all_selective from pymongo import _csot, helpers_shared, message -from pymongo.common import MAX_MESSAGE_SIZE -from pymongo.compression_support import _NO_COMPRESSION, decompress +from pymongo.compression_support import _NO_COMPRESSION from pymongo.errors import ( NotPrimaryError, OperationFailure, - ProtocolError, ) from pymongo.logger import _COMMAND_LOGGER, _CommandStatusMessage, _debug_log -from pymongo.message import _UNPACK_REPLY, _OpMsg, _OpReply +from pymongo.message import _OpMsg from pymongo.monitoring import _is_speculative_authenticate from pymongo.network_layer import ( - _UNPACK_COMPRESSION_HEADER, - _UNPACK_HEADER, - receive_data, + receive_message, sendall, ) @@ -71,7 +66,7 @@ def command( read_preference: Optional[_ServerMode], codec_options: CodecOptions[_DocumentType], session: Optional[ClientSession], - client: Optional[MongoClient], + client: Optional[MongoClient[Any]], check: bool = True, allowable_errors: Optional[Sequence[Union[str, int]]] = None, address: Optional[_Address] = None, @@ -168,8 +163,8 @@ def command( if _COMMAND_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _COMMAND_LOGGER, - clientId=client._topology_settings._topology_id, message=_CommandStatusMessage.STARTED, + clientId=client._topology_settings._topology_id, command=spec, commandName=next(iter(spec)), databaseName=dbname, @@ -194,7 +189,7 @@ def command( ) try: - sendall(conn.conn, msg) + sendall(conn.conn.get_conn, msg) if use_op_msg and unacknowledged: # Unacknowledged, fake a successful command response. reply = None @@ -207,6 +202,10 @@ def command( ) response_doc = unpacked_docs[0] + if not conn.ready: + cluster_time = response_doc.get("$clusterTime") + if cluster_time: + conn._cluster_time = cluster_time if client: client._process_response(response_doc, session) if check: @@ -226,8 +225,8 @@ def command( if _COMMAND_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _COMMAND_LOGGER, - clientId=client._topology_settings._topology_id, message=_CommandStatusMessage.FAILED, + clientId=client._topology_settings._topology_id, durationMS=duration, failure=failure, commandName=next(iter(spec)), @@ -260,8 +259,8 @@ def command( if _COMMAND_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _COMMAND_LOGGER, - clientId=client._topology_settings._topology_id, message=_CommandStatusMessage.SUCCEEDED, + clientId=client._topology_settings._topology_id, durationMS=duration, reply=response_doc, commandName=next(iter(spec)), @@ -297,45 +296,3 @@ def command( ) return response_doc # type: ignore[return-value] - - -def receive_message( - conn: Connection, request_id: Optional[int], max_message_size: int = MAX_MESSAGE_SIZE -) -> Union[_OpReply, _OpMsg]: - """Receive a raw BSON message or raise socket.error.""" - if _csot.get_timeout(): - deadline = _csot.get_deadline() - else: - timeout = conn.conn.gettimeout() - if timeout: - deadline = time.monotonic() + timeout - else: - deadline = None - # Ignore the response's request id. - length, _, response_to, op_code = _UNPACK_HEADER(receive_data(conn, 16, deadline)) - # No request_id for exhaust cursor "getMore". - if request_id is not None: - if request_id != response_to: - raise ProtocolError(f"Got response id {response_to!r} but expected {request_id!r}") - if length <= 16: - raise ProtocolError( - f"Message length ({length!r}) not longer than standard message header size (16)" - ) - if length > max_message_size: - raise ProtocolError( - f"Message length ({length!r}) is larger than server max " - f"message size ({max_message_size!r})" - ) - if op_code == 2012: - op_code, _, compressor_id = _UNPACK_COMPRESSION_HEADER(receive_data(conn, 9, deadline)) - data = decompress(receive_data(conn, length - 25, deadline), compressor_id) - else: - data = receive_data(conn, length - 16, deadline) - - try: - unpack_reply = _UNPACK_REPLY[op_code] - except KeyError: - raise ProtocolError( - f"Got opcode {op_code!r} but expected {_UNPACK_REPLY.keys()!r}" - ) from None - return unpack_reply(data) diff --git a/pymongo/synchronous/pool.py b/pymongo/synchronous/pool.py index 1a155c82d7..66258fda18 100644 --- a/pymongo/synchronous/pool.py +++ b/pymongo/synchronous/pool.py @@ -4,7 +4,7 @@ # may not use this file except in compliance with the License. You # may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -17,11 +17,8 @@ import asyncio import collections import contextlib -import functools import logging import os -import socket -import ssl import sys import time import weakref @@ -49,18 +46,16 @@ from pymongo.errors import ( # type:ignore[attr-defined] AutoReconnect, ConfigurationError, - ConnectionFailure, DocumentTooLarge, ExecutionTimeout, InvalidOperation, - NetworkTimeout, NotPrimaryError, OperationFailure, PyMongoError, WaitQueueTimeoutError, - _CertificateError, ) from pymongo.hello import Hello, HelloCompat +from pymongo.helpers_shared import _get_timeout_details, format_timeout_details from pymongo.lock import ( _cond_wait, _create_condition, @@ -76,16 +71,21 @@ ConnectionCheckOutFailedReason, ConnectionClosedReason, ) -from pymongo.network_layer import sendall +from pymongo.network_layer import NetworkingInterface, receive_message, sendall from pymongo.pool_options import PoolOptions +from pymongo.pool_shared import ( + SSLErrors, + _CancellationContext, + _configured_socket_interface, + _raise_connection_failure, +) from pymongo.read_preferences import ReadPreference from pymongo.server_api import _add_to_command from pymongo.server_type import SERVER_TYPE from pymongo.socket_checker import SocketChecker -from pymongo.ssl_support import HAS_SNI, SSLError from pymongo.synchronous.client_session import _validate_session_write_concern from pymongo.synchronous.helpers import _handle_reauth -from pymongo.synchronous.network import command, receive_message +from pymongo.synchronous.network import command if TYPE_CHECKING: from bson import CodecOptions @@ -96,13 +96,12 @@ ZstdContext, ) from pymongo.message import _OpMsg, _OpReply - from pymongo.pyopenssl_context import _sslConn from pymongo.read_concern import ReadConcern from pymongo.read_preferences import _ServerMode from pymongo.synchronous.auth import _AuthContext from pymongo.synchronous.client_session import ClientSession from pymongo.synchronous.mongo_client import MongoClient, _MongoClientErrorHandler - from pymongo.typings import ClusterTime, _Address, _CollationIn + from pymongo.typings import _Address, _CollationIn from pymongo.write_concern import WriteConcern try: @@ -123,133 +122,6 @@ def _set_non_inheritable_non_atomic(fd: int) -> None: # noqa: ARG001 _IS_SYNC = True -_MAX_TCP_KEEPIDLE = 120 -_MAX_TCP_KEEPINTVL = 10 -_MAX_TCP_KEEPCNT = 9 - -if sys.platform == "win32": - try: - import _winreg as winreg - except ImportError: - import winreg - - def _query(key, name, default): - try: - value, _ = winreg.QueryValueEx(key, name) - # Ensure the value is a number or raise ValueError. - return int(value) - except (OSError, ValueError): - # QueryValueEx raises OSError when the key does not exist (i.e. - # the system is using the Windows default value). - return default - - try: - with winreg.OpenKey( - winreg.HKEY_LOCAL_MACHINE, r"SYSTEM\CurrentControlSet\Services\Tcpip\Parameters" - ) as key: - _WINDOWS_TCP_IDLE_MS = _query(key, "KeepAliveTime", 7200000) - _WINDOWS_TCP_INTERVAL_MS = _query(key, "KeepAliveInterval", 1000) - except OSError: - # We could not check the default values because winreg.OpenKey failed. - # Assume the system is using the default values. - _WINDOWS_TCP_IDLE_MS = 7200000 - _WINDOWS_TCP_INTERVAL_MS = 1000 - - def _set_keepalive_times(sock): - idle_ms = min(_WINDOWS_TCP_IDLE_MS, _MAX_TCP_KEEPIDLE * 1000) - interval_ms = min(_WINDOWS_TCP_INTERVAL_MS, _MAX_TCP_KEEPINTVL * 1000) - if idle_ms < _WINDOWS_TCP_IDLE_MS or interval_ms < _WINDOWS_TCP_INTERVAL_MS: - sock.ioctl(socket.SIO_KEEPALIVE_VALS, (1, idle_ms, interval_ms)) - -else: - - def _set_tcp_option(sock: socket.socket, tcp_option: str, max_value: int) -> None: - if hasattr(socket, tcp_option): - sockopt = getattr(socket, tcp_option) - try: - # PYTHON-1350 - NetBSD doesn't implement getsockopt for - # TCP_KEEPIDLE and friends. Don't attempt to set the - # values there. - default = sock.getsockopt(socket.IPPROTO_TCP, sockopt) - if default > max_value: - sock.setsockopt(socket.IPPROTO_TCP, sockopt, max_value) - except OSError: - pass - - def _set_keepalive_times(sock: socket.socket) -> None: - _set_tcp_option(sock, "TCP_KEEPIDLE", _MAX_TCP_KEEPIDLE) - _set_tcp_option(sock, "TCP_KEEPINTVL", _MAX_TCP_KEEPINTVL) - _set_tcp_option(sock, "TCP_KEEPCNT", _MAX_TCP_KEEPCNT) - - -def _raise_connection_failure( - address: Any, - error: Exception, - msg_prefix: Optional[str] = None, - timeout_details: Optional[dict[str, float]] = None, -) -> NoReturn: - """Convert a socket.error to ConnectionFailure and raise it.""" - host, port = address - # If connecting to a Unix socket, port will be None. - if port is not None: - msg = "%s:%d: %s" % (host, port, error) - else: - msg = f"{host}: {error}" - if msg_prefix: - msg = msg_prefix + msg - if "configured timeouts" not in msg: - msg += format_timeout_details(timeout_details) - if isinstance(error, socket.timeout): - raise NetworkTimeout(msg) from error - elif isinstance(error, SSLError) and "timed out" in str(error): - # Eventlet does not distinguish TLS network timeouts from other - # SSLErrors (https://github.com/eventlet/eventlet/issues/692). - # Luckily, we can work around this limitation because the phrase - # 'timed out' appears in all the timeout related SSLErrors raised. - raise NetworkTimeout(msg) from error - else: - raise AutoReconnect(msg) from error - - -def _get_timeout_details(options: PoolOptions) -> dict[str, float]: - details = {} - timeout = _csot.get_timeout() - socket_timeout = options.socket_timeout - connect_timeout = options.connect_timeout - if timeout: - details["timeoutMS"] = timeout * 1000 - if socket_timeout and not timeout: - details["socketTimeoutMS"] = socket_timeout * 1000 - if connect_timeout: - details["connectTimeoutMS"] = connect_timeout * 1000 - return details - - -def format_timeout_details(details: Optional[dict[str, float]]) -> str: - result = "" - if details: - result += " (configured timeouts:" - for timeout in ["socketTimeoutMS", "timeoutMS", "connectTimeoutMS"]: - if timeout in details: - result += f" {timeout}: {details[timeout]}ms," - result = result[:-1] - result += ")" - return result - - -class _CancellationContext: - def __init__(self) -> None: - self._cancelled = False - - def cancel(self) -> None: - """Cancel this context.""" - self._cancelled = True - - @property - def cancelled(self) -> bool: - """Was cancel called?""" - return self._cancelled - class Connection: """Store a connection with some metadata. @@ -258,15 +130,22 @@ class Connection: :param pool: a Pool instance :param address: the server's (host, port) :param id: the id of this socket in it's pool + :param is_sdam: SDAM connections do not call hello on creation """ def __init__( - self, conn: Union[socket.socket, _sslConn], pool: Pool, address: tuple[str, int], id: int + self, + conn: NetworkingInterface, + pool: Pool, + address: tuple[str, int], + id: int, + is_sdam: bool, ): self.pool_ref = weakref.ref(pool) self.conn = conn self.address = address self.id = id + self.is_sdam = is_sdam self.closed = False self.last_checkin_time = time.monotonic() self.performed_handshake = False @@ -310,16 +189,18 @@ def __init__( self.connect_rtt = 0.0 self._client_id = pool._client_id self.creation_time = time.monotonic() + # For gossiping $clusterTime from the connection handshake to the client. + self._cluster_time = None def set_conn_timeout(self, timeout: Optional[float]) -> None: """Cache last timeout to avoid duplicate calls to conn.settimeout.""" if timeout == self.last_timeout: return self.last_timeout = timeout - self.conn.settimeout(timeout) + self.conn.get_conn.settimeout(timeout) def apply_timeout( - self, client: MongoClient, cmd: Optional[MutableMapping[str, Any]] + self, client: MongoClient[Any], cmd: Optional[MutableMapping[str, Any]] ) -> Optional[float]: # CSOT: use remaining timeout when set. timeout = _csot.remaining() @@ -373,12 +254,11 @@ def hello_cmd(self) -> dict[str, Any]: else: return {HelloCompat.LEGACY_CMD: 1, "helloOk": True} - def hello(self) -> Hello: - return self._hello(None, None, None) + def hello(self) -> Hello[dict[str, Any]]: + return self._hello(None, None) def _hello( self, - cluster_time: Optional[ClusterTime], topology_version: Optional[Any], heartbeat_frequency: Optional[int], ) -> Hello[dict[str, Any]]: @@ -401,9 +281,6 @@ def _hello( if self.opts.connect_timeout: self.set_conn_timeout(self.opts.connect_timeout + heartbeat_frequency) - if not performing_handshake and cluster_time is not None: - cmd["$clusterTime"] = cluster_time - creds = self.opts._credentials if creds: if creds.mechanism == "DEFAULT" and creds.username: @@ -479,7 +356,7 @@ def command( dbname: str, spec: MutableMapping[str, Any], read_preference: _ServerMode = ReadPreference.PRIMARY, - codec_options: CodecOptions = DEFAULT_CODEC_OPTIONS, + codec_options: CodecOptions[Mapping[str, Any]] = DEFAULT_CODEC_OPTIONS, # type: ignore[assignment] check: bool = True, allowable_errors: Optional[Sequence[Union[str, int]]] = None, read_concern: Optional[ReadConcern] = None, @@ -487,7 +364,7 @@ def command( parse_write_concern_error: bool = False, collation: Optional[_CollationIn] = None, session: Optional[ClientSession] = None, - client: Optional[MongoClient] = None, + client: Optional[MongoClient[Any]] = None, retryable_write: bool = False, publish_events: bool = True, user_fields: Optional[Mapping[str, Any]] = None, @@ -539,7 +416,7 @@ def command( spec, self.is_mongos, read_preference, - codec_options, + codec_options, # type: ignore[arg-type] session, client, check, @@ -559,7 +436,7 @@ def command( ) except (OperationFailure, NotPrimaryError): raise - # Catch socket.error, KeyboardInterrupt, etc. and close ourselves. + # Catch socket.error, KeyboardInterrupt, CancelledError, etc. and close ourselves. except BaseException as error: self._raise_connection_failure(error) @@ -575,7 +452,8 @@ def send_message(self, message: bytes, max_doc_size: int) -> None: ) try: - sendall(self.conn, message) + sendall(self.conn.get_conn, message) + # Catch KeyboardInterrupt, CancelledError, etc. and cleanup. except BaseException as error: self._raise_connection_failure(error) @@ -586,6 +464,7 @@ def receive_message(self, request_id: Optional[int]) -> Union[_OpReply, _OpMsg]: """ try: return receive_message(self, request_id, self.max_message_size) + # Catch KeyboardInterrupt, CancelledError, etc. and cleanup. except BaseException as error: self._raise_connection_failure(error) @@ -609,7 +488,7 @@ def unack_write(self, msg: bytes, max_doc_size: int) -> None: self.send_message(msg, max_doc_size) def write_command( - self, request_id: int, msg: bytes, codec_options: CodecOptions + self, request_id: int, msg: bytes, codec_options: CodecOptions[Mapping[str, Any]] ) -> dict[str, Any]: """Send "insert" etc. command, returning response as a dict. @@ -652,8 +531,8 @@ def authenticate(self, reauthenticate: bool = False) -> None: if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, - clientId=self._client_id, message=_ConnectionStatusMessage.CONN_READY, + clientId=self._client_id, serverHost=self.address[0], serverPort=self.address[1], driverConnectionId=self.id, @@ -661,7 +540,7 @@ def authenticate(self, reauthenticate: bool = False) -> None: ) def validate_session( - self, client: Optional[MongoClient], session: Optional[ClientSession] + self, client: Optional[MongoClient[Any]], session: Optional[ClientSession] ) -> None: """Validate this session before use with client. @@ -683,8 +562,8 @@ def close_conn(self, reason: Optional[str]) -> None: if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, - clientId=self._client_id, message=_ConnectionStatusMessage.CONN_CLOSED, + clientId=self._client_id, serverHost=self.address[0], serverPort=self.address[1], driverConnectionId=self.id, @@ -702,20 +581,21 @@ def _close_conn(self) -> None: # shutdown. try: self.conn.close() - except asyncio.CancelledError: - raise except Exception: # noqa: S110 pass def conn_closed(self) -> bool: """Return True if we know socket has been closed, False otherwise.""" - return self.socket_checker.socket_closed(self.conn) + if _IS_SYNC: + return self.socket_checker.socket_closed(self.conn.get_conn) + else: + return self.conn.is_closing() def send_cluster_time( self, command: MutableMapping[str, Any], session: Optional[ClientSession], - client: Optional[MongoClient], + client: Optional[MongoClient[Any]], ) -> None: """Add $clusterTime.""" if client: @@ -746,7 +626,7 @@ def _raise_connection_failure(self, error: BaseException) -> NoReturn: # signals and throws KeyboardInterrupt into the current frame on the # main thread. # - # But in Gevent and Eventlet, the polling mechanism (epoll, kqueue, + # But in Gevent, the polling mechanism (epoll, kqueue, # ..) is called in Python code, which experiences the signal as a # KeyboardInterrupt from the start, rather than as an initial # socket.error, so we catch that, close the socket, and reraise it. @@ -758,7 +638,7 @@ def _raise_connection_failure(self, error: BaseException) -> NoReturn: reason = ConnectionClosedReason.ERROR self.close_conn(reason) # SSLError from PyOpenSSL inherits directly from Exception. - if isinstance(error, (IOError, OSError, SSLError)): + if isinstance(error, (IOError, OSError, *SSLErrors)): details = _get_timeout_details(self.opts) _raise_connection_failure(self.address, error, timeout_details=details) else: @@ -781,143 +661,6 @@ def __repr__(self) -> str: ) -def _create_connection(address: _Address, options: PoolOptions) -> socket.socket: - """Given (host, port) and PoolOptions, connect and return a socket object. - - Can raise socket.error. - - This is a modified version of create_connection from CPython >= 2.7. - """ - host, port = address - - # Check if dealing with a unix domain socket - if host.endswith(".sock"): - if not hasattr(socket, "AF_UNIX"): - raise ConnectionFailure("UNIX-sockets are not supported on this system") - sock = socket.socket(socket.AF_UNIX) - # SOCK_CLOEXEC not supported for Unix sockets. - _set_non_inheritable_non_atomic(sock.fileno()) - try: - sock.connect(host) - return sock - except OSError: - sock.close() - raise - - # Don't try IPv6 if we don't support it. Also skip it if host - # is 'localhost' (::1 is fine). Avoids slow connect issues - # like PYTHON-356. - family = socket.AF_INET - if socket.has_ipv6 and host != "localhost": - family = socket.AF_UNSPEC - - err = None - for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM): - af, socktype, proto, dummy, sa = res - # SOCK_CLOEXEC was new in CPython 3.2, and only available on a limited - # number of platforms (newer Linux and *BSD). Starting with CPython 3.4 - # all file descriptors are created non-inheritable. See PEP 446. - try: - sock = socket.socket(af, socktype | getattr(socket, "SOCK_CLOEXEC", 0), proto) - except OSError: - # Can SOCK_CLOEXEC be defined even if the kernel doesn't support - # it? - sock = socket.socket(af, socktype, proto) - # Fallback when SOCK_CLOEXEC isn't available. - _set_non_inheritable_non_atomic(sock.fileno()) - try: - sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - # CSOT: apply timeout to socket connect. - timeout = _csot.remaining() - if timeout is None: - timeout = options.connect_timeout - elif timeout <= 0: - raise socket.timeout("timed out") - sock.settimeout(timeout) - sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, True) - _set_keepalive_times(sock) - sock.connect(sa) - return sock - except OSError as e: - err = e - sock.close() - - if err is not None: - raise err - else: - # This likely means we tried to connect to an IPv6 only - # host with an OS/kernel or Python interpreter that doesn't - # support IPv6. The test case is Jython2.5.1 which doesn't - # support IPv6 at all. - raise OSError("getaddrinfo failed") - - -def _configured_socket(address: _Address, options: PoolOptions) -> Union[socket.socket, _sslConn]: - """Given (host, port) and PoolOptions, return a configured socket. - - Can raise socket.error, ConnectionFailure, or _CertificateError. - - Sets socket's SSL and timeout options. - """ - sock = _create_connection(address, options) - ssl_context = options._ssl_context - - if ssl_context is None: - sock.settimeout(options.socket_timeout) - return sock - - host = address[0] - try: - # We have to pass hostname / ip address to wrap_socket - # to use SSLContext.check_hostname. - if HAS_SNI: - if _IS_SYNC: - ssl_sock = ssl_context.wrap_socket(sock, server_hostname=host) - else: - if hasattr(ssl_context, "a_wrap_socket"): - ssl_sock = ssl_context.a_wrap_socket(sock, server_hostname=host) # type: ignore[assignment, misc] - else: - loop = asyncio.get_running_loop() - ssl_sock = loop.run_in_executor( - None, - functools.partial(ssl_context.wrap_socket, sock, server_hostname=host), # type: ignore[assignment, misc] - ) - else: - if _IS_SYNC: - ssl_sock = ssl_context.wrap_socket(sock) - else: - if hasattr(ssl_context, "a_wrap_socket"): - ssl_sock = ssl_context.a_wrap_socket(sock) # type: ignore[assignment, misc] - else: - loop = asyncio.get_running_loop() - ssl_sock = loop.run_in_executor(None, ssl_context.wrap_socket, sock) # type: ignore[assignment, misc] - except _CertificateError: - sock.close() - # Raise _CertificateError directly like we do after match_hostname - # below. - raise - except (OSError, SSLError) as exc: - sock.close() - # We raise AutoReconnect for transient and permanent SSL handshake - # failures alike. Permanent handshake failures, like protocol - # mismatch, will be turned into ServerSelectionTimeoutErrors later. - details = _get_timeout_details(options) - _raise_connection_failure(address, exc, "SSL handshake failed: ", timeout_details=details) - if ( - ssl_context.verify_mode - and not ssl_context.check_hostname - and not options.tls_allow_invalid_hostnames - ): - try: - ssl.match_hostname(ssl_sock.getpeercert(), hostname=host) # type:ignore[attr-defined] - except _CertificateError: - ssl_sock.close() - raise - - ssl_sock.settimeout(options.socket_timeout) - return ssl_sock - - class _PoolClosedError(PyMongoError): """Internal error raised when a thread tries to get a connection from a closed pool. @@ -962,19 +705,19 @@ class PoolState: # Do *not* explicitly inherit from object or Jython won't call __del__ -# http://bugs.jython.org/issue1057 +# https://bugs.jython.org/issue1057 class Pool: def __init__( self, address: _Address, options: PoolOptions, - handshake: bool = True, + is_sdam: bool = False, client_id: Optional[ObjectId] = None, ): """ :param address: a (hostname, port) tuple :param options: a PoolOptions instance - :param handshake: whether to call hello for each new Connection + :param is_sdam: whether to call hello for each new Connection """ if options.pause_enabled: self.state = PoolState.PAUSED @@ -986,7 +729,7 @@ def __init__( # LIFO pool. Sockets are ordered on idle time. Sockets claimed # and returned to pool from the left side. Stale sockets removed # from the right side. - self.conns: collections.deque = collections.deque() + self.conns: collections.deque[Connection] = collections.deque() self.active_contexts: set[_CancellationContext] = set() self.lock = _create_lock() self._max_connecting_cond = _create_condition(self.lock) @@ -1003,14 +746,14 @@ def __init__( self.pid = os.getpid() self.address = address self.opts = options - self.handshake = handshake + self.is_sdam = is_sdam # Don't publish events or logs in Monitor pools. self.enabled_for_cmap = ( - self.handshake + not self.is_sdam and self.opts._event_listeners is not None and self.opts._event_listeners.enabled_for_cmap ) - self.enabled_for_logging = self.handshake + self.enabled_for_logging = not self.is_sdam # The first portion of the wait queue. # Enforces: maxPoolSize @@ -1035,8 +778,8 @@ def __init__( if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, - clientId=self._client_id, message=_ConnectionStatusMessage.POOL_CREATED, + clientId=self._client_id, serverHost=self.address[0], serverPort=self.address[1], **self.opts.non_default_options, @@ -1061,8 +804,8 @@ def ready(self) -> None: if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, - clientId=self._client_id, message=_ConnectionStatusMessage.POOL_READY, + clientId=self._client_id, serverHost=self.address[0], serverPort=self.address[1], ) @@ -1093,8 +836,8 @@ def _reset( if service_id is None: sockets, self.conns = self.conns, collections.deque() else: - discard: collections.deque = collections.deque() - keep: collections.deque = collections.deque() + discard: collections.deque = collections.deque() # type: ignore[type-arg] + keep: collections.deque = collections.deque() # type: ignore[type-arg] for conn in self.conns: if conn.service_id == service_id: discard.append(conn) @@ -1118,16 +861,22 @@ def _reset( # PoolClosedEvent but that reset() SHOULD close sockets *after* # publishing the PoolClearedEvent. if close: - for conn in sockets: - conn.close_conn(ConnectionClosedReason.POOL_CLOSED) + if not _IS_SYNC: + asyncio.gather( + *[conn.close_conn(ConnectionClosedReason.POOL_CLOSED) for conn in sockets], # type: ignore[func-returns-value] + return_exceptions=True, + ) + else: + for conn in sockets: + conn.close_conn(ConnectionClosedReason.POOL_CLOSED) if self.enabled_for_cmap: assert listeners is not None listeners.publish_pool_closed(self.address) if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, - clientId=self._client_id, message=_ConnectionStatusMessage.POOL_CLOSED, + clientId=self._client_id, serverHost=self.address[0], serverPort=self.address[1], ) @@ -1143,14 +892,20 @@ def _reset( if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, - clientId=self._client_id, message=_ConnectionStatusMessage.POOL_CLEARED, + clientId=self._client_id, serverHost=self.address[0], serverPort=self.address[1], serviceId=service_id, ) - for conn in sockets: - conn.close_conn(ConnectionClosedReason.STALE) + if not _IS_SYNC: + asyncio.gather( + *[conn.close_conn(ConnectionClosedReason.STALE) for conn in sockets], # type: ignore[func-returns-value] + return_exceptions=True, + ) + else: + for conn in sockets: + conn.close_conn(ConnectionClosedReason.STALE) def update_is_writable(self, is_writable: Optional[bool]) -> None: """Updates the is_writable attribute on all sockets currently in the @@ -1159,7 +914,7 @@ def update_is_writable(self, is_writable: Optional[bool]) -> None: self.is_writable = is_writable with self.lock: for _socket in self.conns: - _socket.update_is_writable(self.is_writable) + _socket.update_is_writable(self.is_writable) # type: ignore[arg-type] def reset( self, service_id: Optional[ObjectId] = None, interrupt_connections: bool = False @@ -1187,12 +942,20 @@ def remove_stale_sockets(self, reference_generation: int) -> None: return if self.opts.max_idle_time_seconds is not None: + close_conns = [] with self.lock: while ( self.conns and self.conns[-1].idle_time_seconds() > self.opts.max_idle_time_seconds ): - conn = self.conns.pop() + close_conns.append(self.conns.pop()) + if not _IS_SYNC: + asyncio.gather( + *[conn.close_conn(ConnectionClosedReason.IDLE) for conn in close_conns], # type: ignore[func-returns-value] + return_exceptions=True, + ) + else: + for conn in close_conns: conn.close_conn(ConnectionClosedReason.IDLE) while True: @@ -1213,14 +976,18 @@ def remove_stale_sockets(self, reference_generation: int) -> None: self._pending += 1 incremented = True conn = self.connect() + close_conn = False with self.lock: # Close connection and return if the pool was reset during # socket creation or while acquiring the pool lock. if self.gen.get_overall() != reference_generation: - conn.close_conn(ConnectionClosedReason.STALE) - return - self.conns.appendleft(conn) - self.active_contexts.discard(conn.cancel_context) + close_conn = True + if not close_conn: + self.conns.appendleft(conn) + self.active_contexts.discard(conn.cancel_context) + if close_conn: + conn.close_conn(ConnectionClosedReason.STALE) + return finally: if incremented: # Notify after adding the socket to the pool. @@ -1254,15 +1021,16 @@ def connect(self, handler: Optional[_MongoClientErrorHandler] = None) -> Connect if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, - clientId=self._client_id, message=_ConnectionStatusMessage.CONN_CREATED, + clientId=self._client_id, serverHost=self.address[0], serverPort=self.address[1], driverConnectionId=conn_id, ) try: - sock = _configured_socket(self.address, self.opts) + networking_interface = _configured_socket_interface(self.address, self.opts) + # Catch KeyboardInterrupt, CancelledError, etc. and cleanup. except BaseException as error: with self.lock: self.active_contexts.discard(tmp_context) @@ -1274,40 +1042,44 @@ def connect(self, handler: Optional[_MongoClientErrorHandler] = None) -> Connect if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, - clientId=self._client_id, message=_ConnectionStatusMessage.CONN_CLOSED, + clientId=self._client_id, serverHost=self.address[0], serverPort=self.address[1], driverConnectionId=conn_id, reason=_verbose_connection_error_reason(ConnectionClosedReason.ERROR), error=ConnectionClosedReason.ERROR, ) - if isinstance(error, (IOError, OSError, SSLError)): + if isinstance(error, (IOError, OSError, *SSLErrors)): details = _get_timeout_details(self.opts) _raise_connection_failure(self.address, error, timeout_details=details) raise - conn = Connection(sock, self, self.address, conn_id) # type: ignore[arg-type] + conn = Connection(networking_interface, self, self.address, conn_id, self.is_sdam) # type: ignore[arg-type] with self.lock: self.active_contexts.add(conn.cancel_context) self.active_contexts.discard(tmp_context) if tmp_context.cancelled: conn.cancel_context.cancel() try: - if self.handshake: + if not self.is_sdam: conn.hello() self.is_writable = conn.is_writable if handler: handler.contribute_socket(conn, completed_handshake=False) conn.authenticate() + # Catch KeyboardInterrupt, CancelledError, etc. and cleanup. except BaseException: with self.lock: self.active_contexts.discard(conn.cancel_context) conn.close_conn(ConnectionClosedReason.ERROR) raise + if handler: + handler.client._topology.receive_cluster_time(conn._cluster_time) + return conn @contextlib.contextmanager @@ -1337,8 +1109,8 @@ def checkout( if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, - clientId=self._client_id, message=_ConnectionStatusMessage.CHECKOUT_STARTED, + clientId=self._client_id, serverHost=self.address[0], serverPort=self.address[1], ) @@ -1352,8 +1124,8 @@ def checkout( if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, - clientId=self._client_id, message=_ConnectionStatusMessage.CHECKOUT_SUCCEEDED, + clientId=self._client_id, serverHost=self.address[0], serverPort=self.address[1], driverConnectionId=conn.id, @@ -1363,6 +1135,7 @@ def checkout( with self.lock: self.active_contexts.add(conn.cancel_context) yield conn + # Catch KeyboardInterrupt, CancelledError, etc. and cleanup. except BaseException: # Exception in caller. Ensure the connection gets returned. # Note that when pinned is True, the session owns the @@ -1400,8 +1173,8 @@ def _raise_if_not_ready(self, checkout_started_time: float, emit_event: bool) -> if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, - clientId=self._client_id, message=_ConnectionStatusMessage.CHECKOUT_FAILED, + clientId=self._client_id, serverHost=self.address[0], serverPort=self.address[1], reason="An error occurred while trying to establish a new connection", @@ -1434,8 +1207,8 @@ def _get_conn( if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, - clientId=self._client_id, message=_ConnectionStatusMessage.CHECKOUT_FAILED, + clientId=self._client_id, serverHost=self.address[0], serverPort=self.address[1], reason="Connection pool was closed", @@ -1509,6 +1282,7 @@ def _get_conn( with self._max_connecting_cond: self._pending -= 1 self._max_connecting_cond.notify() + # Catch KeyboardInterrupt, CancelledError, etc. and cleanup. except BaseException: if conn: # We checked out a socket but authentication failed. @@ -1529,8 +1303,8 @@ def _get_conn( if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, - clientId=self._client_id, message=_ConnectionStatusMessage.CHECKOUT_FAILED, + clientId=self._client_id, serverHost=self.address[0], serverPort=self.address[1], reason="An error occurred while trying to establish a new connection", @@ -1562,8 +1336,8 @@ def checkin(self, conn: Connection) -> None: if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, - clientId=self._client_id, message=_ConnectionStatusMessage.CHECKEDIN, + clientId=self._client_id, serverHost=self.address[0], serverPort=self.address[1], driverConnectionId=conn.id, @@ -1583,8 +1357,8 @@ def checkin(self, conn: Connection) -> None: if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, - clientId=self._client_id, message=_ConnectionStatusMessage.CONN_CLOSED, + clientId=self._client_id, serverHost=self.address[0], serverPort=self.address[1], driverConnectionId=conn.id, @@ -1592,17 +1366,20 @@ def checkin(self, conn: Connection) -> None: error=ConnectionClosedReason.ERROR, ) else: + close_conn = False with self.lock: # Hold the lock to ensure this section does not race with # Pool.reset(). if self.stale_generation(conn.generation, conn.service_id): - conn.close_conn(ConnectionClosedReason.STALE) + close_conn = True else: conn.update_last_checkin_time() conn.update_is_writable(bool(self.is_writable)) self.conns.appendleft(conn) # Notify any threads waiting to create a connection. self._max_connecting_cond.notify() + if close_conn: + conn.close_conn(ConnectionClosedReason.STALE) with self.size_cond: if txn: @@ -1661,8 +1438,8 @@ def _raise_wait_queue_timeout(self, checkout_started_time: float) -> NoReturn: if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, - clientId=self._client_id, message=_ConnectionStatusMessage.CHECKOUT_FAILED, + clientId=self._client_id, serverHost=self.address[0], serverPort=self.address[1], reason="Wait queue timeout elapsed without a connection becoming available", @@ -1693,5 +1470,6 @@ def __del__(self) -> None: # Avoid ResourceWarnings in Python 3 # Close all sockets without calling reset() or close() because it is # not safe to acquire a lock in __del__. - for conn in self.conns: - conn.close_conn(None) + if _IS_SYNC: + for conn in self.conns: + conn.close_conn(None) # type: ignore[unused-coroutine] diff --git a/pymongo/synchronous/server.py b/pymongo/synchronous/server.py index ed48cc6cc8..f57420918b 100644 --- a/pymongo/synchronous/server.py +++ b/pymongo/synchronous/server.py @@ -4,7 +4,7 @@ # may not use this file except in compliance with the License. You # may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -66,7 +66,7 @@ def __init__( monitor: Monitor, topology_id: Optional[ObjectId] = None, listeners: Optional[_EventListeners] = None, - events: Optional[ReferenceType[Queue]] = None, + events: Optional[ReferenceType[Queue[Any]]] = None, ) -> None: """Represent one MongoDB server.""" self._description = server_description @@ -108,10 +108,10 @@ def close(self) -> None: if _SDAM_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _SDAM_LOGGER, + message=_SDAMStatusMessage.STOP_SERVER, topologyId=self._topology_id, serverHost=self._description.address[0], serverPort=self._description.address[1], - message=_SDAMStatusMessage.STOP_SERVER, ) self._monitor.close() @@ -142,7 +142,7 @@ def run_operation( read_preference: _ServerMode, listeners: Optional[_EventListeners], unpack_res: Callable[..., list[_DocumentOut]], - client: MongoClient, + client: MongoClient[Any], ) -> Response: """Run a _Query or _GetMore operation and return a Response object. @@ -173,8 +173,8 @@ def run_operation( if _COMMAND_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _COMMAND_LOGGER, - clientId=client._topology_settings._topology_id, message=_CommandStatusMessage.STARTED, + clientId=client._topology_settings._topology_id, command=cmd, commandName=next(iter(cmd)), databaseName=dbn, @@ -224,7 +224,7 @@ def run_operation( if use_cmd: first = docs[0] operation.client._process_response(first, operation.session) # type: ignore[misc, arg-type] - _check_command_response(first, conn.max_wire_version) + _check_command_response(first, conn.max_wire_version, pool_opts=conn.opts) # type:ignore[has-type] except Exception as exc: duration = datetime.now() - start if isinstance(exc, (NotPrimaryError, OperationFailure)): @@ -234,8 +234,8 @@ def run_operation( if _COMMAND_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _COMMAND_LOGGER, - clientId=client._topology_settings._topology_id, message=_CommandStatusMessage.FAILED, + clientId=client._topology_settings._topology_id, durationMS=duration, failure=failure, commandName=next(iter(cmd)), @@ -278,8 +278,8 @@ def run_operation( if _COMMAND_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _COMMAND_LOGGER, - clientId=client._topology_settings._topology_id, message=_CommandStatusMessage.SUCCEEDED, + clientId=client._topology_settings._topology_id, durationMS=duration, reply=res, commandName=next(iter(cmd)), diff --git a/pymongo/synchronous/settings.py b/pymongo/synchronous/settings.py index 040776713f..61b86fa18d 100644 --- a/pymongo/synchronous/settings.py +++ b/pymongo/synchronous/settings.py @@ -4,7 +4,7 @@ # may not use this file except in compliance with the License. You # may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -51,6 +51,7 @@ def __init__( srv_service_name: str = common.SRV_SERVICE_NAME, srv_max_hosts: int = 0, server_monitoring_mode: str = common.SERVER_MONITORING_MODE, + topology_id: Optional[ObjectId] = None, ): """Represent MongoClient's configuration. @@ -78,8 +79,10 @@ def __init__( self._srv_service_name = srv_service_name self._srv_max_hosts = srv_max_hosts or 0 self._server_monitoring_mode = server_monitoring_mode - - self._topology_id = ObjectId() + if topology_id is not None: + self._topology_id = topology_id + else: + self._topology_id = ObjectId() # Store the allocation traceback to catch unclosed clients in the # test suite. self._stack = "".join(traceback.format_stack()[:-2]) diff --git a/pymongo/srv_resolver.py b/pymongo/synchronous/srv_resolver.py similarity index 84% rename from pymongo/srv_resolver.py rename to pymongo/synchronous/srv_resolver.py index 5be6cb98db..4802310698 100644 --- a/pymongo/srv_resolver.py +++ b/pymongo/synchronous/srv_resolver.py @@ -25,6 +25,8 @@ if TYPE_CHECKING: from dns import resolver +_IS_SYNC = True + def _have_dnspython() -> bool: try: @@ -45,13 +47,14 @@ def maybe_decode(text: Union[str, bytes]) -> str: # PYTHON-2667 Lazily call dns.resolver methods for compatibility with eventlet. def _resolve(*args: Any, **kwargs: Any) -> resolver.Answer: - from dns import resolver + if _IS_SYNC: + from dns import resolver - if hasattr(resolver, "resolve"): - # dnspython >= 2 return resolver.resolve(*args, **kwargs) - # dnspython 1.X - return resolver.query(*args, **kwargs) + else: + from dns import asyncresolver + + return asyncresolver.resolve(*args, **kwargs) # type:ignore[return-value] _INVALID_HOST_MSG = ( @@ -78,14 +81,13 @@ def __init__( raise ConfigurationError(_INVALID_HOST_MSG % ("an IP address",)) except ValueError: pass - try: - self.__plist = self.__fqdn.split(".")[1:] + split_fqdn = self.__fqdn.split(".") + self.__plist = split_fqdn[1:] if len(split_fqdn) > 2 else split_fqdn except Exception: raise ConfigurationError(_INVALID_HOST_MSG % (fqdn,)) from None self.__slen = len(self.__plist) - if self.__slen < 2: - raise ConfigurationError(_INVALID_HOST_MSG % (fqdn,)) + self.nparts = len(split_fqdn) def get_options(self) -> Optional[str]: from dns import resolver @@ -96,7 +98,7 @@ def get_options(self) -> Optional[str]: # No TXT records return None except Exception as exc: - raise ConfigurationError(str(exc)) from None + raise ConfigurationError(str(exc)) from exc if len(results) > 1: raise ConfigurationError("Only one TXT record is supported") return (b"&".join([b"".join(res.strings) for res in results])).decode("utf-8") # type: ignore[attr-defined] @@ -111,7 +113,7 @@ def _resolve_uri(self, encapsulate_errors: bool) -> resolver.Answer: # Raise the original error. raise # Else, raise all errors as ConfigurationError. - raise ConfigurationError(str(exc)) from None + raise ConfigurationError(str(exc)) from exc return results def _get_srv_response_and_hosts( @@ -127,10 +129,15 @@ def _get_srv_response_and_hosts( # Validate hosts for node in nodes: + srv_host = node[0].lower() + if self.__fqdn == srv_host and self.nparts < 3: + raise ConfigurationError( + "Invalid SRV host: return address is identical to SRV hostname" + ) try: - nlist = node[0].lower().split(".")[1:][-self.__slen :] - except Exception: - raise ConfigurationError(f"Invalid SRV host: {node[0]}") from None + nlist = srv_host.split(".")[1:][-self.__slen :] + except Exception as exc: + raise ConfigurationError(f"Invalid SRV host: {node[0]}") from exc if self.__plist != nlist: raise ConfigurationError(f"Invalid SRV host: {node[0]}") if self.__srv_max_hosts: diff --git a/pymongo/synchronous/topology.py b/pymongo/synchronous/topology.py index b03269ae43..a4ca0e6e0f 100644 --- a/pymongo/synchronous/topology.py +++ b/pymongo/synchronous/topology.py @@ -4,7 +4,7 @@ # may not use this file except in compliance with the License. You # may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -16,6 +16,7 @@ from __future__ import annotations +import asyncio import logging import os import queue @@ -36,6 +37,7 @@ OperationFailure, PyMongoError, ServerSelectionTimeoutError, + WaitQueueTimeoutError, WriteError, ) from pymongo.hello import Hello @@ -61,7 +63,7 @@ writable_server_selector, ) from pymongo.synchronous.client_session import _ServerSession, _ServerSessionPool -from pymongo.synchronous.monitor import SrvMonitor +from pymongo.synchronous.monitor import MonitorBase, SrvMonitor from pymongo.synchronous.pool import Pool from pymongo.synchronous.server import Server from pymongo.topology_description import ( @@ -82,7 +84,7 @@ _pymongo_dir = str(Path(__file__).parent) -def process_events_queue(queue_ref: weakref.ReferenceType[queue.Queue]) -> bool: +def process_events_queue(queue_ref: weakref.ReferenceType[queue.Queue]) -> bool: # type: ignore[type-arg] q = queue_ref() if not q: return False # Cancel PeriodicExecutor. @@ -118,8 +120,8 @@ def __init__(self, topology_settings: TopologySettings): if _SDAM_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _SDAM_LOGGER, - topologyId=self._topology_id, message=_SDAMStatusMessage.START_TOPOLOGY, + topologyId=self._topology_id, ) if self._publish_tp: @@ -150,10 +152,10 @@ def __init__(self, topology_settings: TopologySettings): if _SDAM_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _SDAM_LOGGER, - topologyId=self._topology_id, - previousDescription=initial_td, - newDescription=self._description, message=_SDAMStatusMessage.TOPOLOGY_CHANGE, + topologyId=self._topology_id, + previousDescription=repr(initial_td), + newDescription=repr(self._description), ) for seed in topology_settings.seeds: @@ -163,10 +165,10 @@ def __init__(self, topology_settings: TopologySettings): if _SDAM_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _SDAM_LOGGER, + message=_SDAMStatusMessage.START_SERVER, topologyId=self._topology_id, serverHost=seed[0], serverPort=seed[1], - message=_SDAMStatusMessage.START_SERVER, ) # Store the seed list to help diagnose errors in _error_message(). @@ -184,7 +186,7 @@ def __init__(self, topology_settings: TopologySettings): if self._publish_server or self._publish_tp: assert self._events is not None - weak: weakref.ReferenceType[queue.Queue] + weak: weakref.ReferenceType[queue.Queue[Any]] def target() -> bool: return process_events_queue(weak) @@ -207,6 +209,9 @@ def target() -> bool: if self._settings.fqdn is not None and not self._settings.load_balanced: self._srv_monitor = SrvMonitor(self, self._settings) + # Stores all monitor tasks that need to be joined on close or server selection + self._monitor_tasks: list[MonitorBase] = [] + def open(self) -> None: """Start monitoring, or restart after a fork. @@ -232,9 +237,7 @@ def open(self) -> None: warnings.warn( # type: ignore[call-overload] # noqa: B028 "MongoClient opened before fork. May not be entirely fork-safe, " "proceed with caution. See PyMongo's documentation for details: " - "https://www.mongodb.com/docs/languages/" - "python/pymongo-driver/current/faq/" - "#is-pymongo-fork-safe-", + "https://dochub.mongodb.org/core/pymongo-fork-deadlock", **kwargs, ) with self._lock: @@ -283,6 +286,10 @@ def select_servers( else: server_timeout = server_selection_timeout + # Cleanup any completed monitor tasks safely + if not _IS_SYNC and self._monitor_tasks: + self.cleanup_monitors() + with self._lock: server_descriptions = self._select_servers_loop( selector, server_timeout, operation, operation_id, address @@ -347,7 +354,7 @@ def _select_servers_loop( operationId=operation_id, topologyDescription=self.description, clientId=self.description._topology_settings._topology_id, - remainingTimeMS=int(end_time - time.monotonic()), + remainingTimeMS=int(1000 * (end_time - time.monotonic())), ) logged_waiting = True @@ -493,7 +500,6 @@ def _process_change( self._description = new_td self._update_servers() - self._receive_cluster_time_no_lock(server_description.cluster_time) if self._publish_tp and not suppress_event: assert self._events is not None @@ -506,10 +512,10 @@ def _process_change( if _SDAM_LOGGER.isEnabledFor(logging.DEBUG) and not suppress_event: _debug_log( _SDAM_LOGGER, - topologyId=self._topology_id, - previousDescription=td_old, - newDescription=self._description, message=_SDAMStatusMessage.TOPOLOGY_CHANGE, + topologyId=self._topology_id, + previousDescription=repr(td_old), + newDescription=repr(self._description), ) # Shutdown SRV polling for unsupported cluster types. @@ -520,12 +526,8 @@ def _process_change( and self._description.topology_type not in SRV_POLLING_TOPOLOGIES ): self._srv_monitor.close() - - # Clear the pool from a failed heartbeat. - if reset_pool: - server = self._servers.get(server_description.address) - if server: - server.pool.reset(interrupt_connections=interrupt_connections) + if not _IS_SYNC: + self._monitor_tasks.append(self._srv_monitor) # Wake anything waiting in select_servers(). self._condition.notify_all() @@ -549,6 +551,11 @@ def on_change( # that didn't include this server. if self._opened and self._description.has_server(server_description.address): self._process_change(server_description, reset_pool, interrupt_connections) + # Clear the pool from a failed heartbeat, done outside the lock to avoid blocking on connection close. + if reset_pool: + server = self._servers.get(server_description.address) + if server: + server.pool.reset(interrupt_connections=interrupt_connections) def _process_srv_update(self, seedlist: list[tuple[str, Any]]) -> None: """Process a new seedlist on an opened topology. @@ -572,10 +579,10 @@ def _process_srv_update(self, seedlist: list[tuple[str, Any]]) -> None: if _SDAM_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _SDAM_LOGGER, - topologyId=self._topology_id, - previousDescription=td_old, - newDescription=self._description, message=_SDAMStatusMessage.TOPOLOGY_CHANGE, + topologyId=self._topology_id, + previousDescription=repr(td_old), + newDescription=repr(self._description), ) def on_srv_update(self, seedlist: list[tuple[str, Any]]) -> None: @@ -693,6 +700,8 @@ def close(self) -> None: old_td = self._description for server in self._servers.values(): server.close() + if not _IS_SYNC: + self._monitor_tasks.append(server._monitor) # Mark all servers Unknown. self._description = self._description.reset() @@ -703,6 +712,8 @@ def close(self) -> None: # Stop SRV polling thread. if self._srv_monitor: self._srv_monitor.close() + if not _IS_SYNC: + self._monitor_tasks.append(self._srv_monitor) self._opened = False self._closed = True @@ -732,13 +743,13 @@ def close(self) -> None: if _SDAM_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _SDAM_LOGGER, - topologyId=self._topology_id, - previousDescription=old_td, - newDescription=self._description, message=_SDAMStatusMessage.TOPOLOGY_CHANGE, + topologyId=self._topology_id, + previousDescription=repr(old_td), + newDescription=repr(self._description), ) _debug_log( - _SDAM_LOGGER, topologyId=self._topology_id, message=_SDAMStatusMessage.STOP_TOPOLOGY + _SDAM_LOGGER, message=_SDAMStatusMessage.STOP_TOPOLOGY, topologyId=self._topology_id ) if self._publish_server or self._publish_tp: @@ -877,6 +888,8 @@ def _handle_error(self, address: _Address, err_ctx: _ErrorContext) -> None: # Clear the pool. server.reset(service_id) elif isinstance(error, ConnectionFailure): + if isinstance(error, WaitQueueTimeoutError): + return # "Client MUST replace the server's description with type Unknown # ... MUST NOT request an immediate check of the server." if not self._settings.load_balanced: @@ -942,6 +955,8 @@ def _update_servers(self) -> None: for address, server in list(self._servers.items()): if not self._description.has_server(address): server.close() + if not _IS_SYNC: + self._monitor_tasks.append(server._monitor) self._servers.pop(address) def _create_pool_for_server(self, address: _Address) -> Pool: @@ -968,7 +983,7 @@ def _create_pool_for_monitor(self, address: _Address) -> Pool: ) return self._settings.pool_class( - address, monitor_pool_options, handshake=False, client_id=self._topology_id + address, monitor_pool_options, is_sdam=True, client_id=self._topology_id ) def _error_message(self, selector: Callable[[Selection], Selection]) -> str: @@ -1029,6 +1044,15 @@ def _error_message(self, selector: Callable[[Selection], Selection]) -> str: else: return ",".join(str(server.error) for server in servers if server.error) + def cleanup_monitors(self) -> None: + tasks = [] + try: + while self._monitor_tasks: + tasks.append(self._monitor_tasks.pop()) + except IndexError: + pass + asyncio.gather(*[t.join() for t in tasks], return_exceptions=True) # type: ignore[func-returns-value] + def __repr__(self) -> str: msg = "" if not self._opened: diff --git a/pymongo/synchronous/uri_parser.py b/pymongo/synchronous/uri_parser.py new file mode 100644 index 0000000000..45c1752953 --- /dev/null +++ b/pymongo/synchronous/uri_parser.py @@ -0,0 +1,193 @@ +# Copyright 2011-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you +# may not use this file except in compliance with the License. You +# may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. See the License for the specific language governing +# permissions and limitations under the License. + + +"""Tools to parse and validate a MongoDB URI.""" +from __future__ import annotations + +from typing import Any, Optional +from urllib.parse import unquote_plus + +from pymongo.common import SRV_SERVICE_NAME, _CaseInsensitiveDictionary +from pymongo.errors import ConfigurationError, InvalidURI +from pymongo.synchronous.srv_resolver import _SrvResolver +from pymongo.uri_parser_shared import ( + _ALLOWED_TXT_OPTS, + DEFAULT_PORT, + SCHEME, + SCHEME_LEN, + SRV_SCHEME_LEN, + _check_options, + _make_options_case_sensitive, + _validate_uri, + split_hosts, + split_options, +) + +_IS_SYNC = True + + +def parse_uri( + uri: str, + default_port: Optional[int] = DEFAULT_PORT, + validate: bool = True, + warn: bool = False, + normalize: bool = True, + connect_timeout: Optional[float] = None, + srv_service_name: Optional[str] = None, + srv_max_hosts: Optional[int] = None, +) -> dict[str, Any]: + """Parse and validate a MongoDB URI. + + Returns a dict of the form:: + + { + 'nodelist': , + 'username': or None, + 'password': or None, + 'database': or None, + 'collection': or None, + 'options': , + 'fqdn': or None + } + + If the URI scheme is "mongodb+srv://" DNS SRV and TXT lookups will be done + to build nodelist and options. + + :param uri: The MongoDB URI to parse. + :param default_port: The port number to use when one wasn't specified + for a host in the URI. + :param validate: If ``True`` (the default), validate and + normalize all options. Default: ``True``. + :param warn: When validating, if ``True`` then will warn + the user then ignore any invalid options or values. If ``False``, + validation will error when options are unsupported or values are + invalid. Default: ``False``. + :param normalize: If ``True``, convert names of URI options + to their internally-used names. Default: ``True``. + :param connect_timeout: The maximum time in milliseconds to + wait for a response from the DNS server. + :param srv_service_name: A custom SRV service name + + .. versionchanged:: 4.14 + ``options`` is now type ``dict`` as opposed to a ``_CaseInsensitiveDictionary``. + + .. versionchanged:: 4.6 + The delimiting slash (``/``) between hosts and connection options is now optional. + For example, "mongodb://example.com?tls=true" is now a valid URI. + + .. versionchanged:: 4.0 + To better follow RFC 3986, unquoted percent signs ("%") are no longer + supported. + + .. versionchanged:: 3.9 + Added the ``normalize`` parameter. + + .. versionchanged:: 3.6 + Added support for mongodb+srv:// URIs. + + .. versionchanged:: 3.5 + Return the original value of the ``readPreference`` MongoDB URI option + instead of the validated read preference mode. + + .. versionchanged:: 3.1 + ``warn`` added so invalid options can be ignored. + """ + result = _validate_uri(uri, default_port, validate, warn, normalize, srv_max_hosts) + result.update( + _parse_srv( + uri, + default_port, + validate, + warn, + normalize, + connect_timeout, + srv_service_name, + srv_max_hosts, + ) + ) + result["options"] = _make_options_case_sensitive(result["options"]) + return result + + +def _parse_srv( + uri: str, + default_port: Optional[int] = DEFAULT_PORT, + validate: bool = True, + warn: bool = False, + normalize: bool = True, + connect_timeout: Optional[float] = None, + srv_service_name: Optional[str] = None, + srv_max_hosts: Optional[int] = None, +) -> dict[str, Any]: + if uri.startswith(SCHEME): + is_srv = False + scheme_free = uri[SCHEME_LEN:] + else: + is_srv = True + scheme_free = uri[SRV_SCHEME_LEN:] + + options = _CaseInsensitiveDictionary() + + host_plus_db_part, _, opts = scheme_free.partition("?") + if "/" in host_plus_db_part: + host_part, _, _ = host_plus_db_part.partition("/") + else: + host_part = host_plus_db_part + + if opts: + options.update(split_options(opts, validate, warn, normalize)) + if srv_service_name is None: + srv_service_name = options.get("srvServiceName", SRV_SERVICE_NAME) + if "@" in host_part: + _, _, hosts = host_part.rpartition("@") + else: + hosts = host_part + + hosts = unquote_plus(hosts) + srv_max_hosts = srv_max_hosts or options.get("srvMaxHosts") + if is_srv: + nodes = split_hosts(hosts, default_port=None) + fqdn, port = nodes[0] + + # Use the connection timeout. connectTimeoutMS passed as a keyword + # argument overrides the same option passed in the connection string. + connect_timeout = connect_timeout or options.get("connectTimeoutMS") + dns_resolver = _SrvResolver(fqdn, connect_timeout, srv_service_name, srv_max_hosts) + nodes = dns_resolver.get_hosts() + dns_options = dns_resolver.get_options() + if dns_options: + parsed_dns_options = split_options(dns_options, validate, warn, normalize) + if set(parsed_dns_options) - _ALLOWED_TXT_OPTS: + raise ConfigurationError( + "Only authSource, replicaSet, and loadBalanced are supported from DNS" + ) + for opt, val in parsed_dns_options.items(): + if opt not in options: + options[opt] = val + if options.get("loadBalanced") and srv_max_hosts: + raise InvalidURI("You cannot specify loadBalanced with srvMaxHosts") + if options.get("replicaSet") and srv_max_hosts: + raise InvalidURI("You cannot specify replicaSet with srvMaxHosts") + if "tls" not in options and "ssl" not in options: + options["tls"] = True if validate else "true" + else: + nodes = split_hosts(hosts, default_port=default_port) + + _check_options(nodes, options) + + return { + "nodelist": nodes, + "options": options, + } diff --git a/pymongo/topology_description.py b/pymongo/topology_description.py index f669fefd2e..de67a8f94a 100644 --- a/pymongo/topology_description.py +++ b/pymongo/topology_description.py @@ -4,7 +4,7 @@ # may not use this file except in compliance with the License. You # may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -33,8 +33,8 @@ from bson.min_key import MinKey from bson.objectid import ObjectId from pymongo import common -from pymongo.errors import ConfigurationError -from pymongo.read_preferences import ReadPreference, _AggWritePref, _ServerMode +from pymongo.errors import ConfigurationError, PyMongoError +from pymongo.read_preferences import Primary, ReadPreference, _AggWritePref, _ServerMode from pymongo.server_description import ServerDescription from pymongo.server_selectors import Selection from pymongo.server_type import SERVER_TYPE @@ -324,6 +324,17 @@ def apply_selector( description = self.server_descriptions().get(address) return [description] if description else [] + # Primary selection fast path. + if self.topology_type == TOPOLOGY_TYPE.ReplicaSetWithPrimary and type(selector) is Primary: + for sd in self._server_descriptions.values(): + if sd.server_type == SERVER_TYPE.RSPrimary: + sds = [sd] + if custom_selector: + sds = custom_selector(sds) + return sds + # No primary found, return an empty list. + return [] + selection = Selection.from_topology_description(self) # Ignore read preference for sharded clusters. if self.topology_type != TOPOLOGY_TYPE.Sharded: @@ -558,12 +569,16 @@ def _update_rs_from_primary( return _check_has_primary(sds), replica_set_name, max_set_version, max_election_id if server_description.max_wire_version is None or server_description.max_wire_version < 17: - new_election_tuple: tuple = (server_description.set_version, server_description.election_id) - max_election_tuple: tuple = (max_set_version, max_election_id) + new_election_tuple: tuple = (server_description.set_version, server_description.election_id) # type: ignore[type-arg] + max_election_tuple: tuple = (max_set_version, max_election_id) # type: ignore[type-arg] if None not in new_election_tuple: if None not in max_election_tuple and new_election_tuple < max_election_tuple: # Stale primary, set to type Unknown. - sds[server_description.address] = server_description.to_unknown() + sds[server_description.address] = server_description.to_unknown( + PyMongoError( + f"primary marked stale due to electionId/setVersion mismatch, {new_election_tuple} is stale compared to {max_election_tuple}" + ) + ) return _check_has_primary(sds), replica_set_name, max_set_version, max_election_id max_election_id = server_description.election_id @@ -578,7 +593,11 @@ def _update_rs_from_primary( max_election_safe = tuple(MinKey() if i is None else i for i in max_election_tuple) if new_election_safe < max_election_safe: # Stale primary, set to type Unknown. - sds[server_description.address] = server_description.to_unknown() + sds[server_description.address] = server_description.to_unknown( + PyMongoError( + f"primary marked stale due to electionId/setVersion mismatch, {new_election_tuple} is stale compared to {max_election_tuple}" + ) + ) return _check_has_primary(sds), replica_set_name, max_set_version, max_election_id else: max_election_id = server_description.election_id @@ -591,7 +610,9 @@ def _update_rs_from_primary( and server.address != server_description.address ): # Reset old primary's type to Unknown. - sds[server.address] = server.to_unknown() + sds[server.address] = server.to_unknown( + PyMongoError("primary marked stale due to discovery of newer primary") + ) # There can be only one prior primary. break diff --git a/pymongo/typings.py b/pymongo/typings.py index 68962eb540..e678720db9 100644 --- a/pymongo/typings.py +++ b/pymongo/typings.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -51,7 +51,7 @@ _T = TypeVar("_T") # Type hinting types for compatibility between async and sync classes -_AgnosticMongoClient = Union["AsyncMongoClient", "MongoClient"] +_AgnosticMongoClient = Union["AsyncMongoClient", "MongoClient"] # type: ignore[type-arg] _AgnosticConnection = Union["AsyncConnection", "Connection"] _AgnosticClientSession = Union["AsyncClientSession", "ClientSession"] _AgnosticBulk = Union["_AsyncBulk", "_Bulk"] diff --git a/pymongo/uri_parser.py b/pymongo/uri_parser.py index 7018dad7d8..fe253b9bbf 100644 --- a/pymongo/uri_parser.py +++ b/pymongo/uri_parser.py @@ -4,7 +4,7 @@ # may not use this file except in compliance with the License. You # may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -13,627 +13,32 @@ # permissions and limitations under the License. -"""Tools to parse and validate a MongoDB URI. - -.. seealso:: This module is compatible with both the synchronous and asynchronous PyMongo APIs. -""" +"""Re-import of synchronous URI Parser API for compatibility.""" from __future__ import annotations -import re import sys -import warnings -from typing import ( - TYPE_CHECKING, - Any, - Mapping, - MutableMapping, - Optional, - Sized, - Union, - cast, -) -from urllib.parse import unquote_plus - -from pymongo.client_options import _parse_ssl_options -from pymongo.common import ( - INTERNAL_URI_OPTION_NAME_MAP, - SRV_SERVICE_NAME, - URI_OPTIONS_DEPRECATION_MAP, - _CaseInsensitiveDictionary, - get_validated_options, -) -from pymongo.errors import ConfigurationError, InvalidURI -from pymongo.srv_resolver import _have_dnspython, _SrvResolver -from pymongo.typings import _Address - -if TYPE_CHECKING: - from pymongo.pyopenssl_context import SSLContext - -SCHEME = "mongodb://" -SCHEME_LEN = len(SCHEME) -SRV_SCHEME = "mongodb+srv://" -SRV_SCHEME_LEN = len(SRV_SCHEME) -DEFAULT_PORT = 27017 - - -def _unquoted_percent(s: str) -> bool: - """Check for unescaped percent signs. - - :param s: A string. `s` can have things like '%25', '%2525', - and '%E2%85%A8' but cannot have unquoted percent like '%foo'. - """ - for i in range(len(s)): - if s[i] == "%": - sub = s[i : i + 3] - # If unquoting yields the same string this means there was an - # unquoted %. - if unquote_plus(sub) == sub: - return True - return False - - -def parse_userinfo(userinfo: str) -> tuple[str, str]: - """Validates the format of user information in a MongoDB URI. - Reserved characters that are gen-delimiters (":", "/", "?", "#", "[", - "]", "@") as per RFC 3986 must be escaped. - - Returns a 2-tuple containing the unescaped username followed - by the unescaped password. - - :param userinfo: A string of the form : - """ - if "@" in userinfo or userinfo.count(":") > 1 or _unquoted_percent(userinfo): - raise InvalidURI( - "Username and password must be escaped according to " - "RFC 3986, use urllib.parse.quote_plus" - ) - - user, _, passwd = userinfo.partition(":") - # No password is expected with GSSAPI authentication. - if not user: - raise InvalidURI("The empty string is not valid username.") - - return unquote_plus(user), unquote_plus(passwd) - - -def parse_ipv6_literal_host( - entity: str, default_port: Optional[int] -) -> tuple[str, Optional[Union[str, int]]]: - """Validates an IPv6 literal host:port string. - - Returns a 2-tuple of IPv6 literal followed by port where - port is default_port if it wasn't specified in entity. - - :param entity: A string that represents an IPv6 literal enclosed - in braces (e.g. '[::1]' or '[::1]:27017'). - :param default_port: The port number to use when one wasn't - specified in entity. - """ - if entity.find("]") == -1: - raise ValueError( - "an IPv6 address literal must be enclosed in '[' and ']' according to RFC 2732." - ) - i = entity.find("]:") - if i == -1: - return entity[1:-1], default_port - return entity[1:i], entity[i + 2 :] - - -def parse_host(entity: str, default_port: Optional[int] = DEFAULT_PORT) -> _Address: - """Validates a host string - - Returns a 2-tuple of host followed by port where port is default_port - if it wasn't specified in the string. - - :param entity: A host or host:port string where host could be a - hostname or IP address. - :param default_port: The port number to use when one wasn't - specified in entity. - """ - host = entity - port: Optional[Union[str, int]] = default_port - if entity[0] == "[": - host, port = parse_ipv6_literal_host(entity, default_port) - elif entity.endswith(".sock"): - return entity, default_port - elif entity.find(":") != -1: - if entity.count(":") > 1: - raise ValueError( - "Reserved characters such as ':' must be " - "escaped according RFC 2396. An IPv6 " - "address literal must be enclosed in '[' " - "and ']' according to RFC 2732." - ) - host, port = host.split(":", 1) - if isinstance(port, str): - if not port.isdigit(): - # Special case check for mistakes like "mongodb://localhost:27017 ". - if all(c.isspace() or c.isdigit() for c in port): - for c in port: - if c.isspace(): - raise ValueError(f"Port contains whitespace character: {c!r}") - - # A non-digit port indicates that the URI is invalid, likely because the password - # or username were not escaped. - raise ValueError( - "Port contains non-digit characters. Hint: username and password must be escaped according to " - "RFC 3986, use urllib.parse.quote_plus" - ) - if int(port) > 65535 or int(port) <= 0: - raise ValueError("Port must be an integer between 0 and 65535") - port = int(port) - - # Normalize hostname to lowercase, since DNS is case-insensitive: - # http://tools.ietf.org/html/rfc4343 - # This prevents useless rediscovery if "foo.com" is in the seed list but - # "FOO.com" is in the hello response. - return host.lower(), port - - -# Options whose values are implicitly determined by tlsInsecure. -_IMPLICIT_TLSINSECURE_OPTS = { - "tlsallowinvalidcertificates", - "tlsallowinvalidhostnames", - "tlsdisableocspendpointcheck", -} - - -def _parse_options(opts: str, delim: Optional[str]) -> _CaseInsensitiveDictionary: - """Helper method for split_options which creates the options dict. - Also handles the creation of a list for the URI tag_sets/ - readpreferencetags portion, and the use of a unicode options string. - """ - options = _CaseInsensitiveDictionary() - for uriopt in opts.split(delim): - key, value = uriopt.split("=") - if key.lower() == "readpreferencetags": - options.setdefault(key, []).append(value) - else: - if key in options: - warnings.warn(f"Duplicate URI option '{key}'.", stacklevel=2) - if key.lower() == "authmechanismproperties": - val = value - else: - val = unquote_plus(value) - options[key] = val - - return options - - -def _handle_security_options(options: _CaseInsensitiveDictionary) -> _CaseInsensitiveDictionary: - """Raise appropriate errors when conflicting TLS options are present in - the options dictionary. - - :param options: Instance of _CaseInsensitiveDictionary containing - MongoDB URI options. - """ - # Implicitly defined options must not be explicitly specified. - tlsinsecure = options.get("tlsinsecure") - if tlsinsecure is not None: - for opt in _IMPLICIT_TLSINSECURE_OPTS: - if opt in options: - err_msg = "URI options %s and %s cannot be specified simultaneously." - raise InvalidURI( - err_msg % (options.cased_key("tlsinsecure"), options.cased_key(opt)) - ) - - # Handle co-occurence of OCSP & tlsAllowInvalidCertificates options. - tlsallowinvalidcerts = options.get("tlsallowinvalidcertificates") - if tlsallowinvalidcerts is not None: - if "tlsdisableocspendpointcheck" in options: - err_msg = "URI options %s and %s cannot be specified simultaneously." - raise InvalidURI( - err_msg - % ("tlsallowinvalidcertificates", options.cased_key("tlsdisableocspendpointcheck")) - ) - if tlsallowinvalidcerts is True: - options["tlsdisableocspendpointcheck"] = True - - # Handle co-occurence of CRL and OCSP-related options. - tlscrlfile = options.get("tlscrlfile") - if tlscrlfile is not None: - for opt in ("tlsinsecure", "tlsallowinvalidcertificates", "tlsdisableocspendpointcheck"): - if options.get(opt) is True: - err_msg = "URI option %s=True cannot be specified when CRL checking is enabled." - raise InvalidURI(err_msg % (opt,)) - - if "ssl" in options and "tls" in options: - - def truth_value(val: Any) -> Any: - if val in ("true", "false"): - return val == "true" - if isinstance(val, bool): - return val - return val - - if truth_value(options.get("ssl")) != truth_value(options.get("tls")): - err_msg = "Can not specify conflicting values for URI options %s and %s." - raise InvalidURI(err_msg % (options.cased_key("ssl"), options.cased_key("tls"))) - - return options - - -def _handle_option_deprecations(options: _CaseInsensitiveDictionary) -> _CaseInsensitiveDictionary: - """Issue appropriate warnings when deprecated options are present in the - options dictionary. Removes deprecated option key, value pairs if the - options dictionary is found to also have the renamed option. - - :param options: Instance of _CaseInsensitiveDictionary containing - MongoDB URI options. - """ - for optname in list(options): - if optname in URI_OPTIONS_DEPRECATION_MAP: - mode, message = URI_OPTIONS_DEPRECATION_MAP[optname] - if mode == "renamed": - newoptname = message - if newoptname in options: - warn_msg = "Deprecated option '%s' ignored in favor of '%s'." - warnings.warn( - warn_msg % (options.cased_key(optname), options.cased_key(newoptname)), - DeprecationWarning, - stacklevel=2, - ) - options.pop(optname) - continue - warn_msg = "Option '%s' is deprecated, use '%s' instead." - warnings.warn( - warn_msg % (options.cased_key(optname), newoptname), - DeprecationWarning, - stacklevel=2, - ) - elif mode == "removed": - warn_msg = "Option '%s' is deprecated. %s." - warnings.warn( - warn_msg % (options.cased_key(optname), message), - DeprecationWarning, - stacklevel=2, - ) - - return options - - -def _normalize_options(options: _CaseInsensitiveDictionary) -> _CaseInsensitiveDictionary: - """Normalizes option names in the options dictionary by converting them to - their internally-used names. - - :param options: Instance of _CaseInsensitiveDictionary containing - MongoDB URI options. - """ - # Expand the tlsInsecure option. - tlsinsecure = options.get("tlsinsecure") - if tlsinsecure is not None: - for opt in _IMPLICIT_TLSINSECURE_OPTS: - # Implicit options are logically the same as tlsInsecure. - options[opt] = tlsinsecure - - for optname in list(options): - intname = INTERNAL_URI_OPTION_NAME_MAP.get(optname, None) - if intname is not None: - options[intname] = options.pop(optname) - - return options - - -def validate_options(opts: Mapping[str, Any], warn: bool = False) -> MutableMapping[str, Any]: - """Validates and normalizes options passed in a MongoDB URI. - - Returns a new dictionary of validated and normalized options. If warn is - False then errors will be thrown for invalid options, otherwise they will - be ignored and a warning will be issued. - - :param opts: A dict of MongoDB URI options. - :param warn: If ``True`` then warnings will be logged and - invalid options will be ignored. Otherwise invalid options will - cause errors. - """ - return get_validated_options(opts, warn) - - -def split_options( - opts: str, validate: bool = True, warn: bool = False, normalize: bool = True -) -> MutableMapping[str, Any]: - """Takes the options portion of a MongoDB URI, validates each option - and returns the options in a dictionary. - - :param opt: A string representing MongoDB URI options. - :param validate: If ``True`` (the default), validate and normalize all - options. - :param warn: If ``False`` (the default), suppress all warnings raised - during validation of options. - :param normalize: If ``True`` (the default), renames all options to their - internally-used names. - """ - and_idx = opts.find("&") - semi_idx = opts.find(";") - try: - if and_idx >= 0 and semi_idx >= 0: - raise InvalidURI("Can not mix '&' and ';' for option separators.") - elif and_idx >= 0: - options = _parse_options(opts, "&") - elif semi_idx >= 0: - options = _parse_options(opts, ";") - elif opts.find("=") != -1: - options = _parse_options(opts, None) - else: - raise ValueError - except ValueError: - raise InvalidURI("MongoDB URI options are key=value pairs.") from None - - options = _handle_security_options(options) - - options = _handle_option_deprecations(options) - - if normalize: - options = _normalize_options(options) - - if validate: - options = cast(_CaseInsensitiveDictionary, validate_options(options, warn)) - if options.get("authsource") == "": - raise InvalidURI("the authSource database cannot be an empty string") - - return options - - -def split_hosts(hosts: str, default_port: Optional[int] = DEFAULT_PORT) -> list[_Address]: - """Takes a string of the form host1[:port],host2[:port]... and - splits it into (host, port) tuples. If [:port] isn't present the - default_port is used. - - Returns a set of 2-tuples containing the host name (or IP) followed by - port number. - - :param hosts: A string of the form host1[:port],host2[:port],... - :param default_port: The port number to use when one wasn't specified - for a host. - """ - nodes = [] - for entity in hosts.split(","): - if not entity: - raise ConfigurationError("Empty host (or extra comma in host list).") - port = default_port - # Unix socket entities don't have ports - if entity.endswith(".sock"): - port = None - nodes.append(parse_host(entity, port)) - return nodes - - -# Prohibited characters in database name. DB names also can't have ".", but for -# backward-compat we allow "db.collection" in URI. -_BAD_DB_CHARS = re.compile("[" + re.escape(r'/ "$') + "]") - -_ALLOWED_TXT_OPTS = frozenset( - ["authsource", "authSource", "replicaset", "replicaSet", "loadbalanced", "loadBalanced"] -) - - -def _check_options(nodes: Sized, options: Mapping[str, Any]) -> None: - # Ensure directConnection was not True if there are multiple seeds. - if len(nodes) > 1 and options.get("directconnection"): - raise ConfigurationError("Cannot specify multiple hosts with directConnection=true") - - if options.get("loadbalanced"): - if len(nodes) > 1: - raise ConfigurationError("Cannot specify multiple hosts with loadBalanced=true") - if options.get("directconnection"): - raise ConfigurationError("Cannot specify directConnection=true with loadBalanced=true") - if options.get("replicaset"): - raise ConfigurationError("Cannot specify replicaSet with loadBalanced=true") - - -def parse_uri( - uri: str, - default_port: Optional[int] = DEFAULT_PORT, - validate: bool = True, - warn: bool = False, - normalize: bool = True, - connect_timeout: Optional[float] = None, - srv_service_name: Optional[str] = None, - srv_max_hosts: Optional[int] = None, -) -> dict[str, Any]: - """Parse and validate a MongoDB URI. - - Returns a dict of the form:: - - { - 'nodelist': , - 'username': or None, - 'password': or None, - 'database': or None, - 'collection': or None, - 'options': , - 'fqdn': or None - } - - If the URI scheme is "mongodb+srv://" DNS SRV and TXT lookups will be done - to build nodelist and options. - - :param uri: The MongoDB URI to parse. - :param default_port: The port number to use when one wasn't specified - for a host in the URI. - :param validate: If ``True`` (the default), validate and - normalize all options. Default: ``True``. - :param warn: When validating, if ``True`` then will warn - the user then ignore any invalid options or values. If ``False``, - validation will error when options are unsupported or values are - invalid. Default: ``False``. - :param normalize: If ``True``, convert names of URI options - to their internally-used names. Default: ``True``. - :param connect_timeout: The maximum time in milliseconds to - wait for a response from the DNS server. - :param srv_service_name: A custom SRV service name - - .. versionchanged:: 4.6 - The delimiting slash (``/``) between hosts and connection options is now optional. - For example, "mongodb://example.com?tls=true" is now a valid URI. - - .. versionchanged:: 4.0 - To better follow RFC 3986, unquoted percent signs ("%") are no longer - supported. - - .. versionchanged:: 3.9 - Added the ``normalize`` parameter. - - .. versionchanged:: 3.6 - Added support for mongodb+srv:// URIs. - - .. versionchanged:: 3.5 - Return the original value of the ``readPreference`` MongoDB URI option - instead of the validated read preference mode. - - .. versionchanged:: 3.1 - ``warn`` added so invalid options can be ignored. - """ - if uri.startswith(SCHEME): - is_srv = False - scheme_free = uri[SCHEME_LEN:] - elif uri.startswith(SRV_SCHEME): - if not _have_dnspython(): - python_path = sys.executable or "python" - raise ConfigurationError( - 'The "dnspython" module must be ' - "installed to use mongodb+srv:// URIs. " - "To fix this error install pymongo again:\n " - "%s -m pip install pymongo>=4.3" % (python_path) - ) - is_srv = True - scheme_free = uri[SRV_SCHEME_LEN:] - else: - raise InvalidURI(f"Invalid URI scheme: URI must begin with '{SCHEME}' or '{SRV_SCHEME}'") - - if not scheme_free: - raise InvalidURI("Must provide at least one hostname or IP.") - - user = None - passwd = None - dbase = None - collection = None - options = _CaseInsensitiveDictionary() - - host_plus_db_part, _, opts = scheme_free.partition("?") - if "/" in host_plus_db_part: - host_part, _, dbase = host_plus_db_part.partition("/") - else: - host_part = host_plus_db_part - - if dbase: - dbase = unquote_plus(dbase) - if "." in dbase: - dbase, collection = dbase.split(".", 1) - if _BAD_DB_CHARS.search(dbase): - raise InvalidURI('Bad database name "%s"' % dbase) - else: - dbase = None - - if opts: - options.update(split_options(opts, validate, warn, normalize)) - if srv_service_name is None: - srv_service_name = options.get("srvServiceName", SRV_SERVICE_NAME) - if "@" in host_part: - userinfo, _, hosts = host_part.rpartition("@") - user, passwd = parse_userinfo(userinfo) - else: - hosts = host_part - - if "/" in hosts: - raise InvalidURI("Any '/' in a unix domain socket must be percent-encoded: %s" % host_part) - - hosts = unquote_plus(hosts) - fqdn = None - srv_max_hosts = srv_max_hosts or options.get("srvMaxHosts") - if is_srv: - if options.get("directConnection"): - raise ConfigurationError(f"Cannot specify directConnection=true with {SRV_SCHEME} URIs") - nodes = split_hosts(hosts, default_port=None) - if len(nodes) != 1: - raise InvalidURI(f"{SRV_SCHEME} URIs must include one, and only one, hostname") - fqdn, port = nodes[0] - if port is not None: - raise InvalidURI(f"{SRV_SCHEME} URIs must not include a port number") - - # Use the connection timeout. connectTimeoutMS passed as a keyword - # argument overrides the same option passed in the connection string. - connect_timeout = connect_timeout or options.get("connectTimeoutMS") - dns_resolver = _SrvResolver(fqdn, connect_timeout, srv_service_name, srv_max_hosts) - nodes = dns_resolver.get_hosts() - dns_options = dns_resolver.get_options() - if dns_options: - parsed_dns_options = split_options(dns_options, validate, warn, normalize) - if set(parsed_dns_options) - _ALLOWED_TXT_OPTS: - raise ConfigurationError( - "Only authSource, replicaSet, and loadBalanced are supported from DNS" - ) - for opt, val in parsed_dns_options.items(): - if opt not in options: - options[opt] = val - if options.get("loadBalanced") and srv_max_hosts: - raise InvalidURI("You cannot specify loadBalanced with srvMaxHosts") - if options.get("replicaSet") and srv_max_hosts: - raise InvalidURI("You cannot specify replicaSet with srvMaxHosts") - if "tls" not in options and "ssl" not in options: - options["tls"] = True if validate else "true" - elif not is_srv and options.get("srvServiceName") is not None: - raise ConfigurationError( - "The srvServiceName option is only allowed with 'mongodb+srv://' URIs" - ) - elif not is_srv and srv_max_hosts: - raise ConfigurationError( - "The srvMaxHosts option is only allowed with 'mongodb+srv://' URIs" - ) - else: - nodes = split_hosts(hosts, default_port=default_port) - - _check_options(nodes, options) - - return { - "nodelist": nodes, - "username": user, - "password": passwd, - "database": dbase, - "collection": collection, - "options": options, - "fqdn": fqdn, - } - - -def _parse_kms_tls_options(kms_tls_options: Optional[Mapping[str, Any]]) -> dict[str, SSLContext]: - """Parse KMS TLS connection options.""" - if not kms_tls_options: - return {} - if not isinstance(kms_tls_options, dict): - raise TypeError("kms_tls_options must be a dict") - contexts = {} - for provider, options in kms_tls_options.items(): - if not isinstance(options, dict): - raise TypeError(f'kms_tls_options["{provider}"] must be a dict') - options.setdefault("tls", True) - opts = _CaseInsensitiveDictionary(options) - opts = _handle_security_options(opts) - opts = _normalize_options(opts) - opts = cast(_CaseInsensitiveDictionary, validate_options(opts)) - ssl_context, allow_invalid_hostnames = _parse_ssl_options(opts) - if ssl_context is None: - raise ConfigurationError("TLS is required for KMS providers") - if allow_invalid_hostnames: - raise ConfigurationError("Insecure TLS options prohibited") - - for n in [ - "tlsInsecure", - "tlsAllowInvalidCertificates", - "tlsAllowInvalidHostnames", - "tlsDisableCertificateRevocationCheck", - ]: - if n in opts: - raise ConfigurationError(f"Insecure TLS options prohibited: {n}") - contexts[provider] = ssl_context - return contexts +from pymongo.errors import InvalidURI +from pymongo.synchronous.uri_parser import * # noqa: F403 +from pymongo.synchronous.uri_parser import __doc__ as original_doc +from pymongo.uri_parser_shared import * # noqa: F403 + +__doc__ = original_doc +__all__ = [ # noqa: F405 + "parse_userinfo", + "parse_ipv6_literal_host", + "parse_host", + "validate_options", + "split_options", + "split_hosts", + "parse_uri", +] if __name__ == "__main__": import pprint try: - pprint.pprint(parse_uri(sys.argv[1])) # noqa: T203 + pprint.pprint(parse_uri(sys.argv[1])) # noqa: F405, T203 except InvalidURI as exc: print(exc) # noqa: T201 sys.exit(0) diff --git a/pymongo/uri_parser_shared.py b/pymongo/uri_parser_shared.py new file mode 100644 index 0000000000..59168d1e9f --- /dev/null +++ b/pymongo/uri_parser_shared.py @@ -0,0 +1,614 @@ +# Copyright 2011-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you +# may not use this file except in compliance with the License. You +# may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. See the License for the specific language governing +# permissions and limitations under the License. + + +"""Tools to parse and validate a MongoDB URI. + +.. seealso:: This module is compatible with both the synchronous and asynchronous PyMongo APIs. +""" +from __future__ import annotations + +import re +import sys +import warnings +from typing import ( + TYPE_CHECKING, + Any, + Mapping, + MutableMapping, + Optional, + Sized, + Union, + cast, +) +from urllib.parse import unquote_plus + +from pymongo.asynchronous.srv_resolver import _have_dnspython +from pymongo.client_options import _parse_ssl_options +from pymongo.common import ( + INTERNAL_URI_OPTION_NAME_MAP, + URI_OPTIONS_DEPRECATION_MAP, + _CaseInsensitiveDictionary, + get_validated_options, +) +from pymongo.errors import ConfigurationError, InvalidURI +from pymongo.typings import _Address + +if TYPE_CHECKING: + from pymongo.pyopenssl_context import SSLContext + +SCHEME = "mongodb://" +SCHEME_LEN = len(SCHEME) +SRV_SCHEME = "mongodb+srv://" +SRV_SCHEME_LEN = len(SRV_SCHEME) +DEFAULT_PORT = 27017 + +URI_OPTIONS = frozenset( + [ + "appname", + "authMechanism", + "authMechanismProperties", + "authSource", + "compressors", + "connectTimeoutMS", + "directConnection", + "heartbeatFrequencyMS", + "journal", + "loadBalanced", + "localThresholdMS", + "maxIdleTimeMS", + "maxPoolSize", + "maxConnecting", + "maxStalenessSeconds", + "minPoolSize", + "proxyHost", + "proxyPort", + "proxyUsername", + "proxyPassword", + "readConcernLevel", + "readPreference", + "readPreferenceTags", + "replicaSet", + "retryReads", + "retryWrites", + "serverMonitoringMode", + "serverSelectionTimeoutMS", + "serverSelectionTryOnce", + "socketTimeoutMS", + "srvMaxHosts", + "srvServiceName", + "ssl", + "tls", + "tlsAllowInvalidCertificates", + "tlsAllowInvalidHostnames", + "tlsCAFile", + "tlsCertificateKeyFile", + "tlsCertificateKeyFilePassword", + "tlsDisableCertificateRevocationCheck", + "tlsDisableOCSPEndpointCheck", + "tlsInsecure", + "w", + "waitQueueTimeoutMS", + "wTimeoutMS", + "zlibCompressionLevel", + ] +) + + +def _unquoted_percent(s: str) -> bool: + """Check for unescaped percent signs. + + :param s: A string. `s` can have things like '%25', '%2525', + and '%E2%85%A8' but cannot have unquoted percent like '%foo'. + """ + for i in range(len(s)): + if s[i] == "%": + sub = s[i : i + 3] + # If unquoting yields the same string this means there was an + # unquoted %. + if unquote_plus(sub) == sub: + return True + return False + + +def parse_userinfo(userinfo: str) -> tuple[str, str]: + """Validates the format of user information in a MongoDB URI. + Reserved characters that are gen-delimiters (":", "/", "?", "#", "[", + "]", "@") as per RFC 3986 must be escaped. + + Returns a 2-tuple containing the unescaped username followed + by the unescaped password. + + :param userinfo: A string of the form : + """ + if "@" in userinfo or userinfo.count(":") > 1 or _unquoted_percent(userinfo): + raise InvalidURI( + "Username and password must be escaped according to " + "RFC 3986, use urllib.parse.quote_plus" + ) + + user, _, passwd = userinfo.partition(":") + # No password is expected with GSSAPI authentication. + if not user: + raise InvalidURI("The empty string is not valid username") + + return unquote_plus(user), unquote_plus(passwd) + + +def parse_ipv6_literal_host( + entity: str, default_port: Optional[int] +) -> tuple[str, Optional[Union[str, int]]]: + """Validates an IPv6 literal host:port string. + + Returns a 2-tuple of IPv6 literal followed by port where + port is default_port if it wasn't specified in entity. + + :param entity: A string that represents an IPv6 literal enclosed + in braces (e.g. '[::1]' or '[::1]:27017'). + :param default_port: The port number to use when one wasn't + specified in entity. + """ + if entity.find("]") == -1: + raise ValueError( + "an IPv6 address literal must be enclosed in '[' and ']' according to RFC 2732." + ) + i = entity.find("]:") + if i == -1: + return entity[1:-1], default_port + return entity[1:i], entity[i + 2 :] + + +def parse_host(entity: str, default_port: Optional[int] = DEFAULT_PORT) -> _Address: + """Validates a host string + + Returns a 2-tuple of host followed by port where port is default_port + if it wasn't specified in the string. + + :param entity: A host or host:port string where host could be a + hostname or IP address. + :param default_port: The port number to use when one wasn't + specified in entity. + """ + host = entity + port: Optional[Union[str, int]] = default_port + if entity[0] == "[": + host, port = parse_ipv6_literal_host(entity, default_port) + elif entity.endswith(".sock"): + return entity, default_port + elif entity.find(":") != -1: + if entity.count(":") > 1: + raise ValueError( + "Reserved characters such as ':' must be " + "escaped according RFC 2396. An IPv6 " + "address literal must be enclosed in '[' " + "and ']' according to RFC 2732." + ) + host, port = host.split(":", 1) + if isinstance(port, str): + if not port.isdigit(): + # Special case check for mistakes like "mongodb://localhost:27017 ". + if all(c.isspace() or c.isdigit() for c in port): + for c in port: + if c.isspace(): + raise ValueError(f"Port contains whitespace character: {c!r}") + + # A non-digit port indicates that the URI is invalid, likely because the password + # or username were not escaped. + raise ValueError( + "Port contains non-digit characters. Hint: username and password must be escaped according to " + "RFC 3986, use urllib.parse.quote_plus" + ) + if int(port) > 65535 or int(port) <= 0: + raise ValueError("Port must be an integer between 0 and 65535") + port = int(port) + + # Normalize hostname to lowercase, since DNS is case-insensitive: + # https://tools.ietf.org/html/rfc4343 + # This prevents useless rediscovery if "foo.com" is in the seed list but + # "FOO.com" is in the hello response. + return host.lower(), port + + +# Options whose values are implicitly determined by tlsInsecure. +_IMPLICIT_TLSINSECURE_OPTS = { + "tlsallowinvalidcertificates", + "tlsallowinvalidhostnames", + "tlsdisableocspendpointcheck", +} + + +def _parse_options(opts: str, delim: Optional[str]) -> _CaseInsensitiveDictionary: + """Helper method for split_options which creates the options dict. + Also handles the creation of a list for the URI tag_sets/ + readpreferencetags portion, and the use of a unicode options string. + """ + options = _CaseInsensitiveDictionary() + for uriopt in opts.split(delim): + key, value = uriopt.split("=") + if key.lower() == "readpreferencetags": + options.setdefault(key, []).append(value) + else: + if key in options: + warnings.warn(f"Duplicate URI option '{key}'.", stacklevel=2) + if key.lower() == "authmechanismproperties": + val = value + else: + val = unquote_plus(value) + options[key] = val + + return options + + +def _handle_security_options(options: _CaseInsensitiveDictionary) -> _CaseInsensitiveDictionary: + """Raise appropriate errors when conflicting TLS options are present in + the options dictionary. + + :param options: Instance of _CaseInsensitiveDictionary containing + MongoDB URI options. + """ + # Implicitly defined options must not be explicitly specified. + tlsinsecure = options.get("tlsinsecure") + if tlsinsecure is not None: + for opt in _IMPLICIT_TLSINSECURE_OPTS: + if opt in options: + err_msg = "URI options %s and %s cannot be specified simultaneously." + raise InvalidURI( + err_msg % (options.cased_key("tlsinsecure"), options.cased_key(opt)) + ) + + # Handle co-occurence of OCSP & tlsAllowInvalidCertificates options. + tlsallowinvalidcerts = options.get("tlsallowinvalidcertificates") + if tlsallowinvalidcerts is not None: + if "tlsdisableocspendpointcheck" in options: + err_msg = "URI options %s and %s cannot be specified simultaneously." + raise InvalidURI( + err_msg + % ("tlsallowinvalidcertificates", options.cased_key("tlsdisableocspendpointcheck")) + ) + if tlsallowinvalidcerts is True: + options["tlsdisableocspendpointcheck"] = True + + # Handle co-occurence of CRL and OCSP-related options. + tlscrlfile = options.get("tlscrlfile") + if tlscrlfile is not None: + for opt in ("tlsinsecure", "tlsallowinvalidcertificates", "tlsdisableocspendpointcheck"): + if options.get(opt) is True: + err_msg = "URI option %s=True cannot be specified when CRL checking is enabled." + raise InvalidURI(err_msg % (opt,)) + + if "ssl" in options and "tls" in options: + + def truth_value(val: Any) -> Any: + if val in ("true", "false"): + return val == "true" + if isinstance(val, bool): + return val + return val + + if truth_value(options.get("ssl")) != truth_value(options.get("tls")): + err_msg = "Can not specify conflicting values for URI options %s and %s." + raise InvalidURI(err_msg % (options.cased_key("ssl"), options.cased_key("tls"))) + + return options + + +def _handle_option_deprecations(options: _CaseInsensitiveDictionary) -> _CaseInsensitiveDictionary: + """Issue appropriate warnings when deprecated options are present in the + options dictionary. Removes deprecated option key, value pairs if the + options dictionary is found to also have the renamed option. + + :param options: Instance of _CaseInsensitiveDictionary containing + MongoDB URI options. + """ + for optname in list(options): + if optname in URI_OPTIONS_DEPRECATION_MAP: + mode, message = URI_OPTIONS_DEPRECATION_MAP[optname] + if mode == "renamed": + newoptname = message + if newoptname in options: + warn_msg = "Deprecated option '%s' ignored in favor of '%s'." + warnings.warn( + warn_msg % (options.cased_key(optname), options.cased_key(newoptname)), + DeprecationWarning, + stacklevel=2, + ) + options.pop(optname) + continue + warn_msg = "Option '%s' is deprecated, use '%s' instead." + warnings.warn( + warn_msg % (options.cased_key(optname), newoptname), + DeprecationWarning, + stacklevel=2, + ) + elif mode == "removed": + warn_msg = "Option '%s' is deprecated. %s." + warnings.warn( + warn_msg % (options.cased_key(optname), message), + DeprecationWarning, + stacklevel=2, + ) + + return options + + +def _normalize_options(options: _CaseInsensitiveDictionary) -> _CaseInsensitiveDictionary: + """Normalizes option names in the options dictionary by converting them to + their internally-used names. + + :param options: Instance of _CaseInsensitiveDictionary containing + MongoDB URI options. + """ + # Expand the tlsInsecure option. + tlsinsecure = options.get("tlsinsecure") + if tlsinsecure is not None: + for opt in _IMPLICIT_TLSINSECURE_OPTS: + # Implicit options are logically the same as tlsInsecure. + options[opt] = tlsinsecure + + for optname in list(options): + intname = INTERNAL_URI_OPTION_NAME_MAP.get(optname, None) + if intname is not None: + options[intname] = options.pop(optname) + + return options + + +def validate_options(opts: Mapping[str, Any], warn: bool = False) -> MutableMapping[str, Any]: + """Validates and normalizes options passed in a MongoDB URI. + + Returns a new dictionary of validated and normalized options. If warn is + False then errors will be thrown for invalid options, otherwise they will + be ignored and a warning will be issued. + + :param opts: A dict of MongoDB URI options. + :param warn: If ``True`` then warnings will be logged and + invalid options will be ignored. Otherwise invalid options will + cause errors. + """ + return get_validated_options(opts, warn) + + +def split_options( + opts: str, validate: bool = True, warn: bool = False, normalize: bool = True +) -> MutableMapping[str, Any]: + """Takes the options portion of a MongoDB URI, validates each option + and returns the options in a dictionary. + + :param opt: A string representing MongoDB URI options. + :param validate: If ``True`` (the default), validate and normalize all + options. + :param warn: If ``False`` (the default), suppress all warnings raised + during validation of options. + :param normalize: If ``True`` (the default), renames all options to their + internally-used names. + """ + and_idx = opts.find("&") + semi_idx = opts.find(";") + try: + if and_idx >= 0 and semi_idx >= 0: + raise InvalidURI("Can not mix '&' and ';' for option separators") + elif and_idx >= 0: + options = _parse_options(opts, "&") + elif semi_idx >= 0: + options = _parse_options(opts, ";") + elif opts.find("=") != -1: + options = _parse_options(opts, None) + else: + raise ValueError + except ValueError: + raise InvalidURI("MongoDB URI options are key=value pairs") from None + + options = _handle_security_options(options) + + options = _handle_option_deprecations(options) + + if normalize: + options = _normalize_options(options) + + if validate: + options = cast(_CaseInsensitiveDictionary, validate_options(options, warn)) + if options.get("authsource") == "": + raise InvalidURI("the authSource database cannot be an empty string") + + return options + + +def split_hosts(hosts: str, default_port: Optional[int] = DEFAULT_PORT) -> list[_Address]: + """Takes a string of the form host1[:port],host2[:port]... and + splits it into (host, port) tuples. If [:port] isn't present the + default_port is used. + + Returns a set of 2-tuples containing the host name (or IP) followed by + port number. + + :param hosts: A string of the form host1[:port],host2[:port],... + :param default_port: The port number to use when one wasn't specified + for a host. + """ + nodes = [] + for entity in hosts.split(","): + if not entity: + raise ConfigurationError("Empty host (or extra comma in host list)") + port = default_port + # Unix socket entities don't have ports + if entity.endswith(".sock"): + port = None + nodes.append(parse_host(entity, port)) + return nodes + + +# Prohibited characters in database name. DB names also can't have ".", but for +# backward-compat we allow "db.collection" in URI. +_BAD_DB_CHARS = re.compile("[" + re.escape(r'/ "$') + "]") + +_ALLOWED_TXT_OPTS = frozenset( + ["authsource", "authSource", "replicaset", "replicaSet", "loadbalanced", "loadBalanced"] +) + + +def _check_options(nodes: Sized, options: Mapping[str, Any]) -> None: + # Ensure directConnection was not True if there are multiple seeds. + if len(nodes) > 1 and options.get("directconnection"): + raise ConfigurationError("Cannot specify multiple hosts with directConnection=true") + + if options.get("loadbalanced"): + if len(nodes) > 1: + raise ConfigurationError("Cannot specify multiple hosts with loadBalanced=true") + if options.get("directconnection"): + raise ConfigurationError("Cannot specify directConnection=true with loadBalanced=true") + if options.get("replicaset"): + raise ConfigurationError("Cannot specify replicaSet with loadBalanced=true") + + +def _parse_kms_tls_options( + kms_tls_options: Optional[Mapping[str, Any]], + is_sync: bool, +) -> dict[str, SSLContext]: + """Parse KMS TLS connection options.""" + if not kms_tls_options: + return {} + if not isinstance(kms_tls_options, dict): + raise TypeError("kms_tls_options must be a dict") + contexts = {} + for provider, options in kms_tls_options.items(): + if not isinstance(options, dict): + raise TypeError(f'kms_tls_options["{provider}"] must be a dict') + options.setdefault("tls", True) + opts = _CaseInsensitiveDictionary(options) + opts = _handle_security_options(opts) + opts = _normalize_options(opts) + opts = cast(_CaseInsensitiveDictionary, validate_options(opts)) + ssl_context, allow_invalid_hostnames = _parse_ssl_options(opts, is_sync) + if ssl_context is None: + raise ConfigurationError("TLS is required for KMS providers") + if allow_invalid_hostnames: + raise ConfigurationError("Insecure TLS options prohibited") + + for n in [ + "tlsInsecure", + "tlsAllowInvalidCertificates", + "tlsAllowInvalidHostnames", + "tlsDisableCertificateRevocationCheck", + ]: + if n in opts: + raise ConfigurationError(f"Insecure TLS options prohibited: {n}") + contexts[provider] = ssl_context + return contexts + + +def _validate_uri( + uri: str, + default_port: Optional[int] = DEFAULT_PORT, + validate: bool = True, + warn: bool = False, + normalize: bool = True, + srv_max_hosts: Optional[int] = None, +) -> dict[str, Any]: + if uri.startswith(SCHEME): + is_srv = False + scheme_free = uri[SCHEME_LEN:] + elif uri.startswith(SRV_SCHEME): + if not _have_dnspython(): + python_path = sys.executable or "python" + raise ConfigurationError( + 'The "dnspython" module must be ' + "installed to use mongodb+srv:// URIs. " + "To fix this error install pymongo again:\n " + "%s -m pip install pymongo>=4.3" % (python_path) + ) + is_srv = True + scheme_free = uri[SRV_SCHEME_LEN:] + else: + raise InvalidURI(f"Invalid URI scheme: URI must begin with '{SCHEME}' or '{SRV_SCHEME}'") + + if not scheme_free: + raise InvalidURI("Must provide at least one hostname or IP") + + user = None + passwd = None + dbase = None + collection = None + options = _CaseInsensitiveDictionary() + + host_plus_db_part, _, opts = scheme_free.partition("?") + if "/" in host_plus_db_part: + host_part, _, dbase = host_plus_db_part.partition("/") + else: + host_part = host_plus_db_part + + if dbase: + dbase = unquote_plus(dbase) + if "." in dbase: + dbase, collection = dbase.split(".", 1) + if _BAD_DB_CHARS.search(dbase): + raise InvalidURI('Bad database name "%s"' % dbase) + else: + dbase = None + + if opts: + options.update(split_options(opts, validate, warn, normalize)) + if "@" in host_part: + userinfo, _, hosts = host_part.rpartition("@") + user, passwd = parse_userinfo(userinfo) + else: + hosts = host_part + + if "/" in hosts: + raise InvalidURI("Any '/' in a unix domain socket must be percent-encoded: %s" % host_part) + + hosts = unquote_plus(hosts) + fqdn = None + srv_max_hosts = srv_max_hosts or options.get("srvMaxHosts") + if is_srv: + if options.get("directConnection"): + raise ConfigurationError(f"Cannot specify directConnection=true with {SRV_SCHEME} URIs") + nodes = split_hosts(hosts, default_port=None) + if len(nodes) != 1: + raise InvalidURI(f"{SRV_SCHEME} URIs must include one, and only one, hostname") + fqdn, port = nodes[0] + if port is not None: + raise InvalidURI(f"{SRV_SCHEME} URIs must not include a port number") + elif not is_srv and options.get("srvServiceName") is not None: + raise ConfigurationError( + "The srvServiceName option is only allowed with 'mongodb+srv://' URIs" + ) + elif not is_srv and srv_max_hosts: + raise ConfigurationError( + "The srvMaxHosts option is only allowed with 'mongodb+srv://' URIs" + ) + else: + nodes = split_hosts(hosts, default_port=default_port) + + _check_options(nodes, options) + + return { + "nodelist": nodes, + "username": user, + "password": passwd, + "database": dbase, + "collection": collection, + "options": options, + "fqdn": fqdn, + } + + +def _make_options_case_sensitive(options: _CaseInsensitiveDictionary) -> dict[str, Any]: + case_sensitive = {} + for option in URI_OPTIONS: + if option.lower() in options: + case_sensitive[option] = options[option] + options.pop(option) + for k, v in options.items(): + case_sensitive[k] = v + return case_sensitive diff --git a/pymongo/write_concern.py b/pymongo/write_concern.py index 67c9549897..1f9da7af2e 100644 --- a/pymongo/write_concern.py +++ b/pymongo/write_concern.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -74,7 +74,7 @@ def __init__( if wtimeout is not None: if not isinstance(wtimeout, int): - raise TypeError("wtimeout must be an integer") + raise TypeError(f"wtimeout must be an integer, not {type(wtimeout)}") if wtimeout < 0: raise ValueError("wtimeout cannot be less than 0") self.__document["wtimeout"] = wtimeout @@ -98,7 +98,7 @@ def __init__( raise ValueError("w cannot be less than 0") self.__acknowledged = w > 0 elif not isinstance(w, str): - raise TypeError("w must be an integer or string") + raise TypeError(f"w must be an integer or string, not {type(w)}") self.__document["w"] = w self.__server_default = not self.__document @@ -127,7 +127,7 @@ def acknowledged(self) -> bool: def __repr__(self) -> str: return "WriteConcern({})".format( - ", ".join("{}={}".format(*kvt) for kvt in self.__document.items()) + ", ".join(f"{k}={v!r}" for k, v in self.__document.items()) ) def __eq__(self, other: Any) -> bool: diff --git a/pyproject.toml b/pyproject.toml index 9a29a777fc..623eb6c164 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,7 +5,7 @@ build-backend = "hatchling.build" [project] name = "pymongo" dynamic = ["version", "dependencies", "optional-dependencies"] -description = "Python driver for MongoDB " +description = "PyMongo - the Official MongoDB Python driver" readme = "README.md" license = {file="LICENSE"} requires-python = ">=3.9" @@ -35,6 +35,7 @@ classifiers = [ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", "Topic :: Database", "Typing :: Typed", ] @@ -45,6 +46,25 @@ Documentation = "https://www.mongodb.com/docs/languages/python/pymongo-driver/cu Source = "https://github.com/mongodb/mongo-python-driver" Tracker = "https://jira.mongodb.org/projects/PYTHON/issues" +[dependency-groups] +dev = [] +pip = ["pip"] +gevent = ["gevent>=20.6.0"] +coverage = [ + "pytest-cov", + "coverage>=5,<=7.10.6" +] +mockupdb = [ + "mockupdb@git+https://github.com/mongodb-labs/mongo-mockup-db@master" +] +perf = ["simplejson>=3.17.0"] +typing = [ + "mypy==1.18.2", + "pyright==1.1.406", + "typing_extensions", + "pip" +] + # Used to call hatch_build.py [tool.hatch.build.hooks.custom] @@ -86,26 +106,28 @@ filterwarnings = [ "module:Wire protocol compression with:UserWarning", "module:GridIn property:DeprecationWarning", "module:GridOut property:DeprecationWarning", + # pytest-asyncio known issue: https://github.com/pytest-dev/pytest-asyncio/issues/1032 + "module:.*WindowsSelectorEventLoopPolicy:DeprecationWarning", + "module:.*et_event_loop_policy:DeprecationWarning", # TODO: Remove as part of PYTHON-3923. - "module:unclosed =1.16.0,<3.0.0 +dnspython>=2.6.1,<3.0.0 diff --git a/requirements/docs.txt b/requirements/docs.txt index 7d52c1cb3e..54ebf3625d 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -3,4 +3,4 @@ sphinx_rtd_theme>=2,<4 readthedocs-sphinx-search~=0.3 sphinxcontrib-shellcheck>=1,<2 sphinx-autobuild>=2020.9.1 -furo==2024.8.6 +furo==2025.9.25 diff --git a/requirements/encryption.txt b/requirements/encryption.txt index 1a8c14844c..eec1c990f7 100644 --- a/requirements/encryption.txt +++ b/requirements/encryption.txt @@ -1,3 +1,3 @@ pymongo-auth-aws>=1.1.0,<2.0.0 -pymongocrypt>=1.10.0,<2.0.0 -certifi;os.name=='nt' or sys_platform=='darwin' +pymongocrypt>=1.13.0,<2.0.0 +certifi>=2023.7.22;os.name=='nt' or sys_platform=='darwin' diff --git a/requirements/ocsp.txt b/requirements/ocsp.txt index 6570b0905a..39dbddef14 100644 --- a/requirements/ocsp.txt +++ b/requirements/ocsp.txt @@ -5,7 +5,7 @@ # Fallback to certifi on Windows if we can't load CA certs from the system # store and just use certifi on macOS. # https://www.pyopenssl.org/en/stable/api/ssl.html#OpenSSL.SSL.Context.set_default_verify_paths -certifi;os.name=='nt' or sys_platform=='darwin' +certifi>=2023.7.22;os.name=='nt' or sys_platform=='darwin' pyopenssl>=17.2.0 requests<3.0.0 cryptography>=2.5 diff --git a/requirements/test.txt b/requirements/test.txt index 135114feff..566cade7ec 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,2 +1,3 @@ pytest>=8.2 pytest-asyncio>=0.24.0 +importlib_metadata>=7.0;python_version < "3.13" diff --git a/requirements/typing.txt b/requirements/typing.txt deleted file mode 100644 index 189f4f8719..0000000000 --- a/requirements/typing.txt +++ /dev/null @@ -1,7 +0,0 @@ -mypy==1.13.0 -pyright==1.1.391 -typing_extensions --r ./encryption.txt --r ./ocsp.txt --r ./zstd.txt --r ./aws.txt diff --git a/strict_pyrightconfig.json b/strict_pyrightconfig.json new file mode 100644 index 0000000000..9684598cd9 --- /dev/null +++ b/strict_pyrightconfig.json @@ -0,0 +1 @@ +{"strict": ["tests/test_typing_strict.py"]} \ No newline at end of file diff --git a/test/__init__.py b/test/__init__.py index d3a63db2d5..1ee2c283d6 100644 --- a/test/__init__.py +++ b/test/__init__.py @@ -17,6 +17,7 @@ import asyncio import gc +import inspect import logging import multiprocessing import os @@ -29,31 +30,11 @@ import traceback import unittest import warnings -from asyncio import iscoroutinefunction -from test.helpers import ( - COMPRESSORS, - IS_SRV, - MONGODB_API_VERSION, - MULTI_MONGOS_LB_URI, - TEST_LOADBALANCER, - TEST_SERVERLESS, - TLS_OPTIONS, - SystemCertsPatcher, - client_knobs, - db_pwd, - db_user, - global_knobs, - host, - is_server_resolvable, - port, - print_running_topology, - print_thread_stacks, - print_thread_tracebacks, - sanitize_cmd, - sanitize_reply, -) +from inspect import iscoroutinefunction -from pymongo.uri_parser import parse_uri +from pymongo.encryption_options import _HAVE_PYMONGOCRYPT +from pymongo.errors import AutoReconnect +from pymongo.synchronous.uri_parser import parse_uri try: import ipaddress @@ -63,7 +44,6 @@ HAVE_IPADDRESS = False from contextlib import contextmanager from functools import partial, wraps -from test.version import Version from typing import Any, Callable, Dict, Generator, overload from unittest import SkipTest from urllib.parse import quote_plus @@ -78,6 +58,30 @@ from pymongo.synchronous.database import Database from pymongo.synchronous.mongo_client import MongoClient +sys.path[0:0] = [""] + +from test.helpers import client_knobs, global_knobs +from test.helpers_shared import ( + COMPRESSORS, + IS_SRV, + MONGODB_API_VERSION, + MULTI_MONGOS_LB_URI, + TEST_LOADBALANCER, + TLS_OPTIONS, + SystemCertsPatcher, + db_pwd, + db_user, + host, + is_server_resolvable, + port, + print_running_topology, + print_thread_stacks, + print_thread_tracebacks, + sanitize_cmd, + sanitize_reply, +) +from test.version import Version + _IS_SYNC = True @@ -117,11 +121,9 @@ def __init__(self): self.sessions_enabled = False self.client = None # type: ignore self.conn_lock = threading.Lock() - self.is_data_lake = False self.load_balancer = TEST_LOADBALANCER - self.serverless = TEST_SERVERLESS self._fips_enabled = None - if self.load_balancer or self.serverless: + if self.load_balancer: self.default_client_options["loadBalanced"] = True if COMPRESSORS: self.default_client_options["compressors"] = COMPRESSORS @@ -163,7 +165,7 @@ def uri(self): @property def hello(self): if not self._hello: - if self.serverless or self.load_balancer: + if self.load_balancer: self._hello = self.client.admin.command(HelloCompat.CMD) else: self._hello = self.client.admin.command(HelloCompat.LEGACY_CMD) @@ -196,15 +198,6 @@ def _init_client(self): self.mongoses = [] self.connection_attempts = [] self.client = self._connect(host, port) - if self.client is not None: - # Return early when connected to dataLake as mongohoused does not - # support the getCmdLineOpts command and is tested without TLS. - if os.environ.get("TEST_DATA_LAKE"): - self.is_data_lake = True - self.auth_enabled = True - self.client = self._connect(host, port, username=db_user, password=db_pwd) - self.connected = True - return if HAVE_SSL and not self.client: # Is MongoDB configured for SSL? @@ -217,24 +210,21 @@ def _init_client(self): if self.client: self.connected = True - if self.serverless: - self.auth_enabled = True - else: - try: - self.cmd_line = self.client.admin.command("getCmdLineOpts") - except pymongo.errors.OperationFailure as e: - assert e.details is not None - msg = e.details.get("errmsg", "") - if e.code == 13 or "unauthorized" in msg or "login" in msg: - # Unauthorized. - self.auth_enabled = True - else: - raise + try: + self.cmd_line = self.client.admin.command("getCmdLineOpts") + except pymongo.errors.OperationFailure as e: + assert e.details is not None + msg = e.details.get("errmsg", "") + if e.code == 13 or "unauthorized" in msg or "login" in msg: + # Unauthorized. + self.auth_enabled = True else: - self.auth_enabled = self._server_started_with_auth() + raise + else: + self.auth_enabled = self._server_started_with_auth() if self.auth_enabled: - if not self.serverless and not IS_SRV: + if not IS_SRV: # See if db_user already exists. if not self._check_user_provided(): _create_user(self.client.admin, db_user, db_pwd) @@ -254,13 +244,10 @@ def _init_client(self): # May not have this if OperationFailure was raised earlier. self.cmd_line = self.client.admin.command("getCmdLineOpts") - if self.serverless: - self.server_status = {} - else: - self.server_status = self.client.admin.command("serverStatus") - if self.storage_engine == "mmapv1": - # MMAPv1 does not support retryWrites=True. - self.default_client_options["retryWrites"] = False + self.server_status = self.client.admin.command("serverStatus") + if self.storage_engine == "mmapv1": + # MMAPv1 does not support retryWrites=True. + self.default_client_options["retryWrites"] = False hello = self.hello self.sessions_enabled = "logicalSessionTimeoutMinutes" in hello @@ -297,42 +284,33 @@ def _init_client(self): self.w = len(hello.get("hosts", [])) or 1 self.version = Version.from_client(self.client) - if self.serverless: - self.server_parameters = { - "requireApiVersion": False, - "enableTestCommands": True, - } + self.server_parameters = self.client.admin.command("getParameter", "*") + assert self.cmd_line is not None + if self.server_parameters["enableTestCommands"]: self.test_commands_enabled = True - self.has_ipv6 = False - else: - self.server_parameters = self.client.admin.command("getParameter", "*") - assert self.cmd_line is not None - if self.server_parameters["enableTestCommands"]: + elif "parsed" in self.cmd_line: + params = self.cmd_line["parsed"].get("setParameter", []) + if "enableTestCommands=1" in params: self.test_commands_enabled = True - elif "parsed" in self.cmd_line: - params = self.cmd_line["parsed"].get("setParameter", []) - if "enableTestCommands=1" in params: + else: + params = self.cmd_line["parsed"].get("setParameter", {}) + if params.get("enableTestCommands") == "1": self.test_commands_enabled = True - else: - params = self.cmd_line["parsed"].get("setParameter", {}) - if params.get("enableTestCommands") == "1": - self.test_commands_enabled = True - self.has_ipv6 = self._server_started_with_ipv6() + self.has_ipv6 = self._server_started_with_ipv6() self.is_mongos = (self.hello).get("msg") == "isdbgrid" if self.is_mongos: address = self.client.address self.mongoses.append(address) - if not self.serverless: - # Check for another mongos on the next port. - assert address is not None - next_address = address[0], address[1] + 1 - mongos_client = self._connect(*next_address, **self.default_client_options) - if mongos_client: - hello = mongos_client.admin.command(HelloCompat.LEGACY_CMD) - if hello.get("msg") == "isdbgrid": - self.mongoses.append(next_address) - mongos_client.close() + # Check for another mongos on the next port. + assert address is not None + next_address = address[0], address[1] + 1 + mongos_client = self._connect(*next_address, **self.default_client_options) + if mongos_client: + hello = mongos_client.admin.command(HelloCompat.LEGACY_CMD) + if hello.get("msg") == "isdbgrid": + self.mongoses.append(next_address) + mongos_client.close() def init(self): with self.conn_lock: @@ -381,10 +359,12 @@ def fips_enabled(self): if self._fips_enabled is not None: return self._fips_enabled try: - subprocess.check_call(["fips-mode-setup", "--is-enabled"]) + subprocess.run(["fips-mode-setup", "--is-enabled"], check=True) self._fips_enabled = True except (subprocess.SubprocessError, FileNotFoundError): self._fips_enabled = False + if os.environ.get("REQUIRE_FIPS") and not self._fips_enabled: + raise RuntimeError("Expected FIPS to be enabled") return self._fips_enabled def check_auth_type(self, auth_type): @@ -510,27 +490,6 @@ def require_connection(self, func): func=func, ) - def require_data_lake(self, func): - """Run a test only if we are connected to Atlas Data Lake.""" - return self._require( - lambda: self.is_data_lake, - "Not connected to Atlas Data Lake on self.pair", - func=func, - ) - - def require_no_mmap(self, func): - """Run a test only if the server is not using the MMAPv1 storage - engine. Only works for standalone and replica sets; tests are - run regardless of storage engine on sharded clusters. - """ - - def is_not_mmap(): - if self.is_mongos: - return True - return self.storage_engine != "mmapv1" - - return self._require(is_not_mmap, "Storage engine must not be MMAPv1", func=func) - def require_version_min(self, *ver): """Run a test only if the server version is at least ``version``.""" other_version = Version(*ver) @@ -547,6 +506,32 @@ def require_version_max(self, *ver): "Server version must be at most %s" % str(other_version), ) + def require_libmongocrypt_min(self, *ver): + other_version = Version(*ver) + if not _HAVE_PYMONGOCRYPT: + version = Version.from_string("0.0.0") + else: + from pymongocrypt import libmongocrypt_version + + version = Version.from_string(libmongocrypt_version()) + return self._require( + lambda: version >= other_version, + "Libmongocrypt version must be at least %s" % str(other_version), + ) + + def require_pymongocrypt_min(self, *ver): + other_version = Version(*ver) + if not _HAVE_PYMONGOCRYPT: + version = Version.from_string("0.0.0") + else: + from pymongocrypt import __version__ as pymongocrypt_version + + version = Version.from_string(pymongocrypt_version) + return self._require( + lambda: version >= other_version, + "PyMongoCrypt version must be at least %s" % str(other_version), + ) + def require_auth(self, func): """Run a test only if the server is running with auth enabled.""" return self._require( @@ -589,7 +574,7 @@ def supports_secondary_read_pref(self): if self.has_secondaries: return True if self.is_mongos: - shard = self.client.config.shards.find_one()["host"] # type:ignore[index] + shard = (self.client.config.shards.find_one())["host"] # type:ignore[index] num_members = shard.count(",") + 1 return num_members > 1 return False @@ -659,22 +644,15 @@ def require_no_load_balancer(self, func): lambda: not self.load_balancer, "Must not be connected to a load balancer", func=func ) - def require_no_serverless(self, func): - """Run a test only if the client is not connected to serverless.""" - return self._require( - lambda: not self.serverless, "Must not be connected to serverless", func=func - ) - def require_change_streams(self, func): """Run a test only if the server supports change streams.""" - return self.require_no_mmap(self.require_no_standalone(self.require_no_serverless(func))) + return self.require_no_standalone(func) def is_topology_type(self, topologies): unknown = set(topologies) - { "single", "replicaset", "sharded", - "sharded-replicaset", "load-balanced", } if unknown: @@ -689,16 +667,6 @@ def is_topology_type(self, topologies): return True if "sharded" in topologies and self.is_mongos: return True - if "sharded-replicaset" in topologies and self.is_mongos: - shards = client_context.client.config.shards.find().to_list() - for shard in shards: - # For a 3-member RS-backed sharded cluster, shard['host'] - # will be 'replicaName/ip1:port1,ip2:port2,ip3:port3' - # Otherwise it will be 'ip1:port1' - host_spec = shard["host"] - if not len(host_spec.split("/")) > 1: - return False - return True return False def require_cluster_type(self, topologies=None): @@ -781,8 +749,6 @@ def require_sessions(self, func): return self._require(lambda: self.sessions_enabled, "Sessions not supported", func=func) def supports_retryable_writes(self): - if self.storage_engine == "mmapv1": - return False if not self.sessions_enabled: return False return self.is_mongos or self.is_rs @@ -796,9 +762,6 @@ def require_retryable_writes(self, func): ) def supports_transactions(self): - if self.storage_engine == "mmapv1": - return False - if self.version.at_least(4, 1, 8): return self.is_mongos or self.is_rs @@ -830,6 +793,14 @@ def require_sync(self, func): lambda: _IS_SYNC, "This test only works with the synchronous API", func=func ) + def require_async(self, func): + """Run a test only if using the asynchronous API.""" # unasync: off + return self._require( + lambda: not _IS_SYNC, + "This test only works with the asynchronous API", # unasync: off + func=func, + ) + def mongos_seeds(self): return ",".join("{}:{}".format(*address) for address in self.mongoses) @@ -863,35 +834,88 @@ def max_message_size_bytes(self): # Reusable client context client_context = ClientContext() +# Global event loop for async tests. +LOOP = None -def reset_client_context(): - if _IS_SYNC: - # sync tests don't need to reset a client context - return - elif client_context.client is not None: - client_context.client.close() - client_context.client = None - client_context._init_client() + +def get_loop() -> asyncio.AbstractEventLoop: + """Get the test suite's global event loop.""" + global LOOP + if LOOP is None: + try: + LOOP = asyncio.get_running_loop() + except RuntimeError: + # no running event loop, fallback to get_event_loop. + try: + # Ignore DeprecationWarning: There is no current event loop + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + LOOP = asyncio.get_event_loop() + except RuntimeError: + LOOP = asyncio.new_event_loop() + asyncio.set_event_loop(LOOP) + return LOOP class PyMongoTestCase(unittest.TestCase): + if not _IS_SYNC: + # An async TestCase that uses a single event loop for all tests. + # Inspired by TestCase. + def setUp(self): + pass + + def tearDown(self): + pass + + def addCleanup(self, func, /, *args, **kwargs): + self.addCleanup(*(func, *args), **kwargs) + + def _callSetUp(self): + self.setUp() + self._callAsync(self.setUp) + + def _callTestMethod(self, method): + self._callMaybeAsync(method) + + def _callTearDown(self): + self._callAsync(self.tearDown) + self.tearDown() + + def _callCleanup(self, function, *args, **kwargs): + self._callMaybeAsync(function, *args, **kwargs) + + def _callAsync(self, func, /, *args, **kwargs): + assert inspect.iscoroutinefunction(func), f"{func!r} is not an async function" + return get_loop().run_until_complete(func(*args, **kwargs)) + + def _callMaybeAsync(self, func, /, *args, **kwargs): + if inspect.iscoroutinefunction(func): + return get_loop().run_until_complete(func(*args, **kwargs)) + else: + return func(*args, **kwargs) + def assertEqualCommand(self, expected, actual, msg=None): self.assertEqual(sanitize_cmd(expected), sanitize_cmd(actual), msg) def assertEqualReply(self, expected, actual, msg=None): self.assertEqual(sanitize_reply(expected), sanitize_reply(actual), msg) + @staticmethod + def configure_fail_point(client, command_args, off=False): + cmd = {"configureFailPoint": "failCommand"} + cmd.update(command_args) + if off: + cmd["mode"] = "off" + cmd.pop("data", None) + client.admin.command(cmd) + @contextmanager def fail_point(self, command_args): - cmd_on = SON([("configureFailPoint", "failCommand")]) - cmd_on.update(command_args) - client_context.client.admin.command(cmd_on) + self.configure_fail_point(client_context.client, command_args) try: yield finally: - client_context.client.admin.command( - "configureFailPoint", cmd_on["configureFailPoint"], mode="off" - ) + self.configure_fail_point(client_context.client, command_args, off=True) @contextmanager def fork( @@ -1136,12 +1160,8 @@ class IntegrationTest(PyMongoTestCase): @client_context.require_connection def setUp(self) -> None: - if not _IS_SYNC: - reset_client_context() if client_context.load_balancer and not getattr(self, "RUN_ON_LOAD_BALANCER", False): raise SkipTest("this test does not support load balancers") - if client_context.serverless and not getattr(self, "RUN_ON_SERVERLESS", False): - raise SkipTest("this test does not support serverless") self.client = client_context.client self.db = self.client.pymongo_test if client_context.auth_enabled: @@ -1186,6 +1206,9 @@ def tearDown(self) -> None: def setup(): + if not _IS_SYNC: + # Set up the event loop. + get_loop() client_context.init() warnings.resetwarnings() warnings.simplefilter("always") @@ -1201,19 +1224,16 @@ def teardown(): garbage.append(f" gc.get_referrers: {gc.get_referrers(g)!r}") if garbage: raise AssertionError("\n".join(garbage)) - c = client_context.client - if c: - if not client_context.is_data_lake: - c.drop_database("pymongo-pooling-tests") - c.drop_database("pymongo_test") - c.drop_database("pymongo_test1") - c.drop_database("pymongo_test2") - c.drop_database("pymongo_test_mike") - c.drop_database("pymongo_test_bernie") - c.close() print_running_clients() +@contextmanager +def simple_test_client(): + client_context.init() + yield client_context.client + client_context.client.close() + + def test_cases(suite): """Iterator over all TestCases within a TestSuite.""" for suite_or_case in suite._tests: diff --git a/test/asynchronous/__init__.py b/test/asynchronous/__init__.py index 73e2824742..78d0576add 100644 --- a/test/asynchronous/__init__.py +++ b/test/asynchronous/__init__.py @@ -17,6 +17,7 @@ import asyncio import gc +import inspect import logging import multiprocessing import os @@ -29,31 +30,11 @@ import traceback import unittest import warnings -from asyncio import iscoroutinefunction -from test.helpers import ( - COMPRESSORS, - IS_SRV, - MONGODB_API_VERSION, - MULTI_MONGOS_LB_URI, - TEST_LOADBALANCER, - TEST_SERVERLESS, - TLS_OPTIONS, - SystemCertsPatcher, - client_knobs, - db_pwd, - db_user, - global_knobs, - host, - is_server_resolvable, - port, - print_running_topology, - print_thread_stacks, - print_thread_tracebacks, - sanitize_cmd, - sanitize_reply, -) +from inspect import iscoroutinefunction -from pymongo.uri_parser import parse_uri +from pymongo.asynchronous.uri_parser import parse_uri +from pymongo.encryption_options import _HAVE_PYMONGOCRYPT +from pymongo.errors import AutoReconnect try: import ipaddress @@ -63,7 +44,6 @@ HAVE_IPADDRESS = False from contextlib import asynccontextmanager, contextmanager from functools import partial, wraps -from test.version import Version from typing import Any, Callable, Dict, Generator, overload from unittest import SkipTest from urllib.parse import quote_plus @@ -78,6 +58,30 @@ from pymongo.server_api import ServerApi from pymongo.ssl_support import HAVE_SSL, _ssl # type:ignore[attr-defined] +sys.path[0:0] = [""] + +from test.asynchronous.helpers import client_knobs, global_knobs +from test.helpers_shared import ( + COMPRESSORS, + IS_SRV, + MONGODB_API_VERSION, + MULTI_MONGOS_LB_URI, + TEST_LOADBALANCER, + TLS_OPTIONS, + SystemCertsPatcher, + db_pwd, + db_user, + host, + is_server_resolvable, + port, + print_running_topology, + print_thread_stacks, + print_thread_tracebacks, + sanitize_cmd, + sanitize_reply, +) +from test.version import Version + _IS_SYNC = False @@ -117,11 +121,9 @@ def __init__(self): self.sessions_enabled = False self.client = None # type: ignore self.conn_lock = threading.Lock() - self.is_data_lake = False self.load_balancer = TEST_LOADBALANCER - self.serverless = TEST_SERVERLESS self._fips_enabled = None - if self.load_balancer or self.serverless: + if self.load_balancer: self.default_client_options["loadBalanced"] = True if COMPRESSORS: self.default_client_options["compressors"] = COMPRESSORS @@ -163,7 +165,7 @@ async def uri(self): @property async def hello(self): if not self._hello: - if self.serverless or self.load_balancer: + if self.load_balancer: self._hello = await self.client.admin.command(HelloCompat.CMD) else: self._hello = await self.client.admin.command(HelloCompat.LEGACY_CMD) @@ -196,15 +198,6 @@ async def _init_client(self): self.mongoses = [] self.connection_attempts = [] self.client = await self._connect(host, port) - if self.client is not None: - # Return early when connected to dataLake as mongohoused does not - # support the getCmdLineOpts command and is tested without TLS. - if os.environ.get("TEST_DATA_LAKE"): - self.is_data_lake = True - self.auth_enabled = True - self.client = await self._connect(host, port, username=db_user, password=db_pwd) - self.connected = True - return if HAVE_SSL and not self.client: # Is MongoDB configured for SSL? @@ -217,24 +210,21 @@ async def _init_client(self): if self.client: self.connected = True - if self.serverless: - self.auth_enabled = True - else: - try: - self.cmd_line = await self.client.admin.command("getCmdLineOpts") - except pymongo.errors.OperationFailure as e: - assert e.details is not None - msg = e.details.get("errmsg", "") - if e.code == 13 or "unauthorized" in msg or "login" in msg: - # Unauthorized. - self.auth_enabled = True - else: - raise + try: + self.cmd_line = await self.client.admin.command("getCmdLineOpts") + except pymongo.errors.OperationFailure as e: + assert e.details is not None + msg = e.details.get("errmsg", "") + if e.code == 13 or "unauthorized" in msg or "login" in msg: + # Unauthorized. + self.auth_enabled = True else: - self.auth_enabled = self._server_started_with_auth() + raise + else: + self.auth_enabled = self._server_started_with_auth() if self.auth_enabled: - if not self.serverless and not IS_SRV: + if not IS_SRV: # See if db_user already exists. if not await self._check_user_provided(): await _create_user(self.client.admin, db_user, db_pwd) @@ -254,13 +244,10 @@ async def _init_client(self): # May not have this if OperationFailure was raised earlier. self.cmd_line = await self.client.admin.command("getCmdLineOpts") - if self.serverless: - self.server_status = {} - else: - self.server_status = await self.client.admin.command("serverStatus") - if self.storage_engine == "mmapv1": - # MMAPv1 does not support retryWrites=True. - self.default_client_options["retryWrites"] = False + self.server_status = await self.client.admin.command("serverStatus") + if self.storage_engine == "mmapv1": + # MMAPv1 does not support retryWrites=True. + self.default_client_options["retryWrites"] = False hello = await self.hello self.sessions_enabled = "logicalSessionTimeoutMinutes" in hello @@ -297,44 +284,33 @@ async def _init_client(self): self.w = len(hello.get("hosts", [])) or 1 self.version = await Version.async_from_client(self.client) - if self.serverless: - self.server_parameters = { - "requireApiVersion": False, - "enableTestCommands": True, - } + self.server_parameters = await self.client.admin.command("getParameter", "*") + assert self.cmd_line is not None + if self.server_parameters["enableTestCommands"]: self.test_commands_enabled = True - self.has_ipv6 = False - else: - self.server_parameters = await self.client.admin.command("getParameter", "*") - assert self.cmd_line is not None - if self.server_parameters["enableTestCommands"]: + elif "parsed" in self.cmd_line: + params = self.cmd_line["parsed"].get("setParameter", []) + if "enableTestCommands=1" in params: self.test_commands_enabled = True - elif "parsed" in self.cmd_line: - params = self.cmd_line["parsed"].get("setParameter", []) - if "enableTestCommands=1" in params: + else: + params = self.cmd_line["parsed"].get("setParameter", {}) + if params.get("enableTestCommands") == "1": self.test_commands_enabled = True - else: - params = self.cmd_line["parsed"].get("setParameter", {}) - if params.get("enableTestCommands") == "1": - self.test_commands_enabled = True - self.has_ipv6 = await self._server_started_with_ipv6() + self.has_ipv6 = await self._server_started_with_ipv6() self.is_mongos = (await self.hello).get("msg") == "isdbgrid" if self.is_mongos: address = await self.client.address self.mongoses.append(address) - if not self.serverless: - # Check for another mongos on the next port. - assert address is not None - next_address = address[0], address[1] + 1 - mongos_client = await self._connect( - *next_address, **self.default_client_options - ) - if mongos_client: - hello = await mongos_client.admin.command(HelloCompat.LEGACY_CMD) - if hello.get("msg") == "isdbgrid": - self.mongoses.append(next_address) - await mongos_client.close() + # Check for another mongos on the next port. + assert address is not None + next_address = address[0], address[1] + 1 + mongos_client = await self._connect(*next_address, **self.default_client_options) + if mongos_client: + hello = await mongos_client.admin.command(HelloCompat.LEGACY_CMD) + if hello.get("msg") == "isdbgrid": + self.mongoses.append(next_address) + await mongos_client.close() async def init(self): with self.conn_lock: @@ -383,10 +359,12 @@ def fips_enabled(self): if self._fips_enabled is not None: return self._fips_enabled try: - subprocess.check_call(["fips-mode-setup", "--is-enabled"]) + subprocess.run(["fips-mode-setup", "--is-enabled"], check=True) self._fips_enabled = True except (subprocess.SubprocessError, FileNotFoundError): self._fips_enabled = False + if os.environ.get("REQUIRE_FIPS") and not self._fips_enabled: + raise RuntimeError("Expected FIPS to be enabled") return self._fips_enabled def check_auth_type(self, auth_type): @@ -512,27 +490,6 @@ def require_connection(self, func): func=func, ) - def require_data_lake(self, func): - """Run a test only if we are connected to Atlas Data Lake.""" - return self._require( - lambda: self.is_data_lake, - "Not connected to Atlas Data Lake on self.pair", - func=func, - ) - - def require_no_mmap(self, func): - """Run a test only if the server is not using the MMAPv1 storage - engine. Only works for standalone and replica sets; tests are - run regardless of storage engine on sharded clusters. - """ - - def is_not_mmap(): - if self.is_mongos: - return True - return self.storage_engine != "mmapv1" - - return self._require(is_not_mmap, "Storage engine must not be MMAPv1", func=func) - def require_version_min(self, *ver): """Run a test only if the server version is at least ``version``.""" other_version = Version(*ver) @@ -549,6 +506,32 @@ def require_version_max(self, *ver): "Server version must be at most %s" % str(other_version), ) + def require_libmongocrypt_min(self, *ver): + other_version = Version(*ver) + if not _HAVE_PYMONGOCRYPT: + version = Version.from_string("0.0.0") + else: + from pymongocrypt import libmongocrypt_version + + version = Version.from_string(libmongocrypt_version()) + return self._require( + lambda: version >= other_version, + "Libmongocrypt version must be at least %s" % str(other_version), + ) + + def require_pymongocrypt_min(self, *ver): + other_version = Version(*ver) + if not _HAVE_PYMONGOCRYPT: + version = Version.from_string("0.0.0") + else: + from pymongocrypt import __version__ as pymongocrypt_version + + version = Version.from_string(pymongocrypt_version) + return self._require( + lambda: version >= other_version, + "PyMongoCrypt version must be at least %s" % str(other_version), + ) + def require_auth(self, func): """Run a test only if the server is running with auth enabled.""" return self._require( @@ -588,10 +571,10 @@ async def check(): @property async def supports_secondary_read_pref(self): - if self.has_secondaries: + if await self.has_secondaries: return True if self.is_mongos: - shard = await self.client.config.shards.find_one()["host"] # type:ignore[index] + shard = (await self.client.config.shards.find_one())["host"] # type:ignore[index] num_members = shard.count(",") + 1 return num_members > 1 return False @@ -661,22 +644,15 @@ def require_no_load_balancer(self, func): lambda: not self.load_balancer, "Must not be connected to a load balancer", func=func ) - def require_no_serverless(self, func): - """Run a test only if the client is not connected to serverless.""" - return self._require( - lambda: not self.serverless, "Must not be connected to serverless", func=func - ) - def require_change_streams(self, func): """Run a test only if the server supports change streams.""" - return self.require_no_mmap(self.require_no_standalone(self.require_no_serverless(func))) + return self.require_no_standalone(func) async def is_topology_type(self, topologies): unknown = set(topologies) - { "single", "replicaset", "sharded", - "sharded-replicaset", "load-balanced", } if unknown: @@ -691,16 +667,6 @@ async def is_topology_type(self, topologies): return True if "sharded" in topologies and self.is_mongos: return True - if "sharded-replicaset" in topologies and self.is_mongos: - shards = await async_client_context.client.config.shards.find().to_list() - for shard in shards: - # For a 3-member RS-backed sharded cluster, shard['host'] - # will be 'replicaName/ip1:port1,ip2:port2,ip3:port3' - # Otherwise it will be 'ip1:port1' - host_spec = shard["host"] - if not len(host_spec.split("/")) > 1: - return False - return True return False def require_cluster_type(self, topologies=None): @@ -783,8 +749,6 @@ def require_sessions(self, func): return self._require(lambda: self.sessions_enabled, "Sessions not supported", func=func) def supports_retryable_writes(self): - if self.storage_engine == "mmapv1": - return False if not self.sessions_enabled: return False return self.is_mongos or self.is_rs @@ -798,9 +762,6 @@ def require_retryable_writes(self, func): ) def supports_transactions(self): - if self.storage_engine == "mmapv1": - return False - if self.version.at_least(4, 1, 8): return self.is_mongos or self.is_rs @@ -832,6 +793,14 @@ def require_sync(self, func): lambda: _IS_SYNC, "This test only works with the synchronous API", func=func ) + def require_async(self, func): + """Run a test only if using the asynchronous API.""" # unasync: off + return self._require( + lambda: not _IS_SYNC, + "This test only works with the asynchronous API", # unasync: off + func=func, + ) + def mongos_seeds(self): return ",".join("{}:{}".format(*address) for address in self.mongoses) @@ -865,35 +834,88 @@ async def max_message_size_bytes(self): # Reusable client context async_client_context = AsyncClientContext() +# Global event loop for async tests. +LOOP = None + + +def get_loop() -> asyncio.AbstractEventLoop: + """Get the test suite's global event loop.""" + global LOOP + if LOOP is None: + try: + LOOP = asyncio.get_running_loop() + except RuntimeError: + # no running event loop, fallback to get_event_loop. + try: + # Ignore DeprecationWarning: There is no current event loop + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + LOOP = asyncio.get_event_loop() + except RuntimeError: + LOOP = asyncio.new_event_loop() + asyncio.set_event_loop(LOOP) + return LOOP + + +class AsyncPyMongoTestCase(unittest.TestCase): + if not _IS_SYNC: + # An async TestCase that uses a single event loop for all tests. + # Inspired by IsolatedAsyncioTestCase. + async def asyncSetUp(self): + pass + + async def asyncTearDown(self): + pass + + def addAsyncCleanup(self, func, /, *args, **kwargs): + self.addCleanup(*(func, *args), **kwargs) + + def _callSetUp(self): + self.setUp() + self._callAsync(self.asyncSetUp) + + def _callTestMethod(self, method): + self._callMaybeAsync(method) + + def _callTearDown(self): + self._callAsync(self.asyncTearDown) + self.tearDown() -async def reset_client_context(): - if _IS_SYNC: - # sync tests don't need to reset a client context - return - elif async_client_context.client is not None: - await async_client_context.client.close() - async_client_context.client = None - await async_client_context._init_client() + def _callCleanup(self, function, *args, **kwargs): + self._callMaybeAsync(function, *args, **kwargs) + def _callAsync(self, func, /, *args, **kwargs): + assert inspect.iscoroutinefunction(func), f"{func!r} is not an async function" + return get_loop().run_until_complete(func(*args, **kwargs)) + + def _callMaybeAsync(self, func, /, *args, **kwargs): + if inspect.iscoroutinefunction(func): + return get_loop().run_until_complete(func(*args, **kwargs)) + else: + return func(*args, **kwargs) -class AsyncPyMongoTestCase(unittest.IsolatedAsyncioTestCase): def assertEqualCommand(self, expected, actual, msg=None): self.assertEqual(sanitize_cmd(expected), sanitize_cmd(actual), msg) def assertEqualReply(self, expected, actual, msg=None): self.assertEqual(sanitize_reply(expected), sanitize_reply(actual), msg) + @staticmethod + async def configure_fail_point(client, command_args, off=False): + cmd = {"configureFailPoint": "failCommand"} + cmd.update(command_args) + if off: + cmd["mode"] = "off" + cmd.pop("data", None) + await client.admin.command(cmd) + @asynccontextmanager async def fail_point(self, command_args): - cmd_on = SON([("configureFailPoint", "failCommand")]) - cmd_on.update(command_args) - await async_client_context.client.admin.command(cmd_on) + await self.configure_fail_point(async_client_context.client, command_args) try: yield finally: - await async_client_context.client.admin.command( - "configureFailPoint", cmd_on["configureFailPoint"], mode="off" - ) + await self.configure_fail_point(async_client_context.client, command_args, off=True) @contextmanager def fork( @@ -970,7 +992,7 @@ async def _unmanaged_async_mongo_client( auth_mech = kwargs.get("authMechanism", "") if async_client_context.auth_enabled and authenticate and auth_mech != "MONGODB-OIDC": # Only add the default username or password if one is not provided. - res = parse_uri(uri) + res = await parse_uri(uri) if ( not res["username"] and not res["password"] @@ -1001,7 +1023,7 @@ async def _async_mongo_client( auth_mech = kwargs.get("authMechanism", "") if async_client_context.auth_enabled and authenticate and auth_mech != "MONGODB-OIDC": # Only add the default username or password if one is not provided. - res = parse_uri(uri) + res = await parse_uri(uri) if ( not res["username"] and not res["password"] @@ -1124,15 +1146,15 @@ def unmanaged_simple_client( async def disable_replication(self, client): """Disable replication on all secondaries.""" - for h, p in client.secondaries: + for h, p in await client.secondaries: secondary = await self.async_single_client(h, p) - secondary.admin.command("configureFailPoint", "stopReplProducer", mode="alwaysOn") + await secondary.admin.command("configureFailPoint", "stopReplProducer", mode="alwaysOn") async def enable_replication(self, client): """Enable replication on all secondaries.""" - for h, p in client.secondaries: + for h, p in await client.secondaries: secondary = await self.async_single_client(h, p) - secondary.admin.command("configureFailPoint", "stopReplProducer", mode="off") + await secondary.admin.command("configureFailPoint", "stopReplProducer", mode="off") class AsyncUnitTest(AsyncPyMongoTestCase): @@ -1154,12 +1176,8 @@ class AsyncIntegrationTest(AsyncPyMongoTestCase): @async_client_context.require_connection async def asyncSetUp(self) -> None: - if not _IS_SYNC: - await reset_client_context() if async_client_context.load_balancer and not getattr(self, "RUN_ON_LOAD_BALANCER", False): raise SkipTest("this test does not support load balancers") - if async_client_context.serverless and not getattr(self, "RUN_ON_SERVERLESS", False): - raise SkipTest("this test does not support serverless") self.client = async_client_context.client self.db = self.client.pymongo_test if async_client_context.auth_enabled: @@ -1204,6 +1222,9 @@ async def asyncTearDown(self) -> None: async def async_setup(): + if not _IS_SYNC: + # Set up the event loop. + get_loop() await async_client_context.init() warnings.resetwarnings() warnings.simplefilter("always") @@ -1219,19 +1240,16 @@ async def async_teardown(): garbage.append(f" gc.get_referrers: {gc.get_referrers(g)!r}") if garbage: raise AssertionError("\n".join(garbage)) - c = async_client_context.client - if c: - if not async_client_context.is_data_lake: - await c.drop_database("pymongo-pooling-tests") - await c.drop_database("pymongo_test") - await c.drop_database("pymongo_test1") - await c.drop_database("pymongo_test2") - await c.drop_database("pymongo_test_mike") - await c.drop_database("pymongo_test_bernie") - await c.close() print_running_clients() +@asynccontextmanager +async def async_simple_test_client(): + await async_client_context.init() + yield async_client_context.client + await async_client_context.client.close() + + def test_cases(suite): """Iterator over all TestCases within a TestSuite.""" for suite_or_case in suite._tests: diff --git a/test/asynchronous/helpers.py b/test/asynchronous/helpers.py index b5fc5d8ac4..892c629631 100644 --- a/test/asynchronous/helpers.py +++ b/test/asynchronous/helpers.py @@ -12,144 +12,22 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Shared constants and helper methods for pymongo, bson, and gridfs test suites.""" +"""Shared helper methods for pymongo, bson, and gridfs test suites.""" from __future__ import annotations -import base64 -import gc -import multiprocessing -import os -import signal -import socket -import subprocess -import sys +import asyncio import threading -import time import traceback -import unittest -import warnings -from asyncio import iscoroutinefunction - -try: - import ipaddress - - HAVE_IPADDRESS = True -except ImportError: - HAVE_IPADDRESS = False from functools import wraps -from typing import Any, Callable, Dict, Generator, no_type_check -from unittest import SkipTest +from typing import Optional, no_type_check -from bson.son import SON -from pymongo import common, message +from bson import SON +from pymongo import common +from pymongo._asyncio_task import create_task from pymongo.read_preferences import ReadPreference -from pymongo.ssl_support import HAVE_SSL, _ssl # type:ignore[attr-defined] -from pymongo.uri_parser import parse_uri - -if HAVE_SSL: - import ssl _IS_SYNC = False -# Enable debug output for uncollectable objects. PyPy does not have set_debug. -if hasattr(gc, "set_debug"): - gc.set_debug( - gc.DEBUG_UNCOLLECTABLE | getattr(gc, "DEBUG_OBJECTS", 0) | getattr(gc, "DEBUG_INSTANCES", 0) - ) - -# The host and port of a single mongod or mongos, or the seed host -# for a replica set. -host = os.environ.get("DB_IP", "localhost") -port = int(os.environ.get("DB_PORT", 27017)) -IS_SRV = "mongodb+srv" in host - -db_user = os.environ.get("DB_USER", "user") -db_pwd = os.environ.get("DB_PASSWORD", "password") - -CERT_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "certificates") -CLIENT_PEM = os.environ.get("CLIENT_PEM", os.path.join(CERT_PATH, "client.pem")) -CA_PEM = os.environ.get("CA_PEM", os.path.join(CERT_PATH, "ca.pem")) - -TLS_OPTIONS: Dict = {"tls": True} -if CLIENT_PEM: - TLS_OPTIONS["tlsCertificateKeyFile"] = CLIENT_PEM -if CA_PEM: - TLS_OPTIONS["tlsCAFile"] = CA_PEM - -COMPRESSORS = os.environ.get("COMPRESSORS") -MONGODB_API_VERSION = os.environ.get("MONGODB_API_VERSION") -TEST_LOADBALANCER = bool(os.environ.get("TEST_LOADBALANCER")) -TEST_SERVERLESS = bool(os.environ.get("TEST_SERVERLESS")) -SINGLE_MONGOS_LB_URI = os.environ.get("SINGLE_MONGOS_LB_URI") -MULTI_MONGOS_LB_URI = os.environ.get("MULTI_MONGOS_LB_URI") - -if TEST_LOADBALANCER: - res = parse_uri(SINGLE_MONGOS_LB_URI or "") - host, port = res["nodelist"][0] - db_user = res["username"] or db_user - db_pwd = res["password"] or db_pwd -elif TEST_SERVERLESS: - TEST_LOADBALANCER = True - res = parse_uri(SINGLE_MONGOS_LB_URI or "") - host, port = res["nodelist"][0] - db_user = res["username"] or db_user - db_pwd = res["password"] or db_pwd - TLS_OPTIONS = {"tls": True} - # Spec says serverless tests must be run with compression. - COMPRESSORS = COMPRESSORS or "zlib" - - -# Shared KMS data. -LOCAL_MASTER_KEY = base64.b64decode( - b"Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ" - b"5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" -) -AWS_CREDS = { - "accessKeyId": os.environ.get("FLE_AWS_KEY", ""), - "secretAccessKey": os.environ.get("FLE_AWS_SECRET", ""), -} -AWS_CREDS_2 = { - "accessKeyId": os.environ.get("FLE_AWS_KEY2", ""), - "secretAccessKey": os.environ.get("FLE_AWS_SECRET2", ""), -} -AZURE_CREDS = { - "tenantId": os.environ.get("FLE_AZURE_TENANTID", ""), - "clientId": os.environ.get("FLE_AZURE_CLIENTID", ""), - "clientSecret": os.environ.get("FLE_AZURE_CLIENTSECRET", ""), -} -GCP_CREDS = { - "email": os.environ.get("FLE_GCP_EMAIL", ""), - "privateKey": os.environ.get("FLE_GCP_PRIVATEKEY", ""), -} -KMIP_CREDS = {"endpoint": os.environ.get("FLE_KMIP_ENDPOINT", "localhost:5698")} - -# Ensure Evergreen metadata doesn't result in truncation -os.environ.setdefault("MONGOB_LOG_MAX_DOCUMENT_LENGTH", "2000") - - -def is_server_resolvable(): - """Returns True if 'server' is resolvable.""" - socket_timeout = socket.getdefaulttimeout() - socket.setdefaulttimeout(1) - try: - try: - socket.gethostbyname("server") - return True - except OSError: - return False - finally: - socket.setdefaulttimeout(socket_timeout) - - -def _create_user(authdb, user, pwd=None, roles=None, **kwargs): - cmd = SON([("createUser", user)]) - # X509 doesn't use a password - if pwd: - cmd["pwd"] = pwd - cmd["roles"] = roles or ["root"] - cmd.update(**kwargs) - return authdb.command(cmd) - async def async_repl_set_step_down(client, **kwargs): """Run replSetStepDown, first unfreezing a secondary with replSetFreeze.""" @@ -244,128 +122,55 @@ def __del__(self): raise Exception(msg) -def _all_users(db): - return {u["user"] for u in db.command("usersInfo").get("users", [])} - - -def sanitize_cmd(cmd): - cp = cmd.copy() - cp.pop("$clusterTime", None) - cp.pop("$db", None) - cp.pop("$readPreference", None) - cp.pop("lsid", None) - if MONGODB_API_VERSION: - # Stable API parameters - cp.pop("apiVersion", None) - # OP_MSG encoding may move the payload type one field to the - # end of the command. Do the same here. - name = next(iter(cp)) - try: - identifier = message._FIELD_MAP[name] - docs = cp.pop(identifier) - cp[identifier] = docs - except KeyError: - pass - return cp - - -def sanitize_reply(reply): - cp = reply.copy() - cp.pop("$clusterTime", None) - cp.pop("operationTime", None) - return cp - - -def print_thread_tracebacks() -> None: - """Print all Python thread tracebacks.""" - for thread_id, frame in sys._current_frames().items(): - sys.stderr.write(f"\n--- Traceback for thread {thread_id} ---\n") - traceback.print_stack(frame, file=sys.stderr) - - -def print_thread_stacks(pid: int) -> None: - """Print all C-level thread stacks for a given process id.""" - if sys.platform == "darwin": - cmd = ["lldb", "--attach-pid", f"{pid}", "--batch", "--one-line", '"thread backtrace all"'] - else: - cmd = ["gdb", f"--pid={pid}", "--batch", '--eval-command="thread apply all bt"'] - - try: - res = subprocess.run( - cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, encoding="utf-8" - ) - except Exception as exc: - sys.stderr.write(f"Could not print C-level thread stacks because {cmd[0]} failed: {exc}") - else: - sys.stderr.write(res.stdout) - - # Global knobs to speed up the test suite. global_knobs = client_knobs(events_queue_frequency=0.05) -def _get_executors(topology): - executors = [] - for server in topology._servers.values(): - # Some MockMonitor do not have an _executor. - if hasattr(server._monitor, "_executor"): - executors.append(server._monitor._executor) - if hasattr(server._monitor, "_rtt_monitor"): - executors.append(server._monitor._rtt_monitor._executor) - executors.append(topology._Topology__events_executor) - if topology._srv_monitor: - executors.append(topology._srv_monitor._executor) - - return [e for e in executors if e is not None] - - -def print_running_topology(topology): - running = [e for e in _get_executors(topology) if not e._stopped] - if running: - print( - "WARNING: found Topology with running threads:\n" - f" Threads: {running}\n" - f" Topology: {topology}\n" - f" Creation traceback:\n{topology._settings._stack}" - ) - - -def test_cases(suite): - """Iterator over all TestCases within a TestSuite.""" - for suite_or_case in suite._tests: - if isinstance(suite_or_case, unittest.TestCase): - # unittest.TestCase - yield suite_or_case - else: - # unittest.TestSuite - yield from test_cases(suite_or_case) - - -# Helper method to workaround https://bugs.python.org/issue21724 -def clear_warning_registry(): - """Clear the __warningregistry__ for all modules.""" - for _, module in list(sys.modules.items()): - if hasattr(module, "__warningregistry__"): - module.__warningregistry__ = {} # type:ignore[attr-defined] - - -class SystemCertsPatcher: - def __init__(self, ca_certs): - if ( - ssl.OPENSSL_VERSION.lower().startswith("libressl") - and sys.platform == "darwin" - and not _ssl.IS_PYOPENSSL - ): - raise SkipTest( - "LibreSSL on OSX doesn't support setting CA certificates " - "using SSL_CERT_FILE environment variable." - ) - self.original_certs = os.environ.get("SSL_CERT_FILE") - # Tell OpenSSL where CA certificates live. - os.environ["SSL_CERT_FILE"] = ca_certs +if _IS_SYNC: + PARENT = threading.Thread +else: + PARENT = object - def disable(self): - if self.original_certs is None: - os.environ.pop("SSL_CERT_FILE") - else: - os.environ["SSL_CERT_FILE"] = self.original_certs + +class ConcurrentRunner(PARENT): + def __init__(self, **kwargs): + if _IS_SYNC: + super().__init__(**kwargs) + self.name = kwargs.get("name", "ConcurrentRunner") + self.stopped = False + self.task = None + self.target = kwargs.get("target", None) + self.args = kwargs.get("args", []) + + if not _IS_SYNC: + + async def start(self): + self.task = create_task(self.run(), name=self.name) + + async def join(self, timeout: Optional[float] = None): # type: ignore[override] + if self.task is not None: + await asyncio.wait([self.task], timeout=timeout) + + def is_alive(self): + return not self.stopped + + async def run(self): + try: + await self.target(*self.args) + finally: + self.stopped = True + + +class ExceptionCatchingTask(ConcurrentRunner): + """A Task that stores any exception encountered while running.""" + + def __init__(self, **kwargs): + super().__init__(**kwargs) + self.exc = None + + async def run(self): + try: + await super().run() + except BaseException as exc: + self.exc = exc + raise diff --git a/test/asynchronous/pymongo_mocks.py b/test/asynchronous/pymongo_mocks.py index ed2395bc98..40beb3c0dc 100644 --- a/test/asynchronous/pymongo_mocks.py +++ b/test/asynchronous/pymongo_mocks.py @@ -66,7 +66,7 @@ def __init__(self, server_description, topology, pool, topology_settings): def cancel_check(self): pass - def join(self): + async def join(self): pass def open(self): @@ -75,7 +75,7 @@ def open(self): def request_check(self): pass - def close(self): + async def close(self): self.opened = False diff --git a/test/asynchronous/test_async_cancellation.py b/test/asynchronous/test_async_cancellation.py new file mode 100644 index 0000000000..f450ea23cc --- /dev/null +++ b/test/asynchronous/test_async_cancellation.py @@ -0,0 +1,129 @@ +# Copyright 2025-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test that async cancellation performed by users clean up resources correctly.""" +from __future__ import annotations + +import asyncio +import sys +from test.asynchronous.utils import async_get_pool +from test.utils_shared import delay, one + +sys.path[0:0] = [""] + +from test.asynchronous import AsyncIntegrationTest, async_client_context, connected + + +class TestAsyncCancellation(AsyncIntegrationTest): + async def test_async_cancellation_closes_connection(self): + pool = await async_get_pool(self.client) + await self.client.db.test.insert_one({"x": 1}) + self.addAsyncCleanup(self.client.db.test.delete_many, {}) + + conn = one(pool.conns) + + async def task(): + await self.client.db.test.find_one({"$where": delay(0.2)}) + + task = asyncio.create_task(task()) + + await asyncio.sleep(0.1) + + task.cancel() + with self.assertRaises(asyncio.CancelledError): + await task + + self.assertTrue(conn.closed) + + @async_client_context.require_transactions + async def test_async_cancellation_aborts_transaction(self): + await self.client.db.test.insert_one({"x": 1}) + self.addAsyncCleanup(self.client.db.test.delete_many, {}) + + session = self.client.start_session() + + async def callback(session): + await self.client.db.test.find_one({"$where": delay(0.2)}, session=session) + + async def task(): + await session.with_transaction(callback) + + task = asyncio.create_task(task()) + + await asyncio.sleep(0.1) + + task.cancel() + with self.assertRaises(asyncio.CancelledError): + await task + + self.assertFalse(session.in_transaction) + + @async_client_context.require_failCommand_blockConnection + async def test_async_cancellation_closes_cursor(self): + await self.client.db.test.insert_many([{"x": 1}, {"x": 2}]) + self.addAsyncCleanup(self.client.db.test.delete_many, {}) + + cursor = self.client.db.test.find({}, batch_size=1) + await cursor.next() + + # Make sure getMore commands block + fail_command = { + "configureFailPoint": "failCommand", + "mode": "alwaysOn", + "data": {"failCommands": ["getMore"], "blockConnection": True, "blockTimeMS": 200}, + } + + async def task(): + async with self.fail_point(fail_command): + await cursor.next() + + task = asyncio.create_task(task()) + + await asyncio.sleep(0.1) + + task.cancel() + with self.assertRaises(asyncio.CancelledError): + await task + + self.assertTrue(cursor._killed) + + @async_client_context.require_change_streams + @async_client_context.require_failCommand_blockConnection + async def test_async_cancellation_closes_change_stream(self): + self.addAsyncCleanup(self.client.db.test.delete_many, {}) + change_stream = await self.client.db.test.watch(batch_size=2) + event = asyncio.Event() + + # Make sure getMore commands block + fail_command = { + "configureFailPoint": "failCommand", + "mode": "alwaysOn", + "data": {"failCommands": ["getMore"], "blockConnection": True, "blockTimeMS": 200}, + } + + async def task(): + async with self.fail_point(fail_command): + await self.client.db.test.insert_many([{"x": 1}, {"x": 2}]) + event.set() + await change_stream.next() + + task = asyncio.create_task(task()) + + await event.wait() + + task.cancel() + with self.assertRaises(asyncio.CancelledError): + await task + + self.assertTrue(change_stream._closed) diff --git a/test/asynchronous/test_async_contextvars_reset.py b/test/asynchronous/test_async_contextvars_reset.py new file mode 100644 index 0000000000..c6e825bbdf --- /dev/null +++ b/test/asynchronous/test_async_contextvars_reset.py @@ -0,0 +1,41 @@ +# Copyright 2025-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test that AsyncPeriodicExecutors do not copy ContextVars from their parents.""" +from __future__ import annotations + +import asyncio +import sys +from test.asynchronous.utils import async_get_pool +from test.utils_shared import delay, one + +sys.path[0:0] = [""] + +from test.asynchronous import AsyncIntegrationTest + + +class TestAsyncContextVarsReset(AsyncIntegrationTest): + async def test_context_vars_are_reset_in_executor(self): + if sys.version_info < (3, 12): + self.skipTest("Test requires asyncio.Task.get_context (added in Python 3.12)") + + await self.client.db.test.insert_one({"x": 1}) + for server in self.client._topology._servers.values(): + for context in [ + c + for c in server._monitor._executor._task.get_context() + if c.name in ["TIMEOUT", "RTT", "DEADLINE"] + ]: + self.assertIn(context.get(), [None, float("inf"), 0.0]) + await self.client.db.test.delete_many({}) diff --git a/test/asynchronous/test_async_loop_safety.py b/test/asynchronous/test_async_loop_safety.py new file mode 100644 index 0000000000..7516cb8eeb --- /dev/null +++ b/test/asynchronous/test_async_loop_safety.py @@ -0,0 +1,36 @@ +# Copyright 2025-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test that the asynchronous API detects event loop changes and fails correctly.""" +from __future__ import annotations + +import asyncio +import unittest + +from pymongo import AsyncMongoClient + + +class TestClientLoopSafety(unittest.TestCase): + def test_client_errors_on_different_loop(self): + client = AsyncMongoClient() + loop1 = asyncio.new_event_loop() + loop1.run_until_complete(client.aconnect()) + loop2 = asyncio.new_event_loop() + with self.assertRaisesRegex( + RuntimeError, "Cannot use AsyncMongoClient in different event loop" + ): + loop2.run_until_complete(client.aconnect()) + loop1.run_until_complete(client.close()) + loop1.close() + loop2.close() diff --git a/test/asynchronous/test_async_loop_unblocked.py b/test/asynchronous/test_async_loop_unblocked.py new file mode 100644 index 0000000000..86f934b798 --- /dev/null +++ b/test/asynchronous/test_async_loop_unblocked.py @@ -0,0 +1,56 @@ +# Copyright 2025-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test that the asynchronous API does not block the event loop.""" +from __future__ import annotations + +import asyncio +import time +from test.asynchronous import AsyncIntegrationTest + +from pymongo.errors import ServerSelectionTimeoutError + + +class TestClientLoopUnblocked(AsyncIntegrationTest): + async def test_client_does_not_block_loop(self): + # Use an unreachable TEST-NET host to ensure that the client times out attempting to create a connection. + client = self.simple_client("192.0.2.1", serverSelectionTimeoutMS=500) + latencies = [] + + # If the loop is being blocked, at least one iteration will have a latency much more than 0.1 seconds + async def background_task(): + start = time.monotonic() + try: + while True: + start = time.monotonic() + await asyncio.sleep(0.1) + latencies.append(time.monotonic() - start) + except asyncio.CancelledError: + latencies.append(time.monotonic() - start) + raise + + t = asyncio.create_task(background_task()) + + with self.assertRaisesRegex(ServerSelectionTimeoutError, "No servers found yet"): + await client.admin.command("ping") + + t.cancel() + with self.assertRaises(asyncio.CancelledError): + await t + + self.assertLessEqual( + sorted(latencies, reverse=True)[0], + 1.0, + "Background task was blocked from running", + ) diff --git a/test/asynchronous/test_auth.py b/test/asynchronous/test_auth.py index 08dc4d7247..904674db16 100644 --- a/test/asynchronous/test_auth.py +++ b/test/asynchronous/test_auth.py @@ -30,7 +30,7 @@ async_client_context, unittest, ) -from test.utils import AllowListEventListener, delay, ignore_deprecations +from test.utils_shared import AllowListEventListener, delay, ignore_deprecations import pytest @@ -275,10 +275,10 @@ async def test_gssapi_threaded(self): async def test_gssapi_canonicalize_host_name(self): # Test the low level method. assert GSSAPI_HOST is not None - result = _canonicalize_hostname(GSSAPI_HOST, "forward") + result = await _canonicalize_hostname(GSSAPI_HOST, "forward") if "compute-1.amazonaws.com" not in result: self.assertEqual(result, GSSAPI_HOST) - result = _canonicalize_hostname(GSSAPI_HOST, "forwardAndReverse") + result = await _canonicalize_hostname(GSSAPI_HOST, "forwardAndReverse") self.assertEqual(result, GSSAPI_HOST) # Use the equivalent named CANONICALIZE_HOST_NAME. diff --git a/test/asynchronous/test_auth_oidc.py b/test/asynchronous/test_auth_oidc.py new file mode 100644 index 0000000000..639c155e73 --- /dev/null +++ b/test/asynchronous/test_auth_oidc.py @@ -0,0 +1,1192 @@ +# Copyright 2023-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test MONGODB-OIDC Authentication.""" +from __future__ import annotations + +import os +import sys +import time +import unittest +import warnings +from contextlib import asynccontextmanager +from pathlib import Path +from test.asynchronous import AsyncPyMongoTestCase +from test.asynchronous.helpers import ConcurrentRunner +from typing import Dict + +import pytest + +sys.path[0:0] = [""] + +from test.asynchronous.unified_format import generate_test_classes +from test.utils_shared import EventListener, OvertCommandListener + +from bson import SON +from pymongo import AsyncMongoClient +from pymongo._azure_helpers import _get_azure_response +from pymongo._gcp_helpers import _get_gcp_response +from pymongo.asynchronous.auth_oidc import ( + OIDCCallback, + OIDCCallbackContext, + OIDCCallbackResult, + _get_authenticator, +) +from pymongo.auth_oidc_shared import _get_k8s_token +from pymongo.auth_shared import _build_credentials_tuple +from pymongo.cursor_shared import CursorType +from pymongo.errors import AutoReconnect, ConfigurationError, OperationFailure +from pymongo.hello import HelloCompat +from pymongo.operations import InsertOne +from pymongo.synchronous.uri_parser import parse_uri + +_IS_SYNC = False + +ROOT = Path(__file__).parent.parent.resolve() +TEST_PATH = ROOT / "auth" / "unified" +ENVIRON = os.environ.get("OIDC_ENV", "test") +DOMAIN = os.environ.get("OIDC_DOMAIN", "") +TOKEN_DIR = os.environ.get("OIDC_TOKEN_DIR", "") +TOKEN_FILE = os.environ.get("OIDC_TOKEN_FILE", "") + +# Generate unified tests. +globals().update(generate_test_classes(str(TEST_PATH), module=__name__)) + +pytestmark = pytest.mark.auth_oidc + + +class OIDCTestBase(AsyncPyMongoTestCase): + @classmethod + def setUpClass(cls): + cls.uri_single = os.environ["MONGODB_URI_SINGLE"] + cls.uri_multiple = os.environ.get("MONGODB_URI_MULTI") + cls.uri_admin = os.environ["MONGODB_URI"] + if ENVIRON == "test": + if not TOKEN_DIR: + raise ValueError("Please set OIDC_TOKEN_DIR") + if not TOKEN_FILE: + raise ValueError("Please set OIDC_TOKEN_FILE") + + async def asyncSetUp(self): + self.request_called = 0 + + def get_token(self, username=None): + """Get a token for the current provider.""" + if ENVIRON == "test": + if username is None: + token_file = TOKEN_FILE + else: + token_file = os.path.join(TOKEN_DIR, username) + with open(token_file) as fid: # noqa: ASYNC101,RUF100 + return fid.read() + elif ENVIRON == "azure": + opts = parse_uri(self.uri_single)["options"] + token_aud = opts["authMechanismProperties"]["TOKEN_RESOURCE"] + return _get_azure_response(token_aud, username)["access_token"] + elif ENVIRON == "gcp": + opts = parse_uri(self.uri_single)["options"] + token_aud = opts["authMechanismProperties"]["TOKEN_RESOURCE"] + return _get_gcp_response(token_aud, username)["access_token"] + elif ENVIRON == "k8s": + return _get_k8s_token() + else: + raise ValueError(f"Unknown ENVIRON: {ENVIRON}") + + @asynccontextmanager + async def fail_point(self, command_args): + cmd_on = SON([("configureFailPoint", "failCommand")]) + cmd_on.update(command_args) + client = AsyncMongoClient(self.uri_admin) + await client.admin.command(cmd_on) + try: + yield + finally: + await client.admin.command( + "configureFailPoint", cmd_on["configureFailPoint"], mode="off" + ) + await client.close() + + +class TestAuthOIDCHuman(OIDCTestBase): + uri: str + + @classmethod + def setUpClass(cls): + if ENVIRON != "test": + raise unittest.SkipTest("Human workflows are only tested with the test environment") + if DOMAIN is None: + raise ValueError("Missing OIDC_DOMAIN") + super().setUpClass() + + async def asyncSetUp(self): + self.refresh_present = 0 + await super().asyncSetUp() + + def create_request_cb(self, username="test_user1", sleep=0): + def request_token(context: OIDCCallbackContext): + # Validate the info. + self.assertIsInstance(context.idp_info.issuer, str) + if context.idp_info.clientId is not None: + self.assertIsInstance(context.idp_info.clientId, str) + + # Validate the timeout. + timeout_seconds = context.timeout_seconds + self.assertEqual(timeout_seconds, 60 * 5) + + if context.refresh_token: + self.refresh_present += 1 + + token = self.get_token(username) + resp = OIDCCallbackResult(access_token=token, refresh_token=token) + + time.sleep(sleep) + self.request_called += 1 + return resp + + class Inner(OIDCCallback): + def fetch(self, context): + return request_token(context) + + return Inner() + + async def create_client(self, *args, **kwargs): + username = kwargs.get("username", "test_user1") + if kwargs.get("username") in ["test_user1", "test_user2"]: + kwargs["username"] = f"{username}@{DOMAIN}" + request_cb = kwargs.pop("request_cb", self.create_request_cb(username=username)) + props = kwargs.pop("authmechanismproperties", {"OIDC_HUMAN_CALLBACK": request_cb}) + kwargs["retryReads"] = False + if not len(args): + args = [self.uri_single] + + client = self.simple_client(*args, authmechanismproperties=props, **kwargs) + + return client + + async def test_1_1_single_principal_implicit_username(self): + # Create default OIDC client with authMechanism=MONGODB-OIDC. + client = await self.create_client() + # Perform a find operation that succeeds. + await client.test.test.find_one() + # Close the client. + await client.close() + + async def test_1_2_single_principal_explicit_username(self): + # Create a client with MONGODB_URI_SINGLE, a username of test_user1, authMechanism=MONGODB-OIDC, and the OIDC human callback. + client = await self.create_client(username="test_user1") + # Perform a find operation that succeeds. + await client.test.test.find_one() + # Close the client. + await client.close() + + async def test_1_3_multiple_principal_user_1(self): + if not self.uri_multiple: + raise unittest.SkipTest("Test Requires Server with Multiple Workflow IdPs") + # Create a client with MONGODB_URI_MULTI, a username of test_user1, authMechanism=MONGODB-OIDC, and the OIDC human callback. + client = await self.create_client(self.uri_multiple, username="test_user1") + # Perform a find operation that succeeds. + await client.test.test.find_one() + # Close the client. + await client.close() + + async def test_1_4_multiple_principal_user_2(self): + if not self.uri_multiple: + raise unittest.SkipTest("Test Requires Server with Multiple Workflow IdPs") + # Create a human callback that reads in the generated test_user2 token file. + # Create a client with MONGODB_URI_MULTI, a username of test_user2, authMechanism=MONGODB-OIDC, and the OIDC human callback. + client = await self.create_client(self.uri_multiple, username="test_user2") + # Perform a find operation that succeeds. + await client.test.test.find_one() + # Close the client. + await client.close() + + async def test_1_5_multiple_principal_no_user(self): + if not self.uri_multiple: + raise unittest.SkipTest("Test Requires Server with Multiple Workflow IdPs") + # Create a client with MONGODB_URI_MULTI, no username, authMechanism=MONGODB-OIDC, and the OIDC human callback. + client = await self.create_client(self.uri_multiple) + # Assert that a find operation fails. + with self.assertRaises(OperationFailure): + await client.test.test.find_one() + # Close the client. + await client.close() + + async def test_1_6_allowed_hosts_blocked(self): + # Create a default OIDC client, with an ALLOWED_HOSTS that is an empty list. + request_token = self.create_request_cb() + props: Dict = {"OIDC_HUMAN_CALLBACK": request_token, "ALLOWED_HOSTS": []} + client = await self.create_client(authmechanismproperties=props) + # Assert that a find operation fails with a client-side error. + with self.assertRaises(ConfigurationError): + await client.test.test.find_one() + # Close the client. + await client.close() + + # Create a client that uses the URL mongodb://localhost/?authMechanism=MONGODB-OIDC&ignored=example.com, + # a human callback, and an ALLOWED_HOSTS that contains ["example.com"]. + props: Dict = { + "OIDC_HUMAN_CALLBACK": request_token, + "ALLOWED_HOSTS": ["example.com"], + } + with warnings.catch_warnings(): + warnings.simplefilter("default") + client = await self.create_client( + self.uri_single + "&ignored=example.com", + authmechanismproperties=props, + connect=False, + ) + # Assert that a find operation fails with a client-side error. + with self.assertRaises(ConfigurationError): + await client.test.test.find_one() + # Close the client. + await client.close() + + async def test_1_7_allowed_hosts_in_connection_string_ignored(self): + # Create an OIDC configured client with the connection string: `mongodb+srv://example.com/?authMechanism=MONGODB-OIDC&authMechanismProperties=ALLOWED_HOSTS:%5B%22example.com%22%5D` and a Human Callback. + # Assert that the creation of the client raises a configuration error. + uri = "mongodb+srv://example.com?authMechanism=MONGODB-OIDC&authMechanismProperties=ALLOWED_HOSTS:%5B%22example.com%22%5D" + with self.assertRaises(ConfigurationError), warnings.catch_warnings(): + warnings.simplefilter("ignore") + c = AsyncMongoClient( + uri, + authmechanismproperties=dict(OIDC_HUMAN_CALLBACK=self.create_request_cb()), + ) + await c.aconnect() + + async def test_1_8_machine_idp_human_callback(self): + if not os.environ.get("OIDC_IS_LOCAL"): + raise unittest.SkipTest("Test Requires Local OIDC server") + # Create a client with MONGODB_URI_SINGLE, a username of test_machine, authMechanism=MONGODB-OIDC, and the OIDC human callback. + client = await self.create_client(username="test_machine") + # Perform a find operation that succeeds. + await client.test.test.find_one() + # Close the client. + await client.close() + + async def test_2_1_valid_callback_inputs(self): + # Create a AsyncMongoClient with a human callback that validates its inputs and returns a valid access token. + client = await self.create_client() + # Perform a find operation that succeeds. Verify that the human callback was called with the appropriate inputs, including the timeout parameter if possible. + # Ensure that there are no unexpected fields. + await client.test.test.find_one() + # Close the client. + await client.close() + + async def test_2_2_callback_returns_missing_data(self): + # Create a AsyncMongoClient with a human callback that returns data not conforming to the OIDCCredential with missing fields. + class CustomCB(OIDCCallback): + def fetch(self, ctx): + return dict() + + client = await self.create_client(request_cb=CustomCB()) + # Perform a find operation that fails. + with self.assertRaises(ValueError): + await client.test.test.find_one() + # Close the client. + await client.close() + + async def test_2_3_refresh_token_is_passed_to_the_callback(self): + # Create a AsyncMongoClient with a human callback that checks for the presence of a refresh token. + client = await self.create_client() + + # Perform a find operation that succeeds. + await client.test.test.find_one() + + # Set a fail point for ``find`` commands. + async with self.fail_point( + { + "mode": {"times": 1}, + "data": {"failCommands": ["find"], "errorCode": 391}, + } + ): + # Perform a ``find`` operation that succeeds. + await client.test.test.find_one() + + # Assert that the callback has been called twice. + self.assertEqual(self.request_called, 2) + + # Assert that the refresh token was used once. + self.assertEqual(self.refresh_present, 1) + + async def test_3_1_uses_speculative_authentication_if_there_is_a_cached_token(self): + # Create a client with a human callback that returns a valid token. + client = await self.create_client() + + # Set a fail point for ``find`` commands. + async with self.fail_point( + { + "mode": {"times": 1}, + "data": {"failCommands": ["find"], "errorCode": 391, "closeConnection": True}, + } + ): + # Perform a ``find`` operation that fails. + with self.assertRaises(AutoReconnect): + await client.test.test.find_one() + + # Set a fail point for ``saslStart`` commands. + async with self.fail_point( + { + "mode": {"times": 1}, + "data": {"failCommands": ["saslStart"], "errorCode": 18}, + } + ): + # Perform a ``find`` operation that succeeds + await client.test.test.find_one() + + # Close the client. + await client.close() + + async def test_3_2_does_not_use_speculative_authentication_if_there_is_no_cached_token(self): + # Create a ``AsyncMongoClient`` with a human callback that returns a valid token + client = await self.create_client() + + # Set a fail point for ``saslStart`` commands. + async with self.fail_point( + { + "mode": {"times": 1}, + "data": {"failCommands": ["saslStart"], "errorCode": 18}, + } + ): + # Perform a ``find`` operation that fails. + with self.assertRaises(OperationFailure): + await client.test.test.find_one() + + # Close the client. + await client.close() + + async def test_4_1_reauthenticate_succeeds(self): + # Create a default OIDC client and add an event listener. + # The following assumes that the driver does not emit saslStart or saslContinue events. + # If the driver does emit those events, ignore/filter them for the purposes of this test. + listener = OvertCommandListener() + client = await self.create_client(event_listeners=[listener]) + + # Perform a find operation that succeeds. + await client.test.test.find_one() + + # Assert that the human callback has been called once. + self.assertEqual(self.request_called, 1) + + # Clear the listener state if possible. + listener.reset() + + # Force a reauthenication using a fail point. + async with self.fail_point( + { + "mode": {"times": 1}, + "data": {"failCommands": ["find"], "errorCode": 391}, + } + ): + # Perform another find operation that succeeds. + await client.test.test.find_one() + + # Assert that the human callback has been called twice. + self.assertEqual(self.request_called, 2) + + # Assert that the ordering of list started events is [find, find]. + # Note that if the listener stat could not be cleared then there will be an extra find command. + started_events = [ + i.command_name for i in listener.started_events if not i.command_name.startswith("sasl") + ] + succeeded_events = [ + i.command_name + for i in listener.succeeded_events + if not i.command_name.startswith("sasl") + ] + failed_events = [ + i.command_name for i in listener.failed_events if not i.command_name.startswith("sasl") + ] + + self.assertEqual( + started_events, + [ + "find", + "find", + ], + ) + # Assert that the list of command succeeded events is [find]. + self.assertEqual(succeeded_events, ["find"]) + # Assert that a find operation failed once during the command execution. + self.assertEqual(failed_events, ["find"]) + # Close the client. + await client.close() + + async def test_4_2_reauthenticate_succeeds_no_refresh(self): + # Create a default OIDC client with a human callback that does not return a refresh token. + cb = self.create_request_cb() + + class CustomRequest(OIDCCallback): + def fetch(self, *args, **kwargs): + result = cb.fetch(*args, **kwargs) + result.refresh_token = None + return result + + client = await self.create_client(request_cb=CustomRequest()) + + # Perform a find operation that succeeds. + await client.test.test.find_one() + + # Assert that the human callback has been called once. + self.assertEqual(self.request_called, 1) + + # Force a reauthenication using a fail point. + async with self.fail_point( + { + "mode": {"times": 1}, + "data": {"failCommands": ["find"], "errorCode": 391}, + } + ): + # Perform a find operation that succeeds. + await client.test.test.find_one() + + # Assert that the human callback has been called twice. + self.assertEqual(self.request_called, 2) + # Close the client. + await client.close() + + async def test_4_3_reauthenticate_succeeds_after_refresh_fails(self): + # Create a default OIDC client with a human callback that returns an invalid refresh token + cb = self.create_request_cb() + + class CustomRequest(OIDCCallback): + def fetch(self, *args, **kwargs): + result = cb.fetch(*args, **kwargs) + result.refresh_token = "bad" + return result + + client = await self.create_client(request_cb=CustomRequest()) + + # Perform a find operation that succeeds. + await client.test.test.find_one() + + # Assert that the human callback has been called once. + self.assertEqual(self.request_called, 1) + + # Force a reauthenication using a fail point. + async with self.fail_point( + { + "mode": {"times": 1}, + "data": {"failCommands": ["find"], "errorCode": 391}, + } + ): + # Perform a find operation that succeeds. + await client.test.test.find_one() + + # Assert that the human callback has been called 2 times. + self.assertEqual(self.request_called, 2) + + # Close the client. + await client.close() + + async def test_4_4_reauthenticate_fails(self): + # Create a default OIDC client with a human callback that returns invalid refresh tokens and + # Returns invalid access tokens after the first access. + cb = self.create_request_cb() + + class CustomRequest(OIDCCallback): + fetch_called = 0 + + def fetch(self, *args, **kwargs): + self.fetch_called += 1 + result = cb.fetch(*args, **kwargs) + result.refresh_token = "bad" + if self.fetch_called > 1: + result.access_token = "bad" + return result + + client = await self.create_client(request_cb=CustomRequest()) + + # Perform a find operation that succeeds (to force a speculative auth). + await client.test.test.find_one() + # Assert that the human callback has been called once. + self.assertEqual(self.request_called, 1) + + # Force a reauthentication using a failCommand. + async with self.fail_point( + { + "mode": {"times": 1}, + "data": {"failCommands": ["find"], "errorCode": 391}, + } + ): + # Perform a find operation that fails. + with self.assertRaises(OperationFailure): + await client.test.test.find_one() + + # Assert that the human callback has been called three times. + self.assertEqual(self.request_called, 3) + + # Close the client. + await client.close() + + async def test_request_callback_returns_null(self): + class RequestTokenNull(OIDCCallback): + def fetch(self, a): + return None + + client = await self.create_client(request_cb=RequestTokenNull()) + with self.assertRaises(ValueError): + await client.test.test.find_one() + await client.close() + + async def test_request_callback_invalid_result(self): + class CallbackInvalidToken(OIDCCallback): + def fetch(self, a): + return {} + + client = await self.create_client(request_cb=CallbackInvalidToken()) + with self.assertRaises(ValueError): + await client.test.test.find_one() + await client.close() + + async def test_reauthentication_succeeds_multiple_connections(self): + request_cb = self.create_request_cb() + + # Create a client with the callback. + client1 = await self.create_client(request_cb=request_cb) + client2 = await self.create_client(request_cb=request_cb) + + # Perform an insert operation. + await client1.test.test.insert_many([{"a": 1}, {"a": 1}]) + await client2.test.test.find_one() + self.assertEqual(self.request_called, 2) + + # Use the same authenticator for both clients + # to simulate a race condition with separate connections. + # We should only see one extra callback despite both connections + # needing to reauthenticate. + client2.options.pool_options._credentials.cache.data = ( + client1.options.pool_options._credentials.cache.data + ) + + await client1.test.test.find_one() + await client2.test.test.find_one() + + async with self.fail_point( + { + "mode": {"times": 1}, + "data": {"failCommands": ["find"], "errorCode": 391}, + } + ): + await client1.test.test.find_one() + + self.assertEqual(self.request_called, 3) + + async with self.fail_point( + { + "mode": {"times": 1}, + "data": {"failCommands": ["find"], "errorCode": 391}, + } + ): + await client2.test.test.find_one() + + self.assertEqual(self.request_called, 3) + await client1.close() + await client2.close() + + # PyMongo specific tests, since we have multiple code paths for reauth handling. + + async def test_reauthenticate_succeeds_bulk_write(self): + # Create a client. + client = await self.create_client() + + # Perform a find operation. + await client.test.test.find_one() + + # Assert that the request callback has been called once. + self.assertEqual(self.request_called, 1) + + async with self.fail_point( + { + "mode": {"times": 1}, + "data": {"failCommands": ["insert"], "errorCode": 391}, + } + ): + # Perform a bulk write operation. + await client.test.test.bulk_write([InsertOne({})]) # type:ignore[type-var] + + # Assert that the request callback has been called twice. + self.assertEqual(self.request_called, 2) + await client.close() + + async def test_reauthenticate_succeeds_bulk_read(self): + # Create a client. + client = await self.create_client() + + # Perform a find operation. + await client.test.test.find_one() + + # Perform a bulk write operation. + await client.test.test.bulk_write([InsertOne({})]) # type:ignore[type-var] + + # Assert that the request callback has been called once. + self.assertEqual(self.request_called, 1) + + async with self.fail_point( + { + "mode": {"times": 1}, + "data": {"failCommands": ["find"], "errorCode": 391}, + } + ): + # Perform a bulk read operation. + cursor = client.test.test.find_raw_batches({}) + await cursor.to_list() + + # Assert that the request callback has been called twice. + self.assertEqual(self.request_called, 2) + await client.close() + + async def test_reauthenticate_succeeds_cursor(self): + # Create a client. + client = await self.create_client() + + # Perform an insert operation. + await client.test.test.insert_one({"a": 1}) + + # Assert that the request callback has been called once. + self.assertEqual(self.request_called, 1) + + async with self.fail_point( + { + "mode": {"times": 1}, + "data": {"failCommands": ["find"], "errorCode": 391}, + } + ): + # Perform a find operation. + cursor = client.test.test.find({"a": 1}) + self.assertGreaterEqual(len(await cursor.to_list()), 1) + + # Assert that the request callback has been called twice. + self.assertEqual(self.request_called, 2) + await client.close() + + async def test_reauthenticate_succeeds_get_more(self): + # Create a client. + client = await self.create_client() + + # Perform an insert operation. + await client.test.test.insert_many([{"a": 1}, {"a": 1}]) + + # Assert that the request callback has been called once. + self.assertEqual(self.request_called, 1) + + async with self.fail_point( + { + "mode": {"times": 1}, + "data": {"failCommands": ["getMore"], "errorCode": 391}, + } + ): + # Perform a find operation. + cursor = client.test.test.find({"a": 1}, batch_size=1) + self.assertGreaterEqual(len(await cursor.to_list()), 1) + + # Assert that the request callback has been called twice. + self.assertEqual(self.request_called, 2) + await client.close() + + async def test_reauthenticate_succeeds_get_more_exhaust(self): + # Ensure no mongos + client = await self.create_client() + hello = await client.admin.command(HelloCompat.LEGACY_CMD) + if hello.get("msg") != "isdbgrid": + raise unittest.SkipTest("Must not be a mongos") + + # Create a client with the callback. + client = await self.create_client() + + # Perform an insert operation. + await client.test.test.insert_many([{"a": 1}, {"a": 1}]) + + # Assert that the request callback has been called once. + self.assertEqual(self.request_called, 1) + + async with self.fail_point( + { + "mode": {"times": 1}, + "data": {"failCommands": ["getMore"], "errorCode": 391}, + } + ): + # Perform a find operation. + cursor = client.test.test.find({"a": 1}, batch_size=1, cursor_type=CursorType.EXHAUST) + self.assertGreaterEqual(len(await cursor.to_list()), 1) + + # Assert that the request callback has been called twice. + self.assertEqual(self.request_called, 2) + await client.close() + + async def test_reauthenticate_succeeds_command(self): + # Create a client. + client = await self.create_client() + + # Perform an insert operation. + await client.test.test.insert_one({"a": 1}) + + # Assert that the request callback has been called once. + self.assertEqual(self.request_called, 1) + + async with self.fail_point( + { + "mode": {"times": 1}, + "data": {"failCommands": ["count"], "errorCode": 391}, + } + ): + # Perform a count operation. + cursor = await client.test.command({"count": "test"}) + + self.assertGreaterEqual(len(cursor), 1) + + # Assert that the request callback has been called twice. + self.assertEqual(self.request_called, 2) + await client.close() + + +class TestAuthOIDCMachine(OIDCTestBase): + uri: str + + async def asyncSetUp(self): + self.request_called = 0 + + def create_request_cb(self, username=None, sleep=0): + def request_token(context): + assert isinstance(context.timeout_seconds, int) + assert context.version == 1 + assert context.refresh_token is None + assert context.idp_info is None + token = self.get_token(username) + time.sleep(sleep) + self.request_called += 1 + return OIDCCallbackResult(access_token=token) + + class Inner(OIDCCallback): + def fetch(self, context): + return request_token(context) + + return Inner() + + async def create_client(self, *args, **kwargs): + request_cb = kwargs.pop("request_cb", self.create_request_cb()) + props = kwargs.pop("authmechanismproperties", {"OIDC_CALLBACK": request_cb}) + kwargs["retryReads"] = False + if not len(args): + args = [self.uri_single] + client = AsyncMongoClient(*args, authmechanismproperties=props, **kwargs) + self.addAsyncCleanup(client.close) + return client + + async def test_1_1_callback_is_called_during_reauthentication(self): + # Create a ``AsyncMongoClient`` configured with a custom OIDC callback that + # implements the provider logic. + client = await self.create_client() + # Perform a ``find`` operation that succeeds. + await client.test.test.find_one() + # Assert that the callback was called 1 time. + self.assertEqual(self.request_called, 1) + + async def test_1_2_callback_is_called_once_for_multiple_connections(self): + # Create a ``AsyncMongoClient`` configured with a custom OIDC callback that + # implements the provider logic. + client = await self.create_client() + await client.aconnect() + + # Start 10 tasks and run 100 find operations that all succeed in each task. + async def target(): + for _ in range(100): + await client.test.test.find_one() + + tasks = [] + for i in range(10): + tasks.append(ConcurrentRunner(target=target)) + for t in tasks: + await t.start() + for t in tasks: + await t.join() + # Assert that the callback was called 1 time. + self.assertEqual(self.request_called, 1) + + async def test_2_1_valid_callback_inputs(self): + # Create a AsyncMongoClient configured with an OIDC callback that validates its inputs and returns a valid access token. + client = await self.create_client() + # Perform a find operation that succeeds. + await client.test.test.find_one() + # Assert that the OIDC callback was called with the appropriate inputs, including the timeout parameter if possible. Ensure that there are no unexpected fields. + self.assertEqual(self.request_called, 1) + + async def test_2_2_oidc_callback_returns_null(self): + # Create a AsyncMongoClient configured with an OIDC callback that returns null. + class CallbackNullToken(OIDCCallback): + def fetch(self, a): + return None + + client = await self.create_client(request_cb=CallbackNullToken()) + # Perform a find operation that fails. + with self.assertRaises(ValueError): + await client.test.test.find_one() + + async def test_2_3_oidc_callback_returns_missing_data(self): + # Create a AsyncMongoClient configured with an OIDC callback that returns data not conforming to the OIDCCredential with missing fields. + class CustomCallback(OIDCCallback): + count = 0 + + def fetch(self, a): + self.count += 1 + return object() + + client = await self.create_client(request_cb=CustomCallback()) + # Perform a find operation that fails. + with self.assertRaises(ValueError): + await client.test.test.find_one() + + async def test_2_4_invalid_client_configuration_with_callback(self): + # Create a AsyncMongoClient configured with an OIDC callback and auth mechanism property ENVIRONMENT:test. + request_cb = self.create_request_cb() + props: Dict = {"OIDC_CALLBACK": request_cb, "ENVIRONMENT": "test"} + # Assert it returns a client configuration error. + with self.assertRaises(ConfigurationError): + await self.create_client(authmechanismproperties=props) + + async def test_2_5_invalid_use_of_ALLOWED_HOSTS(self): + # Create an OIDC configured client with auth mechanism properties `{"ENVIRONMENT": "test", "ALLOWED_HOSTS": []}`. + props: Dict = {"ENVIRONMENT": "test", "ALLOWED_HOSTS": []} + # Assert it returns a client configuration error. + with self.assertRaises(ConfigurationError): + await self.create_client(authmechanismproperties=props) + + # Create an OIDC configured client with auth mechanism properties `{"OIDC_CALLBACK": "", "ALLOWED_HOSTS": []}`. + props: Dict = {"OIDC_CALLBACK": self.create_request_cb(), "ALLOWED_HOSTS": []} + # Assert it returns a client configuration error. + with self.assertRaises(ConfigurationError): + await self.create_client(authmechanismproperties=props) + + async def test_2_6_ALLOWED_HOSTS_defaults_ignored(self): + # Create a MongoCredential for OIDC with a machine callback. + props = {"OIDC_CALLBACK": self.create_request_cb()} + extra = dict(authmechanismproperties=props) + mongo_creds = _build_credentials_tuple("MONGODB-OIDC", None, "foo", None, extra, "test") + # Assert that creating an authenticator for example.com does not result in an error. + authenticator = _get_authenticator(mongo_creds, ("example.com", 30)) + assert authenticator.properties.username == "foo" + + # Create a MongoCredential for OIDC with an ENVIRONMENT. + props = {"ENVIRONMENT": "test"} + extra = dict(authmechanismproperties=props) + mongo_creds = _build_credentials_tuple("MONGODB-OIDC", None, None, None, extra, "test") + # Assert that creating an authenticator for example.com does not result in an error. + authenticator = _get_authenticator(mongo_creds, ("example.com", 30)) + assert authenticator.properties.username == "" + + async def test_3_1_authentication_failure_with_cached_tokens_fetch_a_new_token_and_retry(self): + # Create a AsyncMongoClient and an OIDC callback that implements the provider logic. + client = await self.create_client() + await client.aconnect() + # Poison the cache with an invalid access token. + # Set a fail point for ``find`` command. + async with self.fail_point( + { + "mode": {"times": 1}, + "data": {"failCommands": ["find"], "errorCode": 391, "closeConnection": True}, + } + ): + # Perform a ``find`` operation that fails. This is to force the ``AsyncMongoClient`` + # to cache an access token. + with self.assertRaises(AutoReconnect): + await client.test.test.find_one() + # Poison the cache of the client. + client.options.pool_options._credentials.cache.data.access_token = "bad" + # Reset the request count. + self.request_called = 0 + # Verify that a find succeeds. + await client.test.test.find_one() + # Verify that the callback was called 1 time. + self.assertEqual(self.request_called, 1) + + async def test_3_2_authentication_failures_without_cached_tokens_returns_an_error(self): + # Create a AsyncMongoClient configured with retryReads=false and an OIDC callback that always returns invalid access tokens. + class CustomCallback(OIDCCallback): + count = 0 + + def fetch(self, a): + self.count += 1 + return OIDCCallbackResult(access_token="bad value") + + callback = CustomCallback() + client = await self.create_client(request_cb=callback) + # Perform a ``find`` operation that fails. + with self.assertRaises(OperationFailure): + await client.test.test.find_one() + # Verify that the callback was called 1 time. + self.assertEqual(callback.count, 1) + + async def test_3_3_unexpected_error_code_does_not_clear_cache(self): + # Create a ``AsyncMongoClient`` with a human callback that returns a valid token + client = await self.create_client() + + # Set a fail point for ``saslStart`` commands. + async with self.fail_point( + { + "mode": {"times": 1}, + "data": {"failCommands": ["saslStart"], "errorCode": 20}, + } + ): + # Perform a ``find`` operation that fails. + with self.assertRaises(OperationFailure): + await client.test.test.find_one() + + # Assert that the callback has been called once. + self.assertEqual(self.request_called, 1) + + # Perform a ``find`` operation that succeeds. + await client.test.test.find_one() + + # Assert that the callback has been called once. + self.assertEqual(self.request_called, 1) + + async def test_4_1_reauthentication_succeeds(self): + # Create a ``AsyncMongoClient`` configured with a custom OIDC callback that + # implements the provider logic. + client = await self.create_client() + await client.aconnect() + + # Set a fail point for the find command. + async with self.fail_point( + { + "mode": {"times": 1}, + "data": {"failCommands": ["find"], "errorCode": 391}, + } + ): + # Perform a ``find`` operation that succeeds. + await client.test.test.find_one() + + # Verify that the callback was called 2 times (once during the connection + # handshake, and again during reauthentication). + self.assertEqual(self.request_called, 2) + + async def test_4_2_read_commands_fail_if_reauthentication_fails(self): + # Create a ``AsyncMongoClient`` whose OIDC callback returns one good token and then + # bad tokens after the first call. + get_token = self.get_token + + class CustomCallback(OIDCCallback): + count = 0 + + def fetch(self, _): + self.count += 1 + if self.count == 1: + access_token = get_token() + else: + access_token = "bad value" + return OIDCCallbackResult(access_token=access_token) + + callback = CustomCallback() + client = await self.create_client(request_cb=callback) + + # Perform a read operation that succeeds. + await client.test.test.find_one() + + # Set a fail point for the find command. + async with self.fail_point( + { + "mode": {"times": 1}, + "data": {"failCommands": ["find"], "errorCode": 391}, + } + ): + # Perform a ``find`` operation that fails. + with self.assertRaises(OperationFailure): + await client.test.test.find_one() + + # Verify that the callback was called 2 times. + self.assertEqual(callback.count, 2) + + async def test_4_3_write_commands_fail_if_reauthentication_fails(self): + # Create a ``AsyncMongoClient`` whose OIDC callback returns one good token and then + # bad token after the first call. + get_token = self.get_token + + class CustomCallback(OIDCCallback): + count = 0 + + def fetch(self, _): + self.count += 1 + if self.count == 1: + access_token = get_token() + else: + access_token = "bad value" + return OIDCCallbackResult(access_token=access_token) + + callback = CustomCallback() + client = await self.create_client(request_cb=callback) + + # Perform an insert operation that succeeds. + await client.test.test.insert_one({}) + + # Set a fail point for the find command. + async with self.fail_point( + { + "mode": {"times": 1}, + "data": {"failCommands": ["insert"], "errorCode": 391}, + } + ): + # Perform a ``insert`` operation that fails. + with self.assertRaises(OperationFailure): + await client.test.test.insert_one({}) + + # Verify that the callback was called 2 times. + self.assertEqual(callback.count, 2) + + async def test_4_4_speculative_authentication_should_be_ignored_on_reauthentication(self): + # Create an OIDC configured client that can listen for `SaslStart` commands. + listener = EventListener() + client = await self.create_client(event_listeners=[listener]) + await client.aconnect() + + # Preload the *Client Cache* with a valid access token to enforce Speculative Authentication. + client2 = await self.create_client() + await client2.test.test.find_one() + client.options.pool_options._credentials.cache.data = ( + client2.options.pool_options._credentials.cache.data + ) + await client2.close() + self.request_called = 0 + + # Perform an `insert` operation that succeeds. + await client.test.test.insert_one({}) + + # Assert that the callback was not called. + self.assertEqual(self.request_called, 0) + + # Assert there were no `SaslStart` commands executed. + assert not any( + event.command_name.lower() == "saslstart" for event in listener.started_events + ) + listener.reset() + + # Set a fail point for `insert` commands of the form: + async with self.fail_point( + { + "mode": {"times": 1}, + "data": {"failCommands": ["insert"], "errorCode": 391}, + } + ): + # Perform an `insert` operation that succeeds. + await client.test.test.insert_one({}) + + # Assert that the callback was called once. + self.assertEqual(self.request_called, 1) + + # Assert there were `SaslStart` commands executed. + assert any(event.command_name.lower() == "saslstart" for event in listener.started_events) + + async def test_4_5_reauthentication_succeeds_when_a_session_is_involved(self): + # Create an OIDC configured client. + client = await self.create_client() + + # Set a fail point for `find` commands of the form: + async with self.fail_point( + { + "mode": {"times": 1}, + "data": {"failCommands": ["find"], "errorCode": 391}, + } + ): + # Start a new session. + async with client.start_session() as session: + # In the started session perform a `find` operation that succeeds. + await client.test.test.find_one({}, session=session) + + # Assert that the callback was called 2 times (once during the connection handshake, and again during reauthentication). + self.assertEqual(self.request_called, 2) + + async def test_5_1_azure_with_no_username(self): + if ENVIRON != "azure": + raise unittest.SkipTest("Test is only supported on Azure") + opts = parse_uri(self.uri_single)["options"] + resource = opts["authMechanismProperties"]["TOKEN_RESOURCE"] + + props = dict(TOKEN_RESOURCE=resource, ENVIRONMENT="azure") + client = await self.create_client(authMechanismProperties=props) + await client.test.test.find_one() + + async def test_5_2_azure_with_bad_username(self): + if ENVIRON != "azure": + raise unittest.SkipTest("Test is only supported on Azure") + + opts = parse_uri(self.uri_single)["options"] + token_aud = opts["authMechanismProperties"]["TOKEN_RESOURCE"] + + props = dict(TOKEN_RESOURCE=token_aud, ENVIRONMENT="azure") + client = await self.create_client(username="bad", authmechanismproperties=props) + with self.assertRaises(ValueError): + await client.test.test.find_one() + + async def test_speculative_auth_success(self): + client1 = await self.create_client() + await client1.test.test.find_one() + client2 = await self.create_client() + await client2.aconnect() + + # Prime the cache of the second client. + client2.options.pool_options._credentials.cache.data = ( + client1.options.pool_options._credentials.cache.data + ) + + # Set a fail point for saslStart commands. + async with self.fail_point( + { + "mode": {"times": 2}, + "data": {"failCommands": ["saslStart"], "errorCode": 18}, + } + ): + # Perform a find operation. + await client2.test.test.find_one() + + async def test_reauthentication_succeeds_multiple_connections(self): + client1 = await self.create_client() + client2 = await self.create_client() + + # Perform an insert operation. + await client1.test.test.insert_many([{"a": 1}, {"a": 1}]) + await client2.test.test.find_one() + self.assertEqual(self.request_called, 2) + + # Use the same authenticator for both clients + # to simulate a race condition with separate connections. + # We should only see one extra callback despite both connections + # needing to reauthenticate. + client2.options.pool_options._credentials.cache.data = ( + client1.options.pool_options._credentials.cache.data + ) + + await client1.test.test.find_one() + await client2.test.test.find_one() + + async with self.fail_point( + { + "mode": {"times": 1}, + "data": {"failCommands": ["find"], "errorCode": 391}, + } + ): + await client1.test.test.find_one() + + self.assertEqual(self.request_called, 3) + + async with self.fail_point( + { + "mode": {"times": 1}, + "data": {"failCommands": ["find"], "errorCode": 391}, + } + ): + await client2.test.test.find_one() + + self.assertEqual(self.request_called, 3) + + +if __name__ == "__main__": + unittest.main() diff --git a/test/asynchronous/test_auth_spec.py b/test/asynchronous/test_auth_spec.py index e9e43d5759..7c659c6d93 100644 --- a/test/asynchronous/test_auth_spec.py +++ b/test/asynchronous/test_auth_spec.py @@ -22,13 +22,17 @@ import warnings from test.asynchronous import AsyncPyMongoTestCase +import pytest + sys.path[0:0] = [""] from test import unittest from test.asynchronous.unified_format import generate_test_classes from pymongo import AsyncMongoClient -from pymongo.asynchronous.auth_oidc import OIDCCallback +from pymongo.auth_oidc_shared import OIDCCallback + +pytestmark = pytest.mark.auth _IS_SYNC = False diff --git a/test/asynchronous/test_bulk.py b/test/asynchronous/test_bulk.py index 7191a412c1..02958e6f0e 100644 --- a/test/asynchronous/test_bulk.py +++ b/test/asynchronous/test_bulk.py @@ -24,7 +24,7 @@ sys.path[0:0] = [""] from test.asynchronous import AsyncIntegrationTest, async_client_context, remove_all_users, unittest -from test.utils import async_wait_until +from test.utils_shared import async_wait_until from bson.binary import Binary, UuidRepresentation from bson.codec_options import CodecOptions @@ -94,7 +94,7 @@ def assertEqualUpsert(self, expected, actual): self.assertEqual(expected["index"], actual["index"]) if expected["_id"] == "...": # Unspecified value. - self.assertTrue("_id" in actual) + self.assertIn("_id", actual) else: self.assertEqual(expected["_id"], actual["_id"]) @@ -107,7 +107,7 @@ def assertEqualWriteError(self, expected, actual): self.assertEqual(expected["code"], actual["code"]) if expected["errmsg"] == "...": # Unspecified value. - self.assertTrue("errmsg" in actual) + self.assertIn("errmsg", actual) else: self.assertEqual(expected["errmsg"], actual["errmsg"]) @@ -115,7 +115,7 @@ def assertEqualWriteError(self, expected, actual): actual_op = actual["op"].copy() if expected_op.get("_id") == "...": # Unspecified _id. - self.assertTrue("_id" in actual_op) + self.assertIn("_id", actual_op) actual_op.pop("_id") expected_op.pop("_id") @@ -160,12 +160,12 @@ async def _test_update_many(self, update): result = await self.coll.bulk_write([UpdateMany({}, update)]) self.assertEqualResponse(expected, result.bulk_api_result) self.assertEqual(2, result.matched_count) - self.assertTrue(result.modified_count in (2, None)) + self.assertIn(result.modified_count, (2, None)) async def test_update_many(self): await self._test_update_many({"$set": {"foo": "bar"}}) - @async_client_context.require_version_min(4, 1, 11) + @async_client_context.require_version_min(4, 2, 0) async def test_update_many_pipeline(self): await self._test_update_many([{"$set": {"foo": "bar"}}]) @@ -201,12 +201,12 @@ async def _test_update_one(self, update): result = await self.coll.bulk_write([UpdateOne({}, update)]) self.assertEqualResponse(expected, result.bulk_api_result) self.assertEqual(1, result.matched_count) - self.assertTrue(result.modified_count in (1, None)) + self.assertIn(result.modified_count, (1, None)) async def test_update_one(self): await self._test_update_one({"$set": {"foo": "bar"}}) - @async_client_context.require_version_min(4, 1, 11) + @async_client_context.require_version_min(4, 2, 0) async def test_update_one_pipeline(self): await self._test_update_one([{"$set": {"foo": "bar"}}]) @@ -227,7 +227,7 @@ async def test_replace_one(self): result = await self.coll.bulk_write([ReplaceOne({}, {"foo": "bar"})]) self.assertEqualResponse(expected, result.bulk_api_result) self.assertEqual(1, result.matched_count) - self.assertTrue(result.modified_count in (1, None)) + self.assertIn(result.modified_count, (1, None)) async def test_remove(self): # Test removing all documents, ordered. @@ -281,7 +281,7 @@ async def test_upsert(self): self.assertEqual(1, result.upserted_count) assert result.upserted_ids is not None self.assertEqual(1, len(result.upserted_ids)) - self.assertTrue(isinstance(result.upserted_ids.get(0), ObjectId)) + self.assertIsInstance(result.upserted_ids.get(0), ObjectId) self.assertEqual(await self.coll.count_documents({"foo": "bar"}), 1) @@ -301,7 +301,7 @@ async def test_numerous_inserts(self): async def test_bulk_max_message_size(self): await self.coll.delete_many({}) - self.addCleanup(self.coll.delete_many, {}) + self.addAsyncCleanup(self.coll.delete_many, {}) _16_MB = 16 * 1000 * 1000 # Generate a list of documents such that the first batched OP_MSG is # as close as possible to the 48MB limit. @@ -505,7 +505,7 @@ async def test_single_ordered_batch(self): async def test_single_error_ordered_batch(self): await self.coll.create_index("a", unique=True) - self.addCleanup(self.coll.drop_index, [("a", 1)]) + self.addAsyncCleanup(self.coll.drop_index, [("a", 1)]) requests: list = [ InsertOne({"b": 1, "a": 1}), UpdateOne({"b": 2}, {"$set": {"a": 1}}, upsert=True), @@ -547,7 +547,7 @@ async def test_single_error_ordered_batch(self): async def test_multiple_error_ordered_batch(self): await self.coll.create_index("a", unique=True) - self.addCleanup(self.coll.drop_index, [("a", 1)]) + self.addAsyncCleanup(self.coll.drop_index, [("a", 1)]) requests: list = [ InsertOne({"b": 1, "a": 1}), UpdateOne({"b": 2}, {"$set": {"a": 1}}, upsert=True), @@ -616,7 +616,7 @@ async def test_single_unordered_batch(self): async def test_single_error_unordered_batch(self): await self.coll.create_index("a", unique=True) - self.addCleanup(self.coll.drop_index, [("a", 1)]) + self.addAsyncCleanup(self.coll.drop_index, [("a", 1)]) requests: list = [ InsertOne({"b": 1, "a": 1}), UpdateOne({"b": 2}, {"$set": {"a": 1}}, upsert=True), @@ -659,7 +659,7 @@ async def test_single_error_unordered_batch(self): async def test_multiple_error_unordered_batch(self): await self.coll.create_index("a", unique=True) - self.addCleanup(self.coll.drop_index, [("a", 1)]) + self.addAsyncCleanup(self.coll.drop_index, [("a", 1)]) requests: list = [ InsertOne({"b": 1, "a": 1}), UpdateOne({"b": 2}, {"$set": {"a": 3}}, upsert=True), @@ -961,7 +961,6 @@ async def cause_wtimeout(self, requests, ordered): @async_client_context.require_replica_set @async_client_context.require_secondaries_count(1) async def test_write_concern_failure_ordered(self): - self.skipTest("Skipping until PYTHON-4865 is resolved.") details = None # Ensure we don't raise on wnote. @@ -995,15 +994,15 @@ async def test_write_concern_failure_ordered(self): # When talking to legacy servers there will be a # write concern error for each operation. - self.assertTrue(len(details["writeConcernErrors"]) > 0) + self.assertGreater(len(details["writeConcernErrors"]), 0) failed = details["writeConcernErrors"][0] self.assertEqual(64, failed["code"]) - self.assertTrue(isinstance(failed["errmsg"], str)) + self.assertIsInstance(failed["errmsg"], str) await self.coll.delete_many({}) await self.coll.create_index("a", unique=True) - self.addCleanup(self.coll.drop_index, [("a", 1)]) + self.addAsyncCleanup(self.coll.drop_index, [("a", 1)]) # Fail due to write concern support as well # as duplicate key error on ordered batch. @@ -1036,9 +1035,9 @@ async def test_write_concern_failure_ordered(self): details, ) - self.assertTrue(len(details["writeConcernErrors"]) > 1) + self.assertGreater(len(details["writeConcernErrors"]), 1) failed = details["writeErrors"][0] - self.assertTrue("duplicate" in failed["errmsg"]) + self.assertIn("duplicate", failed["errmsg"]) @async_client_context.require_version_max(7, 1) # PYTHON-4560 @async_client_context.require_replica_set @@ -1074,11 +1073,11 @@ async def test_write_concern_failure_unordered(self): self.assertEqual(0, len(details["writeErrors"])) # When talking to legacy servers there will be a # write concern error for each operation. - self.assertTrue(len(details["writeConcernErrors"]) > 1) + self.assertGreater(len(details["writeConcernErrors"]), 1) await self.coll.delete_many({}) await self.coll.create_index("a", unique=True) - self.addCleanup(self.coll.drop_index, [("a", 1)]) + self.addAsyncCleanup(self.coll.drop_index, [("a", 1)]) # Fail due to write concern support as well # as duplicate key error on unordered batch. @@ -1101,17 +1100,17 @@ async def test_write_concern_failure_unordered(self): self.assertEqual(1, len(details["writeErrors"])) # When talking to legacy servers there will be a # write concern error for each operation. - self.assertTrue(len(details["writeConcernErrors"]) > 1) + self.assertGreater(len(details["writeConcernErrors"]), 1) failed = details["writeErrors"][0] self.assertEqual(2, failed["index"]) self.assertEqual(11000, failed["code"]) - self.assertTrue(isinstance(failed["errmsg"], str)) + self.assertIsInstance(failed["errmsg"], str) self.assertEqual(1, failed["op"]["a"]) failed = details["writeConcernErrors"][0] self.assertEqual(64, failed["code"]) - self.assertTrue(isinstance(failed["errmsg"], str)) + self.assertIsInstance(failed["errmsg"], str) upserts = details["upserted"] self.assertEqual(1, len(upserts)) diff --git a/test/asynchronous/test_change_stream.py b/test/asynchronous/test_change_stream.py index 08da00cc1e..3fb8b517f3 100644 --- a/test/asynchronous/test_change_stream.py +++ b/test/asynchronous/test_change_stream.py @@ -36,7 +36,7 @@ unittest, ) from test.asynchronous.unified_format import generate_test_classes -from test.utils import ( +from test.utils_shared import ( AllowListEventListener, EventListener, OvertCommandListener, @@ -48,7 +48,6 @@ from bson.raw_bson import DEFAULT_RAW_BSON_OPTIONS, RawBSONDocument from pymongo import AsyncMongoClient from pymongo.asynchronous.command_cursor import AsyncCommandCursor -from pymongo.asynchronous.helpers import anext from pymongo.errors import ( InvalidOperation, OperationFailure, @@ -267,7 +266,7 @@ async def test_batch_size_is_honored(self): # $changeStream.startAtOperationTime was added in 4.0.0. @no_type_check - @async_client_context.require_version_min(4, 0, 0) + @async_client_context.require_version_min(4, 2, 0) async def test_start_at_operation_time(self): optime = await self.get_start_at_operation_time() @@ -410,7 +409,14 @@ async def test_change_operations(self): expected_update_description = {"updatedFields": {"new": 1}, "removedFields": ["foo"]} if async_client_context.version.at_least(4, 5, 0): expected_update_description["truncatedArrays"] = [] - self.assertEqual(expected_update_description, change["updateDescription"]) + self.assertEqual( + expected_update_description, + { + k: v + for k, v in change["updateDescription"].items() + if k in expected_update_description + }, + ) # Replace. await self.watched_collection().replace_one({"new": 1}, {"foo": "bar"}) change = await change_stream.next() @@ -429,7 +435,7 @@ async def test_change_operations(self): await self._test_get_invalidate_event(change_stream) @no_type_check - @async_client_context.require_version_min(4, 1, 1) + @async_client_context.require_version_min(4, 2, 0) async def test_start_after(self): resume_token = await self.get_resume_token(invalidate=True) @@ -445,7 +451,7 @@ async def test_start_after(self): self.assertEqual(change["fullDocument"], {"_id": 2}) @no_type_check - @async_client_context.require_version_min(4, 1, 1) + @async_client_context.require_version_min(4, 2, 0) async def test_start_after_resume_process_with_changes(self): resume_token = await self.get_resume_token(invalidate=True) @@ -556,27 +562,16 @@ async def _test_update_resume_token(self, expected_rt_getter): ) # Prose test no. 1 - @async_client_context.require_version_min(4, 0, 7) + @async_client_context.require_version_min(4, 2, 0) async def test_update_resume_token(self): await self._test_update_resume_token(self._get_expected_resume_token) - # Prose test no. 1 - @async_client_context.require_version_max(4, 0, 7) - async def test_update_resume_token_legacy(self): - await self._test_update_resume_token(self._get_expected_resume_token_legacy) - # Prose test no. 2 - @async_client_context.require_version_min(4, 1, 8) + @async_client_context.require_version_min(4, 2, 0) async def test_raises_error_on_missing_id_418plus(self): # Server returns an error on 4.1.8+ await self._test_raises_error_on_missing_id(OperationFailure) - # Prose test no. 2 - @async_client_context.require_version_max(4, 1, 8) - async def test_raises_error_on_missing_id_418minus(self): - # PyMongo raises an error - await self._test_raises_error_on_missing_id(InvalidOperation) - # Prose test no. 3 @no_type_check async def test_resume_on_error(self): @@ -635,40 +630,12 @@ def raise_error(): cursor.close = raise_error await self.insert_one_and_check(change_stream, {"_id": 2}) - # Prose test no. 9 - @no_type_check - @async_client_context.require_version_min(4, 0, 0) - @async_client_context.require_version_max(4, 0, 7) - async def test_start_at_operation_time_caching(self): - # Case 1: change stream not started with startAtOperationTime - client, listener = self.client_with_listener("aggregate") - async with await self.change_stream_with_client(client) as cs: - await self.kill_change_stream_cursor(cs) - await cs.try_next() - cmd = listener.started_events[-1].command - self.assertIsNotNone(cmd["pipeline"][0]["$changeStream"].get("startAtOperationTime")) - - # Case 2: change stream started with startAtOperationTime - listener.reset() - optime = await self.get_start_at_operation_time() - async with await self.change_stream_with_client( - client, start_at_operation_time=optime - ) as cs: - await self.kill_change_stream_cursor(cs) - await cs.try_next() - cmd = listener.started_events[-1].command - self.assertEqual( - cmd["pipeline"][0]["$changeStream"].get("startAtOperationTime"), - optime, - str([k.command for k in listener.started_events]), - ) - # Prose test no. 10 - SKIPPED # This test is identical to prose test no. 3. # Prose test no. 11 @no_type_check - @async_client_context.require_version_min(4, 0, 7) + @async_client_context.require_version_min(4, 2, 0) async def test_resumetoken_empty_batch(self): client, listener = await self._client_with_listener("getMore") async with await self.change_stream_with_client(client) as change_stream: @@ -680,7 +647,7 @@ async def test_resumetoken_empty_batch(self): # Prose test no. 11 @no_type_check - @async_client_context.require_version_min(4, 0, 7) + @async_client_context.require_version_min(4, 2, 0) async def test_resumetoken_exhausted_batch(self): client, listener = await self._client_with_listener("getMore") async with await self.change_stream_with_client(client) as change_stream: @@ -690,38 +657,6 @@ async def test_resumetoken_exhausted_batch(self): response = listener.succeeded_events[-1].reply self.assertEqual(resume_token, response["cursor"]["postBatchResumeToken"]) - # Prose test no. 12 - @no_type_check - @async_client_context.require_version_max(4, 0, 7) - async def test_resumetoken_empty_batch_legacy(self): - resume_point = await self.get_resume_token() - - # Empty resume token when neither resumeAfter or startAfter specified. - async with await self.change_stream() as change_stream: - await change_stream.try_next() - self.assertIsNone(change_stream.resume_token) - - # Resume token value is same as resumeAfter. - async with await self.change_stream(resume_after=resume_point) as change_stream: - await change_stream.try_next() - resume_token = change_stream.resume_token - self.assertEqual(resume_token, resume_point) - - # Prose test no. 12 - @no_type_check - @async_client_context.require_version_max(4, 0, 7) - async def test_resumetoken_exhausted_batch_legacy(self): - # Resume token is _id of last change. - async with await self.change_stream() as change_stream: - change = await self._populate_and_exhaust_change_stream(change_stream) - self.assertEqual(change_stream.resume_token, change["_id"]) - resume_point = change["_id"] - - # Resume token is _id of last change even if resumeAfter is specified. - async with await self.change_stream(resume_after=resume_point) as change_stream: - change = await self._populate_and_exhaust_change_stream(change_stream) - self.assertEqual(change_stream.resume_token, change["_id"]) - # Prose test no. 13 @no_type_check async def test_resumetoken_partially_iterated_batch(self): @@ -763,13 +698,13 @@ async def test_resumetoken_uniterated_nonempty_batch_resumeafter(self): # Prose test no. 14 @no_type_check @async_client_context.require_no_mongos - @async_client_context.require_version_min(4, 1, 1) + @async_client_context.require_version_min(4, 2, 0) async def test_resumetoken_uniterated_nonempty_batch_startafter(self): await self._test_resumetoken_uniterated_nonempty_batch("start_after") # Prose test no. 17 @no_type_check - @async_client_context.require_version_min(4, 1, 1) + @async_client_context.require_version_min(4, 2, 0) async def test_startafter_resume_uses_startafter_after_empty_getMore(self): # Resume should use startAfter after no changes have been returned. resume_point = await self.get_resume_token() @@ -789,7 +724,7 @@ async def test_startafter_resume_uses_startafter_after_empty_getMore(self): # Prose test no. 18 @no_type_check - @async_client_context.require_version_min(4, 1, 1) + @async_client_context.require_version_min(4, 2, 0) async def test_startafter_resume_uses_resumeafter_after_nonempty_getMore(self): # Resume should use resumeAfter after some changes have been returned. resume_point = await self.get_resume_token() @@ -836,7 +771,7 @@ async def test_split_large_change(self): class TestClusterAsyncChangeStream(TestAsyncChangeStreamBase, APITestsMixin): dbs: list - @async_client_context.require_version_min(4, 0, 0, -1) + @async_client_context.require_version_min(4, 2, 0) @async_client_context.require_change_streams async def asyncSetUp(self) -> None: await super().asyncSetUp() @@ -896,7 +831,7 @@ async def test_full_pipeline(self): class TestAsyncDatabaseAsyncChangeStream(TestAsyncChangeStreamBase, APITestsMixin): - @async_client_context.require_version_min(4, 0, 0, -1) + @async_client_context.require_version_min(4, 2, 0) @async_client_context.require_change_streams async def asyncSetUp(self) -> None: await super().asyncSetUp() diff --git a/test/asynchronous/test_client.py b/test/asynchronous/test_client.py index db232386ee..6794605339 100644 --- a/test/asynchronous/test_client.py +++ b/test/asynchronous/test_client.py @@ -34,7 +34,7 @@ import time import uuid from typing import Any, Iterable, Type, no_type_check -from unittest import mock +from unittest import mock, skipIf from unittest.mock import patch import pytest @@ -60,14 +60,16 @@ unittest, ) from test.asynchronous.pymongo_mocks import AsyncMockClient +from test.asynchronous.utils import ( + async_get_pool, + async_wait_until, + asyncAssertRaisesExactly, +) from test.test_binary import BinaryData -from test.utils import ( +from test.utils_shared import ( NTHREADS, CMAPListener, FunctionCallRecorder, - async_get_pool, - async_wait_until, - asyncAssertRaisesExactly, delay, gevent_monkey_patched, is_greenthread_patched, @@ -90,7 +92,6 @@ from pymongo.asynchronous.command_cursor import AsyncCommandCursor from pymongo.asynchronous.cursor import AsyncCursor, CursorType from pymongo.asynchronous.database import AsyncDatabase -from pymongo.asynchronous.helpers import anext from pymongo.asynchronous.mongo_client import AsyncMongoClient from pymongo.asynchronous.pool import ( AsyncConnection, @@ -111,6 +112,7 @@ NetworkTimeout, OperationFailure, ServerSelectionTimeoutError, + WaitQueueTimeoutError, WriteConcernError, ) from pymongo.monitoring import ServerHeartbeatListener, ServerHeartbeatStartedEvent @@ -211,7 +213,7 @@ def make_db(base, name): self.assertRaises(InvalidName, make_db, self.client, "te/t") self.assertRaises(InvalidName, make_db, self.client, "te st") - self.assertTrue(isinstance(self.client.test, AsyncDatabase)) + self.assertIsInstance(self.client.test, AsyncDatabase) self.assertEqual(self.client.test, self.client["test"]) self.assertEqual(self.client.test, AsyncDatabase(self.client, "test")) @@ -225,7 +227,7 @@ def test_get_database(self): self.assertEqual(write_concern, db.write_concern) def test_getattr(self): - self.assertTrue(isinstance(self.client["_does_not_exist"], AsyncDatabase)) + self.assertIsInstance(self.client["_does_not_exist"], AsyncDatabase) with self.assertRaises(AttributeError) as context: self.client._does_not_exist @@ -237,10 +239,7 @@ def test_getattr(self): def test_iteration(self): client = self.client - if "PyPy" in sys.version and sys.version_info < (3, 8, 15): - msg = "'NoneType' object is not callable" - else: - msg = "'AsyncMongoClient' object is not iterable" + msg = "'AsyncMongoClient' object is not iterable" # Iteration fails with self.assertRaisesRegex(TypeError, msg): for _ in client: # type: ignore[misc] # error: "None" not callable [misc] @@ -512,13 +511,13 @@ async def test_uri_option_precedence(self): async def test_connection_timeout_ms_propagates_to_DNS_resolver(self): # Patch the resolver. - from pymongo.srv_resolver import _resolve + from pymongo.asynchronous.srv_resolver import _resolve patched_resolver = FunctionCallRecorder(_resolve) - pymongo.srv_resolver._resolve = patched_resolver + pymongo.asynchronous.srv_resolver._resolve = patched_resolver def reset_resolver(): - pymongo.srv_resolver._resolve = _resolve + pymongo.asynchronous.srv_resolver._resolve = _resolve self.addCleanup(reset_resolver) @@ -607,7 +606,7 @@ def test_validate_suggestion(self): with self.assertRaisesRegex(ConfigurationError, expected): AsyncMongoClient(**{typo: "standard"}) # type: ignore[arg-type] - @patch("pymongo.srv_resolver._SrvResolver.get_hosts") + @patch("pymongo.asynchronous.srv_resolver._SrvResolver.get_hosts") def test_detected_environment_logging(self, mock_get_hosts): normal_hosts = [ "normal.host.com", @@ -629,7 +628,8 @@ def test_detected_environment_logging(self, mock_get_hosts): logs = [record.getMessage() for record in cm.records if record.name == "pymongo.client"] self.assertEqual(len(logs), 7) - @patch("pymongo.srv_resolver._SrvResolver.get_hosts") + @skipIf(os.environ.get("DEBUG_LOG"), "Enabling debug logs breaks this test") + @patch("pymongo.asynchronous.srv_resolver._SrvResolver.get_hosts") async def test_detected_environment_warning(self, mock_get_hosts): with self._caplog.at_level(logging.WARN): normal_hosts = [ @@ -673,7 +673,7 @@ async def test_max_idle_time_reaper_default(self): async with server._pool.checkout() as conn: pass self.assertEqual(1, len(server._pool.conns)) - self.assertTrue(conn in server._pool.conns) + self.assertIn(conn, server._pool.conns) async def test_max_idle_time_reaper_removes_stale_minPoolSize(self): with client_knobs(kill_cursor_frequency=0.1): @@ -746,12 +746,12 @@ async def test_min_pool_size(self): # Assert that if a socket is closed, a new one takes its place async with server._pool.checkout() as conn: - conn.close_conn(None) + await conn.close_conn(None) await async_wait_until( lambda: len(server._pool.conns) == 10, "a closed socket gets replaced from the pool", ) - self.assertFalse(conn in server._pool.conns) + self.assertNotIn(conn, server._pool.conns) async def test_max_idle_time_checkout(self): # Use high frequency to test _get_socket_no_auth. @@ -768,8 +768,8 @@ async def test_max_idle_time_checkout(self): async with server._pool.checkout() as new_con: self.assertNotEqual(conn, new_con) self.assertEqual(1, len(server._pool.conns)) - self.assertFalse(conn in server._pool.conns) - self.assertTrue(new_con in server._pool.conns) + self.assertNotIn(conn, server._pool.conns) + self.assertIn(new_con, server._pool.conns) # Test that connections are reused if maxIdleTimeMS is not set. client = await self.async_rs_or_single_client() @@ -849,6 +849,58 @@ async def test_init_disconnected_with_auth(self): with self.assertRaises(ConnectionFailure): await c.pymongo_test.test.find_one() + @async_client_context.require_replica_set + @async_client_context.require_no_load_balancer + @async_client_context.require_tls + async def test_init_disconnected_with_srv(self): + c = await self.async_rs_or_single_client( + "mongodb+srv://test1.test.build.10gen.cc", connect=False, tlsInsecure=True + ) + # nodes returns an empty set if not connected + self.assertEqual(c.nodes, frozenset()) + # topology_description returns the initial seed description if not connected + topology_description = c.topology_description + self.assertEqual(topology_description.topology_type, TOPOLOGY_TYPE.Unknown) + self.assertEqual( + { + ("test1.test.build.10gen.cc", None): ServerDescription( + ("test1.test.build.10gen.cc", None) + ) + }, + topology_description.server_descriptions(), + ) + + # address causes client to block until connected + self.assertIsNotNone(await c.address) + # Initial seed topology and connected topology have the same ID + self.assertEqual( + c._topology._topology_id, topology_description._topology_settings._topology_id + ) + await c.close() + + c = await self.async_rs_or_single_client( + "mongodb+srv://test1.test.build.10gen.cc", connect=False, tlsInsecure=True + ) + # primary causes client to block until connected + await c.primary + self.assertIsNotNone(c._topology) + await c.close() + + c = await self.async_rs_or_single_client( + "mongodb+srv://test1.test.build.10gen.cc", connect=False, tlsInsecure=True + ) + # secondaries causes client to block until connected + await c.secondaries + self.assertIsNotNone(c._topology) + await c.close() + + c = await self.async_rs_or_single_client( + "mongodb+srv://test1.test.build.10gen.cc", connect=False, tlsInsecure=True + ) + # arbiters causes client to block until connected + await c.arbiters + self.assertIsNotNone(c._topology) + async def test_equality(self): seed = "{}:{}".format(*list(self.client._topology_settings.seeds)[0]) c = await self.async_rs_or_single_client(seed, connect=False) @@ -933,6 +985,15 @@ async def test_repr(self): async with eval(the_repr) as client_two: self.assertEqual(client_two, client) + async def test_repr_srv_host(self): + client = AsyncMongoClient("mongodb+srv://test1.test.build.10gen.cc/", connect=False) + # before srv resolution + self.assertIn("host='mongodb+srv://test1.test.build.10gen.cc'", repr(client)) + await client.aconnect() + # after srv resolution + self.assertIn("host=['localhost.test.build.10gen.cc:", repr(client)) + await client.close() + async def test_getters(self): await async_wait_until( lambda: async_client_context.nodes == self.client.nodes, "find all nodes" @@ -943,7 +1004,7 @@ async def test_list_databases(self): cursor = await self.client.list_databases() self.assertIsInstance(cursor, AsyncCommandCursor) helper_docs = await cursor.to_list() - self.assertTrue(len(helper_docs) > 0) + self.assertGreater(len(helper_docs), 0) self.assertEqual(len(helper_docs), len(cmd_docs)) # PYTHON-3529 Some fields may change between calls, just compare names. for helper_doc, cmd_doc in zip(helper_docs, cmd_docs): @@ -970,8 +1031,8 @@ async def test_list_database_names(self): cmd_names = [doc["name"] for doc in cmd_docs] db_names = await self.client.list_database_names() - self.assertTrue("pymongo_test" in db_names) - self.assertTrue("pymongo_test_mike" in db_names) + self.assertIn("pymongo_test", db_names) + self.assertIn("pymongo_test_mike", db_names) self.assertEqual(db_names, cmd_names) async def test_drop_database(self): @@ -1195,9 +1256,9 @@ async def test_unix_socket(self): client = await self.async_rs_or_single_client(uri) await client.pymongo_test.test.insert_one({"dummy": "object"}) dbs = await client.list_database_names() - self.assertTrue("pymongo_test" in dbs) + self.assertIn("pymongo_test", dbs) - self.assertTrue(mongodb_socket in repr(client)) + self.assertIn(mongodb_socket, repr(client)) # Confirm it fails with a missing socket. with self.assertRaises(ConnectionFailure): @@ -1212,15 +1273,15 @@ async def test_document_class(self): await db.test.insert_one({"x": 1}) self.assertEqual(dict, c.codec_options.document_class) - self.assertTrue(isinstance(await db.test.find_one(), dict)) - self.assertFalse(isinstance(await db.test.find_one(), SON)) + self.assertIsInstance(await db.test.find_one(), dict) + self.assertNotIsInstance(await db.test.find_one(), SON) c = await self.async_rs_or_single_client(document_class=SON) db = c.pymongo_test self.assertEqual(SON, c.codec_options.document_class) - self.assertTrue(isinstance(await db.test.find_one(), SON)) + self.assertIsInstance(await db.test.find_one(), SON) async def test_timeouts(self): client = await self.async_rs_or_single_client( @@ -1261,7 +1322,6 @@ async def test_socket_timeout(self): no_timeout = self.client timeout_sec = 1 timeout = await self.async_rs_or_single_client(socketTimeoutMS=1000 * timeout_sec) - self.addAsyncCleanup(timeout.close) await no_timeout.pymongo_test.drop_collection("test") await no_timeout.pymongo_test.test.insert_one({"x": 1}) @@ -1314,13 +1374,21 @@ async def test_server_selection_timeout(self): self.assertAlmostEqual(30, client.options.server_selection_timeout) async def test_waitQueueTimeoutMS(self): - client = await self.async_rs_or_single_client(waitQueueTimeoutMS=2000) - self.assertEqual((await async_get_pool(client)).opts.wait_queue_timeout, 2) + listener = CMAPListener() + client = await self.async_rs_or_single_client( + waitQueueTimeoutMS=10, maxPoolSize=1, event_listeners=[listener] + ) + pool = await async_get_pool(client) + self.assertEqual(pool.opts.wait_queue_timeout, 0.01) + async with pool.checkout(): + with self.assertRaises(WaitQueueTimeoutError): + await client.test.command("ping") + self.assertFalse(listener.events_by_type(monitoring.PoolClearedEvent)) async def test_socketKeepAlive(self): pool = await async_get_pool(self.client) async with pool.checkout() as conn: - keepalive = conn.conn.getsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE) + keepalive = conn.conn.sock.getsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE) self.assertTrue(keepalive) @no_type_check @@ -1362,8 +1430,8 @@ async def test_ipv6(self): await client.pymongo_test_bernie.test.insert_one({"dummy": "object"}) dbs = await client.list_database_names() - self.assertTrue("pymongo_test" in dbs) - self.assertTrue("pymongo_test_bernie" in dbs) + self.assertIn("pymongo_test", dbs) + self.assertIn("pymongo_test_bernie", dbs) async def test_contextlib(self): client = await self.async_rs_or_single_client() @@ -1520,7 +1588,7 @@ async def test_exhaust_network_error(self): # Cause a network error. conn = one(pool.conns) - conn.conn.close() + await conn.conn.close() cursor = collection.find(cursor_type=CursorType.EXHAUST) with self.assertRaises(ConnectionFailure): await anext(cursor) @@ -1545,7 +1613,7 @@ async def test_auth_network_error(self): # Cause a network error on the actual socket. pool = await async_get_pool(c) conn = one(pool.conns) - conn.conn.close() + await conn.conn.close() # AsyncConnection.authenticate logs, but gets a socket.error. Should be # reraised as AutoReconnect. @@ -1793,6 +1861,29 @@ async def stall_connect(*args, **kwargs): # Each ping command should not take more than 2 seconds self.assertLess(total, 2) + async def test_background_connections_log_on_error(self): + with self.assertLogs("pymongo.client", level="ERROR") as cm: + client = await self.async_rs_or_single_client(minPoolSize=1) + # Create a single connection in the pool. + await client.admin.command("ping") + + # Cause new connections to fail. + pool = await async_get_pool(client) + + async def fail_connect(*args, **kwargs): + raise Exception("failed to connect") + + pool.connect = fail_connect + # Un-patch Pool.connect to break the cyclic reference. + self.addCleanup(delattr, pool, "connect") + + await pool.reset_without_pause() + + await async_wait_until( + lambda: "failed to connect" in "".join(cm.output), "start creating connections" + ) + self.assertIn("MongoClient background task encountered an error", "".join(cm.output)) + @async_client_context.require_replica_set async def test_direct_connection(self): # direct_connection=True should result in Single topology. @@ -1827,20 +1918,20 @@ def server_description_count(): return i gc.collect() - with client_knobs(min_heartbeat_interval=0.003): + with client_knobs(min_heartbeat_interval=0.002): client = self.simple_client( - "invalid:27017", heartbeatFrequencyMS=3, serverSelectionTimeoutMS=150 + "invalid:27017", heartbeatFrequencyMS=2, serverSelectionTimeoutMS=200 ) initial_count = server_description_count() with self.assertRaises(ServerSelectionTimeoutError): await client.test.test.find_one() gc.collect() final_count = server_description_count() + await client.close() # If a bug like PYTHON-2433 is reintroduced then too many # ServerDescriptions will be kept alive and this test will fail: - # AssertionError: 19 != 46 within 15 delta (27 difference) - # On Python 3.11 we seem to get more of a delta. - self.assertAlmostEqual(initial_count, final_count, delta=20) + # AssertionError: 11 != 47 within 20 delta (36 difference) + self.assertAlmostEqual(initial_count, final_count, delta=30) @async_client_context.require_failCommand_fail_point async def test_network_error_message(self): @@ -1880,32 +1971,41 @@ async def test_service_name_from_kwargs(self): srvServiceName="customname", connect=False, ) + await client.aconnect() self.assertEqual(client._topology_settings.srv_service_name, "customname") + await client.close() client = AsyncMongoClient( "mongodb+srv://user:password@test22.test.build.10gen.cc" "/?srvServiceName=shouldbeoverriden", srvServiceName="customname", connect=False, ) + await client.aconnect() self.assertEqual(client._topology_settings.srv_service_name, "customname") + await client.close() client = AsyncMongoClient( "mongodb+srv://user:password@test22.test.build.10gen.cc/?srvServiceName=customname", connect=False, ) + await client.aconnect() self.assertEqual(client._topology_settings.srv_service_name, "customname") + await client.close() async def test_srv_max_hosts_kwarg(self): client = self.simple_client("mongodb+srv://test1.test.build.10gen.cc/") + await client.aconnect() self.assertGreater(len(client.topology_description.server_descriptions()), 1) client = self.simple_client("mongodb+srv://test1.test.build.10gen.cc/", srvmaxhosts=1) + await client.aconnect() self.assertEqual(len(client.topology_description.server_descriptions()), 1) client = self.simple_client( "mongodb+srv://test1.test.build.10gen.cc/?srvMaxHosts=1", srvmaxhosts=2 ) + await client.aconnect() self.assertEqual(len(client.topology_description.server_descriptions()), 2) @unittest.skipIf( - async_client_context.load_balancer or async_client_context.serverless, + async_client_context.load_balancer, "loadBalanced clients do not run SDAM", ) @unittest.skipIf(sys.platform == "win32", "Windows does not support SIGSTOP") @@ -1958,7 +2058,7 @@ async def _test_handshake(self, env_vars, expected_env): async def test_handshake_01_aws(self): await self._test_handshake( { - "AWS_EXECUTION_ENV": "AWS_Lambda_python3.9", + "AWS_EXECUTION_ENV": "AWS_Lambda_python3.10", "AWS_REGION": "us-east-2", "AWS_LAMBDA_FUNCTION_MEMORY_SIZE": "1024", }, @@ -1996,7 +2096,7 @@ async def test_handshake_04_vercel(self): async def test_handshake_05_multiple(self): await self._test_handshake( - {"AWS_EXECUTION_ENV": "AWS_Lambda_python3.9", "FUNCTIONS_WORKER_RUNTIME": "python"}, + {"AWS_EXECUTION_ENV": "AWS_Lambda_python3.10", "FUNCTIONS_WORKER_RUNTIME": "python"}, None, ) # Extra cases for other combos. @@ -2008,13 +2108,16 @@ async def test_handshake_05_multiple(self): async def test_handshake_06_region_too_long(self): await self._test_handshake( - {"AWS_EXECUTION_ENV": "AWS_Lambda_python3.9", "AWS_REGION": "a" * 512}, + {"AWS_EXECUTION_ENV": "AWS_Lambda_python3.10", "AWS_REGION": "a" * 512}, {"name": "aws.lambda"}, ) async def test_handshake_07_memory_invalid_int(self): await self._test_handshake( - {"AWS_EXECUTION_ENV": "AWS_Lambda_python3.9", "AWS_LAMBDA_FUNCTION_MEMORY_SIZE": "big"}, + { + "AWS_EXECUTION_ENV": "AWS_Lambda_python3.10", + "AWS_LAMBDA_FUNCTION_MEMORY_SIZE": "big", + }, {"name": "aws.lambda"}, ) @@ -2205,7 +2308,7 @@ async def test_exhaust_query_network_error(self): # Cause a network error. conn = one(pool.conns) - conn.conn.close() + await conn.conn.close() cursor = collection.find(cursor_type=CursorType.EXHAUST) with self.assertRaises(ConnectionFailure): @@ -2233,7 +2336,7 @@ async def test_exhaust_getmore_network_error(self): # Cause a network error. conn = cursor._sock_mgr.conn - conn.conn.close() + await conn.conn.close() # A getmore fails. with self.assertRaises(ConnectionFailure): diff --git a/test/asynchronous/test_client_bulk_write.py b/test/asynchronous/test_client_bulk_write.py index a82629f495..49f969fa34 100644 --- a/test/asynchronous/test_client_bulk_write.py +++ b/test/asynchronous/test_client_bulk_write.py @@ -18,9 +18,6 @@ import os import sys -from bson import encode -from bson.raw_bson import RawBSONDocument - sys.path[0:0] = [""] from test.asynchronous import ( @@ -28,7 +25,8 @@ async_client_context, unittest, ) -from test.utils import ( +from test.asynchronous.utils import flaky +from test.utils_shared import ( OvertCommandListener, ) from unittest.mock import patch @@ -50,7 +48,6 @@ class TestClientBulkWrite(AsyncIntegrationTest): @async_client_context.require_version_min(8, 0, 0, -24) - @async_client_context.require_no_serverless async def test_returns_error_if_no_namespace_provided(self): models = [InsertOne(document={"a": "b"})] with self.assertRaises(InvalidOperation) as context: @@ -61,7 +58,6 @@ async def test_returns_error_if_no_namespace_provided(self): ) @async_client_context.require_version_min(8, 0, 0, -24) - @async_client_context.require_no_serverless async def test_handles_non_pymongo_error(self): with patch.object( _AsyncClientBulk, "write_command", return_value={"error": TypeError("mock type error")} @@ -73,7 +69,6 @@ async def test_handles_non_pymongo_error(self): self.assertFalse(hasattr(context.exception.error, "details")) @async_client_context.require_version_min(8, 0, 0, -24) - @async_client_context.require_no_serverless async def test_formats_write_error_correctly(self): models = [ InsertOne(namespace="db.coll", document={"_id": 1}), @@ -87,19 +82,9 @@ async def test_formats_write_error_correctly(self): self.assertEqual(write_error["idx"], 1) self.assertEqual(write_error["op"], {"insert": 0, "document": {"_id": 1}}) - @async_client_context.require_version_min(8, 0, 0, -24) - @async_client_context.require_no_serverless - async def test_raw_bson_not_inflated(self): - doc = RawBSONDocument(encode({"a": "b" * 100})) - models = [ - InsertOne(namespace="db.coll", document=doc), - ] - await self.client.bulk_write(models=models) - - self.assertIsNone(doc._RawBSONDocument__inflated_doc) - # https://github.com/mongodb/specifications/tree/master/source/crud/tests +# Note: tests 1 and 2 are in test_read_write_concern_spec.py class TestClientBulkWriteCRUD(AsyncIntegrationTest): async def asyncSetUp(self): await super().asyncSetUp() @@ -108,8 +93,7 @@ async def asyncSetUp(self): self.max_message_size_bytes = await async_client_context.max_message_size_bytes @async_client_context.require_version_min(8, 0, 0, -24) - @async_client_context.require_no_serverless - async def test_batch_splits_if_num_operations_too_large(self): + async def test_3_batch_splits_if_num_operations_too_large(self): listener = OvertCommandListener() client = await self.async_rs_or_single_client(event_listeners=[listener]) @@ -133,8 +117,7 @@ async def test_batch_splits_if_num_operations_too_large(self): self.assertEqual(first_event.operation_id, second_event.operation_id) @async_client_context.require_version_min(8, 0, 0, -24) - @async_client_context.require_no_serverless - async def test_batch_splits_if_ops_payload_too_large(self): + async def test_4_batch_splits_if_ops_payload_too_large(self): listener = OvertCommandListener() client = await self.async_rs_or_single_client(event_listeners=[listener]) @@ -165,9 +148,8 @@ async def test_batch_splits_if_ops_payload_too_large(self): self.assertEqual(first_event.operation_id, second_event.operation_id) @async_client_context.require_version_min(8, 0, 0, -24) - @async_client_context.require_no_serverless @async_client_context.require_failCommand_fail_point - async def test_collects_write_concern_errors_across_batches(self): + async def test_5_collects_write_concern_errors_across_batches(self): listener = OvertCommandListener() client = await self.async_rs_or_single_client( event_listeners=[listener], @@ -208,8 +190,7 @@ async def test_collects_write_concern_errors_across_batches(self): self.assertEqual(len(bulk_write_events), 2) @async_client_context.require_version_min(8, 0, 0, -24) - @async_client_context.require_no_serverless - async def test_collects_write_errors_across_batches_unordered(self): + async def test_6_collects_write_errors_across_batches_unordered(self): listener = OvertCommandListener() client = await self.async_rs_or_single_client(event_listeners=[listener]) @@ -238,8 +219,7 @@ async def test_collects_write_errors_across_batches_unordered(self): self.assertEqual(len(bulk_write_events), 2) @async_client_context.require_version_min(8, 0, 0, -24) - @async_client_context.require_no_serverless - async def test_collects_write_errors_across_batches_ordered(self): + async def test_6_collects_write_errors_across_batches_ordered(self): listener = OvertCommandListener() client = await self.async_rs_or_single_client(event_listeners=[listener]) @@ -268,8 +248,7 @@ async def test_collects_write_errors_across_batches_ordered(self): self.assertEqual(len(bulk_write_events), 1) @async_client_context.require_version_min(8, 0, 0, -24) - @async_client_context.require_no_serverless - async def test_handles_cursor_requiring_getMore(self): + async def test_7_handles_cursor_requiring_getMore(self): listener = OvertCommandListener() client = await self.async_rs_or_single_client(event_listeners=[listener]) @@ -308,9 +287,8 @@ async def test_handles_cursor_requiring_getMore(self): self.assertTrue(get_more_event) @async_client_context.require_version_min(8, 0, 0, -24) - @async_client_context.require_no_serverless @async_client_context.require_no_standalone - async def test_handles_cursor_requiring_getMore_within_transaction(self): + async def test_8_handles_cursor_requiring_getMore_within_transaction(self): listener = OvertCommandListener() client = await self.async_rs_or_single_client(event_listeners=[listener]) @@ -351,9 +329,8 @@ async def test_handles_cursor_requiring_getMore_within_transaction(self): self.assertTrue(get_more_event) @async_client_context.require_version_min(8, 0, 0, -24) - @async_client_context.require_no_serverless @async_client_context.require_failCommand_fail_point - async def test_handles_getMore_error(self): + async def test_9_handles_getMore_error(self): listener = OvertCommandListener() client = await self.async_rs_or_single_client(event_listeners=[listener]) @@ -406,8 +383,7 @@ async def test_handles_getMore_error(self): self.assertTrue(kill_cursors_event) @async_client_context.require_version_min(8, 0, 0, -24) - @async_client_context.require_no_serverless - async def test_returns_error_if_unacknowledged_too_large_insert(self): + async def test_10_returns_error_if_unacknowledged_too_large_insert(self): listener = OvertCommandListener() client = await self.async_rs_or_single_client(event_listeners=[listener]) @@ -466,8 +442,7 @@ async def _setup_namespace_test_models(self): return num_models, models @async_client_context.require_version_min(8, 0, 0, -24) - @async_client_context.require_no_serverless - async def test_no_batch_splits_if_new_namespace_is_not_too_large(self): + async def test_11_no_batch_splits_if_new_namespace_is_not_too_large(self): listener = OvertCommandListener() client = await self.async_rs_or_single_client(event_listeners=[listener]) @@ -497,8 +472,7 @@ async def test_no_batch_splits_if_new_namespace_is_not_too_large(self): self.assertEqual(event.command["nsInfo"][0]["ns"], "db.coll") @async_client_context.require_version_min(8, 0, 0, -24) - @async_client_context.require_no_serverless - async def test_batch_splits_if_new_namespace_is_too_large(self): + async def test_11_batch_splits_if_new_namespace_is_too_large(self): listener = OvertCommandListener() client = await self.async_rs_or_single_client(event_listeners=[listener]) @@ -535,27 +509,27 @@ async def test_batch_splits_if_new_namespace_is_too_large(self): self.assertEqual(second_event.command["nsInfo"][0]["ns"], namespace) @async_client_context.require_version_min(8, 0, 0, -24) - @async_client_context.require_no_serverless - async def test_returns_error_if_no_writes_can_be_added_to_ops(self): + async def test_12_returns_error_if_no_writes_can_be_added_to_ops(self): client = await self.async_rs_or_single_client() # Document too large. b_repeated = "b" * self.max_message_size_bytes models = [InsertOne(namespace="db.coll", document={"a": b_repeated})] - with self.assertRaises(DocumentTooLarge): + with self.assertRaises(DocumentTooLarge) as context: await client.bulk_write(models=models) + self.assertIsNone(context.exception.partial_result) # Namespace too large. c_repeated = "c" * self.max_message_size_bytes namespace = f"db.{c_repeated}" models = [InsertOne(namespace=namespace, document={"a": "b"})] - with self.assertRaises(DocumentTooLarge): + with self.assertRaises(DocumentTooLarge) as context: await client.bulk_write(models=models) + self.assertIsNone(context.exception.partial_result) @async_client_context.require_version_min(8, 0, 0, -24) - @async_client_context.require_no_serverless @unittest.skipUnless(_HAVE_PYMONGOCRYPT, "pymongocrypt is not installed") - async def test_returns_error_if_auto_encryption_configured(self): + async def test_13_returns_error_if_auto_encryption_configured(self): opts = AutoEncryptionOpts( key_vault_namespace="db.coll", kms_providers={"aws": {"accessKeyId": "foo", "secretAccessKey": "bar"}}, @@ -565,12 +539,12 @@ async def test_returns_error_if_auto_encryption_configured(self): models = [InsertOne(namespace="db.coll", document={"a": "b"})] with self.assertRaises(InvalidOperation) as context: await client.bulk_write(models=models) + self.assertIsNone(context.exception.partial_result) self.assertIn( "bulk_write does not currently support automatic encryption", context.exception._message ) @async_client_context.require_version_min(8, 0, 0, -24) - @async_client_context.require_no_serverless async def test_upserted_result(self): client = await self.async_rs_or_single_client() @@ -609,8 +583,9 @@ async def test_upserted_result(self): self.assertEqual(result.update_results[1].did_upsert, True) self.assertEqual(result.update_results[2].did_upsert, False) + # Note: test 14 is optional and intentionally not implemented because we provide multiple APIs to specify explain. + @async_client_context.require_version_min(8, 0, 0, -24) - @async_client_context.require_no_serverless async def test_15_unacknowledged_write_across_batches(self): listener = OvertCommandListener() client = await self.async_rs_or_single_client(event_listeners=[listener]) @@ -651,21 +626,20 @@ async def test_15_unacknowledged_write_across_batches(self): # https://github.com/mongodb/specifications/blob/master/source/client-side-operations-timeout/tests/README.md#11-multi-batch-bulkwrites class TestClientBulkWriteCSOT(AsyncIntegrationTest): async def asyncSetUp(self): - if os.environ.get("SKIP_CSOT_TESTS", ""): - raise unittest.SkipTest("SKIP_CSOT_TESTS is set, skipping...") await super().asyncSetUp() self.max_write_batch_size = await async_client_context.max_write_batch_size self.max_bson_object_size = await async_client_context.max_bson_size self.max_message_size_bytes = await async_client_context.max_message_size_bytes @async_client_context.require_version_min(8, 0, 0, -24) - @async_client_context.require_no_serverless @async_client_context.require_failCommand_fail_point + @flaky(reason="PYTHON-5290", max_runs=3, affects_cpython_linux=True) async def test_timeout_in_multi_batch_bulk_write(self): + if sys.platform != "linux" and "CI" in os.environ: + self.skipTest("PYTHON-3522 CSOT test runs too slow on Windows and MacOS") _OVERHEAD = 500 internal_client = await self.async_rs_or_single_client(timeoutMS=None) - self.addAsyncCleanup(internal_client.close) collection = internal_client.db["coll"] self.addAsyncCleanup(collection.drop) diff --git a/test/asynchronous/test_client_context.py b/test/asynchronous/test_client_context.py index 6a195eb6b8..652b32e798 100644 --- a/test/asynchronous/test_client_context.py +++ b/test/asynchronous/test_client_context.py @@ -36,31 +36,15 @@ def test_must_connect(self): ), ) - def test_serverless(self): - if not os.environ.get("TEST_SERVERLESS"): - raise SkipTest("TEST_SERVERLESS is not set") - - self.assertTrue( - async_client_context.connected and async_client_context.serverless, - "client context must be connected to serverless when " - f"TEST_SERVERLESS is set. Failed attempts:\n{async_client_context.connection_attempt_info()}", - ) - def test_enableTestCommands_is_disabled(self): - if not os.environ.get("PYMONGO_DISABLE_TEST_COMMANDS"): - raise SkipTest("PYMONGO_DISABLE_TEST_COMMANDS is not set") + if not os.environ.get("DISABLE_TEST_COMMANDS"): + raise SkipTest("DISABLE_TEST_COMMANDS is not set") self.assertFalse( async_client_context.test_commands_enabled, - "enableTestCommands must be disabled when PYMONGO_DISABLE_TEST_COMMANDS is set.", + "enableTestCommands must be disabled when DISABLE_TEST_COMMANDS is set.", ) - def test_setdefaultencoding_worked(self): - if not os.environ.get("SETDEFAULTENCODING"): - raise SkipTest("SETDEFAULTENCODING is not set") - - self.assertEqual(sys.getdefaultencoding(), os.environ["SETDEFAULTENCODING"]) - def test_free_threading_is_enabled(self): if "free-threading build" not in sys.version: raise SkipTest("this test requires the Python free-threading build") diff --git a/test/asynchronous/test_client_metadata.py b/test/asynchronous/test_client_metadata.py new file mode 100644 index 0000000000..2f175cceed --- /dev/null +++ b/test/asynchronous/test_client_metadata.py @@ -0,0 +1,232 @@ +# Copyright 2013-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import asyncio +import os +import pathlib +import time +import unittest +from test.asynchronous import AsyncIntegrationTest +from test.asynchronous.unified_format import generate_test_classes +from test.utils_shared import CMAPListener +from typing import Any, Optional + +import pytest + +from pymongo import AsyncMongoClient +from pymongo.driver_info import DriverInfo +from pymongo.monitoring import ConnectionClosedEvent + +try: + from mockupdb import MockupDB, OpMsgReply + + _HAVE_MOCKUPDB = True +except ImportError: + _HAVE_MOCKUPDB = False + +pytestmark = pytest.mark.mockupdb + +_IS_SYNC = False + +# Location of JSON test specifications. +if _IS_SYNC: + _TEST_PATH = os.path.join(pathlib.Path(__file__).resolve().parent, "handshake", "unified") +else: + _TEST_PATH = os.path.join( + pathlib.Path(__file__).resolve().parent.parent, "handshake", "unified" + ) + +# Generate unified tests. +globals().update(generate_test_classes(_TEST_PATH, module=__name__)) + + +def _get_handshake_driver_info(request): + assert "client" in request + return request["client"] + + +class TestClientMetadataProse(AsyncIntegrationTest): + async def asyncSetUp(self): + await super().asyncSetUp() + self.server = MockupDB() + self.handshake_req = None + + def respond(r): + if "ismaster" in r: + # then this is a handshake request + self.handshake_req = r + return r.reply(OpMsgReply(maxWireVersion=13)) + + self.server.autoresponds(respond) + self.server.run() + self.addAsyncCleanup(self.server.stop) + + async def send_ping_and_get_metadata( + self, client: AsyncMongoClient, is_handshake: bool + ) -> tuple[str, Optional[str], Optional[str], dict[str, Any]]: + # reset if handshake request + if is_handshake: + self.handshake_req: Optional[dict] = None + + await client.admin.command("ping") + metadata = _get_handshake_driver_info(self.handshake_req) + driver_metadata = metadata["driver"] + name, version, platform = ( + driver_metadata["name"], + driver_metadata["version"], + metadata["platform"], + ) + return name, version, platform, metadata + + async def check_metadata_added( + self, + client: AsyncMongoClient, + add_name: str, + add_version: Optional[str], + add_platform: Optional[str], + ) -> None: + # send initial metadata + name, version, platform, metadata = await self.send_ping_and_get_metadata(client, True) + # wait for connection to become idle + await asyncio.sleep(0.005) + + # add new metadata + client.append_metadata(DriverInfo(add_name, add_version, add_platform)) + new_name, new_version, new_platform, new_metadata = await self.send_ping_and_get_metadata( + client, True + ) + if add_name is not None and add_name.lower() in name.lower().split("|"): + self.assertEqual(name, new_name) + self.assertEqual(version, new_version) + self.assertEqual(platform, new_platform) + else: + self.assertEqual(new_name, f"{name}|{add_name}" if add_name is not None else name) + self.assertEqual( + new_version, + f"{version}|{add_version}" if add_version is not None else version, + ) + self.assertEqual( + new_platform, + f"{platform}|{add_platform}" if add_platform is not None else platform, + ) + + metadata.pop("driver") + metadata.pop("platform") + new_metadata.pop("driver") + new_metadata.pop("platform") + self.assertEqual(metadata, new_metadata) + + async def test_append_metadata(self): + client = await self.async_rs_or_single_client( + "mongodb://" + self.server.address_string, + maxIdleTimeMS=1, + driver=DriverInfo("library", "1.2", "Library Platform"), + ) + await self.check_metadata_added(client, "framework", "2.0", "Framework Platform") + + async def test_append_metadata_platform_none(self): + client = await self.async_rs_or_single_client( + "mongodb://" + self.server.address_string, + maxIdleTimeMS=1, + driver=DriverInfo("library", "1.2", "Library Platform"), + ) + await self.check_metadata_added(client, "framework", "2.0", None) + + async def test_append_metadata_version_none(self): + client = await self.async_rs_or_single_client( + "mongodb://" + self.server.address_string, + maxIdleTimeMS=1, + driver=DriverInfo("library", "1.2", "Library Platform"), + ) + await self.check_metadata_added(client, "framework", None, "Framework Platform") + + async def test_append_metadata_platform_version_none(self): + client = await self.async_rs_or_single_client( + "mongodb://" + self.server.address_string, + maxIdleTimeMS=1, + driver=DriverInfo("library", "1.2", "Library Platform"), + ) + await self.check_metadata_added(client, "framework", None, None) + + async def test_multiple_successive_metadata_updates(self): + client = await self.async_rs_or_single_client( + "mongodb://" + self.server.address_string, maxIdleTimeMS=1, connect=False + ) + client.append_metadata(DriverInfo("library", "1.2", "Library Platform")) + await self.check_metadata_added(client, "framework", "2.0", "Framework Platform") + + async def test_multiple_successive_metadata_updates_platform_none(self): + client = await self.async_rs_or_single_client( + "mongodb://" + self.server.address_string, + maxIdleTimeMS=1, + ) + client.append_metadata(DriverInfo("library", "1.2", "Library Platform")) + await self.check_metadata_added(client, "framework", "2.0", None) + + async def test_multiple_successive_metadata_updates_version_none(self): + client = await self.async_rs_or_single_client( + "mongodb://" + self.server.address_string, + maxIdleTimeMS=1, + ) + client.append_metadata(DriverInfo("library", "1.2", "Library Platform")) + await self.check_metadata_added(client, "framework", None, "Framework Platform") + + async def test_multiple_successive_metadata_updates_platform_version_none(self): + client = await self.async_rs_or_single_client( + "mongodb://" + self.server.address_string, + maxIdleTimeMS=1, + ) + client.append_metadata(DriverInfo("library", "1.2", "Library Platform")) + await self.check_metadata_added(client, "framework", None, None) + + async def test_doesnt_update_established_connections(self): + listener = CMAPListener() + client = await self.async_rs_or_single_client( + "mongodb://" + self.server.address_string, + maxIdleTimeMS=1, + driver=DriverInfo("library", "1.2", "Library Platform"), + event_listeners=[listener], + ) + + # send initial metadata + name, version, platform, metadata = await self.send_ping_and_get_metadata(client, True) + self.assertIsNotNone(name) + self.assertIsNotNone(version) + self.assertIsNotNone(platform) + + # add data + add_name, add_version, add_platform = "framework", "2.0", "Framework Platform" + client.append_metadata(DriverInfo(add_name, add_version, add_platform)) + # check new data isn't sent + self.handshake_req: Optional[dict] = None + await client.admin.command("ping") + self.assertIsNone(self.handshake_req) + self.assertEqual(listener.event_count(ConnectionClosedEvent), 0) + + async def test_duplicate_driver_name_no_op(self): + client = await self.async_rs_or_single_client( + "mongodb://" + self.server.address_string, + maxIdleTimeMS=1, + ) + client.append_metadata(DriverInfo("library", "1.2", "Library Platform")) + await self.check_metadata_added(client, "framework", None, None) + # wait for connection to become idle + await asyncio.sleep(0.005) + # add same metadata again + await self.check_metadata_added(client, "Framework", None, None) + + +if __name__ == "__main__": + unittest.main() diff --git a/test/asynchronous/test_collation.py b/test/asynchronous/test_collation.py index d7fd85b168..da810a2a9f 100644 --- a/test/asynchronous/test_collation.py +++ b/test/asynchronous/test_collation.py @@ -18,10 +18,9 @@ import functools import warnings from test.asynchronous import AsyncIntegrationTest, async_client_context, unittest -from test.utils import EventListener, OvertCommandListener +from test.utils_shared import EventListener, OvertCommandListener from typing import Any -from pymongo.asynchronous.helpers import anext from pymongo.collation import ( Collation, CollationAlternate, diff --git a/test/asynchronous/test_collection.py b/test/asynchronous/test_collection.py index 528919f63c..498563fe83 100644 --- a/test/asynchronous/test_collection.py +++ b/test/asynchronous/test_collection.py @@ -21,6 +21,7 @@ import sys from codecs import utf_8_decode from collections import defaultdict +from test.asynchronous.utils import async_get_pool, async_is_mongos from typing import Any, Iterable, no_type_check from pymongo.asynchronous.database import AsyncDatabase @@ -33,14 +34,13 @@ AsyncUnitTest, async_client_context, ) -from test.utils import ( +from test.utils_shared import ( IMPOSSIBLE_WRITE_CONCERN, EventListener, OvertCommandListener, - async_get_pool, - async_is_mongos, async_wait_until, ) +from test.version import Version from bson import encode from bson.codec_options import CodecOptions @@ -51,7 +51,6 @@ from pymongo import ASCENDING, DESCENDING, GEO2D, GEOSPHERE, HASHED, TEXT from pymongo.asynchronous.collection import AsyncCollection, ReturnDocument from pymongo.asynchronous.command_cursor import AsyncCommandCursor -from pymongo.asynchronous.helpers import anext from pymongo.asynchronous.mongo_client import AsyncMongoClient from pymongo.bulk_shared import BulkWriteError from pymongo.cursor_shared import CursorType @@ -112,7 +111,7 @@ def make_col(base, name): def test_getattr(self): coll = self.db.test - self.assertTrue(isinstance(coll["_does_not_exist"], AsyncCollection)) + self.assertIsInstance(coll["_does_not_exist"], AsyncCollection) with self.assertRaises(AttributeError) as context: coll._does_not_exist @@ -133,13 +132,7 @@ def test_getattr(self): def test_iteration(self): coll = self.db.coll - if "PyPy" in sys.version and sys.version_info < (3, 8, 15): - msg = "'NoneType' object is not callable" - else: - if _IS_SYNC: - msg = "'Collection' object is not iterable" - else: - msg = "'AsyncCollection' object is not iterable" + msg = "'AsyncCollection' object is not iterable" # Iteration fails with self.assertRaisesRegex(TypeError, msg): for _ in coll: # type: ignore[misc] # error: "None" not callable [misc] @@ -183,7 +176,7 @@ def write_concern_collection(self): yield self.db.test async def test_equality(self): - self.assertTrue(isinstance(self.db.test, AsyncCollection)) + self.assertIsInstance(self.db.test, AsyncCollection) self.assertEqual(self.db.test, self.db["test"]) self.assertEqual(self.db.test, AsyncCollection(self.db, "test")) self.assertEqual(self.db.test.mike, self.db["test.mike"]) @@ -219,7 +212,7 @@ async def lambda_test_2(): async def test_drop_nonexistent_collection(self): await self.db.drop_collection("test") - self.assertFalse("test" in await self.db.list_collection_names()) + self.assertNotIn("test", await self.db.list_collection_names()) # No exception await self.db.drop_collection("test") @@ -255,7 +248,7 @@ async def test_create_indexes(self): await db.test.drop_indexes() self.assertEqual(len(await db.test.index_information()), 1) await db.test.create_indexes([IndexModel("hello")]) - self.assertTrue("hello_1" in await db.test.index_information()) + self.assertIn("hello_1", await db.test.index_information()) await db.test.drop_indexes() self.assertEqual(len(await db.test.index_information()), 1) @@ -264,7 +257,7 @@ async def test_create_indexes(self): ) info = await db.test.index_information() for name in names: - self.assertTrue(name in info) + self.assertIn(name, info) await db.test.drop() await db.test.insert_one({"a": 1}) @@ -318,16 +311,16 @@ async def test_create_index(self): await db.test.drop_indexes() self.assertEqual(len(await db.test.index_information()), 1) await db.test.create_index("hello") - self.assertTrue("hello_1" in await db.test.index_information()) + self.assertIn("hello_1", await db.test.index_information()) await db.test.drop_indexes() self.assertEqual(len(await db.test.index_information()), 1) await db.test.create_index([("hello", DESCENDING), ("world", ASCENDING)]) - self.assertTrue("hello_-1_world_1" in await db.test.index_information()) + self.assertIn("hello_-1_world_1", await db.test.index_information()) await db.test.drop_indexes() await db.test.create_index([("hello", DESCENDING), ("world", ASCENDING)], name=None) - self.assertTrue("hello_-1_world_1" in await db.test.index_information()) + self.assertIn("hello_-1_world_1", await db.test.index_information()) await db.test.drop() await db.test.insert_one({"a": 1}) @@ -353,10 +346,13 @@ async def test_drop_index(self): await db.test.drop_index(name) # Drop it again. - with self.assertRaises(OperationFailure): + if async_client_context.version < Version(8, 3, -1): + with self.assertRaises(OperationFailure): + await db.test.drop_index(name) + else: await db.test.drop_index(name) self.assertEqual(len(await db.test.index_information()), 2) - self.assertTrue("hello_1" in await db.test.index_information()) + self.assertIn("hello_1", await db.test.index_information()) await db.test.drop_indexes() await db.test.create_index("hello") @@ -366,7 +362,7 @@ async def test_drop_index(self): self.assertEqual(name, "goodbye_1") await db.test.drop_index([("goodbye", ASCENDING)]) self.assertEqual(len(await db.test.index_information()), 2) - self.assertTrue("hello_1" in await db.test.index_information()) + self.assertIn("hello_1", await db.test.index_information()) with self.write_concern_collection() as coll: await coll.drop_index("hello_1") @@ -402,7 +398,7 @@ def map_indexes(indexes): indexes = await (await db.test.list_indexes()).to_list() self.assertEqual(len(indexes), 1) - self.assertTrue("_id_" in map_indexes(indexes)) + self.assertIn("_id_", map_indexes(indexes)) await db.test.create_index("hello") indexes = await (await db.test.list_indexes()).to_list() @@ -431,7 +427,7 @@ async def test_index_info(self): await db.test.drop() await db.test.insert_one({}) # create collection self.assertEqual(len(await db.test.index_information()), 1) - self.assertTrue("_id_" in await db.test.index_information()) + self.assertIn("_id_", await db.test.index_information()) await db.test.create_index("hello") self.assertEqual(len(await db.test.index_information()), 2) @@ -495,7 +491,7 @@ async def test_index_text(self): await db.test.drop_indexes() self.assertEqual("t_text", await db.test.create_index([("t", TEXT)])) index_info = (await db.test.index_information())["t_text"] - self.assertTrue("weights" in index_info) + self.assertIn("weights", index_info) await db.test.insert_many( [{"t": "spam eggs and spam"}, {"t": "spam"}, {"t": "egg sausage and bacon"}] @@ -507,7 +503,7 @@ async def test_index_text(self): # Sort by 'score' field. cursor.sort([("score", {"$meta": "textScore"})]) results = await cursor.to_list() - self.assertTrue(results[0]["score"] >= results[1]["score"]) + self.assertGreaterEqual(results[0]["score"], results[1]["score"]) await db.test.drop_indexes() @@ -556,7 +552,7 @@ async def test_index_background(self): await db.test.create_index([("keya", ASCENDING)]) await db.test.create_index([("keyb", ASCENDING)], background=False) await db.test.create_index([("keyc", ASCENDING)], background=True) - self.assertFalse("background" in (await db.test.index_information())["keya_1"]) + self.assertNotIn("background", (await db.test.index_information())["keya_1"]) self.assertFalse((await db.test.index_information())["keyb_1"]["background"]) self.assertTrue((await db.test.index_information())["keyc_1"]["background"]) @@ -709,7 +705,7 @@ async def test_field_selection(self): doc = await anext(db.test.find({}, {"_id": False})) l = list(doc) - self.assertFalse("_id" in l) + self.assertNotIn("_id", l) async def test_options(self): db = self.db @@ -725,8 +721,8 @@ async def test_insert_one(self): document: dict[str, Any] = {"_id": 1000} result = await db.test.insert_one(document) - self.assertTrue(isinstance(result, InsertOneResult)) - self.assertTrue(isinstance(result.inserted_id, int)) + self.assertIsInstance(result, InsertOneResult) + self.assertIsInstance(result.inserted_id, int) self.assertEqual(document["_id"], result.inserted_id) self.assertTrue(result.acknowledged) self.assertIsNotNone(await db.test.find_one({"_id": document["_id"]})) @@ -734,8 +730,8 @@ async def test_insert_one(self): document = {"foo": "bar"} result = await db.test.insert_one(document) - self.assertTrue(isinstance(result, InsertOneResult)) - self.assertTrue(isinstance(result.inserted_id, ObjectId)) + self.assertIsInstance(result, InsertOneResult) + self.assertIsInstance(result.inserted_id, ObjectId) self.assertEqual(document["_id"], result.inserted_id) self.assertTrue(result.acknowledged) self.assertIsNotNone(await db.test.find_one({"_id": document["_id"]})) @@ -743,8 +739,8 @@ async def test_insert_one(self): db = db.client.get_database(db.name, write_concern=WriteConcern(w=0)) result = await db.test.insert_one(document) - self.assertTrue(isinstance(result, InsertOneResult)) - self.assertTrue(isinstance(result.inserted_id, ObjectId)) + self.assertIsInstance(result, InsertOneResult) + self.assertIsInstance(result.inserted_id, ObjectId) self.assertEqual(document["_id"], result.inserted_id) self.assertFalse(result.acknowledged) # The insert failed duplicate key... @@ -756,7 +752,7 @@ async def async_lambda(): document = RawBSONDocument(encode({"_id": ObjectId(), "foo": "bar"})) result = await db.test.insert_one(document) - self.assertTrue(isinstance(result, InsertOneResult)) + self.assertIsInstance(result, InsertOneResult) self.assertEqual(result.inserted_id, None) async def test_insert_many(self): @@ -765,38 +761,38 @@ async def test_insert_many(self): docs: list = [{} for _ in range(5)] result = await db.test.insert_many(docs) - self.assertTrue(isinstance(result, InsertManyResult)) - self.assertTrue(isinstance(result.inserted_ids, list)) + self.assertIsInstance(result, InsertManyResult) + self.assertIsInstance(result.inserted_ids, list) self.assertEqual(5, len(result.inserted_ids)) for doc in docs: _id = doc["_id"] - self.assertTrue(isinstance(_id, ObjectId)) - self.assertTrue(_id in result.inserted_ids) + self.assertIsInstance(_id, ObjectId) + self.assertIn(_id, result.inserted_ids) self.assertEqual(1, await db.test.count_documents({"_id": _id})) self.assertTrue(result.acknowledged) docs = [{"_id": i} for i in range(5)] result = await db.test.insert_many(docs) - self.assertTrue(isinstance(result, InsertManyResult)) - self.assertTrue(isinstance(result.inserted_ids, list)) + self.assertIsInstance(result, InsertManyResult) + self.assertIsInstance(result.inserted_ids, list) self.assertEqual(5, len(result.inserted_ids)) for doc in docs: _id = doc["_id"] - self.assertTrue(isinstance(_id, int)) - self.assertTrue(_id in result.inserted_ids) + self.assertIsInstance(_id, int) + self.assertIn(_id, result.inserted_ids) self.assertEqual(1, await db.test.count_documents({"_id": _id})) self.assertTrue(result.acknowledged) docs = [RawBSONDocument(encode({"_id": i + 5})) for i in range(5)] result = await db.test.insert_many(docs) - self.assertTrue(isinstance(result, InsertManyResult)) - self.assertTrue(isinstance(result.inserted_ids, list)) + self.assertIsInstance(result, InsertManyResult) + self.assertIsInstance(result.inserted_ids, list) self.assertEqual([], result.inserted_ids) db = db.client.get_database(db.name, write_concern=WriteConcern(w=0)) docs: list = [{} for _ in range(5)] result = await db.test.insert_many(docs) - self.assertTrue(isinstance(result, InsertManyResult)) + self.assertIsInstance(result, InsertManyResult) self.assertFalse(result.acknowledged) self.assertEqual(20, await db.test.count_documents({})) @@ -837,20 +833,20 @@ async def test_delete_one(self): await self.db.test.insert_one({"z": 1}) result = await self.db.test.delete_one({"x": 1}) - self.assertTrue(isinstance(result, DeleteResult)) + self.assertIsInstance(result, DeleteResult) self.assertEqual(1, result.deleted_count) self.assertTrue(result.acknowledged) self.assertEqual(2, await self.db.test.count_documents({})) result = await self.db.test.delete_one({"y": 1}) - self.assertTrue(isinstance(result, DeleteResult)) + self.assertIsInstance(result, DeleteResult) self.assertEqual(1, result.deleted_count) self.assertTrue(result.acknowledged) self.assertEqual(1, await self.db.test.count_documents({})) db = self.db.client.get_database(self.db.name, write_concern=WriteConcern(w=0)) result = await db.test.delete_one({"z": 1}) - self.assertTrue(isinstance(result, DeleteResult)) + self.assertIsInstance(result, DeleteResult) self.assertRaises(InvalidOperation, lambda: result.deleted_count) self.assertFalse(result.acknowledged) @@ -868,14 +864,14 @@ async def test_delete_many(self): await self.db.test.insert_one({"y": 1}) result = await self.db.test.delete_many({"x": 1}) - self.assertTrue(isinstance(result, DeleteResult)) + self.assertIsInstance(result, DeleteResult) self.assertEqual(2, result.deleted_count) self.assertTrue(result.acknowledged) self.assertEqual(0, await self.db.test.count_documents({"x": 1})) db = self.db.client.get_database(self.db.name, write_concern=WriteConcern(w=0)) result = await db.test.delete_many({"y": 1}) - self.assertTrue(isinstance(result, DeleteResult)) + self.assertIsInstance(result, DeleteResult) self.assertRaises(InvalidOperation, lambda: result.deleted_count) self.assertFalse(result.acknowledged) @@ -927,10 +923,10 @@ async def test_insert_bypass_document_validation(self): with self.assertRaises(OperationFailure): await db.test.insert_one({"_id": 1, "x": 100}) result = await db.test.insert_one({"_id": 1, "x": 100}, bypass_document_validation=True) - self.assertTrue(isinstance(result, InsertOneResult)) + self.assertIsInstance(result, InsertOneResult) self.assertEqual(1, result.inserted_id) result = await db.test.insert_one({"_id": 2, "a": 0}) - self.assertTrue(isinstance(result, InsertOneResult)) + self.assertIsInstance(result, InsertOneResult) self.assertEqual(2, result.inserted_id) await db_w0.test.insert_one({"y": 1}, bypass_document_validation=True) @@ -945,22 +941,22 @@ async def async_lambda(): with self.assertRaises(OperationFailure): await db.test.insert_many(docs) result = await db.test.insert_many(docs, bypass_document_validation=True) - self.assertTrue(isinstance(result, InsertManyResult)) + self.assertIsInstance(result, InsertManyResult) self.assertTrue(97, len(result.inserted_ids)) for doc in docs: _id = doc["_id"] - self.assertTrue(isinstance(_id, int)) - self.assertTrue(_id in result.inserted_ids) + self.assertIsInstance(_id, int) + self.assertIn(_id, result.inserted_ids) self.assertEqual(1, await db.test.count_documents({"x": doc["x"]})) self.assertTrue(result.acknowledged) docs = [{"_id": i, "a": 200 - i} for i in range(100, 200)] result = await db.test.insert_many(docs) - self.assertTrue(isinstance(result, InsertManyResult)) + self.assertIsInstance(result, InsertManyResult) self.assertTrue(97, len(result.inserted_ids)) for doc in docs: _id = doc["_id"] - self.assertTrue(isinstance(_id, int)) - self.assertTrue(_id in result.inserted_ids) + self.assertIsInstance(_id, int) + self.assertIn(_id, result.inserted_ids) self.assertEqual(1, await db.test.count_documents({"a": doc["a"]})) self.assertTrue(result.acknowledged) @@ -1138,23 +1134,23 @@ async def test_find_w_fields(self): ) self.assertEqual(1, await db.test.count_documents({})) doc = await anext(db.test.find({})) - self.assertTrue("x" in doc) + self.assertIn("x", doc) doc = await anext(db.test.find({})) - self.assertTrue("mike" in doc) + self.assertIn("mike", doc) doc = await anext(db.test.find({})) - self.assertTrue("extra thing" in doc) + self.assertIn("extra thing", doc) doc = await anext(db.test.find({}, ["x", "mike"])) - self.assertTrue("x" in doc) + self.assertIn("x", doc) doc = await anext(db.test.find({}, ["x", "mike"])) - self.assertTrue("mike" in doc) + self.assertIn("mike", doc) doc = await anext(db.test.find({}, ["x", "mike"])) - self.assertFalse("extra thing" in doc) + self.assertNotIn("extra thing", doc) doc = await anext(db.test.find({}, ["mike"])) - self.assertFalse("x" in doc) + self.assertNotIn("x", doc) doc = await anext(db.test.find({}, ["mike"])) - self.assertTrue("mike" in doc) + self.assertIn("mike", doc) doc = await anext(db.test.find({}, ["mike"])) - self.assertFalse("extra thing" in doc) + self.assertNotIn("extra thing", doc) @no_type_check async def test_fields_specifier_as_dict(self): @@ -1165,8 +1161,8 @@ async def test_fields_specifier_as_dict(self): self.assertEqual([1, 2, 3], (await db.test.find_one())["x"]) self.assertEqual([2, 3], (await db.test.find_one(projection={"x": {"$slice": -2}}))["x"]) - self.assertTrue("x" not in await db.test.find_one(projection={"x": 0})) - self.assertTrue("mike" in await db.test.find_one(projection={"x": 0})) + self.assertNotIn("x", await db.test.find_one(projection={"x": 0})) + self.assertIn("mike", await db.test.find_one(projection={"x": 0})) async def test_find_w_regex(self): db = self.db @@ -1189,7 +1185,7 @@ async def test_id_can_be_anything(self): await db.test.delete_many({}) auto_id = {"hello": "world"} await db.test.insert_one(auto_id) - self.assertTrue(isinstance(auto_id["_id"], ObjectId)) + self.assertIsInstance(auto_id["_id"], ObjectId) numeric = {"_id": 240, "hello": "world"} await db.test.insert_one(numeric) @@ -1201,7 +1197,7 @@ async def test_id_can_be_anything(self): async for x in db.test.find(): self.assertEqual(x["hello"], "world") - self.assertTrue("_id" in x) + self.assertIn("_id", x) async def test_unique_index(self): db = self.db @@ -1321,10 +1317,10 @@ async def test_error_code(self): try: await self.db.test.update_many({}, {"$thismodifierdoesntexist": 1}) except OperationFailure as exc: - self.assertTrue(exc.code in (9, 10147, 16840, 17009)) + self.assertIn(exc.code, (9, 10147, 16840, 17009)) # Just check that we set the error document. Fields # vary by MongoDB version. - self.assertTrue(exc.details is not None) + self.assertIsNotNone(exc.details) else: self.fail("OperationFailure was not raised") @@ -1353,9 +1349,9 @@ async def test_replace_one(self): id1 = (await db.test.insert_one({"x": 1})).inserted_id result = await db.test.replace_one({"x": 1}, {"y": 1}) - self.assertTrue(isinstance(result, UpdateResult)) + self.assertIsInstance(result, UpdateResult) self.assertEqual(1, result.matched_count) - self.assertTrue(result.modified_count in (None, 1)) + self.assertIn(result.modified_count, (None, 1)) self.assertIsNone(result.upserted_id) self.assertTrue(result.acknowledged) self.assertEqual(1, await db.test.count_documents({"y": 1})) @@ -1364,9 +1360,9 @@ async def test_replace_one(self): replacement = RawBSONDocument(encode({"_id": id1, "z": 1})) result = await db.test.replace_one({"y": 1}, replacement, True) - self.assertTrue(isinstance(result, UpdateResult)) + self.assertIsInstance(result, UpdateResult) self.assertEqual(1, result.matched_count) - self.assertTrue(result.modified_count in (None, 1)) + self.assertIn(result.modified_count, (None, 1)) self.assertIsNone(result.upserted_id) self.assertTrue(result.acknowledged) self.assertEqual(1, await db.test.count_documents({"z": 1})) @@ -1374,16 +1370,16 @@ async def test_replace_one(self): self.assertEqual((await db.test.find_one(id1))["z"], 1) # type: ignore result = await db.test.replace_one({"x": 2}, {"y": 2}, True) - self.assertTrue(isinstance(result, UpdateResult)) + self.assertIsInstance(result, UpdateResult) self.assertEqual(0, result.matched_count) - self.assertTrue(result.modified_count in (None, 0)) - self.assertTrue(isinstance(result.upserted_id, ObjectId)) + self.assertIn(result.modified_count, (None, 0)) + self.assertIsInstance(result.upserted_id, ObjectId) self.assertTrue(result.acknowledged) self.assertEqual(1, await db.test.count_documents({"y": 2})) db = db.client.get_database(db.name, write_concern=WriteConcern(w=0)) result = await db.test.replace_one({"x": 0}, {"y": 0}) - self.assertTrue(isinstance(result, UpdateResult)) + self.assertIsInstance(result, UpdateResult) self.assertRaises(InvalidOperation, lambda: result.matched_count) self.assertRaises(InvalidOperation, lambda: result.modified_count) self.assertRaises(InvalidOperation, lambda: result.upserted_id) @@ -1398,33 +1394,33 @@ async def test_update_one(self): id1 = (await db.test.insert_one({"x": 5})).inserted_id result = await db.test.update_one({}, {"$inc": {"x": 1}}) - self.assertTrue(isinstance(result, UpdateResult)) + self.assertIsInstance(result, UpdateResult) self.assertEqual(1, result.matched_count) - self.assertTrue(result.modified_count in (None, 1)) + self.assertIn(result.modified_count, (None, 1)) self.assertIsNone(result.upserted_id) self.assertTrue(result.acknowledged) self.assertEqual((await db.test.find_one(id1))["x"], 6) # type: ignore id2 = (await db.test.insert_one({"x": 1})).inserted_id result = await db.test.update_one({"x": 6}, {"$inc": {"x": 1}}) - self.assertTrue(isinstance(result, UpdateResult)) + self.assertIsInstance(result, UpdateResult) self.assertEqual(1, result.matched_count) - self.assertTrue(result.modified_count in (None, 1)) + self.assertIn(result.modified_count, (None, 1)) self.assertIsNone(result.upserted_id) self.assertTrue(result.acknowledged) self.assertEqual((await db.test.find_one(id1))["x"], 7) # type: ignore self.assertEqual((await db.test.find_one(id2))["x"], 1) # type: ignore result = await db.test.update_one({"x": 2}, {"$set": {"y": 1}}, True) - self.assertTrue(isinstance(result, UpdateResult)) + self.assertIsInstance(result, UpdateResult) self.assertEqual(0, result.matched_count) - self.assertTrue(result.modified_count in (None, 0)) - self.assertTrue(isinstance(result.upserted_id, ObjectId)) + self.assertIn(result.modified_count, (None, 0)) + self.assertIsInstance(result.upserted_id, ObjectId) self.assertTrue(result.acknowledged) db = db.client.get_database(db.name, write_concern=WriteConcern(w=0)) result = await db.test.update_one({"x": 0}, {"$inc": {"x": 1}}) - self.assertTrue(isinstance(result, UpdateResult)) + self.assertIsInstance(result, UpdateResult) self.assertRaises(InvalidOperation, lambda: result.matched_count) self.assertRaises(InvalidOperation, lambda: result.modified_count) self.assertRaises(InvalidOperation, lambda: result.upserted_id) @@ -1455,31 +1451,31 @@ async def test_update_many(self): await db.test.insert_one({"x": 4, "y": 4}) result = await db.test.update_many({"x": 4}, {"$set": {"y": 5}}) - self.assertTrue(isinstance(result, UpdateResult)) + self.assertIsInstance(result, UpdateResult) self.assertEqual(2, result.matched_count) - self.assertTrue(result.modified_count in (None, 2)) + self.assertIn(result.modified_count, (None, 2)) self.assertIsNone(result.upserted_id) self.assertTrue(result.acknowledged) self.assertEqual(3, await db.test.count_documents({"y": 5})) result = await db.test.update_many({"x": 5}, {"$set": {"y": 6}}) - self.assertTrue(isinstance(result, UpdateResult)) + self.assertIsInstance(result, UpdateResult) self.assertEqual(1, result.matched_count) - self.assertTrue(result.modified_count in (None, 1)) + self.assertIn(result.modified_count, (None, 1)) self.assertIsNone(result.upserted_id) self.assertTrue(result.acknowledged) self.assertEqual(1, await db.test.count_documents({"y": 6})) result = await db.test.update_many({"x": 2}, {"$set": {"y": 1}}, True) - self.assertTrue(isinstance(result, UpdateResult)) + self.assertIsInstance(result, UpdateResult) self.assertEqual(0, result.matched_count) - self.assertTrue(result.modified_count in (None, 0)) - self.assertTrue(isinstance(result.upserted_id, ObjectId)) + self.assertIn(result.modified_count, (None, 0)) + self.assertIsInstance(result.upserted_id, ObjectId) self.assertTrue(result.acknowledged) db = db.client.get_database(db.name, write_concern=WriteConcern(w=0)) result = await db.test.update_many({"x": 0}, {"$inc": {"x": 1}}) - self.assertTrue(isinstance(result, UpdateResult)) + self.assertIsInstance(result, UpdateResult) self.assertRaises(InvalidOperation, lambda: result.matched_count) self.assertRaises(InvalidOperation, lambda: result.modified_count) self.assertRaises(InvalidOperation, lambda: result.upserted_id) @@ -1563,7 +1559,7 @@ async def test_aggregate(self): pipeline = {"$project": {"_id": False, "foo": True}} result = await db.test.aggregate([pipeline]) - self.assertTrue(isinstance(result, AsyncCommandCursor)) + self.assertIsInstance(result, AsyncCommandCursor) self.assertEqual([{"foo": [1, 2]}], await result.to_list()) # Test write concern. @@ -1581,7 +1577,7 @@ async def test_aggregate_raw_bson(self): pipeline = {"$project": {"_id": False, "foo": True}} coll = db.get_collection("test", codec_options=CodecOptions(document_class=RawBSONDocument)) result = await coll.aggregate([pipeline]) - self.assertTrue(isinstance(result, AsyncCommandCursor)) + self.assertIsInstance(result, AsyncCommandCursor) first_result = await anext(result) self.assertIsInstance(first_result, RawBSONDocument) self.assertEqual([1, 2], list(first_result["foo"])) @@ -1590,7 +1586,7 @@ async def test_aggregation_cursor_validation(self): db = self.db projection = {"$project": {"_id": "$_id"}} cursor = await db.test.aggregate([projection], cursor={}) - self.assertTrue(isinstance(cursor, AsyncCommandCursor)) + self.assertIsInstance(cursor, AsyncCommandCursor) async def test_aggregation_cursor(self): db = self.db @@ -1732,21 +1728,21 @@ async def test_find_one(self): self.assertEqual(await db.test.find_one({}), await db.test.find_one()) self.assertEqual(await db.test.find_one({"hello": "world"}), await db.test.find_one()) - self.assertTrue("hello" in await db.test.find_one(projection=["hello"])) - self.assertTrue("hello" not in await db.test.find_one(projection=["foo"])) + self.assertIn("hello", await db.test.find_one(projection=["hello"])) + self.assertNotIn("hello", await db.test.find_one(projection=["foo"])) - self.assertTrue("hello" in await db.test.find_one(projection=("hello",))) - self.assertTrue("hello" not in await db.test.find_one(projection=("foo",))) + self.assertIn("hello", await db.test.find_one(projection=("hello",))) + self.assertNotIn("hello", await db.test.find_one(projection=("foo",))) - self.assertTrue("hello" in await db.test.find_one(projection={"hello"})) - self.assertTrue("hello" not in await db.test.find_one(projection={"foo"})) + self.assertIn("hello", await db.test.find_one(projection={"hello"})) + self.assertNotIn("hello", await db.test.find_one(projection={"foo"})) - self.assertTrue("hello" in await db.test.find_one(projection=frozenset(["hello"]))) - self.assertTrue("hello" not in await db.test.find_one(projection=frozenset(["foo"]))) + self.assertIn("hello", await db.test.find_one(projection=frozenset(["hello"]))) + self.assertNotIn("hello", await db.test.find_one(projection=frozenset(["foo"]))) self.assertEqual(["_id"], list(await db.test.find_one(projection={"_id": True}))) - self.assertTrue("hello" in list(await db.test.find_one(projection={}))) - self.assertTrue("hello" in list(await db.test.find_one(projection=[]))) + self.assertIn("hello", list(await db.test.find_one(projection={}))) + self.assertIn("hello", list(await db.test.find_one(projection=[]))) self.assertEqual(None, await db.test.find_one({"hello": "foo"})) self.assertEqual(None, await db.test.find_one(ObjectId())) @@ -2215,9 +2211,9 @@ async def test_find_regex(self): await c.drop() await c.insert_one({"r": re.compile(".*")}) - self.assertTrue(isinstance((await c.find_one())["r"], Regex)) # type: ignore + self.assertIsInstance((await c.find_one())["r"], Regex) # type: ignore async for doc in c.find(): - self.assertTrue(isinstance(doc["r"], Regex)) + self.assertIsInstance(doc["r"], Regex) def test_find_command_generation(self): cmd = _gen_find_command( diff --git a/test/asynchronous/test_comment.py b/test/asynchronous/test_comment.py index be3626a8b8..2d6d0f5f1e 100644 --- a/test/asynchronous/test_comment.py +++ b/test/asynchronous/test_comment.py @@ -20,9 +20,9 @@ import sys sys.path[0:0] = [""] -from asyncio import iscoroutinefunction +from inspect import iscoroutinefunction from test.asynchronous import AsyncIntegrationTest, async_client_context, unittest -from test.utils import OvertCommandListener +from test.utils_shared import OvertCommandListener from bson.dbref import DBRef from pymongo.asynchronous.command_cursor import AsyncCommandCursor diff --git a/test/asynchronous/test_concurrency.py b/test/asynchronous/test_concurrency.py index 1683b8413b..65ea90c03f 100644 --- a/test/asynchronous/test_concurrency.py +++ b/test/asynchronous/test_concurrency.py @@ -18,7 +18,7 @@ import asyncio import time from test.asynchronous import AsyncIntegrationTest, async_client_context -from test.utils import delay +from test.utils_shared import delay _IS_SYNC = False @@ -50,5 +50,5 @@ async def test_concurrency(self): concurrent_time = time.time() - start percent_faster = (sequential_time - concurrent_time) / concurrent_time * 100 - # We expect the concurrent tasks to be at least 75% faster on all platforms as a conservative benchmark - self.assertGreaterEqual(percent_faster, 75) + # We expect the concurrent tasks to be at least 50% faster on all platforms as a conservative benchmark + self.assertGreaterEqual(percent_faster, 50) diff --git a/test/asynchronous/test_connection_monitoring.py b/test/asynchronous/test_connection_monitoring.py new file mode 100644 index 0000000000..c6dc6f0a69 --- /dev/null +++ b/test/asynchronous/test_connection_monitoring.py @@ -0,0 +1,472 @@ +# Copyright 2019-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Execute Transactions Spec tests.""" +from __future__ import annotations + +import asyncio +import os +import sys +import time +from pathlib import Path +from test.asynchronous.utils import async_get_pool, async_get_pools + +sys.path[0:0] = [""] + +from test.asynchronous import AsyncIntegrationTest, async_client_context, client_knobs, unittest +from test.asynchronous.pymongo_mocks import DummyMonitor +from test.asynchronous.utils_spec_runner import AsyncSpecTestCreator, SpecRunnerTask +from test.utils_shared import ( + CMAPListener, + async_wait_until, + camel_to_snake, +) + +from bson.objectid import ObjectId +from bson.son import SON +from pymongo.asynchronous.pool import PoolState, _PoolClosedError +from pymongo.errors import ( + ConnectionFailure, + OperationFailure, + PyMongoError, + WaitQueueTimeoutError, +) +from pymongo.monitoring import ( + ConnectionCheckedInEvent, + ConnectionCheckedOutEvent, + ConnectionCheckOutFailedEvent, + ConnectionCheckOutFailedReason, + ConnectionCheckOutStartedEvent, + ConnectionClosedEvent, + ConnectionClosedReason, + ConnectionCreatedEvent, + ConnectionReadyEvent, + PoolClearedEvent, + PoolClosedEvent, + PoolCreatedEvent, + PoolReadyEvent, +) +from pymongo.read_preferences import ReadPreference +from pymongo.topology_description import updated_topology_description + +_IS_SYNC = False + +OBJECT_TYPES = { + # Event types. + "ConnectionCheckedIn": ConnectionCheckedInEvent, + "ConnectionCheckedOut": ConnectionCheckedOutEvent, + "ConnectionCheckOutFailed": ConnectionCheckOutFailedEvent, + "ConnectionClosed": ConnectionClosedEvent, + "ConnectionCreated": ConnectionCreatedEvent, + "ConnectionReady": ConnectionReadyEvent, + "ConnectionCheckOutStarted": ConnectionCheckOutStartedEvent, + "ConnectionPoolCreated": PoolCreatedEvent, + "ConnectionPoolReady": PoolReadyEvent, + "ConnectionPoolCleared": PoolClearedEvent, + "ConnectionPoolClosed": PoolClosedEvent, + # Error types. + "PoolClosedError": _PoolClosedError, + "WaitQueueTimeoutError": WaitQueueTimeoutError, +} + + +class AsyncTestCMAP(AsyncIntegrationTest): + # Location of JSON test specifications. + if _IS_SYNC: + TEST_PATH = os.path.join(Path(__file__).resolve().parent, "connection_monitoring") + else: + TEST_PATH = os.path.join(Path(__file__).resolve().parent.parent, "connection_monitoring") + + # Test operations: + + async def start(self, op): + """Run the 'start' thread operation.""" + target = op["target"] + thread = SpecRunnerTask(target) + await thread.start() + self.targets[target] = thread + + async def wait(self, op): + """Run the 'wait' operation.""" + await asyncio.sleep(op["ms"] / 1000.0) + + async def wait_for_thread(self, op): + """Run the 'waitForThread' operation.""" + target = op["target"] + thread = self.targets[target] + await thread.stop() + await thread.join() + if thread.exc: + raise thread.exc + self.assertFalse(thread.ops) + + async def wait_for_event(self, op): + """Run the 'waitForEvent' operation.""" + event = OBJECT_TYPES[op["event"]] + count = op["count"] + timeout = op.get("timeout", 10000) / 1000.0 + await async_wait_until( + lambda: self.listener.event_count(event) >= count, + f"find {count} {event} event(s)", + timeout=timeout, + ) + + async def check_out(self, op): + """Run the 'checkOut' operation.""" + label = op["label"] + async with self.pool.checkout() as conn: + # Call 'pin_cursor' so we can hold the socket. + conn.pin_cursor() + if label: + self.labels[label] = conn + else: + self.addAsyncCleanup(conn.close_conn, None) + + async def check_in(self, op): + """Run the 'checkIn' operation.""" + label = op["connection"] + conn = self.labels[label] + await self.pool.checkin(conn) + + async def ready(self, op): + """Run the 'ready' operation.""" + await self.pool.ready() + + async def clear(self, op): + """Run the 'clear' operation.""" + if "interruptInUseConnections" in op: + await self.pool.reset(interrupt_connections=op["interruptInUseConnections"]) + else: + await self.pool.reset() + + async def close(self, op): + """Run the 'close' operation.""" + await self.pool.close() + + async def run_operation(self, op): + """Run a single operation in a test.""" + op_name = camel_to_snake(op["name"]) + thread = op["thread"] + meth = getattr(self, op_name) + if thread: + await self.targets[thread].schedule(lambda: meth(op)) + else: + await meth(op) + + async def run_operations(self, ops): + """Run a test's operations.""" + for op in ops: + self._ops.append(op) + await self.run_operation(op) + + def check_object(self, actual, expected): + """Assert that the actual object matches the expected object.""" + self.assertEqual(type(actual), OBJECT_TYPES[expected["type"]]) + for attr, expected_val in expected.items(): + if attr == "type": + continue + c2s = camel_to_snake(attr) + if c2s == "interrupt_in_use_connections": + c2s = "interrupt_connections" + actual_val = getattr(actual, c2s) + if expected_val == 42: + self.assertIsNotNone(actual_val) + else: + self.assertEqual(actual_val, expected_val) + + def check_event(self, actual, expected): + """Assert that the actual event matches the expected event.""" + self.check_object(actual, expected) + + def actual_events(self, ignore): + """Return all the non-ignored events.""" + ignore = tuple(OBJECT_TYPES[name] for name in ignore) + return [event for event in self.listener.events if not isinstance(event, ignore)] + + def check_events(self, events, ignore): + """Check the events of a test.""" + actual_events = self.actual_events(ignore) + for actual, expected in zip(actual_events, events): + self.logs.append(f"Checking event actual: {actual!r} vs expected: {expected!r}") + self.check_event(actual, expected) + + if len(events) > len(actual_events): + self.fail(f"missing events: {events[len(actual_events) :]!r}") + + def check_error(self, actual, expected): + message = expected.pop("message") + self.check_object(actual, expected) + self.assertIn(message, str(actual)) + + async def set_fail_point(self, command_args): + if not async_client_context.supports_failCommand_fail_point: + self.skipTest("failCommand fail point must be supported") + await self.configure_fail_point(self.client, command_args) + + async def run_scenario(self, scenario_def, test): + """Run a CMAP spec test.""" + self.logs: list = [] + self.assertEqual(scenario_def["version"], 1) + self.assertIn(scenario_def["style"], ["unit", "integration"]) + self.listener = CMAPListener() + self._ops: list = [] + + # Configure the fail point before creating the client. + if "failPoint" in test: + fp = test["failPoint"] + await self.set_fail_point(fp) + self.addAsyncCleanup( + self.set_fail_point, {"configureFailPoint": fp["configureFailPoint"], "mode": "off"} + ) + + opts = test["poolOptions"].copy() + opts["event_listeners"] = [self.listener] + opts["_monitor_class"] = DummyMonitor + opts["connect"] = False + # Support backgroundThreadIntervalMS, default to 50ms. + interval = opts.pop("backgroundThreadIntervalMS", 50) + if interval < 0: + kill_cursor_frequency = 99999999 + else: + kill_cursor_frequency = interval / 1000.0 + with client_knobs(kill_cursor_frequency=kill_cursor_frequency, min_heartbeat_interval=0.05): + client = await self.async_single_client(**opts) + # Update the SD to a known type because the DummyMonitor will not. + # Note we cannot simply call topology.on_change because that would + # internally call pool.ready() which introduces unexpected + # PoolReadyEvents. Instead, update the initial state before + # opening the Topology. + td = async_client_context.client._topology.description + sd = td.server_descriptions()[ + (await async_client_context.host, await async_client_context.port) + ] + client._topology._description = updated_topology_description( + client._topology._description, sd + ) + # When backgroundThreadIntervalMS is negative we do not start the + # background thread to ensure it never runs. + if interval < 0: + await client._topology.open() + else: + await client._get_topology() + self.pool = list(client._topology._servers.values())[0].pool + + # Map of target names to Thread objects. + self.targets: dict = {} + # Map of label names to AsyncConnection objects + self.labels: dict = {} + + async def cleanup(): + for t in self.targets.values(): + await t.stop() + for t in self.targets.values(): + await t.join(5) + for conn in self.labels.values(): + await conn.close_conn(None) + + self.addAsyncCleanup(cleanup) + + try: + if test["error"]: + with self.assertRaises(PyMongoError) as ctx: + await self.run_operations(test["operations"]) + self.check_error(ctx.exception, test["error"]) + else: + await self.run_operations(test["operations"]) + + self.check_events(test["events"], test["ignore"]) + except Exception: + # Print the events after a test failure. + print("\nFailed test: {!r}".format(test["description"])) + print("Operations:") + for op in self._ops: + print(op) + print("Threads:") + print(self.targets) + print("AsyncConnections:") + print(self.labels) + print("Events:") + for event in self.listener.events: + print(event) + print("Log:") + for log in self.logs: + print(log) + raise + + POOL_OPTIONS = { + "maxPoolSize": 50, + "minPoolSize": 1, + "maxIdleTimeMS": 10000, + "waitQueueTimeoutMS": 10000, + } + + # + # Prose tests. Numbers correspond to the prose test number in the spec. + # + async def test_1_client_connection_pool_options(self): + client = await self.async_rs_or_single_client(**self.POOL_OPTIONS) + pool_opts = (await async_get_pool(client)).opts + self.assertEqual(pool_opts.non_default_options, self.POOL_OPTIONS) + + async def test_2_all_client_pools_have_same_options(self): + client = await self.async_rs_or_single_client(**self.POOL_OPTIONS) + await client.admin.command("ping") + # Discover at least one secondary. + if await async_client_context.has_secondaries: + await client.admin.command("ping", read_preference=ReadPreference.SECONDARY) + pools = await async_get_pools(client) + pool_opts = pools[0].opts + + self.assertEqual(pool_opts.non_default_options, self.POOL_OPTIONS) + for pool in pools[1:]: + self.assertEqual(pool.opts, pool_opts) + + async def test_3_uri_connection_pool_options(self): + opts = "&".join([f"{k}={v}" for k, v in self.POOL_OPTIONS.items()]) + uri = f"mongodb://{await async_client_context.pair}/?{opts}" + client = await self.async_rs_or_single_client(uri) + pool_opts = (await async_get_pool(client)).opts + self.assertEqual(pool_opts.non_default_options, self.POOL_OPTIONS) + + async def test_4_subscribe_to_events(self): + listener = CMAPListener() + client = await self.async_single_client(event_listeners=[listener]) + self.assertEqual(listener.event_count(PoolCreatedEvent), 1) + + # Creates a new connection. + await client.admin.command("ping") + self.assertEqual(listener.event_count(ConnectionCheckOutStartedEvent), 1) + self.assertEqual(listener.event_count(ConnectionCreatedEvent), 1) + self.assertEqual(listener.event_count(ConnectionReadyEvent), 1) + self.assertEqual(listener.event_count(ConnectionCheckedOutEvent), 1) + self.assertEqual(listener.event_count(ConnectionCheckedInEvent), 1) + + # Uses the existing connection. + await client.admin.command("ping") + self.assertEqual(listener.event_count(ConnectionCheckOutStartedEvent), 2) + self.assertEqual(listener.event_count(ConnectionCheckedOutEvent), 2) + self.assertEqual(listener.event_count(ConnectionCheckedInEvent), 2) + + await client.close() + self.assertEqual(listener.event_count(PoolClosedEvent), 1) + self.assertEqual(listener.event_count(ConnectionClosedEvent), 1) + + async def test_5_check_out_fails_connection_error(self): + listener = CMAPListener() + client = await self.async_single_client(event_listeners=[listener]) + pool = await async_get_pool(client) + + def mock_connect(*args, **kwargs): + raise ConnectionFailure("connect failed") + + pool.connect = mock_connect + # Un-patch Pool.connect to break the cyclic reference. + self.addCleanup(delattr, pool, "connect") + + # Attempt to create a new connection. + with self.assertRaisesRegex(ConnectionFailure, "connect failed"): + await client.admin.command("ping") + + self.assertIsInstance(listener.events[0], PoolCreatedEvent) + self.assertIsInstance(listener.events[1], PoolReadyEvent) + self.assertIsInstance(listener.events[2], ConnectionCheckOutStartedEvent) + self.assertIsInstance(listener.events[3], ConnectionCheckOutFailedEvent) + self.assertIsInstance(listener.events[4], PoolClearedEvent) + + failed_event = listener.events[3] + self.assertEqual(failed_event.reason, ConnectionCheckOutFailedReason.CONN_ERROR) + + @async_client_context.require_no_fips + async def test_5_check_out_fails_auth_error(self): + listener = CMAPListener() + client = await self.async_single_client_noauth( + username="notauser", password="fail", event_listeners=[listener] + ) + + # Attempt to create a new connection. + with self.assertRaisesRegex(OperationFailure, "failed"): + await client.admin.command("ping") + + self.assertIsInstance(listener.events[0], PoolCreatedEvent) + self.assertIsInstance(listener.events[1], PoolReadyEvent) + self.assertIsInstance(listener.events[2], ConnectionCheckOutStartedEvent) + self.assertIsInstance(listener.events[3], ConnectionCreatedEvent) + # Error happens here. + self.assertIsInstance(listener.events[4], ConnectionClosedEvent) + self.assertIsInstance(listener.events[5], ConnectionCheckOutFailedEvent) + self.assertEqual(listener.events[5].reason, ConnectionCheckOutFailedReason.CONN_ERROR) + + # + # Extra non-spec tests + # + def assertRepr(self, obj): + new_obj = eval(repr(obj)) + self.assertEqual(type(new_obj), type(obj)) + self.assertEqual(repr(new_obj), repr(obj)) + + async def test_events_repr(self): + host = ("localhost", 27017) + self.assertRepr(ConnectionCheckedInEvent(host, 1)) + self.assertRepr(ConnectionCheckedOutEvent(host, 1, time.monotonic())) + self.assertRepr( + ConnectionCheckOutFailedEvent( + host, ConnectionCheckOutFailedReason.POOL_CLOSED, time.monotonic() + ) + ) + self.assertRepr(ConnectionClosedEvent(host, 1, ConnectionClosedReason.POOL_CLOSED)) + self.assertRepr(ConnectionCreatedEvent(host, 1)) + self.assertRepr(ConnectionReadyEvent(host, 1, time.monotonic())) + self.assertRepr(ConnectionCheckOutStartedEvent(host)) + self.assertRepr(PoolCreatedEvent(host, {})) + self.assertRepr(PoolClearedEvent(host)) + self.assertRepr(PoolClearedEvent(host, service_id=ObjectId())) + self.assertRepr(PoolClosedEvent(host)) + + async def test_close_leaves_pool_unpaused(self): + listener = CMAPListener() + client = await self.async_single_client(event_listeners=[listener]) + await client.admin.command("ping") + pool = await async_get_pool(client) + await client.close() + self.assertEqual(1, listener.event_count(PoolClosedEvent)) + self.assertEqual(PoolState.CLOSED, pool.state) + # Checking out a connection should fail + with self.assertRaises(_PoolClosedError): + async with pool.checkout(): + pass + + +def create_test(scenario_def, test, name): + async def run_scenario(self): + await self.run_scenario(scenario_def, test) + + return run_scenario + + +class CMAPSpecTestCreator(AsyncSpecTestCreator): + def tests(self, scenario_def): + """Extract the tests from a spec file. + + CMAP tests do not have a 'tests' field. The whole file represents + a single test case. + """ + return [scenario_def] + + +test_creator = CMAPSpecTestCreator(create_test, AsyncTestCMAP, AsyncTestCMAP.TEST_PATH) +test_creator.create_tests() + + +if __name__ == "__main__": + unittest.main() diff --git a/test/asynchronous/test_connections_survive_primary_stepdown_spec.py b/test/asynchronous/test_connections_survive_primary_stepdown_spec.py index 4795d3937a..aed3c1ce7b 100644 --- a/test/asynchronous/test_connections_survive_primary_stepdown_spec.py +++ b/test/asynchronous/test_connections_survive_primary_stepdown_spec.py @@ -16,19 +16,18 @@ from __future__ import annotations import sys +from test.asynchronous.utils import async_ensure_all_connected sys.path[0:0] = [""] from test.asynchronous import ( AsyncIntegrationTest, async_client_context, - reset_client_context, unittest, ) from test.asynchronous.helpers import async_repl_set_step_down -from test.utils import ( +from test.utils_shared import ( CMAPListener, - async_ensure_all_connected, ) from bson import SON @@ -123,18 +122,12 @@ async def run_scenario(self, error_code, retry, pool_status_checker): async def test_not_primary_keep_connection_pool(self): await self.run_scenario(10107, True, self.verify_pool_not_cleared) - @async_client_context.require_version_min(4, 0, 0) - @async_client_context.require_version_max(4, 1, 0, -1) - @async_client_context.require_test_commands - async def test_not_primary_reset_connection_pool(self): - await self.run_scenario(10107, False, self.verify_pool_cleared) - - @async_client_context.require_version_min(4, 0, 0) + @async_client_context.require_version_min(4, 2, 0) @async_client_context.require_test_commands async def test_shutdown_in_progress(self): await self.run_scenario(91, False, self.verify_pool_cleared) - @async_client_context.require_version_min(4, 0, 0) + @async_client_context.require_version_min(4, 2, 0) @async_client_context.require_test_commands async def test_interrupted_at_shutdown(self): await self.run_scenario(11600, False, self.verify_pool_cleared) diff --git a/test/asynchronous/test_crud_unified.py b/test/asynchronous/test_crud_unified.py index e6f42d5bdf..8b1f9b8e38 100644 --- a/test/asynchronous/test_crud_unified.py +++ b/test/asynchronous/test_crud_unified.py @@ -33,7 +33,7 @@ _TEST_PATH = os.path.join(pathlib.Path(__file__).resolve().parent.parent, "crud", "unified") # Generate unified tests. -globals().update(generate_test_classes(_TEST_PATH, module=__name__, RUN_ON_SERVERLESS=True)) +globals().update(generate_test_classes(_TEST_PATH, module=__name__)) if __name__ == "__main__": unittest.main() diff --git a/test/asynchronous/test_csot.py b/test/asynchronous/test_csot.py new file mode 100644 index 0000000000..a978d1ccc0 --- /dev/null +++ b/test/asynchronous/test_csot.py @@ -0,0 +1,116 @@ +# Copyright 2022-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test the CSOT unified spec tests.""" +from __future__ import annotations + +import os +import sys +from pathlib import Path + +sys.path[0:0] = [""] + +from test.asynchronous import AsyncIntegrationTest, async_client_context, unittest +from test.asynchronous.unified_format import generate_test_classes +from test.asynchronous.utils import flaky + +import pymongo +from pymongo import _csot +from pymongo.errors import PyMongoError + +_IS_SYNC = False + +# Location of JSON test specifications. +if _IS_SYNC: + TEST_PATH = os.path.join(Path(__file__).resolve().parent, "csot") +else: + TEST_PATH = os.path.join(Path(__file__).resolve().parent.parent, "csot") + +# Generate unified tests. +globals().update(generate_test_classes(TEST_PATH, module=__name__)) + + +class TestCSOT(AsyncIntegrationTest): + RUN_ON_LOAD_BALANCER = True + + @flaky(reason="PYTHON-3522") + async def test_timeout_nested(self): + coll = self.db.coll + self.assertEqual(_csot.get_timeout(), None) + self.assertEqual(_csot.get_deadline(), float("inf")) + self.assertEqual(_csot.get_rtt(), 0.0) + with pymongo.timeout(10): + await coll.find_one() + self.assertEqual(_csot.get_timeout(), 10) + deadline_10 = _csot.get_deadline() + + # Capped at the original 10 deadline. + with pymongo.timeout(15): + await coll.find_one() + self.assertEqual(_csot.get_timeout(), 15) + self.assertEqual(_csot.get_deadline(), deadline_10) + + # Should be reset to previous values + self.assertEqual(_csot.get_timeout(), 10) + self.assertEqual(_csot.get_deadline(), deadline_10) + await coll.find_one() + + with pymongo.timeout(5): + await coll.find_one() + self.assertEqual(_csot.get_timeout(), 5) + self.assertLess(_csot.get_deadline(), deadline_10) + + # Should be reset to previous values + self.assertEqual(_csot.get_timeout(), 10) + self.assertEqual(_csot.get_deadline(), deadline_10) + await coll.find_one() + + # Should be reset to previous values + self.assertEqual(_csot.get_timeout(), None) + self.assertEqual(_csot.get_deadline(), float("inf")) + self.assertEqual(_csot.get_rtt(), 0.0) + + @async_client_context.require_change_streams + @flaky(reason="PYTHON-3522") + async def test_change_stream_can_resume_after_timeouts(self): + coll = self.db.test + await coll.insert_one({}) + async with await coll.watch() as stream: + with pymongo.timeout(0.1): + with self.assertRaises(PyMongoError) as ctx: + await stream.next() + self.assertTrue(ctx.exception.timeout) + self.assertTrue(stream.alive) + with self.assertRaises(PyMongoError) as ctx: + await stream.try_next() + self.assertTrue(ctx.exception.timeout) + self.assertTrue(stream.alive) + # Resume before the insert on 3.6 because 4.0 is required to avoid skipping documents + if async_client_context.version < (4, 0): + await stream.try_next() + await coll.insert_one({}) + with pymongo.timeout(10): + self.assertTrue(await stream.next()) + self.assertTrue(stream.alive) + # Timeout applies to entire next() call, not only individual commands. + with pymongo.timeout(0.5): + with self.assertRaises(PyMongoError) as ctx: + await stream.next() + self.assertTrue(ctx.exception.timeout) + self.assertTrue(stream.alive) + self.assertFalse(stream.alive) + + +if __name__ == "__main__": + unittest.main() diff --git a/test/asynchronous/test_cursor.py b/test/asynchronous/test_cursor.py index d843ffb4aa..906f78cc97 100644 --- a/test/asynchronous/test_cursor.py +++ b/test/asynchronous/test_cursor.py @@ -31,7 +31,8 @@ sys.path[0:0] = [""] from test.asynchronous import AsyncIntegrationTest, async_client_context, unittest -from test.utils import ( +from test.asynchronous.utils import flaky +from test.utils_shared import ( AllowListEventListener, EventListener, OvertCommandListener, @@ -42,9 +43,9 @@ from bson import decode_all from bson.code import Code +from bson.raw_bson import RawBSONDocument from pymongo import ASCENDING, DESCENDING from pymongo.asynchronous.cursor import AsyncCursor, CursorType -from pymongo.asynchronous.helpers import anext from pymongo.collation import Collation from pymongo.errors import ExecutionTimeout, InvalidOperation, OperationFailure, PyMongoError from pymongo.operations import _IndexList @@ -174,8 +175,8 @@ async def test_max_time_ms(self): cursor = coll.find().max_time_ms(999) c2 = cursor.clone() self.assertEqual(999, c2._max_time_ms) - self.assertTrue("$maxTimeMS" in cursor._query_spec()) - self.assertTrue("$maxTimeMS" in c2._query_spec()) + self.assertIn("$maxTimeMS", cursor._query_spec()) + self.assertIn("$maxTimeMS", c2._query_spec()) self.assertTrue(await coll.find_one(max_time_ms=1000)) @@ -198,6 +199,21 @@ async def test_max_time_ms(self): finally: await client.admin.command("configureFailPoint", "maxTimeAlwaysTimeOut", mode="off") + async def test_maxtime_ms_message(self): + db = self.db + await db.t.insert_one({"x": 1}) + with self.assertRaises(Exception) as error: + await db.t.find_one({"$where": delay(2)}, max_time_ms=1) + + self.assertIn("(configured timeouts: connectTimeoutMS: 20000.0ms", str(error.exception)) + + client = await self.async_rs_client(document_class=RawBSONDocument) + await client.db.t.insert_one({"x": 1}) + with self.assertRaises(Exception) as error: + await client.db.t.find_one({"$where": delay(2)}, max_time_ms=1) + + self.assertIn("(configured timeouts: connectTimeoutMS: 20000.0ms", str(error.exception)) + async def test_max_await_time_ms(self): db = self.db await db.pymongo_test.drop() @@ -240,19 +256,19 @@ async def test_max_await_time_ms(self): # Tailable_await defaults. await coll.find(cursor_type=CursorType.TAILABLE_AWAIT).to_list() # find - self.assertFalse("maxTimeMS" in listener.started_events[0].command) + self.assertNotIn("maxTimeMS", listener.started_events[0].command) # getMore - self.assertFalse("maxTimeMS" in listener.started_events[1].command) + self.assertNotIn("maxTimeMS", listener.started_events[1].command) listener.reset() # Tailable_await with max_await_time_ms set. await coll.find(cursor_type=CursorType.TAILABLE_AWAIT).max_await_time_ms(99).to_list() # find self.assertEqual("find", listener.started_events[0].command_name) - self.assertFalse("maxTimeMS" in listener.started_events[0].command) + self.assertNotIn("maxTimeMS", listener.started_events[0].command) # getMore self.assertEqual("getMore", listener.started_events[1].command_name) - self.assertTrue("maxTimeMS" in listener.started_events[1].command) + self.assertIn("maxTimeMS", listener.started_events[1].command) self.assertEqual(99, listener.started_events[1].command["maxTimeMS"]) listener.reset() @@ -263,11 +279,11 @@ async def test_max_await_time_ms(self): await coll.find(cursor_type=CursorType.TAILABLE_AWAIT).max_time_ms(99).to_list() # find self.assertEqual("find", listener.started_events[0].command_name) - self.assertTrue("maxTimeMS" in listener.started_events[0].command) + self.assertIn("maxTimeMS", listener.started_events[0].command) self.assertEqual(99, listener.started_events[0].command["maxTimeMS"]) # getMore self.assertEqual("getMore", listener.started_events[1].command_name) - self.assertFalse("maxTimeMS" in listener.started_events[1].command) + self.assertNotIn("maxTimeMS", listener.started_events[1].command) listener.reset() # Tailable_await with both max_time_ms and max_await_time_ms @@ -279,11 +295,11 @@ async def test_max_await_time_ms(self): ) # find self.assertEqual("find", listener.started_events[0].command_name) - self.assertTrue("maxTimeMS" in listener.started_events[0].command) + self.assertIn("maxTimeMS", listener.started_events[0].command) self.assertEqual(99, listener.started_events[0].command["maxTimeMS"]) # getMore self.assertEqual("getMore", listener.started_events[1].command_name) - self.assertTrue("maxTimeMS" in listener.started_events[1].command) + self.assertIn("maxTimeMS", listener.started_events[1].command) self.assertEqual(99, listener.started_events[1].command["maxTimeMS"]) listener.reset() @@ -291,31 +307,31 @@ async def test_max_await_time_ms(self): await coll.find(batch_size=1).max_await_time_ms(99).to_list() # find self.assertEqual("find", listener.started_events[0].command_name) - self.assertFalse("maxTimeMS" in listener.started_events[0].command) + self.assertNotIn("maxTimeMS", listener.started_events[0].command) # getMore self.assertEqual("getMore", listener.started_events[1].command_name) - self.assertFalse("maxTimeMS" in listener.started_events[1].command) + self.assertNotIn("maxTimeMS", listener.started_events[1].command) listener.reset() # Non tailable_await with max_time_ms await coll.find(batch_size=1).max_time_ms(99).to_list() # find self.assertEqual("find", listener.started_events[0].command_name) - self.assertTrue("maxTimeMS" in listener.started_events[0].command) + self.assertIn("maxTimeMS", listener.started_events[0].command) self.assertEqual(99, listener.started_events[0].command["maxTimeMS"]) # getMore self.assertEqual("getMore", listener.started_events[1].command_name) - self.assertFalse("maxTimeMS" in listener.started_events[1].command) + self.assertNotIn("maxTimeMS", listener.started_events[1].command) # Non tailable_await with both max_time_ms and max_await_time_ms await coll.find(batch_size=1).max_time_ms(99).max_await_time_ms(88).to_list() # find self.assertEqual("find", listener.started_events[0].command_name) - self.assertTrue("maxTimeMS" in listener.started_events[0].command) + self.assertIn("maxTimeMS", listener.started_events[0].command) self.assertEqual(99, listener.started_events[0].command["maxTimeMS"]) # getMore self.assertEqual("getMore", listener.started_events[1].command_name) - self.assertFalse("maxTimeMS" in listener.started_events[1].command) + self.assertNotIn("maxTimeMS", listener.started_events[1].command) @async_client_context.require_test_commands @async_client_context.require_no_mongos @@ -361,6 +377,29 @@ async def test_explain_with_read_concern(self): self.assertEqual(len(started), 1) self.assertNotIn("readConcern", started[0].command) + # https://github.com/mongodb/specifications/blob/master/source/crud/tests/README.md#14-explain-helpers-allow-users-to-specify-maxtimems + async def test_explain_csot(self): + # Create a MongoClient with command monitoring enabled (referred to as client). + listener = AllowListEventListener("explain") + client = await self.async_rs_or_single_client(event_listeners=[listener]) + + # Create a collection, referred to as collection, with the namespace explain-test.collection. + # Workaround for SERVER-108463 + names = await client["explain-test"].list_collection_names() + if "collection" not in names: + collection = await client["explain-test"].create_collection("collection") + else: + collection = client["explain-test"]["collection"] + + # Run an explained find on collection. The find will have the query predicate { name: 'john doe' }. Specify a maxTimeMS value of 2000ms for the explain. + with pymongo.timeout(2.0): + self.assertTrue(await collection.find({"name": "john doe"}).explain()) + + # Obtain the command started event for the explain. Confirm that the top-level explain command should has a maxTimeMS value of 2000. + started = listener.started_events + self.assertEqual(len(started), 1) + assert 1500 < started[0].command["maxTimeMS"] <= 2000 + async def test_hint(self): db = self.db with self.assertRaises(TypeError): @@ -933,16 +972,19 @@ async def test_clone(self): # Shallow copies can so can mutate cursor2 = copy.copy(cursor) cursor2._projection["cursor2"] = False - self.assertTrue(cursor._projection and "cursor2" in cursor._projection) + self.assertIsNotNone(cursor._projection) + self.assertIn("cursor2", cursor._projection.keys()) # Deepcopies and shouldn't mutate cursor3 = copy.deepcopy(cursor) cursor3._projection["cursor3"] = False - self.assertFalse(cursor._projection and "cursor3" in cursor._projection) + self.assertIsNotNone(cursor._projection) + self.assertNotIn("cursor3", cursor._projection.keys()) cursor4 = cursor.clone() cursor4._projection["cursor4"] = False - self.assertFalse(cursor._projection and "cursor4" in cursor._projection) + self.assertIsNotNone(cursor._projection) + self.assertNotIn("cursor4", cursor._projection.keys()) # Test memo when deepcopying queries query = {"hello": "world"} @@ -959,7 +1001,7 @@ async def test_clone(self): cursor = self.db.test.find().hint([("z", 1), ("a", 1)]) cursor2 = copy.deepcopy(cursor) # Internal types are now dict rather than SON by default - self.assertTrue(isinstance(cursor2._hint, dict)) + self.assertIsInstance(cursor2._hint, dict) self.assertEqual(cursor._hint, cursor2._hint) @async_client_context.require_sync @@ -1187,15 +1229,6 @@ async def test_distinct(self): self.assertEqual(["b", "c"], distinct) - @async_client_context.require_version_max(4, 1, 0, -1) - async def test_max_scan(self): - await self.db.drop_collection("test") - await self.db.test.insert_many([{} for _ in range(100)]) - - self.assertEqual(100, len(await self.db.test.find().to_list())) - self.assertEqual(50, len(await self.db.test.find().max_scan(50).to_list())) - self.assertEqual(50, len(await self.db.test.find().max_scan(90).max_scan(50).to_list())) - async def test_with_statement(self): await self.db.drop_collection("test") await self.db.test.insert_many([{} for _ in range(100)]) @@ -1401,7 +1434,7 @@ async def test_to_list_empty(self): async def test_to_list_length(self): coll = self.db.test await coll.insert_many([{} for _ in range(5)]) - self.addCleanup(coll.drop) + self.addAsyncCleanup(coll.drop) c = coll.find() docs = await c.to_list(3) self.assertEqual(len(docs), 3) @@ -1412,12 +1445,11 @@ async def test_to_list_length(self): docs = await c.to_list(3) self.assertEqual(len(docs), 2) + @flaky(reason="PYTHON-3522") async def test_to_list_csot_applied(self): - if os.environ.get("SKIP_CSOT_TESTS", ""): - raise unittest.SkipTest("SKIP_CSOT_TESTS is set, skipping...") client = await self.async_single_client(timeoutMS=500, w=1) coll = client.pymongo.test - # Initialize the client with a larger timeout to help make test less flakey + # Initialize the client with a larger timeout to help make test less flaky with pymongo.timeout(10): await coll.insert_many([{} for _ in range(5)]) cursor = coll.find({"$where": delay(1)}) @@ -1455,12 +1487,11 @@ async def test_command_cursor_to_list_length(self): self.assertEqual(len(await result.to_list(1)), 1) @async_client_context.require_failCommand_blockConnection + @flaky(reason="PYTHON-3522") async def test_command_cursor_to_list_csot_applied(self): - if os.environ.get("SKIP_CSOT_TESTS", ""): - raise unittest.SkipTest("SKIP_CSOT_TESTS is set, skipping...") client = await self.async_single_client(timeoutMS=500, w=1) coll = client.pymongo.test - # Initialize the client with a larger timeout to help make test less flakey + # Initialize the client with a larger timeout to help make test less flaky with pymongo.timeout(10): await coll.insert_many([{} for _ in range(5)]) fail_command = { @@ -1597,7 +1628,6 @@ async def test_get_item(self): async def test_collation(self): await anext(self.db.test.find_raw_batches(collation=Collation("en_US"))) - @async_client_context.require_no_mmap # MMAPv1 does not support read concern async def test_read_concern(self): await self.db.get_collection("test", write_concern=WriteConcern(w="majority")).insert_one( {} @@ -1812,6 +1842,7 @@ async def test_monitoring(self): @async_client_context.require_version_min(5, 0, -1) @async_client_context.require_no_mongos + @async_client_context.require_sync async def test_exhaust_cursor_db_set(self): listener = OvertCommandListener() client = await self.async_rs_or_single_client(event_listeners=[listener]) @@ -1821,7 +1852,7 @@ async def test_exhaust_cursor_db_set(self): listener.reset() - result = await c.find({}, cursor_type=pymongo.CursorType.EXHAUST, batch_size=1).to_list() + result = list(await c.find({}, cursor_type=pymongo.CursorType.EXHAUST, batch_size=1)) self.assertEqual(len(result), 3) diff --git a/test/asynchronous/test_custom_types.py b/test/asynchronous/test_custom_types.py new file mode 100644 index 0000000000..f8fa51ba76 --- /dev/null +++ b/test/asynchronous/test_custom_types.py @@ -0,0 +1,976 @@ +# Copyright 2019-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test support for callbacks to encode/decode custom types.""" +from __future__ import annotations + +import datetime +import sys +import tempfile +from collections import OrderedDict +from decimal import Decimal +from random import random +from typing import Any, Tuple, Type, no_type_check + +from bson.decimal128 import DecimalDecoder, DecimalEncoder +from gridfs.asynchronous.grid_file import AsyncGridIn, AsyncGridOut + +sys.path[0:0] = [""] + +from test.asynchronous import AsyncIntegrationTest, async_client_context, unittest + +from bson import ( + _BUILT_IN_TYPES, + RE_TYPE, + Decimal128, + _bson_to_dict, + _dict_to_bson, + decode, + decode_all, + decode_file_iter, + decode_iter, + encode, +) +from bson.codec_options import ( + CodecOptions, + TypeCodec, + TypeDecoder, + TypeEncoder, + TypeRegistry, +) +from bson.errors import InvalidDocument +from bson.int64 import Int64 +from bson.raw_bson import RawBSONDocument +from pymongo.asynchronous.collection import ReturnDocument +from pymongo.errors import DuplicateKeyError +from pymongo.message import _CursorAddress + +_IS_SYNC = False + + +DECIMAL_CODECOPTS = CodecOptions(type_registry=TypeRegistry([DecimalEncoder(), DecimalDecoder()])) + + +class UndecipherableInt64Type: + def __init__(self, value): + self.value = value + + def __eq__(self, other): + if isinstance(other, type(self)): + return self.value == other.value + # Does not compare equal to integers. + return False + + +class UndecipherableIntDecoder(TypeDecoder): + bson_type = Int64 + + def transform_bson(self, value): + return UndecipherableInt64Type(value) + + +class UndecipherableIntEncoder(TypeEncoder): + python_type = UndecipherableInt64Type + + def transform_python(self, value): + return Int64(value.value) + + +UNINT_DECODER_CODECOPTS = CodecOptions( + type_registry=TypeRegistry( + [ + UndecipherableIntDecoder(), + ] + ) +) + + +UNINT_CODECOPTS = CodecOptions( + type_registry=TypeRegistry([UndecipherableIntDecoder(), UndecipherableIntEncoder()]) +) + + +class UppercaseTextDecoder(TypeDecoder): + bson_type = str + + def transform_bson(self, value): + return value.upper() + + +UPPERSTR_DECODER_CODECOPTS = CodecOptions( + type_registry=TypeRegistry( + [ + UppercaseTextDecoder(), + ] + ) +) + + +def type_obfuscating_decoder_factory(rt_type): + class ResumeTokenToNanDecoder(TypeDecoder): + bson_type = rt_type + + def transform_bson(self, value): + return "NaN" + + return ResumeTokenToNanDecoder + + +class CustomBSONTypeTests: + @no_type_check + def roundtrip(self, doc): + bsonbytes = encode(doc, codec_options=self.codecopts) + rt_document = decode(bsonbytes, codec_options=self.codecopts) + self.assertEqual(doc, rt_document) + + def test_encode_decode_roundtrip(self): + self.roundtrip({"average": Decimal("56.47")}) + self.roundtrip({"average": {"b": Decimal("56.47")}}) + self.roundtrip({"average": [Decimal("56.47")]}) + self.roundtrip({"average": [[Decimal("56.47")]]}) + self.roundtrip({"average": [{"b": Decimal("56.47")}]}) + + @no_type_check + def test_decode_all(self): + documents = [] + for dec in range(3): + documents.append({"average": Decimal(f"56.4{dec}")}) + + bsonstream = b"" + for doc in documents: + bsonstream += encode(doc, codec_options=self.codecopts) + + self.assertEqual(decode_all(bsonstream, self.codecopts), documents) + + @no_type_check + def test__bson_to_dict(self): + document = {"average": Decimal("56.47")} + rawbytes = encode(document, codec_options=self.codecopts) + decoded_document = _bson_to_dict(rawbytes, self.codecopts) + self.assertEqual(document, decoded_document) + + @no_type_check + def test__dict_to_bson(self): + document = {"average": Decimal("56.47")} + rawbytes = encode(document, codec_options=self.codecopts) + encoded_document = _dict_to_bson(document, False, self.codecopts) + self.assertEqual(encoded_document, rawbytes) + + def _generate_multidocument_bson_stream(self): + inp_num = [str(random() * 100)[:4] for _ in range(10)] + docs = [{"n": Decimal128(dec)} for dec in inp_num] + edocs = [{"n": Decimal(dec)} for dec in inp_num] + bsonstream = b"" + for doc in docs: + bsonstream += encode(doc) + return edocs, bsonstream + + @no_type_check + def test_decode_iter(self): + expected, bson_data = self._generate_multidocument_bson_stream() + for expected_doc, decoded_doc in zip(expected, decode_iter(bson_data, self.codecopts)): + self.assertEqual(expected_doc, decoded_doc) + + @no_type_check + def test_decode_file_iter(self): + expected, bson_data = self._generate_multidocument_bson_stream() + fileobj = tempfile.TemporaryFile() + fileobj.write(bson_data) + fileobj.seek(0) + + for expected_doc, decoded_doc in zip(expected, decode_file_iter(fileobj, self.codecopts)): + self.assertEqual(expected_doc, decoded_doc) + + fileobj.close() + + +class TestCustomPythonBSONTypeToBSONMonolithicCodec(CustomBSONTypeTests, unittest.TestCase): + @classmethod + def setUpClass(cls): + cls.codecopts = DECIMAL_CODECOPTS + + +class TestCustomPythonBSONTypeToBSONMultiplexedCodec(CustomBSONTypeTests, unittest.TestCase): + @classmethod + def setUpClass(cls): + codec_options = CodecOptions( + type_registry=TypeRegistry((DecimalEncoder(), DecimalDecoder())) + ) + cls.codecopts = codec_options + + +class TestBSONFallbackEncoder(unittest.TestCase): + def _get_codec_options(self, fallback_encoder): + type_registry = TypeRegistry(fallback_encoder=fallback_encoder) + return CodecOptions(type_registry=type_registry) + + def test_simple(self): + codecopts = self._get_codec_options(lambda x: Decimal128(x)) + document = {"average": Decimal("56.47")} + bsonbytes = encode(document, codec_options=codecopts) + + exp_document = {"average": Decimal128("56.47")} + exp_bsonbytes = encode(exp_document) + self.assertEqual(bsonbytes, exp_bsonbytes) + + def test_erroring_fallback_encoder(self): + codecopts = self._get_codec_options(lambda _: 1 / 0) + + # fallback converter should not be invoked when encoding known types. + encode( + {"a": 1, "b": Decimal128("1.01"), "c": {"arr": ["abc", 3.678]}}, codec_options=codecopts + ) + + # expect an error when encoding a custom type. + document = {"average": Decimal("56.47")} + with self.assertRaises(ZeroDivisionError): + encode(document, codec_options=codecopts) + + def test_noop_fallback_encoder(self): + codecopts = self._get_codec_options(lambda x: x) + document = {"average": Decimal("56.47")} + with self.assertRaises(InvalidDocument): + encode(document, codec_options=codecopts) + + def test_type_unencodable_by_fallback_encoder(self): + def fallback_encoder(value): + try: + return Decimal128(value) + except: + raise TypeError("cannot encode type %s" % (type(value))) + + codecopts = self._get_codec_options(fallback_encoder) + document = {"average": Decimal} + with self.assertRaises(TypeError): + encode(document, codec_options=codecopts) + + def test_call_only_once_for_not_handled_big_integers(self): + called_with = [] + + def fallback_encoder(value): + called_with.append(value) + return value + + codecopts = self._get_codec_options(fallback_encoder) + document = {"a": {"b": {"c": 2 << 65}}} + + msg = "MongoDB can only handle up to 8-byte ints" + with self.assertRaises(OverflowError, msg=msg): + encode(document, codec_options=codecopts) + + self.assertEqual(called_with, [2 << 65]) + + +class TestBSONTypeEnDeCodecs(unittest.TestCase): + def test_instantiation(self): + msg = "Can't instantiate abstract class" + + def run_test(base, attrs, fail): + codec = type("testcodec", (base,), attrs) + if fail: + with self.assertRaisesRegex(TypeError, msg): + codec() + else: + codec() + + class MyType: + pass + + run_test( + TypeEncoder, + { + "python_type": MyType, + }, + fail=True, + ) + run_test(TypeEncoder, {"transform_python": lambda s, x: x}, fail=True) + run_test( + TypeEncoder, {"transform_python": lambda s, x: x, "python_type": MyType}, fail=False + ) + + run_test( + TypeDecoder, + { + "bson_type": Decimal128, + }, + fail=True, + ) + run_test(TypeDecoder, {"transform_bson": lambda s, x: x}, fail=True) + run_test( + TypeDecoder, {"transform_bson": lambda s, x: x, "bson_type": Decimal128}, fail=False + ) + + run_test(TypeCodec, {"bson_type": Decimal128, "python_type": MyType}, fail=True) + run_test( + TypeCodec, + {"transform_bson": lambda s, x: x, "transform_python": lambda s, x: x}, + fail=True, + ) + run_test( + TypeCodec, + { + "python_type": MyType, + "transform_python": lambda s, x: x, + "transform_bson": lambda s, x: x, + "bson_type": Decimal128, + }, + fail=False, + ) + + def test_type_checks(self): + self.assertTrue(issubclass(TypeCodec, TypeEncoder)) + self.assertTrue(issubclass(TypeCodec, TypeDecoder)) + self.assertFalse(issubclass(TypeDecoder, TypeEncoder)) + self.assertFalse(issubclass(TypeEncoder, TypeDecoder)) + + +class TestBSONCustomTypeEncoderAndFallbackEncoderTandem(unittest.TestCase): + TypeA: Any + TypeB: Any + fallback_encoder_A2B: Any + fallback_encoder_A2BSON: Any + B2BSON: Type[TypeEncoder] + B2A: Type[TypeEncoder] + A2B: Type[TypeEncoder] + + @classmethod + def setUpClass(cls): + class TypeA: + def __init__(self, x): + self.value = x + + class TypeB: + def __init__(self, x): + self.value = x + + # transforms A, and only A into B + def fallback_encoder_A2B(value): + assert isinstance(value, TypeA) + return TypeB(value.value) + + # transforms A, and only A into something encodable + def fallback_encoder_A2BSON(value): + assert isinstance(value, TypeA) + return value.value + + # transforms B into something encodable + class B2BSON(TypeEncoder): + python_type = TypeB + + def transform_python(self, value): + return value.value + + # transforms A into B + # technically, this isn't a proper type encoder as the output is not + # BSON-encodable. + class A2B(TypeEncoder): + python_type = TypeA + + def transform_python(self, value): + return TypeB(value.value) + + # transforms B into A + # technically, this isn't a proper type encoder as the output is not + # BSON-encodable. + class B2A(TypeEncoder): + python_type = TypeB + + def transform_python(self, value): + return TypeA(value.value) + + cls.TypeA = TypeA + cls.TypeB = TypeB + cls.fallback_encoder_A2B = staticmethod(fallback_encoder_A2B) + cls.fallback_encoder_A2BSON = staticmethod(fallback_encoder_A2BSON) + cls.B2BSON = B2BSON + cls.B2A = B2A + cls.A2B = A2B + + def test_encode_fallback_then_custom(self): + codecopts = CodecOptions( + type_registry=TypeRegistry([self.B2BSON()], fallback_encoder=self.fallback_encoder_A2B) + ) + testdoc = {"x": self.TypeA(123)} + expected_bytes = encode({"x": 123}) + + self.assertEqual(encode(testdoc, codec_options=codecopts), expected_bytes) + + def test_encode_custom_then_fallback(self): + codecopts = CodecOptions( + type_registry=TypeRegistry([self.B2A()], fallback_encoder=self.fallback_encoder_A2BSON) + ) + testdoc = {"x": self.TypeB(123)} + expected_bytes = encode({"x": 123}) + + self.assertEqual(encode(testdoc, codec_options=codecopts), expected_bytes) + + def test_chaining_encoders_fails(self): + codecopts = CodecOptions(type_registry=TypeRegistry([self.A2B(), self.B2BSON()])) + + with self.assertRaises(InvalidDocument): + encode({"x": self.TypeA(123)}, codec_options=codecopts) + + def test_infinite_loop_exceeds_max_recursion_depth(self): + codecopts = CodecOptions( + type_registry=TypeRegistry([self.B2A()], fallback_encoder=self.fallback_encoder_A2B) + ) + + # Raises max recursion depth exceeded error + with self.assertRaises(RuntimeError): + encode({"x": self.TypeA(100)}, codec_options=codecopts) + + +class TestTypeRegistry(unittest.TestCase): + types: Tuple[object, object] + codecs: Tuple[Type[TypeCodec], Type[TypeCodec]] + fallback_encoder: Any + + @classmethod + def setUpClass(cls): + class MyIntType: + def __init__(self, x): + assert isinstance(x, int) + self.x = x + + class MyStrType: + def __init__(self, x): + assert isinstance(x, str) + self.x = x + + class MyIntCodec(TypeCodec): + @property + def python_type(self): + return MyIntType + + @property + def bson_type(self): + return int + + def transform_python(self, value): + return value.x + + def transform_bson(self, value): + return MyIntType(value) + + class MyStrCodec(TypeCodec): + @property + def python_type(self): + return MyStrType + + @property + def bson_type(self): + return str + + def transform_python(self, value): + return value.x + + def transform_bson(self, value): + return MyStrType(value) + + def fallback_encoder(value): + return value + + cls.types = (MyIntType, MyStrType) + cls.codecs = (MyIntCodec, MyStrCodec) + cls.fallback_encoder = fallback_encoder + + def test_simple(self): + codec_instances = [codec() for codec in self.codecs] + + def assert_proper_initialization(type_registry, codec_instances): + self.assertEqual( + type_registry._encoder_map, + { + self.types[0]: codec_instances[0].transform_python, + self.types[1]: codec_instances[1].transform_python, + }, + ) + self.assertEqual( + type_registry._decoder_map, + {int: codec_instances[0].transform_bson, str: codec_instances[1].transform_bson}, + ) + self.assertEqual(type_registry._fallback_encoder, self.fallback_encoder) + + type_registry = TypeRegistry(codec_instances, self.fallback_encoder) + assert_proper_initialization(type_registry, codec_instances) + + type_registry = TypeRegistry( + fallback_encoder=self.fallback_encoder, type_codecs=codec_instances + ) + assert_proper_initialization(type_registry, codec_instances) + + # Ensure codec list held by the type registry doesn't change if we + # mutate the initial list. + codec_instances_copy = list(codec_instances) + codec_instances.pop(0) + self.assertListEqual(type_registry._TypeRegistry__type_codecs, codec_instances_copy) + + def test_simple_separate_codecs(self): + class MyIntEncoder(TypeEncoder): + python_type = self.types[0] + + def transform_python(self, value): + return value.x + + class MyIntDecoder(TypeDecoder): + bson_type = int + + def transform_bson(self, value): + return self.types[0](value) + + codec_instances: list = [MyIntDecoder(), MyIntEncoder()] + type_registry = TypeRegistry(codec_instances) + + self.assertEqual( + type_registry._encoder_map, + {MyIntEncoder.python_type: codec_instances[1].transform_python}, + ) + self.assertEqual( + type_registry._decoder_map, + {MyIntDecoder.bson_type: codec_instances[0].transform_bson}, + ) + + def test_initialize_fail(self): + err_msg = "Expected an instance of TypeEncoder, TypeDecoder, or TypeCodec, got .* instead" + with self.assertRaisesRegex(TypeError, err_msg): + TypeRegistry(self.codecs) # type: ignore[arg-type] + + with self.assertRaisesRegex(TypeError, err_msg): + TypeRegistry([type("AnyType", (object,), {})()]) + + err_msg = f"fallback_encoder {True!r} is not a callable" + with self.assertRaisesRegex(TypeError, err_msg): + TypeRegistry([], True) # type: ignore[arg-type] + + err_msg = "fallback_encoder {!r} is not a callable".format("hello") + with self.assertRaisesRegex(TypeError, err_msg): + TypeRegistry(fallback_encoder="hello") # type: ignore[arg-type] + + def test_type_registry_codecs(self): + codec_instances = [codec() for codec in self.codecs] + type_registry = TypeRegistry(codec_instances) + self.assertEqual(type_registry.codecs, codec_instances) + + def test_type_registry_fallback(self): + type_registry = TypeRegistry(fallback_encoder=self.fallback_encoder) + self.assertEqual(type_registry.fallback_encoder, self.fallback_encoder) + + def test_type_registry_repr(self): + codec_instances = [codec() for codec in self.codecs] + type_registry = TypeRegistry(codec_instances) + r = f"TypeRegistry(type_codecs={codec_instances!r}, fallback_encoder={None!r})" + self.assertEqual(r, repr(type_registry)) + + def test_type_registry_eq(self): + codec_instances = [codec() for codec in self.codecs] + self.assertEqual(TypeRegistry(codec_instances), TypeRegistry(codec_instances)) + + codec_instances_2 = [codec() for codec in self.codecs] + self.assertNotEqual(TypeRegistry(codec_instances), TypeRegistry(codec_instances_2)) + + def test_builtin_types_override_fails(self): + def run_test(base, attrs): + msg = ( + r"TypeEncoders cannot change how built-in types " + r"are encoded \(encoder .* transforms type .*\)" + ) + for pytype in _BUILT_IN_TYPES: + attrs.update({"python_type": pytype, "transform_python": lambda x: x}) + codec = type("testcodec", (base,), attrs) + codec_instance = codec() + with self.assertRaisesRegex(TypeError, msg): + TypeRegistry( + [ + codec_instance, + ] + ) + + # Test only some subtypes as not all can be subclassed. + if pytype in [ + bool, + type(None), + RE_TYPE, + ]: + continue + + class MyType(pytype): # type: ignore + pass + + attrs.update({"python_type": MyType, "transform_python": lambda x: x}) + codec = type("testcodec", (base,), attrs) + codec_instance = codec() + with self.assertRaisesRegex(TypeError, msg): + TypeRegistry( + [ + codec_instance, + ] + ) + + run_test(TypeEncoder, {}) + run_test(TypeCodec, {"bson_type": Decimal128, "transform_bson": lambda x: x}) + + +class TestCollectionWCustomType(AsyncIntegrationTest): + async def asyncSetUp(self): + await super().asyncSetUp() + await self.db.test.drop() + + async def asyncTearDown(self): + await self.db.test.drop() + + async def test_overflow_int_w_custom_decoder(self): + type_registry = TypeRegistry(fallback_encoder=lambda val: str(val)) + codec_options = CodecOptions(type_registry=type_registry) + collection = self.db.get_collection("test", codec_options=codec_options) + + await collection.insert_one({"_id": 1, "data": 2**520}) + ret = await collection.find_one() + self.assertEqual(ret["data"], str(2**520)) + + async def test_command_errors_w_custom_type_decoder(self): + db = self.db + test_doc = {"_id": 1, "data": "a"} + test = db.get_collection("test", codec_options=UNINT_DECODER_CODECOPTS) + + result = await test.insert_one(test_doc) + self.assertEqual(result.inserted_id, test_doc["_id"]) + with self.assertRaises(DuplicateKeyError): + await test.insert_one(test_doc) + + async def test_find_w_custom_type_decoder(self): + db = self.db + input_docs = [{"x": Int64(k)} for k in [1, 2, 3]] + for doc in input_docs: + await db.test.insert_one(doc) + + test = db.get_collection("test", codec_options=UNINT_DECODER_CODECOPTS) + async for doc in test.find({}, batch_size=1): + self.assertIsInstance(doc["x"], UndecipherableInt64Type) + + async def test_find_w_custom_type_decoder_and_document_class(self): + async def run_test(doc_cls): + db = self.db + input_docs = [{"x": Int64(k)} for k in [1, 2, 3]] + for doc in input_docs: + await db.test.insert_one(doc) + + test = db.get_collection( + "test", + codec_options=CodecOptions( + type_registry=TypeRegistry([UndecipherableIntDecoder()]), document_class=doc_cls + ), + ) + async for doc in test.find({}, batch_size=1): + self.assertIsInstance(doc, doc_cls) + self.assertIsInstance(doc["x"], UndecipherableInt64Type) + + for doc_cls in [RawBSONDocument, OrderedDict]: + await run_test(doc_cls) + + async def test_aggregate_w_custom_type_decoder(self): + db = self.db + await db.test.insert_many( + [ + {"status": "in progress", "qty": Int64(1)}, + {"status": "complete", "qty": Int64(10)}, + {"status": "in progress", "qty": Int64(1)}, + {"status": "complete", "qty": Int64(10)}, + {"status": "in progress", "qty": Int64(1)}, + ] + ) + test = db.get_collection("test", codec_options=UNINT_DECODER_CODECOPTS) + + pipeline: list = [ + {"$match": {"status": "complete"}}, + {"$group": {"_id": "$status", "total_qty": {"$sum": "$qty"}}}, + ] + result = await test.aggregate(pipeline) + + res = (await result.to_list())[0] + self.assertEqual(res["_id"], "complete") + self.assertIsInstance(res["total_qty"], UndecipherableInt64Type) + self.assertEqual(res["total_qty"].value, 20) + + async def test_distinct_w_custom_type(self): + await self.db.drop_collection("test") + + test = self.db.get_collection("test", codec_options=UNINT_CODECOPTS) + values = [ + UndecipherableInt64Type(1), + UndecipherableInt64Type(2), + UndecipherableInt64Type(3), + {"b": UndecipherableInt64Type(3)}, + ] + await test.insert_many({"a": val} for val in values) + + self.assertEqual(values, await test.distinct("a")) + + async def test_find_one_and__w_custom_type_decoder(self): + db = self.db + c = db.get_collection("test", codec_options=UNINT_DECODER_CODECOPTS) + await c.insert_one({"_id": 1, "x": Int64(1)}) + + doc = await c.find_one_and_update( + {"_id": 1}, {"$inc": {"x": 1}}, return_document=ReturnDocument.AFTER + ) + self.assertEqual(doc["_id"], 1) + self.assertIsInstance(doc["x"], UndecipherableInt64Type) + self.assertEqual(doc["x"].value, 2) + + doc = await c.find_one_and_replace( + {"_id": 1}, {"x": Int64(3), "y": True}, return_document=ReturnDocument.AFTER + ) + self.assertEqual(doc["_id"], 1) + self.assertIsInstance(doc["x"], UndecipherableInt64Type) + self.assertEqual(doc["x"].value, 3) + self.assertEqual(doc["y"], True) + + doc = await c.find_one_and_delete({"y": True}) + self.assertEqual(doc["_id"], 1) + self.assertIsInstance(doc["x"], UndecipherableInt64Type) + self.assertEqual(doc["x"].value, 3) + self.assertIsNone(await c.find_one()) + + +class TestGridFileCustomType(AsyncIntegrationTest): + async def asyncSetUp(self): + await super().asyncSetUp() + await self.db.drop_collection("fs.files") + await self.db.drop_collection("fs.chunks") + + async def test_grid_out_custom_opts(self): + db = self.db.with_options(codec_options=UPPERSTR_DECODER_CODECOPTS) + one = AsyncGridIn( + db.fs, + _id=5, + filename="my_file", + chunkSize=1000, + metadata={"foo": "red", "bar": "blue"}, + bar=3, + baz="hello", + ) + await one.write(b"hello world") + await one.close() + + two = AsyncGridOut(db.fs, 5) + await two.open() + + self.assertEqual("my_file", two.name) + self.assertEqual("my_file", two.filename) + self.assertEqual(5, two._id) + self.assertEqual(11, two.length) + self.assertEqual(1000, two.chunk_size) + self.assertIsInstance(two.upload_date, datetime.datetime) + self.assertEqual({"foo": "red", "bar": "blue"}, two.metadata) + self.assertEqual(3, two.bar) + + for attr in [ + "_id", + "name", + "content_type", + "length", + "chunk_size", + "upload_date", + "aliases", + "metadata", + "md5", + ]: + self.assertRaises(AttributeError, setattr, two, attr, 5) + + +class ChangeStreamsWCustomTypesTestMixin: + @no_type_check + async def change_stream(self, *args, **kwargs): + stream = await self.watched_target.watch(*args, max_await_time_ms=1, **kwargs) + self.addAsyncCleanup(stream.close) + return stream + + @no_type_check + async def insert_and_check(self, change_stream, insert_doc, expected_doc): + await self.input_target.insert_one(insert_doc) + change = await anext(change_stream) + self.assertEqual(change["fullDocument"], expected_doc) + + @no_type_check + async def kill_change_stream_cursor(self, change_stream): + # Cause a cursor not found error on the next getMore. + cursor = change_stream._cursor + address = _CursorAddress(cursor.address, cursor._ns) + client = self.input_target.database.client + await client._close_cursor_now(cursor.cursor_id, address) + + @no_type_check + async def test_simple(self): + codecopts = CodecOptions( + type_registry=TypeRegistry([UndecipherableIntEncoder(), UppercaseTextDecoder()]) + ) + await self.create_targets(codec_options=codecopts) + + input_docs = [ + {"_id": UndecipherableInt64Type(1), "data": "hello"}, + {"_id": 2, "data": "world"}, + {"_id": UndecipherableInt64Type(3), "data": "!"}, + ] + expected_docs = [ + {"_id": 1, "data": "HELLO"}, + {"_id": 2, "data": "WORLD"}, + {"_id": 3, "data": "!"}, + ] + + change_stream = await self.change_stream() + + await self.insert_and_check(change_stream, input_docs[0], expected_docs[0]) + await self.kill_change_stream_cursor(change_stream) + await self.insert_and_check(change_stream, input_docs[1], expected_docs[1]) + await self.kill_change_stream_cursor(change_stream) + await self.insert_and_check(change_stream, input_docs[2], expected_docs[2]) + + @no_type_check + async def test_custom_type_in_pipeline(self): + codecopts = CodecOptions( + type_registry=TypeRegistry([UndecipherableIntEncoder(), UppercaseTextDecoder()]) + ) + await self.create_targets(codec_options=codecopts) + + input_docs = [ + {"_id": UndecipherableInt64Type(1), "data": "hello"}, + {"_id": 2, "data": "world"}, + {"_id": UndecipherableInt64Type(3), "data": "!"}, + ] + expected_docs = [{"_id": 2, "data": "WORLD"}, {"_id": 3, "data": "!"}] + + # UndecipherableInt64Type should be encoded with the TypeRegistry. + change_stream = await self.change_stream( + [{"$match": {"documentKey._id": {"$gte": UndecipherableInt64Type(2)}}}] + ) + + await self.input_target.insert_one(input_docs[0]) + await self.insert_and_check(change_stream, input_docs[1], expected_docs[0]) + await self.kill_change_stream_cursor(change_stream) + await self.insert_and_check(change_stream, input_docs[2], expected_docs[1]) + + @no_type_check + async def test_break_resume_token(self): + # Get one document from a change stream to determine resumeToken type. + await self.create_targets() + change_stream = await self.change_stream() + await self.input_target.insert_one({"data": "test"}) + change = await anext(change_stream) + resume_token_decoder = type_obfuscating_decoder_factory(type(change["_id"]["_data"])) + + # Custom-decoding the resumeToken type breaks resume tokens. + codecopts = CodecOptions( + type_registry=TypeRegistry([resume_token_decoder(), UndecipherableIntEncoder()]) + ) + + # Re-create targets, change stream and proceed. + await self.create_targets(codec_options=codecopts) + + docs = [{"_id": 1}, {"_id": 2}, {"_id": 3}] + + change_stream = await self.change_stream() + await self.insert_and_check(change_stream, docs[0], docs[0]) + await self.kill_change_stream_cursor(change_stream) + await self.insert_and_check(change_stream, docs[1], docs[1]) + await self.kill_change_stream_cursor(change_stream) + await self.insert_and_check(change_stream, docs[2], docs[2]) + + @no_type_check + async def test_document_class(self): + async def run_test(doc_cls): + codecopts = CodecOptions( + type_registry=TypeRegistry([UppercaseTextDecoder(), UndecipherableIntEncoder()]), + document_class=doc_cls, + ) + + await self.create_targets(codec_options=codecopts) + change_stream = await self.change_stream() + + doc = {"a": UndecipherableInt64Type(101), "b": "xyz"} + await self.input_target.insert_one(doc) + change = await anext(change_stream) + + self.assertIsInstance(change, doc_cls) + self.assertEqual(change["fullDocument"]["a"], 101) + self.assertEqual(change["fullDocument"]["b"], "XYZ") + + for doc_cls in [OrderedDict, RawBSONDocument]: + await run_test(doc_cls) + + +class TestCollectionChangeStreamsWCustomTypes( + AsyncIntegrationTest, ChangeStreamsWCustomTypesTestMixin +): + @async_client_context.require_change_streams + async def asyncSetUp(self): + await super().asyncSetUp() + await self.db.test.delete_many({}) + + async def asyncTearDown(self): + await self.input_target.drop() + + async def create_targets(self, *args, **kwargs): + self.watched_target = self.db.get_collection("test", *args, **kwargs) + self.input_target = self.watched_target + # Ensure the collection exists and is empty. + await self.input_target.insert_one({}) + await self.input_target.delete_many({}) + + +class TestDatabaseChangeStreamsWCustomTypes( + AsyncIntegrationTest, ChangeStreamsWCustomTypesTestMixin +): + @async_client_context.require_version_min(4, 2, 0) + @async_client_context.require_change_streams + async def asyncSetUp(self): + await super().asyncSetUp() + await self.db.test.delete_many({}) + + async def asyncTearDown(self): + await self.input_target.drop() + await self.client.drop_database(self.watched_target) + + async def create_targets(self, *args, **kwargs): + self.watched_target = self.client.get_database(self.db.name, *args, **kwargs) + self.input_target = self.watched_target.test + # Insert a record to ensure db, coll are created. + await self.input_target.insert_one({"data": "dummy"}) + + +class TestClusterChangeStreamsWCustomTypes( + AsyncIntegrationTest, ChangeStreamsWCustomTypesTestMixin +): + @async_client_context.require_version_min(4, 2, 0) + @async_client_context.require_change_streams + async def asyncSetUp(self): + await super().asyncSetUp() + await self.db.test.delete_many({}) + + async def asyncTearDown(self): + await self.input_target.drop() + await self.client.drop_database(self.db) + + async def create_targets(self, *args, **kwargs): + codec_options = kwargs.pop("codec_options", None) + if codec_options: + kwargs["type_registry"] = codec_options.type_registry + kwargs["document_class"] = codec_options.document_class + self.watched_target = await self.async_rs_client(*args, **kwargs) + self.input_target = self.watched_target[self.db.name].test + # Insert a record to ensure db, coll are created. + await self.input_target.insert_one({"data": "dummy"}) + + +if __name__ == "__main__": + unittest.main() diff --git a/test/asynchronous/test_database.py b/test/asynchronous/test_database.py index b5a5960420..b49183a852 100644 --- a/test/asynchronous/test_database.py +++ b/test/asynchronous/test_database.py @@ -26,7 +26,7 @@ from test import unittest from test.asynchronous import AsyncIntegrationTest, async_client_context from test.test_custom_types import DECIMAL_CODECOPTS -from test.utils import ( +from test.utils_shared import ( IMPOSSIBLE_WRITE_CONCERN, OvertCommandListener, async_wait_until, @@ -42,7 +42,6 @@ from pymongo.asynchronous import auth from pymongo.asynchronous.collection import AsyncCollection from pymongo.asynchronous.database import AsyncDatabase -from pymongo.asynchronous.helpers import anext from pymongo.asynchronous.mongo_client import AsyncMongoClient from pymongo.errors import ( CollectionInvalid, @@ -91,7 +90,7 @@ def test_get_collection(self): def test_getattr(self): db = self.client.pymongo_test - self.assertTrue(isinstance(db["_does_not_exist"], AsyncCollection)) + self.assertIsInstance(db["_does_not_exist"], AsyncCollection) with self.assertRaises(AttributeError) as context: db._does_not_exist @@ -103,10 +102,7 @@ def test_getattr(self): def test_iteration(self): db = self.client.pymongo_test - if "PyPy" in sys.version and sys.version_info < (3, 8, 15): - msg = "'NoneType' object is not callable" - else: - msg = "'AsyncDatabase' object is not iterable" + msg = "'AsyncDatabase' object is not iterable" # Iteration fails with self.assertRaisesRegex(TypeError, msg): for _ in db: # type: ignore[misc] # error: "None" not callable [misc] @@ -166,13 +162,13 @@ async def test_create_collection(self): await db.create_collection("coll..ection") # type: ignore[arg-type] test = await db.create_collection("test") - self.assertTrue("test" in await db.list_collection_names()) + self.assertIn("test", await db.list_collection_names()) await test.insert_one({"hello": "world"}) self.assertEqual((await db.test.find_one())["hello"], "world") await db.drop_collection("test.foo") await db.create_collection("test.foo") - self.assertTrue("test.foo" in await db.list_collection_names()) + self.assertIn("test.foo", await db.list_collection_names()) with self.assertRaises(CollectionInvalid): await db.create_collection("test.foo") @@ -182,17 +178,17 @@ async def test_list_collection_names(self): await db.test.mike.insert_one({"dummy": "object"}) colls = await db.list_collection_names() - self.assertTrue("test" in colls) - self.assertTrue("test.mike" in colls) + self.assertIn("test", colls) + self.assertIn("test.mike", colls) for coll in colls: - self.assertTrue("$" not in coll) + self.assertNotIn("$", coll) await db.systemcoll.test.insert_one({}) no_system_collections = await db.list_collection_names( filter={"name": {"$regex": r"^(?!system\.)"}} ) for coll in no_system_collections: - self.assertTrue(not coll.startswith("system.")) + self.assertFalse(coll.startswith("system.")) self.assertIn("systemcoll.test", no_system_collections) # Force more than one batch. @@ -242,7 +238,7 @@ async def test_check_exists(self): listener.reset() await db.drop_collection("unique") await db.create_collection("unique", check_exists=False) - self.assertTrue(len(listener.started_events) > 0) + self.assertGreater(len(listener.started_events), 0) self.assertNotIn("listCollections", listener.started_command_names()) async def test_list_collections(self): @@ -255,12 +251,12 @@ async def test_list_collections(self): colls = [result["name"] async for result in results] # All the collections present. - self.assertTrue("test" in colls) - self.assertTrue("test.mike" in colls) + self.assertIn("test", colls) + self.assertIn("test.mike", colls) # No collection containing a '$'. for coll in colls: - self.assertTrue("$" not in coll) + self.assertNotIn("$", coll) # Duplicate check. coll_cnt: dict = {} @@ -268,19 +264,13 @@ async def test_list_collections(self): try: # Found duplicate. coll_cnt[coll] += 1 - self.assertTrue(False) + self.fail("Found duplicate") except KeyError: coll_cnt[coll] = 1 coll_cnt: dict = {} - # Checking if is there any collection which don't exists. - if ( - len(set(colls) - {"test", "test.mike"}) == 0 - or len(set(colls) - {"test", "test.mike", "system.indexes"}) == 0 - ): - self.assertTrue(True) - else: - self.assertTrue(False) + # Check if there are any collections which don't exist. + self.assertLessEqual(set(colls), {"test", "test.mike", "system.indexes"}) colls = await (await db.list_collections(filter={"name": {"$regex": "^test$"}})).to_list() self.assertEqual(1, len(colls)) @@ -297,12 +287,12 @@ async def test_list_collections(self): colls = [result["name"] async for result in results] # Checking only capped collections are present - self.assertTrue("test" in colls) - self.assertFalse("test.mike" in colls) + self.assertIn("test", colls) + self.assertNotIn("test.mike", colls) # No collection containing a '$'. for coll in colls: - self.assertTrue("$" not in coll) + self.assertNotIn("$", coll) # Duplicate check. coll_cnt = {} @@ -310,16 +300,13 @@ async def test_list_collections(self): try: # Found duplicate. coll_cnt[coll] += 1 - self.assertTrue(False) + self.fail("Found duplicate") except KeyError: coll_cnt[coll] = 1 coll_cnt = {} - # Checking if is there any collection which don't exists. - if len(set(colls) - {"test"}) == 0 or len(set(colls) - {"test", "system.indexes"}) == 0: - self.assertTrue(True) - else: - self.assertTrue(False) + # Check if there are any collections which don't exist. + self.assertLessEqual(set(colls), {"test", "system.indexes"}) await self.client.drop_database("pymongo_test") @@ -342,24 +329,24 @@ async def test_drop_collection(self): await db.drop_collection(None) # type: ignore[arg-type] await db.test.insert_one({"dummy": "object"}) - self.assertTrue("test" in await db.list_collection_names()) + self.assertIn("test", await db.list_collection_names()) await db.drop_collection("test") - self.assertFalse("test" in await db.list_collection_names()) + self.assertNotIn("test", await db.list_collection_names()) await db.test.insert_one({"dummy": "object"}) - self.assertTrue("test" in await db.list_collection_names()) + self.assertIn("test", await db.list_collection_names()) await db.drop_collection("test") - self.assertFalse("test" in await db.list_collection_names()) + self.assertNotIn("test", await db.list_collection_names()) await db.test.insert_one({"dummy": "object"}) - self.assertTrue("test" in await db.list_collection_names()) + self.assertIn("test", await db.list_collection_names()) await db.drop_collection(db.test) - self.assertFalse("test" in await db.list_collection_names()) + self.assertNotIn("test", await db.list_collection_names()) await db.test.insert_one({"dummy": "object"}) - self.assertTrue("test" in await db.list_collection_names()) + self.assertIn("test", await db.list_collection_names()) await db.test.drop() - self.assertFalse("test" in await db.list_collection_names()) + self.assertNotIn("test", await db.list_collection_names()) await db.test.drop() await db.drop_collection(db.test.doesnotexist) @@ -431,7 +418,22 @@ async def test_command_with_regex(self): result = await db.command("aggregate", "test", pipeline=[], cursor={}) for doc in result["cursor"]["firstBatch"]: - self.assertTrue(isinstance(doc["r"], Regex)) + self.assertIsInstance(doc["r"], Regex) + + async def test_command_bulkWrite(self): + # Ensure bulk write commands can be run directly via db.command(). + if async_client_context.version.at_least(8, 0): + await self.client.admin.command( + { + "bulkWrite": 1, + "nsInfo": [{"ns": self.db.test.full_name}], + "ops": [{"insert": 0, "document": {}}], + } + ) + await self.db.command({"insert": "test", "documents": [{}]}) + await self.db.command({"update": "test", "updates": [{"q": {}, "u": {"$set": {"x": 1}}}]}) + await self.db.command({"delete": "test", "deletes": [{"q": {}, "limit": 1}]}) + await self.db.test.drop() async def test_cursor_command(self): db = self.client.pymongo_test @@ -460,7 +462,7 @@ def test_password_digest(self): with self.assertRaises(TypeError): auth._password_digest(None) # type: ignore[arg-type, call-arg] - self.assertTrue(isinstance(auth._password_digest("mike", "password"), str)) + self.assertIsInstance(auth._password_digest("mike", "password"), str) self.assertEqual( auth._password_digest("mike", "password"), "cd7e45b3b2767dc2fa9b6b548457ed00" ) @@ -531,7 +533,7 @@ async def test_insert_find_one(self): a_doc = SON({"hello": "world"}) a_key = (await db.test.insert_one(a_doc)).inserted_id - self.assertTrue(isinstance(a_doc["_id"], ObjectId)) + self.assertIsInstance(a_doc["_id"], ObjectId) self.assertEqual(a_doc["_id"], a_key) self.assertEqual(a_doc, await db.test.find_one({"_id": a_doc["_id"]})) self.assertEqual(a_doc, await db.test.find_one(a_key)) diff --git a/test/asynchronous/test_discovery_and_monitoring.py b/test/asynchronous/test_discovery_and_monitoring.py new file mode 100644 index 0000000000..5820d00c48 --- /dev/null +++ b/test/asynchronous/test_discovery_and_monitoring.py @@ -0,0 +1,587 @@ +# Copyright 2014-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test the topology module.""" +from __future__ import annotations + +import asyncio +import os +import socketserver +import sys +import threading +import time +from asyncio import StreamReader, StreamWriter +from pathlib import Path +from test.asynchronous.helpers import ConcurrentRunner +from test.asynchronous.utils import flaky + +from pymongo.asynchronous.pool import AsyncConnection +from pymongo.operations import _Op +from pymongo.server_selectors import writable_server_selector + +sys.path[0:0] = [""] + +from test.asynchronous import ( + AsyncIntegrationTest, + AsyncPyMongoTestCase, + AsyncUnitTest, + async_client_context, + unittest, +) +from test.asynchronous.pymongo_mocks import DummyMonitor +from test.asynchronous.unified_format import generate_test_classes +from test.asynchronous.utils import ( + async_get_pool, +) +from test.utils_shared import ( + CMAPListener, + HeartbeatEventListener, + HeartbeatEventsListListener, + assertion_context, + async_barrier_wait, + async_create_barrier, + async_wait_until, + server_name_to_type, +) +from unittest.mock import patch + +from bson import Timestamp, json_util +from pymongo import common, monitoring +from pymongo.asynchronous.settings import TopologySettings +from pymongo.asynchronous.topology import Topology, _ErrorContext +from pymongo.asynchronous.uri_parser import parse_uri +from pymongo.errors import ( + AutoReconnect, + ConfigurationError, + NetworkTimeout, + NotPrimaryError, + OperationFailure, +) +from pymongo.hello import Hello, HelloCompat +from pymongo.helpers_shared import _check_command_response, _check_write_command_response +from pymongo.monitoring import ServerHeartbeatFailedEvent, ServerHeartbeatStartedEvent +from pymongo.server_description import SERVER_TYPE, ServerDescription +from pymongo.topology_description import TOPOLOGY_TYPE + +_IS_SYNC = False + +# Location of JSON test specifications. +if _IS_SYNC: + SDAM_PATH = os.path.join(Path(__file__).resolve().parent, "discovery_and_monitoring") +else: + SDAM_PATH = os.path.join( + Path(__file__).resolve().parent.parent, + "discovery_and_monitoring", + ) + + +async def create_mock_topology(uri, monitor_class=DummyMonitor): + parsed_uri = await parse_uri(uri) + replica_set_name = None + direct_connection = None + load_balanced = None + if "replicaSet" in parsed_uri["options"]: + replica_set_name = parsed_uri["options"]["replicaSet"] + if "directConnection" in parsed_uri["options"]: + direct_connection = parsed_uri["options"]["directConnection"] + if "loadBalanced" in parsed_uri["options"]: + load_balanced = parsed_uri["options"]["loadBalanced"] + + topology_settings = TopologySettings( + parsed_uri["nodelist"], + replica_set_name=replica_set_name, + monitor_class=monitor_class, + direct_connection=direct_connection, + load_balanced=load_balanced, + ) + + c = Topology(topology_settings) + await c.open() + return c + + +async def got_hello(topology, server_address, hello_response): + server_description = ServerDescription(server_address, Hello(hello_response), 0) + await topology.on_change(server_description) + + +async def got_app_error(topology, app_error): + server_address = common.partition_node(app_error["address"]) + server = topology.get_server_by_address(server_address) + error_type = app_error["type"] + generation = app_error.get("generation", server.pool.gen.get_overall()) + when = app_error["when"] + max_wire_version = app_error["maxWireVersion"] + # XXX: We could get better test coverage by mocking the errors on the + # Pool/AsyncConnection. + try: + if error_type == "command": + _check_command_response(app_error["response"], max_wire_version) + _check_write_command_response(app_error["response"]) + elif error_type == "network": + raise AutoReconnect("mock non-timeout network error") + elif error_type == "timeout": + raise NetworkTimeout("mock network timeout error") + else: + raise AssertionError(f"unknown error type: {error_type}") + raise AssertionError + except (AutoReconnect, NotPrimaryError, OperationFailure) as e: + if when == "beforeHandshakeCompletes": + completed_handshake = False + elif when == "afterHandshakeCompletes": + completed_handshake = True + else: + raise AssertionError(f"Unknown when field {when}") + + await topology.handle_error( + server_address, + _ErrorContext(e, max_wire_version, generation, completed_handshake, None), + ) + + +def get_type(topology, hostname): + description = topology.get_server_by_address((hostname, 27017)).description + return description.server_type + + +class TestAllScenarios(AsyncUnitTest): + pass + + +def topology_type_name(topology_type): + return TOPOLOGY_TYPE._fields[topology_type] + + +def server_type_name(server_type): + return SERVER_TYPE._fields[server_type] + + +def check_outcome(self, topology, outcome): + expected_servers = outcome["servers"] + + # Check weak equality before proceeding. + self.assertEqual(len(topology.description.server_descriptions()), len(expected_servers)) + + if outcome.get("compatible") is False: + with self.assertRaises(ConfigurationError): + topology.description.check_compatible() + else: + # No error. + topology.description.check_compatible() + + # Since lengths are equal, every actual server must have a corresponding + # expected server. + for expected_server_address, expected_server in expected_servers.items(): + node = common.partition_node(expected_server_address) + self.assertTrue(topology.has_server(node)) + actual_server = topology.get_server_by_address(node) + actual_server_description = actual_server.description + expected_server_type = server_name_to_type(expected_server["type"]) + + self.assertEqual( + server_type_name(expected_server_type), + server_type_name(actual_server_description.server_type), + ) + expected_error = expected_server.get("error") + if expected_error: + self.assertIn(expected_error, str(actual_server_description.error)) + + self.assertEqual(expected_server.get("setName"), actual_server_description.replica_set_name) + + self.assertEqual(expected_server.get("setVersion"), actual_server_description.set_version) + + self.assertEqual(expected_server.get("electionId"), actual_server_description.election_id) + + self.assertEqual( + expected_server.get("topologyVersion"), actual_server_description.topology_version + ) + + expected_pool = expected_server.get("pool") + if expected_pool: + self.assertEqual(expected_pool.get("generation"), actual_server.pool.gen.get_overall()) + + self.assertEqual(outcome["setName"], topology.description.replica_set_name) + self.assertEqual( + outcome.get("logicalSessionTimeoutMinutes"), + topology.description.logical_session_timeout_minutes, + ) + + expected_topology_type = getattr(TOPOLOGY_TYPE, outcome["topologyType"]) + self.assertEqual( + topology_type_name(expected_topology_type), + topology_type_name(topology.description.topology_type), + ) + + self.assertEqual(outcome.get("maxSetVersion"), topology.description.max_set_version) + self.assertEqual(outcome.get("maxElectionId"), topology.description.max_election_id) + + +def create_test(scenario_def): + async def run_scenario(self): + c = await create_mock_topology(scenario_def["uri"]) + + for i, phase in enumerate(scenario_def["phases"]): + # Including the phase description makes failures easier to debug. + description = phase.get("description", str(i)) + with assertion_context(f"phase: {description}"): + for response in phase.get("responses", []): + await got_hello(c, common.partition_node(response[0]), response[1]) + + for app_error in phase.get("applicationErrors", []): + await got_app_error(c, app_error) + + check_outcome(self, c, phase["outcome"]) + + return run_scenario + + +def create_tests(): + for dirpath, _, filenames in os.walk(SDAM_PATH): + dirname = os.path.split(dirpath)[-1] + # SDAM unified tests are handled separately. + if dirname == "unified": + continue + + for filename in filenames: + if os.path.splitext(filename)[1] != ".json": + continue + with open(os.path.join(dirpath, filename)) as scenario_stream: + scenario_def = json_util.loads(scenario_stream.read()) + + # Construct test from scenario. + new_test = create_test(scenario_def) + test_name = f"test_{dirname}_{os.path.splitext(filename)[0]}" + + new_test.__name__ = test_name + setattr(TestAllScenarios, new_test.__name__, new_test) + + +create_tests() + + +class TestClusterTimeComparison(AsyncPyMongoTestCase): + async def test_cluster_time_comparison(self): + t = await create_mock_topology("mongodb://host") + + async def send_cluster_time(time, inc): + old = t.max_cluster_time() + new = {"clusterTime": Timestamp(time, inc)} + await got_hello( + t, + ("host", 27017), + { + "ok": 1, + "minWireVersion": 0, + "maxWireVersion": common.MIN_SUPPORTED_WIRE_VERSION, + "$clusterTime": new, + }, + ) + + actual = t.max_cluster_time() + # We never update $clusterTime from monitoring connections. + self.assertEqual(actual, old) + + await send_cluster_time(0, 1) + await send_cluster_time(2, 2) + await send_cluster_time(2, 1) + await send_cluster_time(1, 3) + await send_cluster_time(2, 3) + + +class TestIgnoreStaleErrors(AsyncIntegrationTest): + async def test_ignore_stale_connection_errors(self): + if not _IS_SYNC and sys.version_info < (3, 11): + self.skipTest("Test requires asyncio.Barrier (added in Python 3.11)") + N_TASKS = 5 + barrier = async_create_barrier(N_TASKS) + client = await self.async_rs_or_single_client(minPoolSize=N_TASKS) + + # Wait for initial discovery. + await client.admin.command("ping") + pool = await async_get_pool(client) + starting_generation = pool.gen.get_overall() + await async_wait_until(lambda: len(pool.conns) == N_TASKS, "created conns") + + async def mock_command(*args, **kwargs): + # Synchronize all tasks to ensure they use the same generation. + await async_barrier_wait(barrier, timeout=30) + raise AutoReconnect("mock AsyncConnection.command error") + + for conn in pool.conns: + conn.command = mock_command + + async def insert_command(i): + try: + await client.test.command("insert", "test", documents=[{"i": i}]) + except AutoReconnect: + pass + + tasks = [] + for i in range(N_TASKS): + tasks.append(ConcurrentRunner(target=insert_command, args=(i,))) + for t in tasks: + await t.start() + for t in tasks: + await t.join() + + # Expect a single pool reset for the network error + self.assertEqual(starting_generation + 1, pool.gen.get_overall()) + + # Server should be selectable. + await client.admin.command("ping") + + +class CMAPHeartbeatListener(HeartbeatEventListener, CMAPListener): + pass + + +class TestPoolManagement(AsyncIntegrationTest): + @async_client_context.require_failCommand_appName + async def test_pool_unpause(self): + # This test implements the prose test "AsyncConnection Pool Management" + listener = CMAPHeartbeatListener() + _ = await self.async_single_client( + appName="SDAMPoolManagementTest", heartbeatFrequencyMS=500, event_listeners=[listener] + ) + # Assert that AsyncConnectionPoolReadyEvent occurs after the first + # ServerHeartbeatSucceededEvent. + await listener.async_wait_for_event(monitoring.PoolReadyEvent, 1) + pool_ready = listener.events_by_type(monitoring.PoolReadyEvent)[0] + hb_succeeded = listener.events_by_type(monitoring.ServerHeartbeatSucceededEvent)[0] + self.assertGreater(listener.events.index(pool_ready), listener.events.index(hb_succeeded)) + + listener.reset() + fail_hello = { + "mode": {"times": 2}, + "data": { + "failCommands": [HelloCompat.LEGACY_CMD, "hello"], + "errorCode": 1234, + "appName": "SDAMPoolManagementTest", + }, + } + async with self.fail_point(fail_hello): + await listener.async_wait_for_event(monitoring.ServerHeartbeatFailedEvent, 1) + await listener.async_wait_for_event(monitoring.PoolClearedEvent, 1) + await listener.async_wait_for_event(monitoring.ServerHeartbeatSucceededEvent, 1) + await listener.async_wait_for_event(monitoring.PoolReadyEvent, 1) + + @async_client_context.require_failCommand_appName + @async_client_context.require_test_commands + @async_client_context.require_async + @flaky(reason="PYTHON-5428") + async def test_connection_close_does_not_block_other_operations(self): + listener = CMAPHeartbeatListener() + client = await self.async_single_client( + appName="SDAMConnectionCloseTest", + event_listeners=[listener], + heartbeatFrequencyMS=500, + minPoolSize=10, + ) + server = await (await client._get_topology()).select_server( + writable_server_selector, _Op.TEST + ) + await async_wait_until( + lambda: len(server._pool.conns) == 10, + "pool initialized with 10 connections", + ) + + await client.db.test.insert_one({"x": 1}) + close_delay = 0.1 + latencies = [] + should_exit = [] + + async def run_task(): + while True: + start_time = time.monotonic() + await client.db.test.find_one({}) + elapsed = time.monotonic() - start_time + latencies.append(elapsed) + if should_exit: + break + await asyncio.sleep(0.001) + + task = ConcurrentRunner(target=run_task) + await task.start() + original_close = AsyncConnection.close_conn + try: + # Artificially delay the close operation to simulate a slow close + async def mock_close(self, reason): + await asyncio.sleep(close_delay) + await original_close(self, reason) + + AsyncConnection.close_conn = mock_close + + fail_hello = { + "mode": {"times": 4}, + "data": { + "failCommands": [HelloCompat.LEGACY_CMD, "hello"], + "errorCode": 91, + "appName": "SDAMConnectionCloseTest", + }, + } + async with self.fail_point(fail_hello): + # Wait for server heartbeat to fail + await listener.async_wait_for_event(monitoring.ServerHeartbeatFailedEvent, 1) + # Wait until all idle connections are closed to simulate real-world conditions + await listener.async_wait_for_event(monitoring.ConnectionClosedEvent, 10) + # Wait for one more find to complete after the pool has been reset, then shutdown the task + n = len(latencies) + await async_wait_until(lambda: len(latencies) >= n + 1, "run one more find") + should_exit.append(True) + await task.join() + # No operation latency should not significantly exceed close_delay + self.assertLessEqual(max(latencies), close_delay * 5.0) + finally: + AsyncConnection.close_conn = original_close + + +class TestServerMonitoringMode(AsyncIntegrationTest): + @async_client_context.require_no_load_balancer + async def asyncSetUp(self): + await super().asyncSetUp() + + async def test_rtt_connection_is_enabled_stream(self): + client = await self.async_rs_or_single_client(serverMonitoringMode="stream") + await client.admin.command("ping") + + def predicate(): + for _, server in client._topology._servers.items(): + monitor = server._monitor + if not monitor._stream: + return False + if async_client_context.version >= (4, 4): + if _IS_SYNC: + if monitor._rtt_monitor._executor._thread is None: + return False + else: + if monitor._rtt_monitor._executor._task is None: + return False + else: + if _IS_SYNC: + if monitor._rtt_monitor._executor._thread is not None: + return False + else: + if monitor._rtt_monitor._executor._task is not None: + return False + return True + + await async_wait_until(predicate, "find all RTT monitors") + + async def test_rtt_connection_is_disabled_poll(self): + client = await self.async_rs_or_single_client(serverMonitoringMode="poll") + + await self.assert_rtt_connection_is_disabled(client) + + async def test_rtt_connection_is_disabled_auto(self): + envs = [ + {"AWS_EXECUTION_ENV": "AWS_Lambda_python3.10"}, + {"FUNCTIONS_WORKER_RUNTIME": "python"}, + {"K_SERVICE": "gcpservicename"}, + {"FUNCTION_NAME": "gcpfunctionname"}, + {"VERCEL": "1"}, + ] + for env in envs: + with patch.dict("os.environ", env): + client = await self.async_rs_or_single_client(serverMonitoringMode="auto") + await self.assert_rtt_connection_is_disabled(client) + + async def assert_rtt_connection_is_disabled(self, client): + await client.admin.command("ping") + for _, server in client._topology._servers.items(): + monitor = server._monitor + self.assertFalse(monitor._stream) + if _IS_SYNC: + self.assertIsNone(monitor._rtt_monitor._executor._thread) + else: + self.assertIsNone(monitor._rtt_monitor._executor._task) + + +class MockTCPHandler(socketserver.BaseRequestHandler): + def handle(self): + self.server.events.append("client connected") + if self.request.recv(1024).strip(): + self.server.events.append("client hello received") + self.request.close() + + +class TCPServer(socketserver.TCPServer): + allow_reuse_address = True + + def handle_request_and_shutdown(self): + self.handle_request() + self.server_close() + + +class TestHeartbeatStartOrdering(AsyncPyMongoTestCase): + async def test_heartbeat_start_ordering(self): + events = [] + listener = HeartbeatEventsListListener(events) + + if _IS_SYNC: + server = TCPServer(("localhost", 9999), MockTCPHandler) + server.events = events + server_thread = ConcurrentRunner(target=server.handle_request_and_shutdown) + await server_thread.start() + _c = await self.simple_client( + "mongodb://localhost:9999", + serverSelectionTimeoutMS=500, + event_listeners=(listener,), + ) + await server_thread.join() + listener.wait_for_event(ServerHeartbeatStartedEvent, 1) + listener.wait_for_event(ServerHeartbeatFailedEvent, 1) + + else: + + async def handle_client(reader: StreamReader, writer: StreamWriter): + events.append("client connected") + if (await reader.read(1024)).strip(): + events.append("client hello received") + writer.close() + await writer.wait_closed() + + server = await asyncio.start_server(handle_client, "localhost", 9999) + server.events = events + await server.start_serving() + _c = self.simple_client( + "mongodb://localhost:9999", + serverSelectionTimeoutMS=500, + event_listeners=(listener,), + ) + await _c.aconnect() + + await listener.async_wait_for_event(ServerHeartbeatStartedEvent, 1) + await listener.async_wait_for_event(ServerHeartbeatFailedEvent, 1) + + server.close() + await server.wait_closed() + await _c.close() + + self.assertEqual( + events, + [ + "serverHeartbeatStartedEvent", + "client connected", + "client hello received", + "serverHeartbeatFailedEvent", + ], + ) + + +# Generate unified tests. +globals().update(generate_test_classes(os.path.join(SDAM_PATH, "unified"), module=__name__)) + + +if __name__ == "__main__": + unittest.main() diff --git a/test/asynchronous/test_dns.py b/test/asynchronous/test_dns.py new file mode 100644 index 0000000000..5666612218 --- /dev/null +++ b/test/asynchronous/test_dns.py @@ -0,0 +1,308 @@ +# Copyright 2017 MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Run the SRV support tests.""" +from __future__ import annotations + +import glob +import json +import os +import pathlib +import sys + +sys.path[0:0] = [""] + +from test.asynchronous import ( + AsyncIntegrationTest, + AsyncPyMongoTestCase, + async_client_context, + unittest, +) +from test.utils_shared import async_wait_until +from unittest.mock import MagicMock, patch + +from pymongo.asynchronous.uri_parser import parse_uri +from pymongo.common import validate_read_preference_tags +from pymongo.errors import ConfigurationError +from pymongo.uri_parser_shared import split_hosts + +_IS_SYNC = False + + +class TestDNSRepl(AsyncPyMongoTestCase): + if _IS_SYNC: + TEST_PATH = os.path.join( + pathlib.Path(__file__).resolve().parent, "srv_seedlist", "replica-set" + ) + else: + TEST_PATH = os.path.join( + pathlib.Path(__file__).resolve().parent.parent, "srv_seedlist", "replica-set" + ) + load_balanced = False + + @async_client_context.require_replica_set + def asyncSetUp(self): + pass + + +class TestDNSLoadBalanced(AsyncPyMongoTestCase): + if _IS_SYNC: + TEST_PATH = os.path.join( + pathlib.Path(__file__).resolve().parent, "srv_seedlist", "load-balanced" + ) + else: + TEST_PATH = os.path.join( + pathlib.Path(__file__).resolve().parent.parent, "srv_seedlist", "load-balanced" + ) + load_balanced = True + + @async_client_context.require_load_balancer + def asyncSetUp(self): + pass + + +class TestDNSSharded(AsyncPyMongoTestCase): + if _IS_SYNC: + TEST_PATH = os.path.join(pathlib.Path(__file__).resolve().parent, "srv_seedlist", "sharded") + else: + TEST_PATH = os.path.join( + pathlib.Path(__file__).resolve().parent.parent, "srv_seedlist", "sharded" + ) + load_balanced = False + + @async_client_context.require_mongos + def asyncSetUp(self): + pass + + +def create_test(test_case): + async def run_test(self): + uri = test_case["uri"] + seeds = test_case.get("seeds") + num_seeds = test_case.get("numSeeds", len(seeds or [])) + hosts = test_case.get("hosts") + num_hosts = test_case.get("numHosts", len(hosts or [])) + + options = test_case.get("options", {}) + if "ssl" in options: + options["tls"] = options.pop("ssl") + parsed_options = test_case.get("parsed_options") + # See DRIVERS-1324, unless tls is explicitly set to False we need TLS. + needs_tls = not (options and (options.get("ssl") is False or options.get("tls") is False)) + if needs_tls and not async_client_context.tls: + self.skipTest("this test requires a TLS cluster") + if not needs_tls and async_client_context.tls: + self.skipTest("this test requires a non-TLS cluster") + + if seeds: + seeds = split_hosts(",".join(seeds)) + if hosts: + hosts = frozenset(split_hosts(",".join(hosts))) + + if seeds or num_seeds: + result = await parse_uri(uri, validate=True) + if seeds is not None: + self.assertEqual(sorted(result["nodelist"]), sorted(seeds)) + if num_seeds is not None: + self.assertEqual(len(result["nodelist"]), num_seeds) + if options: + opts = result["options"] + if "readpreferencetags" in opts: + rpts = validate_read_preference_tags( + "readPreferenceTags", opts.pop("readpreferencetags") + ) + opts["readPreferenceTags"] = rpts + self.assertEqual(result["options"], options) + if parsed_options: + for opt, expected in parsed_options.items(): + if opt == "user": + self.assertEqual(result["username"], expected) + elif opt == "password": + self.assertEqual(result["password"], expected) + elif opt == "auth_database" or opt == "db": + self.assertEqual(result["database"], expected) + + hostname = next(iter(async_client_context.client.nodes))[0] + # The replica set members must be configured as 'localhost'. + if hostname == "localhost": + copts = async_client_context.default_client_options.copy() + # Remove tls since SRV parsing should add it automatically. + copts.pop("tls", None) + if async_client_context.tls: + # Our test certs don't support the SRV hosts used in these + # tests. + copts["tlsAllowInvalidHostnames"] = True + + client = self.simple_client(uri, **copts) + if client._options.connect: + await client.aconnect() + if num_seeds is not None: + self.assertEqual(len(client._topology_settings.seeds), num_seeds) + if hosts is not None: + await async_wait_until( + lambda: hosts == client.nodes, "match test hosts to client nodes" + ) + if num_hosts is not None: + await async_wait_until( + lambda: num_hosts == len(client.nodes), "wait to connect to num_hosts" + ) + if test_case.get("ping", True): + await client.admin.command("ping") + # XXX: we should block until SRV poller runs at least once + # and re-run these assertions. + else: + try: + await parse_uri(uri) + except (ConfigurationError, ValueError): + pass + else: + self.fail("failed to raise an exception") + + return run_test + + +def create_tests(cls): + for filename in glob.glob(os.path.join(cls.TEST_PATH, "*.json")): + test_suffix, _ = os.path.splitext(os.path.basename(filename)) + with open(filename) as dns_test_file: + test_method = create_test(json.load(dns_test_file)) + setattr(cls, "test_" + test_suffix, test_method) + + +create_tests(TestDNSRepl) +create_tests(TestDNSLoadBalanced) +create_tests(TestDNSSharded) + + +class TestParsingErrors(AsyncPyMongoTestCase): + async def test_invalid_host(self): + with self.assertRaisesRegex(ConfigurationError, "Invalid URI host: an IP address is not"): + client = self.simple_client("mongodb+srv://127.0.0.1") + await client.aconnect() + with self.assertRaisesRegex(ConfigurationError, "Invalid URI host: an IP address is not"): + client = self.simple_client("mongodb+srv://[::1]") + await client.aconnect() + + +class IsolatedAsyncioTestCaseInsensitive(AsyncIntegrationTest): + async def test_connect_case_insensitive(self): + client = self.simple_client("mongodb+srv://TEST1.TEST.BUILD.10GEN.cc/") + await client.aconnect() + self.assertGreater(len(client.topology_description.server_descriptions()), 1) + + +class TestInitialDnsSeedlistDiscovery(AsyncPyMongoTestCase): + """ + Initial DNS Seedlist Discovery prose tests + https://github.com/mongodb/specifications/blob/0a7a8b5/source/initial-dns-seedlist-discovery/tests/README.md#prose-tests + """ + + async def run_initial_dns_seedlist_discovery_prose_tests(self, test_cases): + for case in test_cases: + with patch("dns.asyncresolver.resolve") as mock_resolver: + + async def mock_resolve(query, record_type, *args, **kwargs): + mock_srv = MagicMock() + mock_srv.target.to_text.return_value = case["mock_target"] + return [mock_srv] + + mock_resolver.side_effect = mock_resolve + domain = case["query"].split("._tcp.")[1] + connection_string = f"mongodb+srv://{domain}" + if "expected_error" not in case: + await parse_uri(connection_string) + else: + try: + await parse_uri(connection_string) + except ConfigurationError as e: + self.assertIn(case["expected_error"], str(e)) + else: + self.fail(f"ConfigurationError was not raised for query: {case['query']}") + + async def test_1_allow_srv_hosts_with_fewer_than_three_dot_separated_parts(self): + with patch("dns.asyncresolver.resolve"): + await parse_uri("mongodb+srv://localhost/") + await parse_uri("mongodb+srv://mongo.local/") + + async def test_2_throw_when_return_address_does_not_end_with_srv_domain(self): + test_cases = [ + { + "query": "_mongodb._tcp.localhost", + "mock_target": "localhost.mongodb", + "expected_error": "Invalid SRV host", + }, + { + "query": "_mongodb._tcp.blogs.mongodb.com", + "mock_target": "blogs.evil.com", + "expected_error": "Invalid SRV host", + }, + { + "query": "_mongodb._tcp.blogs.mongo.local", + "mock_target": "test_1.evil.com", + "expected_error": "Invalid SRV host", + }, + ] + await self.run_initial_dns_seedlist_discovery_prose_tests(test_cases) + + async def test_3_throw_when_return_address_is_identical_to_srv_hostname(self): + test_cases = [ + { + "query": "_mongodb._tcp.localhost", + "mock_target": "localhost", + "expected_error": "Invalid SRV host", + }, + { + "query": "_mongodb._tcp.mongo.local", + "mock_target": "mongo.local", + "expected_error": "Invalid SRV host", + }, + ] + await self.run_initial_dns_seedlist_discovery_prose_tests(test_cases) + + async def test_4_throw_when_return_address_does_not_contain_dot_separating_shared_part_of_domain( + self + ): + test_cases = [ + { + "query": "_mongodb._tcp.localhost", + "mock_target": "test_1.cluster_1localhost", + "expected_error": "Invalid SRV host", + }, + { + "query": "_mongodb._tcp.mongo.local", + "mock_target": "test_1.my_hostmongo.local", + "expected_error": "Invalid SRV host", + }, + { + "query": "_mongodb._tcp.blogs.mongodb.com", + "mock_target": "cluster.testmongodb.com", + "expected_error": "Invalid SRV host", + }, + ] + await self.run_initial_dns_seedlist_discovery_prose_tests(test_cases) + + async def test_5_when_srv_hostname_has_two_dot_separated_parts_it_is_valid_for_the_returned_hostname_to_be_identical( + self + ): + test_cases = [ + { + "query": "_mongodb._tcp.blogs.mongodb.com", + "mock_target": "blogs.mongodb.com", + }, + ] + await self.run_initial_dns_seedlist_discovery_prose_tests(test_cases) + + +if __name__ == "__main__": + unittest.main() diff --git a/test/asynchronous/test_encryption.py b/test/asynchronous/test_encryption.py index 48f791ac16..74c0136ad0 100644 --- a/test/asynchronous/test_encryption.py +++ b/test/asynchronous/test_encryption.py @@ -32,6 +32,7 @@ import warnings from test.asynchronous import AsyncIntegrationTest, AsyncPyMongoTestCase, async_client_context from test.asynchronous.test_bulk import AsyncBulkTestBase +from test.asynchronous.utils import flaky from test.asynchronous.utils_spec_runner import AsyncSpecRunner, AsyncSpecTestCreator from threading import Thread from typing import Any, Dict, Mapping, Optional @@ -39,8 +40,8 @@ import pytest from pymongo.asynchronous.collection import AsyncCollection -from pymongo.asynchronous.helpers import anext from pymongo.daemon import _spawn_daemon +from pymongo.uri_parser_shared import _parse_kms_tls_options try: from pymongo.pyopenssl_context import IS_PYOPENSSL @@ -55,16 +56,19 @@ from test.asynchronous.test_bulk import AsyncBulkTestBase from test.asynchronous.unified_format import generate_test_classes from test.asynchronous.utils_spec_runner import AsyncSpecRunner -from test.helpers import ( +from test.helpers_shared import ( + ALL_KMS_PROVIDERS, AWS_CREDS, + AWS_TEMP_CREDS, AZURE_CREDS, CA_PEM, CLIENT_PEM, + DEFAULT_KMS_TLS, GCP_CREDS, KMIP_CREDS, LOCAL_MASTER_KEY, ) -from test.utils import ( +from test.utils_shared import ( AllowListEventListener, OvertCommandListener, TopologyEventListener, @@ -73,7 +77,7 @@ is_greenthread_patched, ) -from bson import DatetimeMS, Decimal128, encode, json_util +from bson import BSON, DatetimeMS, Decimal128, encode, json_util from bson.binary import UUID_SUBTYPE, Binary, UuidRepresentation from bson.codec_options import CodecOptions from bson.errors import BSONError @@ -84,7 +88,7 @@ from pymongo.asynchronous.encryption import Algorithm, AsyncClientEncryption, QueryType from pymongo.asynchronous.mongo_client import AsyncMongoClient from pymongo.cursor_shared import CursorType -from pymongo.encryption_options import _HAVE_PYMONGOCRYPT, AutoEncryptionOpts, RangeOpts +from pymongo.encryption_options import _HAVE_PYMONGOCRYPT, AutoEncryptionOpts, RangeOpts, TextOpts from pymongo.errors import ( AutoReconnect, BulkWriteError, @@ -94,11 +98,11 @@ EncryptionError, InvalidOperation, OperationFailure, + PyMongoError, ServerSelectionTimeoutError, WriteError, ) from pymongo.operations import InsertOne, ReplaceOne, UpdateOne -from pymongo.ssl_support import get_ssl_context from pymongo.write_concern import WriteConcern _IS_SYNC = False @@ -141,7 +145,7 @@ def test_init(self): self.assertEqual(opts._mongocryptd_bypass_spawn, False) self.assertEqual(opts._mongocryptd_spawn_path, "mongocryptd") self.assertEqual(opts._mongocryptd_spawn_args, ["--idleShutdownTimeoutSecs=60"]) - self.assertEqual(opts._kms_ssl_contexts, {}) + self.assertEqual(opts._kms_tls_options, None) @unittest.skipUnless(_HAVE_PYMONGOCRYPT, "pymongocrypt is not installed") def test_init_spawn_args(self): @@ -165,38 +169,47 @@ def test_init_spawn_args(self): ) @unittest.skipUnless(_HAVE_PYMONGOCRYPT, "pymongocrypt is not installed") - def test_init_kms_tls_options(self): + async def test_init_kms_tls_options(self): # Error cases: + opts = AutoEncryptionOpts({}, "k.d", kms_tls_options={"kmip": 1}) with self.assertRaisesRegex(TypeError, r'kms_tls_options\["kmip"\] must be a dict'): - AutoEncryptionOpts({}, "k.d", kms_tls_options={"kmip": 1}) + AsyncMongoClient(auto_encryption_opts=opts) + tls_opts: Any for tls_opts in [ {"kmip": {"tls": True, "tlsInsecure": True}}, {"kmip": {"tls": True, "tlsAllowInvalidCertificates": True}}, {"kmip": {"tls": True, "tlsAllowInvalidHostnames": True}}, ]: + opts = AutoEncryptionOpts({}, "k.d", kms_tls_options=tls_opts) with self.assertRaisesRegex(ConfigurationError, "Insecure TLS options prohibited"): - opts = AutoEncryptionOpts({}, "k.d", kms_tls_options=tls_opts) + AsyncMongoClient(auto_encryption_opts=opts) + opts = AutoEncryptionOpts( + {}, "k.d", kms_tls_options={"kmip": {"tlsCAFile": "does-not-exist"}} + ) with self.assertRaises(FileNotFoundError): - AutoEncryptionOpts({}, "k.d", kms_tls_options={"kmip": {"tlsCAFile": "does-not-exist"}}) + AsyncMongoClient(auto_encryption_opts=opts) # Success cases: tls_opts: Any for tls_opts in [None, {}]: opts = AutoEncryptionOpts({}, "k.d", kms_tls_options=tls_opts) - self.assertEqual(opts._kms_ssl_contexts, {}) + kms_tls_contexts = _parse_kms_tls_options(opts._kms_tls_options, _IS_SYNC) + self.assertEqual(kms_tls_contexts, {}) opts = AutoEncryptionOpts({}, "k.d", kms_tls_options={"kmip": {"tls": True}, "aws": {}}) - ctx = opts._kms_ssl_contexts["kmip"] + _kms_ssl_contexts = _parse_kms_tls_options(opts._kms_tls_options, _IS_SYNC) + ctx = _kms_ssl_contexts["kmip"] self.assertEqual(ctx.check_hostname, True) self.assertEqual(ctx.verify_mode, ssl.CERT_REQUIRED) - ctx = opts._kms_ssl_contexts["aws"] + ctx = _kms_ssl_contexts["aws"] self.assertEqual(ctx.check_hostname, True) self.assertEqual(ctx.verify_mode, ssl.CERT_REQUIRED) opts = AutoEncryptionOpts( {}, "k.d", - kms_tls_options={"kmip": {"tlsCAFile": CA_PEM, "tlsCertificateKeyFile": CLIENT_PEM}}, + kms_tls_options=DEFAULT_KMS_TLS, ) - ctx = opts._kms_ssl_contexts["kmip"] + _kms_ssl_contexts = _parse_kms_tls_options(opts._kms_tls_options, _IS_SYNC) + ctx = _kms_ssl_contexts["kmip"] self.assertEqual(ctx.check_hostname, True) self.assertEqual(ctx.verify_mode, ssl.CERT_REQUIRED) @@ -385,7 +398,7 @@ async def test_use_after_close(self): ) @unittest.skipIf( is_greenthread_patched(), - "gevent and eventlet do not support POSIX-style forking.", + "gevent does not support POSIX-style forking.", ) @async_client_context.require_sync async def test_fork(self): @@ -441,20 +454,6 @@ class TestClientMaxWireVersion(AsyncIntegrationTest): async def asyncSetUp(self): await super().asyncSetUp() - @async_client_context.require_version_max(4, 0, 99) - async def test_raise_max_wire_version_error(self): - opts = AutoEncryptionOpts(KMS_PROVIDERS, "keyvault.datakeys") - client = await self.async_rs_or_single_client(auto_encryption_opts=opts) - msg = "Auto-encryption requires a minimum MongoDB version of 4.2" - with self.assertRaisesRegex(ConfigurationError, msg): - await client.test.test.insert_one({}) - with self.assertRaisesRegex(ConfigurationError, msg): - await client.admin.command("ping") - with self.assertRaisesRegex(ConfigurationError, msg): - await client.test.test.find_one({}) - with self.assertRaisesRegex(ConfigurationError, msg): - await client.test.test.bulk_write([InsertOne({})]) - async def test_raise_unsupported_error(self): opts = AutoEncryptionOpts(KMS_PROVIDERS, "keyvault.datakeys") client = await self.async_rs_or_single_client(auto_encryption_opts=opts) @@ -619,17 +618,10 @@ async def test_with_statement(self): # Spec tests -AWS_TEMP_CREDS = { - "accessKeyId": os.environ.get("CSFLE_AWS_TEMP_ACCESS_KEY_ID", ""), - "secretAccessKey": os.environ.get("CSFLE_AWS_TEMP_SECRET_ACCESS_KEY", ""), - "sessionToken": os.environ.get("CSFLE_AWS_TEMP_SESSION_TOKEN", ""), -} - AWS_TEMP_NO_SESSION_CREDS = { "accessKeyId": os.environ.get("CSFLE_AWS_TEMP_ACCESS_KEY_ID", ""), "secretAccessKey": os.environ.get("CSFLE_AWS_TEMP_SECRET_ACCESS_KEY", ""), } -KMS_TLS_OPTS = {"kmip": {"tlsCAFile": CA_PEM, "tlsCertificateKeyFile": CLIENT_PEM}} class AsyncTestSpec(AsyncSpecRunner): @@ -666,7 +658,7 @@ def parse_auto_encrypt_opts(self, opts): self.skipTest("GCP environment credentials are not set") if "kmip" in kms_providers: kms_providers["kmip"] = KMIP_CREDS - opts["kms_tls_options"] = KMS_TLS_OPTS + opts["kms_tls_options"] = DEFAULT_KMS_TLS if "key_vault_namespace" not in opts: opts["key_vault_namespace"] = "keyvault.datakeys" if "extra_options" in opts: @@ -740,7 +732,7 @@ def allowable_errors(self, op): return errors -async def create_test(scenario_def, test, name): +def create_test(scenario_def, test, name): @async_client_context.require_test_commands async def run_scenario(self): await self.run_scenario(scenario_def, test) @@ -754,20 +746,11 @@ async def run_scenario(self): if _HAVE_PYMONGOCRYPT: globals().update( generate_test_classes( - os.path.join(SPEC_PATH, "unified"), - module=__name__, + os.path.join(SPEC_PATH, "unified"), module=__name__, expected_failures=["mapReduce .*"] ) ) # Prose Tests -ALL_KMS_PROVIDERS = { - "aws": AWS_CREDS, - "azure": AZURE_CREDS, - "gcp": GCP_CREDS, - "kmip": KMIP_CREDS, - "local": {"key": LOCAL_MASTER_KEY}, -} - LOCAL_KEY_ID = Binary(base64.b64decode(b"LOCALAAAAAAAAAAAAAAAAA=="), UUID_SUBTYPE) AWS_KEY_ID = Binary(base64.b64decode(b"AWSAAAAAAAAAAAAAAAAAAA=="), UUID_SUBTYPE) AZURE_KEY_ID = Binary(base64.b64decode(b"AZUREAAAAAAAAAAAAAAAAA=="), UUID_SUBTYPE) @@ -854,13 +837,17 @@ async def asyncSetUp(self): self.KMS_PROVIDERS, "keyvault.datakeys", schema_map=schemas, - kms_tls_options=KMS_TLS_OPTS, + kms_tls_options=DEFAULT_KMS_TLS, ) self.client_encrypted = await self.async_rs_or_single_client( auto_encryption_opts=opts, uuidRepresentation="standard" ) self.client_encryption = self.create_client_encryption( - self.KMS_PROVIDERS, "keyvault.datakeys", self.client, OPTS, kms_tls_options=KMS_TLS_OPTS + self.KMS_PROVIDERS, + "keyvault.datakeys", + self.client, + OPTS, + kms_tls_options=DEFAULT_KMS_TLS, ) self.listener.reset() @@ -1069,7 +1056,7 @@ async def _test_corpus(self, opts): "keyvault.datakeys", async_client_context.client, OPTS, - kms_tls_options=KMS_TLS_OPTS, + kms_tls_options=DEFAULT_KMS_TLS, ) corpus = self.fix_up_curpus(json_data("corpus", "corpus.json")) @@ -1161,7 +1148,7 @@ async def _test_corpus(self, opts): async def test_corpus(self): opts = AutoEncryptionOpts( - self.kms_providers(), "keyvault.datakeys", kms_tls_options=KMS_TLS_OPTS + self.kms_providers(), "keyvault.datakeys", kms_tls_options=DEFAULT_KMS_TLS ) await self._test_corpus(opts) @@ -1172,7 +1159,7 @@ async def test_corpus_local_schema(self): self.kms_providers(), "keyvault.datakeys", schema_map=schemas, - kms_tls_options=KMS_TLS_OPTS, + kms_tls_options=DEFAULT_KMS_TLS, ) await self._test_corpus(opts) @@ -1279,7 +1266,7 @@ async def test_06_insert_fails_over_16MiB(self): with self.assertRaises(BulkWriteError) as ctx: await self.coll_encrypted.bulk_write([InsertOne(doc)]) err = ctx.exception.details["writeErrors"][0] - self.assertEqual(2, err["code"]) + self.assertIn(err["code"], [2, 10334]) self.assertIn("object to insert too large", err["errmsg"]) @@ -1303,19 +1290,19 @@ async def asyncSetUp(self): key_vault_namespace="keyvault.datakeys", key_vault_client=async_client_context.client, codec_options=OPTS, - kms_tls_options=KMS_TLS_OPTS, + kms_tls_options=DEFAULT_KMS_TLS, ) kms_providers_invalid = copy.deepcopy(kms_providers) kms_providers_invalid["azure"]["identityPlatformEndpoint"] = "doesnotexist.invalid:443" kms_providers_invalid["gcp"]["endpoint"] = "doesnotexist.invalid:443" - kms_providers_invalid["kmip"]["endpoint"] = "doesnotexist.local:5698" + kms_providers_invalid["kmip"]["endpoint"] = "doesnotexist.invalid:5698" self.client_encryption_invalid = self.create_client_encryption( kms_providers=kms_providers_invalid, key_vault_namespace="keyvault.datakeys", key_vault_client=async_client_context.client, codec_options=OPTS, - kms_tls_options=KMS_TLS_OPTS, + kms_tls_options=DEFAULT_KMS_TLS, ) self._kmip_host_error = None self._invalid_host_error = None @@ -1367,15 +1354,10 @@ async def test_03_aws_region_key_endpoint_port(self): }, ) - @unittest.skipUnless(any(AWS_CREDS.values()), "AWS environment credentials are not set") - async def test_04_aws_endpoint_invalid_port(self): - master_key = { - "region": "us-east-1", - "key": ("arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0"), - "endpoint": "kms.us-east-1.amazonaws.com:12345", - } - with self.assertRaisesRegex(EncryptionError, "kms.us-east-1.amazonaws.com:12345"): - await self.client_encryption.create_data_key("aws", master_key=master_key) + async def test_04_kmip_endpoint_invalid_port(self): + master_key = {"keyId": "1", "endpoint": "localhost:12345"} + with self.assertRaisesRegex(EncryptionError, "localhost:12345"): + await self.client_encryption.create_data_key("kmip", master_key=master_key) @unittest.skipUnless(any(AWS_CREDS.values()), "AWS environment credentials are not set") async def test_05_aws_endpoint_wrong_region(self): @@ -1481,7 +1463,7 @@ async def test_11_kmip_master_key_endpoint(self): self.assertEqual("test", await self.client_encryption_invalid.decrypt(encrypted)) async def test_12_kmip_master_key_invalid_endpoint(self): - key = {"keyId": "1", "endpoint": "doesnotexist.local:5698"} + key = {"keyId": "1", "endpoint": "doesnotexist.invalid:5698"} with self.assertRaisesRegex(EncryptionError, self.kmip_host_error): await self.client_encryption.create_data_key("kmip", key) @@ -2163,12 +2145,13 @@ async def test_01_aws(self): # 127.0.0.1:9001: ('Certificate does not contain any `subjectAltName`s.',) key["endpoint"] = "127.0.0.1:9001" with self.assertRaisesRegex( - EncryptionError, "IP address mismatch|wronghost|IPAddressMismatch|Certificate" + EncryptionError, + "IP address mismatch|wronghost|IPAddressMismatch|Certificate|SSL handshake failed", ): await self.client_encryption_invalid_hostname.create_data_key("aws", key) async def test_02_azure(self): - key = {"keyVaultEndpoint": "doesnotexist.local", "keyName": "foo"} + key = {"keyVaultEndpoint": "doesnotexist.invalid", "keyName": "foo"} # Missing client cert error. with self.assertRaisesRegex(EncryptionError, self.cert_error): await self.client_encryption_no_client_cert.create_data_key("azure", key) @@ -2180,7 +2163,8 @@ async def test_02_azure(self): await self.client_encryption_expired.create_data_key("azure", key) # Invalid cert hostname error. with self.assertRaisesRegex( - EncryptionError, "IP address mismatch|wronghost|IPAddressMismatch|Certificate" + EncryptionError, + "IP address mismatch|wronghost|IPAddressMismatch|Certificate|SSL handshake failed", ): await self.client_encryption_invalid_hostname.create_data_key("azure", key) @@ -2197,7 +2181,8 @@ async def test_03_gcp(self): await self.client_encryption_expired.create_data_key("gcp", key) # Invalid cert hostname error. with self.assertRaisesRegex( - EncryptionError, "IP address mismatch|wronghost|IPAddressMismatch|Certificate" + EncryptionError, + "IP address mismatch|wronghost|IPAddressMismatch|Certificate|SSL handshake failed", ): await self.client_encryption_invalid_hostname.create_data_key("gcp", key) @@ -2211,7 +2196,8 @@ async def test_04_kmip(self): await self.client_encryption_expired.create_data_key("kmip") # Invalid cert hostname error. with self.assertRaisesRegex( - EncryptionError, "IP address mismatch|wronghost|IPAddressMismatch|Certificate" + EncryptionError, + "IP address mismatch|wronghost|IPAddressMismatch|Certificate|SSL handshake failed", ): await self.client_encryption_invalid_hostname.create_data_key("kmip") @@ -2221,7 +2207,7 @@ async def test_05_tlsDisableOCSPEndpointCheck_is_permitted(self): encryption = self.create_client_encryption( providers, "keyvault.datakeys", self.client, OPTS, kms_tls_options=options ) - ctx = encryption._io_callbacks.opts._kms_ssl_contexts["aws"] + ctx = encryption._io_callbacks._kms_ssl_contexts["aws"] if not hasattr(ctx, "check_ocsp_endpoint"): raise self.skipTest("OCSP not enabled") self.assertFalse(ctx.check_ocsp_endpoint) @@ -2240,7 +2226,7 @@ async def test_06_named_kms_providers_apply_tls_options_aws(self): await self.client_encryption_with_names.create_data_key("aws:with_tls", key) async def test_06_named_kms_providers_apply_tls_options_azure(self): - key = {"keyVaultEndpoint": "doesnotexist.local", "keyName": "foo"} + key = {"keyVaultEndpoint": "doesnotexist.invalid", "keyName": "foo"} # Missing client cert error. with self.assertRaisesRegex(EncryptionError, self.cert_error): await self.client_encryption_with_names.create_data_key("azure:no_client_cert", key) @@ -2416,6 +2402,310 @@ async def test_05_roundtrip_encrypted_unindexed(self): self.assertEqual(decrypted, val) +# https://github.com/mongodb/specifications/blob/527e22d5090ec48bf1e144c45fc831de0f1935f6/source/client-side-encryption/tests/README.md#25-test-lookup +class TestLookupProse(AsyncEncryptionIntegrationTest): + @async_client_context.require_no_standalone + @async_client_context.require_version_min(7, 0, -1) + async def asyncSetUp(self): + await super().asyncSetUp() + encrypted_client = await self.async_rs_or_single_client( + auto_encryption_opts=AutoEncryptionOpts( + key_vault_namespace="db.keyvault", + kms_providers={"local": {"key": LOCAL_MASTER_KEY}}, + ) + ) + await encrypted_client.drop_database("db") + + key_doc = json_data("etc", "data", "lookup", "key-doc.json") + await create_key_vault(encrypted_client.db.keyvault, key_doc) + self.addAsyncCleanup(async_client_context.client.drop_database, "db") + + await encrypted_client.db.create_collection( + "csfle", + validator={"$jsonSchema": json_data("etc", "data", "lookup", "schema-csfle.json")}, + ) + await encrypted_client.db.create_collection( + "csfle2", + validator={"$jsonSchema": json_data("etc", "data", "lookup", "schema-csfle2.json")}, + ) + await encrypted_client.db.create_collection( + "qe", encryptedFields=json_data("etc", "data", "lookup", "schema-qe.json") + ) + await encrypted_client.db.create_collection( + "qe2", encryptedFields=json_data("etc", "data", "lookup", "schema-qe2.json") + ) + await encrypted_client.db.create_collection("no_schema") + await encrypted_client.db.create_collection("no_schema2") + + unencrypted_client = await self.async_rs_or_single_client() + + await encrypted_client.db.csfle.insert_one({"csfle": "csfle"}) + doc = await unencrypted_client.db.csfle.find_one() + self.assertIsInstance(doc["csfle"], Binary) + await encrypted_client.db.csfle2.insert_one({"csfle2": "csfle2"}) + doc = await unencrypted_client.db.csfle2.find_one() + self.assertIsInstance(doc["csfle2"], Binary) + await encrypted_client.db.qe.insert_one({"qe": "qe"}) + doc = await unencrypted_client.db.qe.find_one() + self.assertIsInstance(doc["qe"], Binary) + await encrypted_client.db.qe2.insert_one({"qe2": "qe2"}) + doc = await unencrypted_client.db.qe2.find_one() + self.assertIsInstance(doc["qe2"], Binary) + await encrypted_client.db.no_schema.insert_one({"no_schema": "no_schema"}) + await encrypted_client.db.no_schema2.insert_one({"no_schema2": "no_schema2"}) + + await encrypted_client.close() + await unencrypted_client.close() + + @async_client_context.require_version_min(8, 1, -1) + async def test_1_csfle_joins_no_schema(self): + encrypted_client = await self.async_rs_or_single_client( + auto_encryption_opts=AutoEncryptionOpts( + key_vault_namespace="db.keyvault", + kms_providers={"local": {"key": LOCAL_MASTER_KEY}}, + ) + ) + doc = await anext( + await encrypted_client.db.csfle.aggregate( + [ + {"$match": {"csfle": "csfle"}}, + { + "$lookup": { + "from": "no_schema", + "as": "matched", + "pipeline": [ + {"$match": {"no_schema": "no_schema"}}, + {"$project": {"_id": 0}}, + ], + } + }, + {"$project": {"_id": 0}}, + ] + ) + ) + self.assertEqual(doc, {"csfle": "csfle", "matched": [{"no_schema": "no_schema"}]}) + + @async_client_context.require_version_min(8, 1, -1) + async def test_2_qe_joins_no_schema(self): + encrypted_client = await self.async_rs_or_single_client( + auto_encryption_opts=AutoEncryptionOpts( + key_vault_namespace="db.keyvault", + kms_providers={"local": {"key": LOCAL_MASTER_KEY}}, + ) + ) + doc = await anext( + await encrypted_client.db.qe.aggregate( + [ + {"$match": {"qe": "qe"}}, + { + "$lookup": { + "from": "no_schema", + "as": "matched", + "pipeline": [ + {"$match": {"no_schema": "no_schema"}}, + {"$project": {"_id": 0, "__safeContent__": 0}}, + ], + } + }, + {"$project": {"_id": 0, "__safeContent__": 0}}, + ] + ) + ) + self.assertEqual(doc, {"qe": "qe", "matched": [{"no_schema": "no_schema"}]}) + + @async_client_context.require_version_min(8, 1, -1) + async def test_3_no_schema_joins_csfle(self): + encrypted_client = await self.async_rs_or_single_client( + auto_encryption_opts=AutoEncryptionOpts( + key_vault_namespace="db.keyvault", + kms_providers={"local": {"key": LOCAL_MASTER_KEY}}, + ) + ) + doc = await anext( + await encrypted_client.db.no_schema.aggregate( + [ + {"$match": {"no_schema": "no_schema"}}, + { + "$lookup": { + "from": "csfle", + "as": "matched", + "pipeline": [{"$match": {"csfle": "csfle"}}, {"$project": {"_id": 0}}], + } + }, + {"$project": {"_id": 0}}, + ] + ) + ) + self.assertEqual(doc, {"no_schema": "no_schema", "matched": [{"csfle": "csfle"}]}) + + @async_client_context.require_version_min(8, 1, -1) + async def test_4_no_schema_joins_qe(self): + encrypted_client = await self.async_rs_or_single_client( + auto_encryption_opts=AutoEncryptionOpts( + key_vault_namespace="db.keyvault", + kms_providers={"local": {"key": LOCAL_MASTER_KEY}}, + ) + ) + doc = await anext( + await encrypted_client.db.no_schema.aggregate( + [ + {"$match": {"no_schema": "no_schema"}}, + { + "$lookup": { + "from": "qe", + "as": "matched", + "pipeline": [ + {"$match": {"qe": "qe"}}, + {"$project": {"_id": 0, "__safeContent__": 0}}, + ], + } + }, + {"$project": {"_id": 0}}, + ] + ) + ) + self.assertEqual(doc, {"no_schema": "no_schema", "matched": [{"qe": "qe"}]}) + + @async_client_context.require_version_min(8, 1, -1) + async def test_5_csfle_joins_csfle2(self): + encrypted_client = await self.async_rs_or_single_client( + auto_encryption_opts=AutoEncryptionOpts( + key_vault_namespace="db.keyvault", + kms_providers={"local": {"key": LOCAL_MASTER_KEY}}, + ) + ) + doc = await anext( + await encrypted_client.db.csfle.aggregate( + [ + {"$match": {"csfle": "csfle"}}, + { + "$lookup": { + "from": "csfle2", + "as": "matched", + "pipeline": [ + {"$match": {"csfle2": "csfle2"}}, + {"$project": {"_id": 0}}, + ], + } + }, + {"$project": {"_id": 0}}, + ] + ) + ) + self.assertEqual(doc, {"csfle": "csfle", "matched": [{"csfle2": "csfle2"}]}) + + @async_client_context.require_version_min(8, 1, -1) + async def test_6_qe_joins_qe2(self): + encrypted_client = await self.async_rs_or_single_client( + auto_encryption_opts=AutoEncryptionOpts( + key_vault_namespace="db.keyvault", + kms_providers={"local": {"key": LOCAL_MASTER_KEY}}, + ) + ) + doc = await anext( + await encrypted_client.db.qe.aggregate( + [ + {"$match": {"qe": "qe"}}, + { + "$lookup": { + "from": "qe2", + "as": "matched", + "pipeline": [ + {"$match": {"qe2": "qe2"}}, + {"$project": {"_id": 0, "__safeContent__": 0}}, + ], + } + }, + {"$project": {"_id": 0, "__safeContent__": 0}}, + ] + ) + ) + self.assertEqual(doc, {"qe": "qe", "matched": [{"qe2": "qe2"}]}) + + @async_client_context.require_version_min(8, 1, -1) + async def test_7_no_schema_joins_no_schema2(self): + encrypted_client = await self.async_rs_or_single_client( + auto_encryption_opts=AutoEncryptionOpts( + key_vault_namespace="db.keyvault", + kms_providers={"local": {"key": LOCAL_MASTER_KEY}}, + ) + ) + doc = await anext( + await encrypted_client.db.no_schema.aggregate( + [ + {"$match": {"no_schema": "no_schema"}}, + { + "$lookup": { + "from": "no_schema2", + "as": "matched", + "pipeline": [ + {"$match": {"no_schema2": "no_schema2"}}, + {"$project": {"_id": 0}}, + ], + } + }, + {"$project": {"_id": 0}}, + ] + ) + ) + self.assertEqual(doc, {"no_schema": "no_schema", "matched": [{"no_schema2": "no_schema2"}]}) + + @async_client_context.require_version_min(8, 1, -1) + async def test_8_csfle_joins_qe(self): + encrypted_client = await self.async_rs_or_single_client( + auto_encryption_opts=AutoEncryptionOpts( + key_vault_namespace="db.keyvault", + kms_providers={"local": {"key": LOCAL_MASTER_KEY}}, + ) + ) + with self.assertRaises(PyMongoError) as exc: + _ = await anext( + await encrypted_client.db.csfle.aggregate( + [ + {"$match": {"csfle": "qe"}}, + { + "$lookup": { + "from": "qe", + "as": "matched", + "pipeline": [{"$match": {"qe": "qe"}}, {"$project": {"_id": 0}}], + } + }, + {"$project": {"_id": 0}}, + ] + ) + ) + self.assertIn("not supported", str(exc)) + + @async_client_context.require_version_max(8, 1, -1) + async def test_9_error(self): + encrypted_client = await self.async_rs_or_single_client( + auto_encryption_opts=AutoEncryptionOpts( + key_vault_namespace="db.keyvault", + kms_providers={"local": {"key": LOCAL_MASTER_KEY}}, + ) + ) + with self.assertRaises(PyMongoError) as exc: + _ = await anext( + await encrypted_client.db.csfle.aggregate( + [ + {"$match": {"csfle": "csfle"}}, + { + "$lookup": { + "from": "no_schema", + "as": "matched", + "pipeline": [ + {"$match": {"no_schema": "no_schema"}}, + {"$project": {"_id": 0}}, + ], + } + }, + {"$project": {"_id": 0}}, + ] + ) + ) + self.assertIn("Upgrade", str(exc)) + + # https://github.com/mongodb/specifications/blob/072601/source/client-side-encryption/tests/README.md#rewrap class TestRewrapWithSeparateClientEncryption(AsyncEncryptionIntegrationTest): MASTER_KEYS: Mapping[str, Mapping[str, Any]] = { @@ -2452,7 +2742,7 @@ async def run_test(self, src_provider, dst_provider): key_vault_client=self.client, key_vault_namespace="keyvault.datakeys", kms_providers=ALL_KMS_PROVIDERS, - kms_tls_options=KMS_TLS_OPTS, + kms_tls_options=DEFAULT_KMS_TLS, codec_options=OPTS, ) @@ -2472,7 +2762,7 @@ async def run_test(self, src_provider, dst_provider): key_vault_client=client2, key_vault_namespace="keyvault.datakeys", kms_providers=ALL_KMS_PROVIDERS, - kms_tls_options=KMS_TLS_OPTS, + kms_tls_options=DEFAULT_KMS_TLS, codec_options=OPTS, ) @@ -2879,15 +3169,10 @@ async def asyncSetUp(self): async def http_post(self, path, data=None): # Note, the connection to the mock server needs to be closed after # each request because the server is single threaded. - ctx: ssl.SSLContext = get_ssl_context( - CLIENT_PEM, # certfile - None, # passphrase - CA_PEM, # ca_certs - None, # crlfile - False, # allow_invalid_certificates - False, # allow_invalid_hostnames - False, # disable_ocsp_endpoint_check - ) + ctx = ssl.create_default_context(cafile=CA_PEM) + ctx.load_cert_chain(CLIENT_PEM) + ctx.check_hostname = False + ctx.verify_mode = ssl.CERT_NONE conn = http.client.HTTPSConnection("127.0.0.1:9003", context=ctx) try: if data is not None: @@ -2948,6 +3233,7 @@ async def test_kms_retry(self): class TestAutomaticDecryptionKeys(AsyncEncryptionIntegrationTest): @async_client_context.require_no_standalone @async_client_context.require_version_min(7, 0, -1) + @flaky(reason="PYTHON-4982") async def asyncSetUp(self): await super().asyncSetUp() self.key1_document = json_data("etc", "data", "keys", "key1-document.json") @@ -2984,9 +3270,10 @@ async def test_02_no_fields(self): ) async def test_03_invalid_keyid(self): + # checkAuthForCreateCollection can be removed when SERVER-102101 is fixed. with self.assertRaisesRegex( EncryptedCollectionError, - "create.encryptedFields.fields.keyId' is the wrong type 'bool', expected type 'binData", + "(create|checkAuthForCreateCollection).encryptedFields.fields.keyId' is the wrong type 'bool', expected type 'binData", ): await self.client_encryption.create_encrypted_collection( database=self.db, @@ -3155,6 +3442,262 @@ async def test_collection_name_collision(self): self.assertIsInstance(exc.exception.encrypted_fields["fields"][0]["keyId"], Binary) +# https://github.com/mongodb/specifications/blob/master/source/client-side-encryption/tests/README.md#27-text-explicit-encryption +class TestExplicitTextEncryptionProse(AsyncEncryptionIntegrationTest): + @async_client_context.require_no_standalone + @async_client_context.require_version_min(8, 2, -1) + @async_client_context.require_libmongocrypt_min(1, 15, 1) + @async_client_context.require_pymongocrypt_min(1, 16, 0) + async def asyncSetUp(self): + await super().asyncSetUp() + # Load the file key1-document.json as key1Document. + self.key1_document = json_data("etc", "data", "keys", "key1-document.json") + # Read the "_id" field of key1Document as key1ID. + self.key1_id = self.key1_document["_id"] + # Drop and create the collection keyvault.datakeys. + # Insert key1Document in keyvault.datakeys with majority write concern. + self.key_vault = await create_key_vault(self.client.keyvault.datakeys, self.key1_document) + self.addAsyncCleanup(self.key_vault.drop) + # Create a ClientEncryption object named clientEncryption with these options. + self.kms_providers = {"local": {"key": LOCAL_MASTER_KEY}} + self.client_encryption = self.create_client_encryption( + self.kms_providers, + self.key_vault.full_name, + self.client, + OPTS, + ) + # Create a MongoClient named encryptedClient with these AutoEncryptionOpts. + opts = AutoEncryptionOpts( + self.kms_providers, + "keyvault.datakeys", + bypass_query_analysis=True, + ) + self.client_encrypted = await self.async_rs_or_single_client(auto_encryption_opts=opts) + + # Using QE CreateCollection() and Collection.Drop(), drop and create the following collections with majority write concern: + # db.prefix-suffix using the encryptedFields option set to the contents of encryptedFields-prefix-suffix.json. + db = self.client_encrypted.db + await db.drop_collection("prefix-suffix") + encrypted_fields = json_data("etc", "data", "encryptedFields-prefix-suffix.json") + await self.client_encryption.create_encrypted_collection( + db, "prefix-suffix", kms_provider="local", encrypted_fields=encrypted_fields + ) + # db.substring using the encryptedFields option set to the contents of encryptedFields-substring.json. + await db.drop_collection("substring") + encrypted_fields = json_data("etc", "data", "encryptedFields-substring.json") + await self.client_encryption.create_encrypted_collection( + db, "substring", kms_provider="local", encrypted_fields=encrypted_fields + ) + + # Use clientEncryption to encrypt the string "foobarbaz" with the following EncryptOpts. + text_opts = TextOpts( + case_sensitive=True, + diacritic_sensitive=True, + prefix=dict(strMaxQueryLength=10, strMinQueryLength=2), + suffix=dict(strMaxQueryLength=10, strMinQueryLength=2), + ) + encrypted_value = await self.client_encryption.encrypt( + "foobarbaz", + key_id=self.key1_id, + algorithm=Algorithm.TEXTPREVIEW, + contention_factor=0, + text_opts=text_opts, + ) + # Use encryptedClient to insert the following document into db.prefix-suffix with majority write concern. + coll = self.client_encrypted.db["prefix-suffix"].with_options( + write_concern=WriteConcern(w="majority") + ) + await coll.insert_one({"_id": 0, "encryptedText": encrypted_value}) + + # Use clientEncryption to encrypt the string "foobarbaz" with the following EncryptOpts. + text_opts = TextOpts( + case_sensitive=True, + diacritic_sensitive=True, + substring=dict(strMaxLength=10, strMaxQueryLength=10, strMinQueryLength=2), + ) + encrypted_value = await self.client_encryption.encrypt( + "foobarbaz", + key_id=self.key1_id, + algorithm=Algorithm.TEXTPREVIEW, + contention_factor=0, + text_opts=text_opts, + ) + # Use encryptedClient to insert the following document into db.substring with majority write concern. + coll = self.client_encrypted.db["substring"].with_options( + write_concern=WriteConcern(w="majority") + ) + await coll.insert_one({"_id": 0, "encryptedText": encrypted_value}) + + async def test_01_can_find_a_document_by_prefix(self): + # Use clientEncryption.encrypt() to encrypt the string "foo" with the following EncryptOpts. + text_opts = TextOpts( + case_sensitive=True, + diacritic_sensitive=True, + prefix=dict(strMaxQueryLength=10, strMinQueryLength=2), + ) + encrypted_value = await self.client_encryption.encrypt( + "foo", + key_id=self.key1_id, + algorithm=Algorithm.TEXTPREVIEW, + query_type=QueryType.PREFIXPREVIEW, + contention_factor=0, + text_opts=text_opts, + ) + # Use encryptedClient to run a "find" operation on the db.prefix-suffix collection with the following filter. + value = await self.client_encrypted.db["prefix-suffix"].find_one( + {"$expr": {"$encStrStartsWith": {"input": "$encryptedText", "prefix": encrypted_value}}} + ) + # Assert the following document is returned. + expected = {"_id": 0, "encryptedText": "foobarbaz"} + value.pop("__safeContent__", None) + self.assertEqual(value, expected) + + async def test_02_can_find_a_document_by_suffix(self): + # Use clientEncryption.encrypt() to encrypt the string "baz" with the following EncryptOpts: + text_opts = TextOpts( + case_sensitive=True, + diacritic_sensitive=True, + suffix=dict(strMaxQueryLength=10, strMinQueryLength=2), + ) + encrypted_value = await self.client_encryption.encrypt( + "baz", + key_id=self.key1_id, + algorithm=Algorithm.TEXTPREVIEW, + query_type=QueryType.SUFFIXPREVIEW, + contention_factor=0, + text_opts=text_opts, + ) + # Use encryptedClient to run a "find" operation on the db.prefix-suffix collection with the following filter: + value = await self.client_encrypted.db["prefix-suffix"].find_one( + {"$expr": {"$encStrEndsWith": {"input": "$encryptedText", "suffix": encrypted_value}}} + ) + # Assert the following document is returned. + expected = {"_id": 0, "encryptedText": "foobarbaz"} + value.pop("__safeContent__", None) + self.assertEqual(value, expected) + + async def test_03_no_document_found_by_prefix(self): + # Use clientEncryption.encrypt() to encrypt the string "baz" with the following EncryptOpts: + text_opts = TextOpts( + case_sensitive=True, + diacritic_sensitive=True, + prefix=dict(strMaxQueryLength=10, strMinQueryLength=2), + ) + encrypted_value = await self.client_encryption.encrypt( + "baz", + key_id=self.key1_id, + algorithm=Algorithm.TEXTPREVIEW, + query_type=QueryType.PREFIXPREVIEW, + contention_factor=0, + text_opts=text_opts, + ) + # Use encryptedClient to run a "find" operation on the db.prefix-suffix collection with the following filter: + value = await self.client_encrypted.db["prefix-suffix"].find_one( + {"$expr": {"$encStrStartsWith": {"input": "$encryptedText", "prefix": encrypted_value}}} + ) + # Assert that no documents are returned. + self.assertIsNone(value) + + async def test_04_no_document_found_by_suffix(self): + # Use clientEncryption.encrypt() to encrypt the string "foo" with the following EncryptOpts: + text_opts = TextOpts( + case_sensitive=True, + diacritic_sensitive=True, + suffix=dict(strMaxQueryLength=10, strMinQueryLength=2), + ) + encrypted_value = await self.client_encryption.encrypt( + "foo", + key_id=self.key1_id, + algorithm=Algorithm.TEXTPREVIEW, + query_type=QueryType.SUFFIXPREVIEW, + contention_factor=0, + text_opts=text_opts, + ) + # Use encryptedClient to run a "find" operation on the db.prefix-suffix collection with the following filter: + value = await self.client_encrypted.db["prefix-suffix"].find_one( + {"$expr": {"$encStrEndsWith": {"input": "$encryptedText", "suffix": encrypted_value}}} + ) + # Assert that no documents are returned. + self.assertIsNone(value) + + async def test_05_can_find_a_document_by_substring(self): + # Use clientEncryption.encrypt() to encrypt the string "bar" with the following EncryptOpts: + text_opts = TextOpts( + case_sensitive=True, + diacritic_sensitive=True, + substring=dict(strMaxLength=10, strMaxQueryLength=10, strMinQueryLength=2), + ) + encrypted_value = await self.client_encryption.encrypt( + "bar", + key_id=self.key1_id, + algorithm=Algorithm.TEXTPREVIEW, + query_type=QueryType.SUBSTRINGPREVIEW, + contention_factor=0, + text_opts=text_opts, + ) + # Use encryptedClient to run a "find" operation on the db.substring collection with the following filter: + value = await self.client_encrypted.db["substring"].find_one( + { + "$expr": { + "$encStrContains": {"input": "$encryptedText", "substring": encrypted_value} + } + } + ) + # Assert the following document is returned: + expected = {"_id": 0, "encryptedText": "foobarbaz"} + value.pop("__safeContent__", None) + self.assertEqual(value, expected) + + async def test_06_no_document_found_by_substring(self): + # Use clientEncryption.encrypt() to encrypt the string "qux" with the following EncryptOpts: + text_opts = TextOpts( + case_sensitive=True, + diacritic_sensitive=True, + substring=dict(strMaxLength=10, strMaxQueryLength=10, strMinQueryLength=2), + ) + encrypted_value = await self.client_encryption.encrypt( + "qux", + key_id=self.key1_id, + algorithm=Algorithm.TEXTPREVIEW, + query_type=QueryType.SUBSTRINGPREVIEW, + contention_factor=0, + text_opts=text_opts, + ) + # Use encryptedClient to run a "find" operation on the db.substring collection with the following filter: + value = await self.client_encrypted.db["substring"].find_one( + { + "$expr": { + "$encStrContains": {"input": "$encryptedText", "substring": encrypted_value} + } + } + ) + # Assert that no documents are returned. + self.assertIsNone(value) + + async def test_07_contentionFactor_is_required(self): + from pymongocrypt.errors import MongoCryptError + + # Use clientEncryption.encrypt() to encrypt the string "foo" with the following EncryptOpts: + text_opts = TextOpts( + case_sensitive=True, + diacritic_sensitive=True, + prefix=dict(strMaxQueryLength=10, strMinQueryLength=2), + ) + with self.assertRaises(EncryptionError) as ctx: + await self.client_encryption.encrypt( + "foo", + key_id=self.key1_id, + algorithm=Algorithm.TEXTPREVIEW, + query_type=QueryType.PREFIXPREVIEW, + text_opts=text_opts, + ) + # Expect an error from libmongocrypt with a message containing the string: "contention factor is required for textPreview algorithm". + self.assertIsInstance(ctx.exception.cause, MongoCryptError) + self.assertEqual( + str(ctx.exception), "contention factor is required for textPreview algorithm" + ) + + def start_mongocryptd(port) -> None: args = ["mongocryptd", f"--port={port}", "--idleShutdownTimeoutSecs=60"] _spawn_daemon(args) @@ -3189,6 +3732,8 @@ async def test_implicit_session_ignored_when_unsupported(self): self.assertNotIn("lsid", self.listener.started_events[1].command) + await self.mongocryptd_client.close() + async def test_explicit_session_errors_when_unsupported(self): self.listener.reset() async with self.mongocryptd_client.start_session() as s: @@ -3201,6 +3746,8 @@ async def test_explicit_session_errors_when_unsupported(self): ): await self.mongocryptd_client.db.test.insert_one({"x": 1}, session=s) + await self.mongocryptd_client.close() + if __name__ == "__main__": unittest.main() diff --git a/test/asynchronous/test_examples.py b/test/asynchronous/test_examples.py new file mode 100644 index 0000000000..21770f490c --- /dev/null +++ b/test/asynchronous/test_examples.py @@ -0,0 +1,1446 @@ +# Copyright 2017 MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""MongoDB documentation examples in Python.""" +from __future__ import annotations + +import asyncio +import datetime +import functools +import sys +import threading +import time +from test.asynchronous.helpers import ConcurrentRunner + +sys.path[0:0] = [""] + +from test.asynchronous import AsyncIntegrationTest, async_client_context, unittest +from test.utils_shared import async_wait_until + +import pymongo +from pymongo.errors import ConnectionFailure, OperationFailure +from pymongo.read_concern import ReadConcern +from pymongo.read_preferences import ReadPreference +from pymongo.server_api import ServerApi +from pymongo.write_concern import WriteConcern + +_IS_SYNC = False + + +class TestSampleShellCommands(AsyncIntegrationTest): + async def asyncSetUp(self): + await super().asyncSetUp() + await self.db.inventory.drop() + + async def asyncTearDown(self): + # Run after every test. + await self.db.inventory.drop() + await self.client.drop_database("pymongo_test") + + async def test_first_three_examples(self): + db = self.db + + # Start Example 1 + await db.inventory.insert_one( + { + "item": "canvas", + "qty": 100, + "tags": ["cotton"], + "size": {"h": 28, "w": 35.5, "uom": "cm"}, + } + ) + # End Example 1 + + self.assertEqual(await db.inventory.count_documents({}), 1) + + # Start Example 2 + cursor = db.inventory.find({"item": "canvas"}) + # End Example 2 + + self.assertEqual(len(await cursor.to_list()), 1) + + # Start Example 3 + await db.inventory.insert_many( + [ + { + "item": "journal", + "qty": 25, + "tags": ["blank", "red"], + "size": {"h": 14, "w": 21, "uom": "cm"}, + }, + { + "item": "mat", + "qty": 85, + "tags": ["gray"], + "size": {"h": 27.9, "w": 35.5, "uom": "cm"}, + }, + { + "item": "mousepad", + "qty": 25, + "tags": ["gel", "blue"], + "size": {"h": 19, "w": 22.85, "uom": "cm"}, + }, + ] + ) + # End Example 3 + + self.assertEqual(await db.inventory.count_documents({}), 4) + + async def test_query_top_level_fields(self): + db = self.db + + # Start Example 6 + await db.inventory.insert_many( + [ + { + "item": "journal", + "qty": 25, + "size": {"h": 14, "w": 21, "uom": "cm"}, + "status": "A", + }, + { + "item": "notebook", + "qty": 50, + "size": {"h": 8.5, "w": 11, "uom": "in"}, + "status": "A", + }, + { + "item": "paper", + "qty": 100, + "size": {"h": 8.5, "w": 11, "uom": "in"}, + "status": "D", + }, + { + "item": "planner", + "qty": 75, + "size": {"h": 22.85, "w": 30, "uom": "cm"}, + "status": "D", + }, + { + "item": "postcard", + "qty": 45, + "size": {"h": 10, "w": 15.25, "uom": "cm"}, + "status": "A", + }, + ] + ) + # End Example 6 + + self.assertEqual(await db.inventory.count_documents({}), 5) + + # Start Example 7 + cursor = db.inventory.find({}) + # End Example 7 + + self.assertEqual(len(await cursor.to_list()), 5) + + # Start Example 9 + cursor = db.inventory.find({"status": "D"}) + # End Example 9 + + self.assertEqual(len(await cursor.to_list()), 2) + + # Start Example 10 + cursor = db.inventory.find({"status": {"$in": ["A", "D"]}}) + # End Example 10 + + self.assertEqual(len(await cursor.to_list()), 5) + + # Start Example 11 + cursor = db.inventory.find({"status": "A", "qty": {"$lt": 30}}) + # End Example 11 + + self.assertEqual(len(await cursor.to_list()), 1) + + # Start Example 12 + cursor = db.inventory.find({"$or": [{"status": "A"}, {"qty": {"$lt": 30}}]}) + # End Example 12 + + self.assertEqual(len(await cursor.to_list()), 3) + + # Start Example 13 + cursor = db.inventory.find( + {"status": "A", "$or": [{"qty": {"$lt": 30}}, {"item": {"$regex": "^p"}}]} + ) + # End Example 13 + + self.assertEqual(len(await cursor.to_list()), 2) + + async def test_query_embedded_documents(self): + db = self.db + + # Start Example 14 + await db.inventory.insert_many( + [ + { + "item": "journal", + "qty": 25, + "size": {"h": 14, "w": 21, "uom": "cm"}, + "status": "A", + }, + { + "item": "notebook", + "qty": 50, + "size": {"h": 8.5, "w": 11, "uom": "in"}, + "status": "A", + }, + { + "item": "paper", + "qty": 100, + "size": {"h": 8.5, "w": 11, "uom": "in"}, + "status": "D", + }, + { + "item": "planner", + "qty": 75, + "size": {"h": 22.85, "w": 30, "uom": "cm"}, + "status": "D", + }, + { + "item": "postcard", + "qty": 45, + "size": {"h": 10, "w": 15.25, "uom": "cm"}, + "status": "A", + }, + ] + ) + # End Example 14 + + # Start Example 15 + cursor = db.inventory.find({"size": {"h": 14, "w": 21, "uom": "cm"}}) + # End Example 15 + + self.assertEqual(len(await cursor.to_list()), 1) + + # Start Example 16 + cursor = db.inventory.find({"size": {"w": 21, "h": 14, "uom": "cm"}}) + # End Example 16 + + self.assertEqual(len(await cursor.to_list()), 0) + + # Start Example 17 + cursor = db.inventory.find({"size.uom": "in"}) + # End Example 17 + + self.assertEqual(len(await cursor.to_list()), 2) + + # Start Example 18 + cursor = db.inventory.find({"size.h": {"$lt": 15}}) + # End Example 18 + + self.assertEqual(len(await cursor.to_list()), 4) + + # Start Example 19 + cursor = db.inventory.find({"size.h": {"$lt": 15}, "size.uom": "in", "status": "D"}) + # End Example 19 + + self.assertEqual(len(await cursor.to_list()), 1) + + async def test_query_arrays(self): + db = self.db + + # Start Example 20 + await db.inventory.insert_many( + [ + {"item": "journal", "qty": 25, "tags": ["blank", "red"], "dim_cm": [14, 21]}, + {"item": "notebook", "qty": 50, "tags": ["red", "blank"], "dim_cm": [14, 21]}, + { + "item": "paper", + "qty": 100, + "tags": ["red", "blank", "plain"], + "dim_cm": [14, 21], + }, + {"item": "planner", "qty": 75, "tags": ["blank", "red"], "dim_cm": [22.85, 30]}, + {"item": "postcard", "qty": 45, "tags": ["blue"], "dim_cm": [10, 15.25]}, + ] + ) + # End Example 20 + + # Start Example 21 + cursor = db.inventory.find({"tags": ["red", "blank"]}) + # End Example 21 + + self.assertEqual(len(await cursor.to_list()), 1) + + # Start Example 22 + cursor = db.inventory.find({"tags": {"$all": ["red", "blank"]}}) + # End Example 22 + + self.assertEqual(len(await cursor.to_list()), 4) + + # Start Example 23 + cursor = db.inventory.find({"tags": "red"}) + # End Example 23 + + self.assertEqual(len(await cursor.to_list()), 4) + + # Start Example 24 + cursor = db.inventory.find({"dim_cm": {"$gt": 25}}) + # End Example 24 + + self.assertEqual(len(await cursor.to_list()), 1) + + # Start Example 25 + cursor = db.inventory.find({"dim_cm": {"$gt": 15, "$lt": 20}}) + # End Example 25 + + self.assertEqual(len(await cursor.to_list()), 4) + + # Start Example 26 + cursor = db.inventory.find({"dim_cm": {"$elemMatch": {"$gt": 22, "$lt": 30}}}) + # End Example 26 + + self.assertEqual(len(await cursor.to_list()), 1) + + # Start Example 27 + cursor = db.inventory.find({"dim_cm.1": {"$gt": 25}}) + # End Example 27 + + self.assertEqual(len(await cursor.to_list()), 1) + + # Start Example 28 + cursor = db.inventory.find({"tags": {"$size": 3}}) + # End Example 28 + + self.assertEqual(len(await cursor.to_list()), 1) + + async def test_query_array_of_documents(self): + db = self.db + + # Start Example 29 + await db.inventory.insert_many( + [ + { + "item": "journal", + "instock": [ + {"warehouse": "A", "qty": 5}, + {"warehouse": "C", "qty": 15}, + ], + }, + {"item": "notebook", "instock": [{"warehouse": "C", "qty": 5}]}, + { + "item": "paper", + "instock": [ + {"warehouse": "A", "qty": 60}, + {"warehouse": "B", "qty": 15}, + ], + }, + { + "item": "planner", + "instock": [ + {"warehouse": "A", "qty": 40}, + {"warehouse": "B", "qty": 5}, + ], + }, + { + "item": "postcard", + "instock": [ + {"warehouse": "B", "qty": 15}, + {"warehouse": "C", "qty": 35}, + ], + }, + ] + ) + # End Example 29 + + # Start Example 30 + cursor = db.inventory.find({"instock": {"warehouse": "A", "qty": 5}}) + # End Example 30 + + self.assertEqual(len(await cursor.to_list()), 1) + + # Start Example 31 + cursor = db.inventory.find({"instock": {"qty": 5, "warehouse": "A"}}) + # End Example 31 + + self.assertEqual(len(await cursor.to_list()), 0) + + # Start Example 32 + cursor = db.inventory.find({"instock.0.qty": {"$lte": 20}}) + # End Example 32 + + self.assertEqual(len(await cursor.to_list()), 3) + + # Start Example 33 + cursor = db.inventory.find({"instock.qty": {"$lte": 20}}) + # End Example 33 + + self.assertEqual(len(await cursor.to_list()), 5) + + # Start Example 34 + cursor = db.inventory.find({"instock": {"$elemMatch": {"qty": 5, "warehouse": "A"}}}) + # End Example 34 + + self.assertEqual(len(await cursor.to_list()), 1) + + # Start Example 35 + cursor = db.inventory.find({"instock": {"$elemMatch": {"qty": {"$gt": 10, "$lte": 20}}}}) + # End Example 35 + + self.assertEqual(len(await cursor.to_list()), 3) + + # Start Example 36 + cursor = db.inventory.find({"instock.qty": {"$gt": 10, "$lte": 20}}) + # End Example 36 + + self.assertEqual(len(await cursor.to_list()), 4) + + # Start Example 37 + cursor = db.inventory.find({"instock.qty": 5, "instock.warehouse": "A"}) + # End Example 37 + + self.assertEqual(len(await cursor.to_list()), 2) + + async def test_query_null(self): + db = self.db + + # Start Example 38 + await db.inventory.insert_many([{"_id": 1, "item": None}, {"_id": 2}]) + # End Example 38 + + # Start Example 39 + cursor = db.inventory.find({"item": None}) + # End Example 39 + + self.assertEqual(len(await cursor.to_list()), 2) + + # Start Example 40 + cursor = db.inventory.find({"item": {"$type": 10}}) + # End Example 40 + + self.assertEqual(len(await cursor.to_list()), 1) + + # Start Example 41 + cursor = db.inventory.find({"item": {"$exists": False}}) + # End Example 41 + + self.assertEqual(len(await cursor.to_list()), 1) + + async def test_projection(self): + db = self.db + + # Start Example 42 + await db.inventory.insert_many( + [ + { + "item": "journal", + "status": "A", + "size": {"h": 14, "w": 21, "uom": "cm"}, + "instock": [{"warehouse": "A", "qty": 5}], + }, + { + "item": "notebook", + "status": "A", + "size": {"h": 8.5, "w": 11, "uom": "in"}, + "instock": [{"warehouse": "C", "qty": 5}], + }, + { + "item": "paper", + "status": "D", + "size": {"h": 8.5, "w": 11, "uom": "in"}, + "instock": [{"warehouse": "A", "qty": 60}], + }, + { + "item": "planner", + "status": "D", + "size": {"h": 22.85, "w": 30, "uom": "cm"}, + "instock": [{"warehouse": "A", "qty": 40}], + }, + { + "item": "postcard", + "status": "A", + "size": {"h": 10, "w": 15.25, "uom": "cm"}, + "instock": [{"warehouse": "B", "qty": 15}, {"warehouse": "C", "qty": 35}], + }, + ] + ) + # End Example 42 + + # Start Example 43 + cursor = db.inventory.find({"status": "A"}) + # End Example 43 + + self.assertEqual(len(await cursor.to_list()), 3) + + # Start Example 44 + cursor = db.inventory.find({"status": "A"}, {"item": 1, "status": 1}) + # End Example 44 + + async for doc in cursor: + self.assertIn("_id", doc) + self.assertIn("item", doc) + self.assertIn("status", doc) + self.assertNotIn("size", doc) + self.assertNotIn("instock", doc) + + # Start Example 45 + cursor = db.inventory.find({"status": "A"}, {"item": 1, "status": 1, "_id": 0}) + # End Example 45 + + async for doc in cursor: + self.assertNotIn("_id", doc) + self.assertIn("item", doc) + self.assertIn("status", doc) + self.assertNotIn("size", doc) + self.assertNotIn("instock", doc) + + # Start Example 46 + cursor = db.inventory.find({"status": "A"}, {"status": 0, "instock": 0}) + # End Example 46 + + async for doc in cursor: + self.assertIn("_id", doc) + self.assertIn("item", doc) + self.assertNotIn("status", doc) + self.assertIn("size", doc) + self.assertNotIn("instock", doc) + + # Start Example 47 + cursor = db.inventory.find({"status": "A"}, {"item": 1, "status": 1, "size.uom": 1}) + # End Example 47 + + async for doc in cursor: + self.assertIn("_id", doc) + self.assertIn("item", doc) + self.assertIn("status", doc) + self.assertIn("size", doc) + self.assertNotIn("instock", doc) + size = doc["size"] + self.assertIn("uom", size) + self.assertNotIn("h", size) + self.assertNotIn("w", size) + + # Start Example 48 + cursor = db.inventory.find({"status": "A"}, {"size.uom": 0}) + # End Example 48 + + async for doc in cursor: + self.assertIn("_id", doc) + self.assertIn("item", doc) + self.assertIn("status", doc) + self.assertIn("size", doc) + self.assertIn("instock", doc) + size = doc["size"] + self.assertNotIn("uom", size) + self.assertIn("h", size) + self.assertIn("w", size) + + # Start Example 49 + cursor = db.inventory.find({"status": "A"}, {"item": 1, "status": 1, "instock.qty": 1}) + # End Example 49 + + async for doc in cursor: + self.assertIn("_id", doc) + self.assertIn("item", doc) + self.assertIn("status", doc) + self.assertNotIn("size", doc) + self.assertIn("instock", doc) + for subdoc in doc["instock"]: + self.assertNotIn("warehouse", subdoc) + self.assertIn("qty", subdoc) + + # Start Example 50 + cursor = db.inventory.find( + {"status": "A"}, {"item": 1, "status": 1, "instock": {"$slice": -1}} + ) + # End Example 50 + + async for doc in cursor: + self.assertIn("_id", doc) + self.assertIn("item", doc) + self.assertIn("status", doc) + self.assertNotIn("size", doc) + self.assertIn("instock", doc) + self.assertEqual(len(doc["instock"]), 1) + + async def test_update_and_replace(self): + db = self.db + + # Start Example 51 + await db.inventory.insert_many( + [ + { + "item": "canvas", + "qty": 100, + "size": {"h": 28, "w": 35.5, "uom": "cm"}, + "status": "A", + }, + { + "item": "journal", + "qty": 25, + "size": {"h": 14, "w": 21, "uom": "cm"}, + "status": "A", + }, + { + "item": "mat", + "qty": 85, + "size": {"h": 27.9, "w": 35.5, "uom": "cm"}, + "status": "A", + }, + { + "item": "mousepad", + "qty": 25, + "size": {"h": 19, "w": 22.85, "uom": "cm"}, + "status": "P", + }, + { + "item": "notebook", + "qty": 50, + "size": {"h": 8.5, "w": 11, "uom": "in"}, + "status": "P", + }, + { + "item": "paper", + "qty": 100, + "size": {"h": 8.5, "w": 11, "uom": "in"}, + "status": "D", + }, + { + "item": "planner", + "qty": 75, + "size": {"h": 22.85, "w": 30, "uom": "cm"}, + "status": "D", + }, + { + "item": "postcard", + "qty": 45, + "size": {"h": 10, "w": 15.25, "uom": "cm"}, + "status": "A", + }, + { + "item": "sketchbook", + "qty": 80, + "size": {"h": 14, "w": 21, "uom": "cm"}, + "status": "A", + }, + { + "item": "sketch pad", + "qty": 95, + "size": {"h": 22.85, "w": 30.5, "uom": "cm"}, + "status": "A", + }, + ] + ) + # End Example 51 + + # Start Example 52 + await db.inventory.update_one( + {"item": "paper"}, + {"$set": {"size.uom": "cm", "status": "P"}, "$currentDate": {"lastModified": True}}, + ) + # End Example 52 + + async for doc in db.inventory.find({"item": "paper"}): + self.assertEqual(doc["size"]["uom"], "cm") + self.assertEqual(doc["status"], "P") + self.assertIn("lastModified", doc) + + # Start Example 53 + await db.inventory.update_many( + {"qty": {"$lt": 50}}, + {"$set": {"size.uom": "in", "status": "P"}, "$currentDate": {"lastModified": True}}, + ) + # End Example 53 + + async for doc in db.inventory.find({"qty": {"$lt": 50}}): + self.assertEqual(doc["size"]["uom"], "in") + self.assertEqual(doc["status"], "P") + self.assertIn("lastModified", doc) + + # Start Example 54 + await db.inventory.replace_one( + {"item": "paper"}, + { + "item": "paper", + "instock": [{"warehouse": "A", "qty": 60}, {"warehouse": "B", "qty": 40}], + }, + ) + # End Example 54 + + async for doc in db.inventory.find({"item": "paper"}, {"_id": 0}): + self.assertEqual(len(doc.keys()), 2) + self.assertIn("item", doc) + self.assertIn("instock", doc) + self.assertEqual(len(doc["instock"]), 2) + + async def test_delete(self): + db = self.db + + # Start Example 55 + await db.inventory.insert_many( + [ + { + "item": "journal", + "qty": 25, + "size": {"h": 14, "w": 21, "uom": "cm"}, + "status": "A", + }, + { + "item": "notebook", + "qty": 50, + "size": {"h": 8.5, "w": 11, "uom": "in"}, + "status": "P", + }, + { + "item": "paper", + "qty": 100, + "size": {"h": 8.5, "w": 11, "uom": "in"}, + "status": "D", + }, + { + "item": "planner", + "qty": 75, + "size": {"h": 22.85, "w": 30, "uom": "cm"}, + "status": "D", + }, + { + "item": "postcard", + "qty": 45, + "size": {"h": 10, "w": 15.25, "uom": "cm"}, + "status": "A", + }, + ] + ) + # End Example 55 + + self.assertEqual(await db.inventory.count_documents({}), 5) + + # Start Example 57 + await db.inventory.delete_many({"status": "A"}) + # End Example 57 + + self.assertEqual(await db.inventory.count_documents({}), 3) + + # Start Example 58 + await db.inventory.delete_one({"status": "D"}) + # End Example 58 + + self.assertEqual(await db.inventory.count_documents({}), 2) + + # Start Example 56 + await db.inventory.delete_many({}) + # End Example 56 + + self.assertEqual(await db.inventory.count_documents({}), 0) + + @async_client_context.require_change_streams + async def test_change_streams(self): + db = self.db + done = False + + async def insert_docs(): + nonlocal done + while not done: + await db.inventory.insert_one({"username": "alice"}) + await db.inventory.delete_one({"username": "alice"}) + await asyncio.sleep(0.005) + + t = ConcurrentRunner(target=insert_docs) + await t.start() + + try: + # 1. The database for reactive, real-time applications + # Start Changestream Example 1 + cursor = await db.inventory.watch() + await anext(cursor) + # End Changestream Example 1 + await cursor.close() + + # Start Changestream Example 2 + cursor = await db.inventory.watch(full_document="updateLookup") + await anext(cursor) + # End Changestream Example 2 + await cursor.close() + + # Start Changestream Example 3 + resume_token = cursor.resume_token + cursor = await db.inventory.watch(resume_after=resume_token) + await anext(cursor) + # End Changestream Example 3 + await cursor.close() + + # Start Changestream Example 4 + pipeline = [ + {"$match": {"fullDocument.username": "alice"}}, + {"$addFields": {"newField": "this is an added field!"}}, + ] + cursor = await db.inventory.watch(pipeline=pipeline) + await anext(cursor) + # End Changestream Example 4 + await cursor.close() + finally: + done = True + await t.join() + + async def test_aggregate_examples(self): + db = self.db + + # Start Aggregation Example 1 + await db.sales.aggregate([{"$match": {"items.fruit": "banana"}}, {"$sort": {"date": 1}}]) + # End Aggregation Example 1 + + # Start Aggregation Example 2 + await db.sales.aggregate( + [ + {"$unwind": "$items"}, + {"$match": {"items.fruit": "banana"}}, + { + "$group": { + "_id": {"day": {"$dayOfWeek": "$date"}}, + "count": {"$sum": "$items.quantity"}, + } + }, + {"$project": {"dayOfWeek": "$_id.day", "numberSold": "$count", "_id": 0}}, + {"$sort": {"numberSold": 1}}, + ] + ) + # End Aggregation Example 2 + + # Start Aggregation Example 3 + await db.sales.aggregate( + [ + {"$unwind": "$items"}, + { + "$group": { + "_id": {"day": {"$dayOfWeek": "$date"}}, + "items_sold": {"$sum": "$items.quantity"}, + "revenue": {"$sum": {"$multiply": ["$items.quantity", "$items.price"]}}, + } + }, + { + "$project": { + "day": "$_id.day", + "revenue": 1, + "items_sold": 1, + "discount": { + "$cond": {"if": {"$lte": ["$revenue", 250]}, "then": 25, "else": 0} + }, + } + }, + ] + ) + # End Aggregation Example 3 + + # Start Aggregation Example 4 + await db.air_alliances.aggregate( + [ + { + "$lookup": { + "from": "air_airlines", + "let": {"constituents": "$airlines"}, + "pipeline": [{"$match": {"$expr": {"$in": ["$name", "$$constituents"]}}}], + "as": "airlines", + } + }, + { + "$project": { + "_id": 0, + "name": 1, + "airlines": { + "$filter": { + "input": "$airlines", + "as": "airline", + "cond": {"$eq": ["$$airline.country", "Canada"]}, + } + }, + } + }, + ] + ) + # End Aggregation Example 4 + + @async_client_context.require_version_min(4, 4) + async def test_aggregate_projection_example(self): + db = self.db + + # Start Aggregation Projection Example 1 + db.inventory.find( + {}, + { + "_id": 0, + "item": 1, + "status": { + "$switch": { + "branches": [ + {"case": {"$eq": ["$status", "A"]}, "then": "Available"}, + {"case": {"$eq": ["$status", "D"]}, "then": "Discontinued"}, + ], + "default": "No status found", + } + }, + "area": { + "$concat": [ + {"$toString": {"$multiply": ["$size.h", "$size.w"]}}, + " ", + "$size.uom", + ] + }, + "reportNumber": {"$literal": 1}, + }, + ) + + # End Aggregation Projection Example 1 + + async def test_commands(self): + db = self.db + await db.restaurants.insert_one({}) + + # Start runCommand Example 1 + await db.command("buildInfo") + # End runCommand Example 1 + + # Start runCommand Example 2 + await db.command("count", "restaurants") + # End runCommand Example 2 + + async def test_index_management(self): + db = self.db + + # Start Index Example 1 + await db.records.create_index("score") + # End Index Example 1 + + # Start Index Example 1 + await db.restaurants.create_index( + [("cuisine", pymongo.ASCENDING), ("name", pymongo.ASCENDING)], + partialFilterExpression={"rating": {"$gt": 5}}, + ) + # End Index Example 1 + + @async_client_context.require_replica_set + async def test_misc(self): + # Marketing examples + client = self.client + self.addAsyncCleanup(client.drop_database, "test") + self.addAsyncCleanup(client.drop_database, "my_database") + + # 2. Tunable consistency controls + collection = client.my_database.my_collection + async with client.start_session() as session: + await collection.insert_one({"_id": 1}, session=session) + await collection.update_one({"_id": 1}, {"$set": {"a": 1}}, session=session) + async for _doc in collection.find({}, session=session): + pass + + # 3. Exploiting the power of arrays + collection = client.test.array_updates_test + await collection.update_one( + {"_id": 1}, {"$set": {"a.$[i].b": 2}}, array_filters=[{"i.b": 0}] + ) + + +class TestTransactionExamples(AsyncIntegrationTest): + @async_client_context.require_transactions + async def test_transactions(self): + # Transaction examples + client = self.client + self.addAsyncCleanup(client.drop_database, "hr") + self.addAsyncCleanup(client.drop_database, "reporting") + + employees = client.hr.employees + events = client.reporting.events + await employees.insert_one({"employee": 3, "status": "Active"}) + await events.insert_one({"employee": 3, "status": {"new": "Active", "old": None}}) + + # Start Transactions Intro Example 1 + + async def update_employee_info(session): + employees_coll = session.client.hr.employees + events_coll = session.client.reporting.events + + async with await session.start_transaction( + read_concern=ReadConcern("snapshot"), write_concern=WriteConcern(w="majority") + ): + await employees_coll.update_one( + {"employee": 3}, {"$set": {"status": "Inactive"}}, session=session + ) + await events_coll.insert_one( + {"employee": 3, "status": {"new": "Inactive", "old": "Active"}}, session=session + ) + + while True: + try: + # Commit uses write concern set at transaction start. + await session.commit_transaction() + print("Transaction committed.") + break + except (ConnectionFailure, OperationFailure) as exc: + # Can retry commit + if exc.has_error_label("UnknownTransactionCommitResult"): + print("UnknownTransactionCommitResult, retrying commit operation ...") + continue + else: + print("Error during commit ...") + raise + + # End Transactions Intro Example 1 + + async with client.start_session() as session: + await update_employee_info(session) + + employee = await employees.find_one({"employee": 3}) + assert employee is not None + self.assertIsNotNone(employee) + self.assertEqual(employee["status"], "Inactive") + + # Start Transactions Retry Example 1 + async def run_transaction_with_retry(txn_func, session): + while True: + try: + await txn_func(session) # performs transaction + break + except (ConnectionFailure, OperationFailure) as exc: + print("Transaction aborted. Caught exception during transaction.") + + # If transient error, retry the whole transaction + if exc.has_error_label("TransientTransactionError"): + print("TransientTransactionError, retrying transaction ...") + continue + else: + raise + + # End Transactions Retry Example 1 + + async with client.start_session() as session: + await run_transaction_with_retry(update_employee_info, session) + + employee = await employees.find_one({"employee": 3}) + assert employee is not None + self.assertIsNotNone(employee) + self.assertEqual(employee["status"], "Inactive") + + # Start Transactions Retry Example 2 + async def commit_with_retry(session): + while True: + try: + # Commit uses write concern set at transaction start. + await session.commit_transaction() + print("Transaction committed.") + break + except (ConnectionFailure, OperationFailure) as exc: + # Can retry commit + if exc.has_error_label("UnknownTransactionCommitResult"): + print("UnknownTransactionCommitResult, retrying commit operation ...") + continue + else: + print("Error during commit ...") + raise + + # End Transactions Retry Example 2 + + # Test commit_with_retry from the previous examples + async def _insert_employee_retry_commit(session): + async with await session.start_transaction(): + await employees.insert_one({"employee": 4, "status": "Active"}, session=session) + await events.insert_one( + {"employee": 4, "status": {"new": "Active", "old": None}}, session=session + ) + + await commit_with_retry(session) + + async with client.start_session() as session: + await run_transaction_with_retry(_insert_employee_retry_commit, session) + + employee = await employees.find_one({"employee": 4}) + assert employee is not None + self.assertIsNotNone(employee) + self.assertEqual(employee["status"], "Active") + + # Start Transactions Retry Example 3 + + async def run_transaction_with_retry(txn_func, session): + while True: + try: + await txn_func(session) # performs transaction + break + except (ConnectionFailure, OperationFailure) as exc: + # If transient error, retry the whole transaction + if exc.has_error_label("TransientTransactionError"): + print("TransientTransactionError, retrying transaction ...") + continue + else: + raise + + async def commit_with_retry(session): + while True: + try: + # Commit uses write concern set at transaction start. + await session.commit_transaction() + print("Transaction committed.") + break + except (ConnectionFailure, OperationFailure) as exc: + # Can retry commit + if exc.has_error_label("UnknownTransactionCommitResult"): + print("UnknownTransactionCommitResult, retrying commit operation ...") + continue + else: + print("Error during commit ...") + raise + + # Updates two collections in a transactions + + async def update_employee_info(session): + employees_coll = session.client.hr.employees + events_coll = session.client.reporting.events + + async with await session.start_transaction( + read_concern=ReadConcern("snapshot"), + write_concern=WriteConcern(w="majority"), + read_preference=ReadPreference.PRIMARY, + ): + await employees_coll.update_one( + {"employee": 3}, {"$set": {"status": "Inactive"}}, session=session + ) + await events_coll.insert_one( + {"employee": 3, "status": {"new": "Inactive", "old": "Active"}}, session=session + ) + + await commit_with_retry(session) + + # Start a session. + async with client.start_session() as session: + try: + await run_transaction_with_retry(update_employee_info, session) + except Exception: + # Do something with error. + raise + + # End Transactions Retry Example 3 + + employee = await employees.find_one({"employee": 3}) + assert employee is not None + self.assertIsNotNone(employee) + self.assertEqual(employee["status"], "Inactive") + + async def MongoClient(_): + return await self.async_rs_client() + + uriString = None + + # Start Transactions withTxn API Example 1 + + # For a replica set, include the replica set name and a seedlist of the members in the URI string; e.g. + # uriString = 'mongodb://mongodb0.example.com:27017,mongodb1.example.com:27017/?replicaSet=myRepl' + # For a sharded cluster, connect to the mongos instances; e.g. + # uriString = 'mongodb://mongos0.example.com:27017,mongos1.example.com:27017/' + + client = await MongoClient(uriString) + wc_majority = WriteConcern("majority", wtimeout=1000) + + # Prereq: Create collections. + await client.get_database("mydb1", write_concern=wc_majority).foo.insert_one({"abc": 0}) + await client.get_database("mydb2", write_concern=wc_majority).bar.insert_one({"xyz": 0}) + + # Step 1: Define the callback that specifies the sequence of operations to perform inside the transactions. + async def callback(session): + collection_one = session.client.mydb1.foo + collection_two = session.client.mydb2.bar + + # Important:: You must pass the session to the operations. + await collection_one.insert_one({"abc": 1}, session=session) + await collection_two.insert_one({"xyz": 999}, session=session) + + # Step 2: Start a client session. + async with client.start_session() as session: + # Step 3: Use with_transaction to start a transaction, execute the callback, and commit (or abort on error). + await session.with_transaction(callback) + + # End Transactions withTxn API Example 1 + + +class TestCausalConsistencyExamples(AsyncIntegrationTest): + @async_client_context.require_secondaries_count(1) + async def test_causal_consistency(self): + # Causal consistency examples + client = self.client + self.addAsyncCleanup(client.drop_database, "test") + await client.test.drop_collection("items") + await client.test.items.insert_one( + {"sku": "111", "name": "Peanuts", "start": datetime.datetime.today()} + ) + + # Start Causal Consistency Example 1 + async with client.start_session(causal_consistency=True) as s1: + current_date = datetime.datetime.today() + items = client.get_database( + "test", + read_concern=ReadConcern("majority"), + write_concern=WriteConcern("majority", wtimeout=1000), + ).items + await items.update_one( + {"sku": "111", "end": None}, {"$set": {"end": current_date}}, session=s1 + ) + await items.insert_one( + {"sku": "nuts-111", "name": "Pecans", "start": current_date}, session=s1 + ) + # End Causal Consistency Example 1 + + assert s1.cluster_time is not None + assert s1.operation_time is not None + + # Start Causal Consistency Example 2 + async with client.start_session(causal_consistency=True) as s2: + s2.advance_cluster_time(s1.cluster_time) + s2.advance_operation_time(s1.operation_time) + + items = client.get_database( + "test", + read_preference=ReadPreference.SECONDARY, + read_concern=ReadConcern("majority"), + write_concern=WriteConcern("majority", wtimeout=1000), + ).items + async for item in items.find({"end": None}, session=s2): + print(item) + # End Causal Consistency Example 2 + + +class TestVersionedApiExamples(AsyncIntegrationTest): + @async_client_context.require_version_min(4, 7) + async def test_versioned_api(self): + # Versioned API examples + async def MongoClient(_, server_api): + return await self.async_rs_client(server_api=server_api, connect=False) + + uri = None + + # Start Versioned API Example 1 + from pymongo.server_api import ServerApi + + await MongoClient(uri, server_api=ServerApi("1")) + # End Versioned API Example 1 + + # Start Versioned API Example 2 + await MongoClient(uri, server_api=ServerApi("1", strict=True)) + # End Versioned API Example 2 + + # Start Versioned API Example 3 + await MongoClient(uri, server_api=ServerApi("1", strict=False)) + # End Versioned API Example 3 + + # Start Versioned API Example 4 + await MongoClient(uri, server_api=ServerApi("1", deprecation_errors=True)) + # End Versioned API Example 4 + + @unittest.skip("PYTHON-3167 count has been added to API version 1") + @async_client_context.require_version_min(4, 7) + async def test_versioned_api_migration(self): + # SERVER-58785 + if await async_client_context.is_topology_type( + ["sharded"] + ) and not async_client_context.version.at_least(5, 0, 2): + self.skipTest("This test needs MongoDB 5.0.2 or newer") + + client = await self.async_rs_client(server_api=ServerApi("1", strict=True)) + await client.db.sales.drop() + + # Start Versioned API Example 5 + def strptime(s): + return datetime.datetime.strptime(s, "%Y-%m-%dT%H:%M:%SZ") + + await client.db.sales.insert_many( + [ + { + "_id": 1, + "item": "abc", + "price": 10, + "quantity": 2, + "date": strptime("2021-01-01T08:00:00Z"), + }, + { + "_id": 2, + "item": "jkl", + "price": 20, + "quantity": 1, + "date": strptime("2021-02-03T09:00:00Z"), + }, + { + "_id": 3, + "item": "xyz", + "price": 5, + "quantity": 5, + "date": strptime("2021-02-03T09:05:00Z"), + }, + { + "_id": 4, + "item": "abc", + "price": 10, + "quantity": 10, + "date": strptime("2021-02-15T08:00:00Z"), + }, + { + "_id": 5, + "item": "xyz", + "price": 5, + "quantity": 10, + "date": strptime("2021-02-15T09:05:00Z"), + }, + { + "_id": 6, + "item": "xyz", + "price": 5, + "quantity": 5, + "date": strptime("2021-02-15T12:05:10Z"), + }, + { + "_id": 7, + "item": "xyz", + "price": 5, + "quantity": 10, + "date": strptime("2021-02-15T14:12:12Z"), + }, + { + "_id": 8, + "item": "abc", + "price": 10, + "quantity": 5, + "date": strptime("2021-03-16T20:20:13Z"), + }, + ] + ) + # End Versioned API Example 5 + + with self.assertRaisesRegex( + OperationFailure, + "Provided apiStrict:true, but the command count is not in API Version 1", + ): + await client.db.command("count", "sales", query={}) + # Start Versioned API Example 6 + # pymongo.errors.OperationFailure: Provided apiStrict:true, but the command count is not in API Version 1, full error: {'ok': 0.0, 'errmsg': 'Provided apiStrict:true, but the command count is not in API Version 1', 'code': 323, 'codeName': 'APIStrictError'} + # End Versioned API Example 6 + + # Start Versioned API Example 7 + await client.db.sales.count_documents({}) + # End Versioned API Example 7 + + # Start Versioned API Example 8 + # 8 + # End Versioned API Example 8 + + +class TestSnapshotQueryExamples(AsyncIntegrationTest): + @async_client_context.require_version_min(5, 0) + async def test_snapshot_query(self): + client = self.client + + if not await async_client_context.is_topology_type(["replicaset", "sharded"]): + self.skipTest("Must be a sharded or replicaset") + + self.addAsyncCleanup(client.drop_database, "pets") + db = client.pets + await db.drop_collection("cats") + await db.drop_collection("dogs") + await db.cats.insert_one( + {"name": "Whiskers", "color": "white", "age": 10, "adoptable": True} + ) + await db.dogs.insert_one( + {"name": "Pebbles", "color": "Brown", "age": 10, "adoptable": True} + ) + + async def predicate_one(): + return await self.check_for_snapshot(db.cats) + + async def predicate_two(): + return await self.check_for_snapshot(db.dogs) + + await async_wait_until(predicate_two, "success") + await async_wait_until(predicate_one, "success") + + # Start Snapshot Query Example 1 + + db = client.pets + async with client.start_session(snapshot=True) as s: + adoptablePetsCount = ( + await ( + await db.cats.aggregate( + [{"$match": {"adoptable": True}}, {"$count": "adoptableCatsCount"}], + session=s, + ) + ).next() + )["adoptableCatsCount"] + + adoptablePetsCount += ( + await ( + await db.dogs.aggregate( + [{"$match": {"adoptable": True}}, {"$count": "adoptableDogsCount"}], + session=s, + ) + ).next() + )["adoptableDogsCount"] + + print(adoptablePetsCount) + + # End Snapshot Query Example 1 + db = client.retail + self.addAsyncCleanup(client.drop_database, "retail") + await db.drop_collection("sales") + + saleDate = datetime.datetime.now() + await db.sales.insert_one({"shoeType": "boot", "price": 30, "saleDate": saleDate}) + + async def predicate_three(): + return await self.check_for_snapshot(db.sales) + + await async_wait_until(predicate_three, "success") + + # Start Snapshot Query Example 2 + db = client.retail + async with client.start_session(snapshot=True) as s: + _ = ( + await ( + await db.sales.aggregate( + [ + { + "$match": { + "$expr": { + "$gt": [ + "$saleDate", + { + "$dateSubtract": { + "startDate": "$$NOW", + "unit": "day", + "amount": 1, + } + }, + ] + } + } + }, + {"$count": "totalDailySales"}, + ], + session=s, + ) + ).next() + )["totalDailySales"] + + # End Snapshot Query Example 2 + + async def check_for_snapshot(self, collection): + """Wait for snapshot reads to become available to prevent this error: + [246:SnapshotUnavailable]: Unable to read from a snapshot due to pending collection catalog changes; please retry the operation. Snapshot timestamp is Timestamp(1646666892, 4). Collection minimum is Timestamp(1646666892, 5) (on localhost:27017, modern retry, attempt 1) + From https://github.com/mongodb/mongo-ruby-driver/commit/7c4117b58e3d12e237f7536f7521e18fc15f79ac + """ + async with self.client.start_session(snapshot=True) as s: + try: + if await collection.find_one(session=s): + return True + return False + except OperationFailure as e: + # Retry them as the server demands... + if e.code == 246: # SnapshotUnavailable + return False + raise + + +if __name__ == "__main__": + unittest.main() diff --git a/test/asynchronous/test_grid_file.py b/test/asynchronous/test_grid_file.py index affdacde91..2a7e9e1f9d 100644 --- a/test/asynchronous/test_grid_file.py +++ b/test/asynchronous/test_grid_file.py @@ -33,7 +33,7 @@ sys.path[0:0] = [""] -from test.utils import OvertCommandListener +from test.utils_shared import OvertCommandListener from bson.objectid import ObjectId from gridfs.asynchronous.grid_file import ( @@ -47,7 +47,6 @@ ) from gridfs.errors import NoFile from pymongo import AsyncMongoClient -from pymongo.asynchronous.helpers import aiter, anext from pymongo.errors import ConfigurationError, ServerSelectionTimeoutError from pymongo.message import _CursorAddress @@ -150,7 +149,7 @@ async def test_grid_in_default_opts(self): a = AsyncGridIn(self.db.fs) - self.assertTrue(isinstance(a._id, ObjectId)) + self.assertIsInstance(a._id, ObjectId) self.assertRaises(AttributeError, setattr, a, "_id", 5) self.assertEqual(None, a.filename) @@ -195,7 +194,7 @@ async def test_grid_in_default_opts(self): self.assertEqual(42, a.forty_two) - self.assertTrue(isinstance(a._id, ObjectId)) + self.assertIsInstance(a._id, ObjectId) self.assertRaises(AttributeError, setattr, a, "_id", 5) self.assertEqual("my_file", a.filename) @@ -209,7 +208,7 @@ async def test_grid_in_default_opts(self): self.assertEqual(255 * 1024, a.chunk_size) self.assertRaises(AttributeError, setattr, a, "chunk_size", 5) - self.assertTrue(isinstance(a.upload_date, datetime.datetime)) + self.assertIsInstance(a.upload_date, datetime.datetime) self.assertRaises(AttributeError, setattr, a, "upload_date", 5) self.assertEqual(["foo"], a.aliases) @@ -248,7 +247,7 @@ async def test_grid_out_default_opts(self): self.assertEqual(None, b.name) self.assertEqual(None, b.filename) self.assertEqual(255 * 1024, b.chunk_size) - self.assertTrue(isinstance(b.upload_date, datetime.datetime)) + self.assertIsInstance(b.upload_date, datetime.datetime) self.assertEqual(None, b.aliases) self.assertEqual(None, b.metadata) self.assertEqual(None, b.md5) @@ -309,7 +308,7 @@ async def test_grid_out_custom_opts(self): self.assertEqual(11, two.length) self.assertEqual("text/html", two.content_type) self.assertEqual(1000, two.chunk_size) - self.assertTrue(isinstance(two.upload_date, datetime.datetime)) + self.assertIsInstance(two.upload_date, datetime.datetime) self.assertEqual(["foo"], two.aliases) self.assertEqual({"foo": 1, "bar": 2}, two.metadata) self.assertEqual(3, two.bar) diff --git a/test/asynchronous/test_gridfs.py b/test/asynchronous/test_gridfs.py new file mode 100644 index 0000000000..f60352f3cb --- /dev/null +++ b/test/asynchronous/test_gridfs.py @@ -0,0 +1,603 @@ +# +# Copyright 2009-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tests for the gridfs package.""" +from __future__ import annotations + +import asyncio +import datetime +import sys +import threading +import time +from io import BytesIO +from test.asynchronous.helpers import ConcurrentRunner +from unittest.mock import patch + +sys.path[0:0] = [""] + +from test.asynchronous import AsyncIntegrationTest, async_client_context, unittest +from test.asynchronous.utils import async_joinall +from test.utils_shared import one + +import gridfs +from bson.binary import Binary +from gridfs.asynchronous.grid_file import DEFAULT_CHUNK_SIZE, AsyncGridOutCursor +from gridfs.errors import CorruptGridFile, FileExists, NoFile +from pymongo.asynchronous.database import AsyncDatabase +from pymongo.asynchronous.mongo_client import AsyncMongoClient +from pymongo.errors import ( + ConfigurationError, + NotPrimaryError, + ServerSelectionTimeoutError, +) +from pymongo.read_preferences import ReadPreference + +_IS_SYNC = False + + +class JustWrite(ConcurrentRunner): + def __init__(self, fs, n): + super().__init__() + self.fs = fs + self.n = n + self.daemon = True + + async def run(self): + for _ in range(self.n): + file = self.fs.new_file(filename="test") + await file.write(b"hello") + await file.close() + + +class JustRead(ConcurrentRunner): + def __init__(self, fs, n, results): + super().__init__() + self.fs = fs + self.n = n + self.results = results + self.daemon = True + + async def run(self): + for _ in range(self.n): + file = await self.fs.get("test") + data = await file.read() + self.results.append(data) + assert data == b"hello" + + +class TestGridfsNoConnect(unittest.IsolatedAsyncioTestCase): + db: AsyncDatabase + + async def asyncSetUp(self): + await super().asyncSetUp() + self.db = AsyncMongoClient(connect=False).pymongo_test + + async def test_gridfs(self): + self.assertRaises(TypeError, gridfs.AsyncGridFS, "foo") + self.assertRaises(TypeError, gridfs.AsyncGridFS, self.db, 5) + + +class TestGridfs(AsyncIntegrationTest): + fs: gridfs.AsyncGridFS + alt: gridfs.AsyncGridFS + + async def asyncSetUp(self): + await super().asyncSetUp() + self.fs = gridfs.AsyncGridFS(self.db) + self.alt = gridfs.AsyncGridFS(self.db, "alt") + await self.cleanup_colls( + self.db.fs.files, self.db.fs.chunks, self.db.alt.files, self.db.alt.chunks + ) + + async def test_basic(self): + oid = await self.fs.put(b"hello world") + self.assertEqual(b"hello world", await (await self.fs.get(oid)).read()) + self.assertEqual(1, await self.db.fs.files.count_documents({})) + self.assertEqual(1, await self.db.fs.chunks.count_documents({})) + + await self.fs.delete(oid) + with self.assertRaises(NoFile): + await self.fs.get(oid) + self.assertEqual(0, await self.db.fs.files.count_documents({})) + self.assertEqual(0, await self.db.fs.chunks.count_documents({})) + + with self.assertRaises(NoFile): + await self.fs.get("foo") + oid = await self.fs.put(b"hello world", _id="foo") + self.assertEqual("foo", oid) + self.assertEqual(b"hello world", await (await self.fs.get("foo")).read()) + + async def test_multi_chunk_delete(self): + await self.db.fs.drop() + self.assertEqual(0, await self.db.fs.files.count_documents({})) + self.assertEqual(0, await self.db.fs.chunks.count_documents({})) + gfs = gridfs.AsyncGridFS(self.db) + oid = await gfs.put(b"hello", chunkSize=1) + self.assertEqual(1, await self.db.fs.files.count_documents({})) + self.assertEqual(5, await self.db.fs.chunks.count_documents({})) + await gfs.delete(oid) + self.assertEqual(0, await self.db.fs.files.count_documents({})) + self.assertEqual(0, await self.db.fs.chunks.count_documents({})) + + async def test_list(self): + self.assertEqual([], await self.fs.list()) + await self.fs.put(b"hello world") + self.assertEqual([], await self.fs.list()) + + # PYTHON-598: in server versions before 2.5.x, creating an index on + # filename, uploadDate causes list() to include None. + await self.fs.get_last_version() + self.assertEqual([], await self.fs.list()) + + await self.fs.put(b"", filename="mike") + await self.fs.put(b"foo", filename="test") + await self.fs.put(b"", filename="hello world") + + self.assertEqual({"mike", "test", "hello world"}, set(await self.fs.list())) + + async def test_empty_file(self): + oid = await self.fs.put(b"") + self.assertEqual(b"", await (await self.fs.get(oid)).read()) + self.assertEqual(1, await self.db.fs.files.count_documents({})) + self.assertEqual(0, await self.db.fs.chunks.count_documents({})) + + raw = await self.db.fs.files.find_one() + assert raw is not None + self.assertEqual(0, raw["length"]) + self.assertEqual(oid, raw["_id"]) + self.assertIsInstance(raw["uploadDate"], datetime.datetime) + self.assertEqual(255 * 1024, raw["chunkSize"]) + self.assertNotIn("md5", raw) + + async def test_corrupt_chunk(self): + files_id = await self.fs.put(b"foobar") + await self.db.fs.chunks.update_one( + {"files_id": files_id}, {"$set": {"data": Binary(b"foo", 0)}} + ) + try: + out = await self.fs.get(files_id) + with self.assertRaises(CorruptGridFile): + await out.read() + + out = await self.fs.get(files_id) + with self.assertRaises(CorruptGridFile): + await out.readline() + finally: + await self.fs.delete(files_id) + + async def test_put_ensures_index(self): + chunks = self.db.fs.chunks + files = self.db.fs.files + # Ensure the collections are removed. + await chunks.drop() + await files.drop() + await self.fs.put(b"junk") + + self.assertTrue( + any( + info.get("key") == [("files_id", 1), ("n", 1)] + for info in (await chunks.index_information()).values() + ) + ) + self.assertTrue( + any( + info.get("key") == [("filename", 1), ("uploadDate", 1)] + for info in (await files.index_information()).values() + ) + ) + + async def test_alt_collection(self): + oid = await self.alt.put(b"hello world") + self.assertEqual(b"hello world", await (await self.alt.get(oid)).read()) + self.assertEqual(1, await self.db.alt.files.count_documents({})) + self.assertEqual(1, await self.db.alt.chunks.count_documents({})) + + await self.alt.delete(oid) + with self.assertRaises(NoFile): + await self.alt.get(oid) + self.assertEqual(0, await self.db.alt.files.count_documents({})) + self.assertEqual(0, await self.db.alt.chunks.count_documents({})) + + with self.assertRaises(NoFile): + await self.alt.get("foo") + oid = await self.alt.put(b"hello world", _id="foo") + self.assertEqual("foo", oid) + self.assertEqual(b"hello world", await (await self.alt.get("foo")).read()) + + await self.alt.put(b"", filename="mike") + await self.alt.put(b"foo", filename="test") + await self.alt.put(b"", filename="hello world") + + self.assertEqual({"mike", "test", "hello world"}, set(await self.alt.list())) + + async def test_threaded_reads(self): + await self.fs.put(b"hello", _id="test") + + tasks = [] + results: list = [] + for i in range(10): + tasks.append(JustRead(self.fs, 10, results)) + await tasks[i].start() + + await async_joinall(tasks) + + self.assertEqual(100 * [b"hello"], results) + + async def test_threaded_writes(self): + tasks = [] + for i in range(10): + tasks.append(JustWrite(self.fs, 10)) + await tasks[i].start() + + await async_joinall(tasks) + + f = await self.fs.get_last_version("test") + self.assertEqual(await f.read(), b"hello") + + # Should have created 100 versions of 'test' file + self.assertEqual(100, await self.db.fs.files.count_documents({"filename": "test"})) + + async def test_get_last_version(self): + one = await self.fs.put(b"foo", filename="test") + await asyncio.sleep(0.01) + two = self.fs.new_file(filename="test") + await two.write(b"bar") + await two.close() + await asyncio.sleep(0.01) + two = two._id + three = await self.fs.put(b"baz", filename="test") + + self.assertEqual(b"baz", await (await self.fs.get_last_version("test")).read()) + await self.fs.delete(three) + self.assertEqual(b"bar", await (await self.fs.get_last_version("test")).read()) + await self.fs.delete(two) + self.assertEqual(b"foo", await (await self.fs.get_last_version("test")).read()) + await self.fs.delete(one) + with self.assertRaises(NoFile): + await self.fs.get_last_version("test") + + async def test_get_last_version_with_metadata(self): + one = await self.fs.put(b"foo", filename="test", author="author") + await asyncio.sleep(0.01) + two = await self.fs.put(b"bar", filename="test", author="author") + + self.assertEqual(b"bar", await (await self.fs.get_last_version(author="author")).read()) + await self.fs.delete(two) + self.assertEqual(b"foo", await (await self.fs.get_last_version(author="author")).read()) + await self.fs.delete(one) + + one = await self.fs.put(b"foo", filename="test", author="author1") + await asyncio.sleep(0.01) + two = await self.fs.put(b"bar", filename="test", author="author2") + + self.assertEqual(b"foo", await (await self.fs.get_last_version(author="author1")).read()) + self.assertEqual(b"bar", await (await self.fs.get_last_version(author="author2")).read()) + self.assertEqual(b"bar", await (await self.fs.get_last_version(filename="test")).read()) + + with self.assertRaises(NoFile): + await self.fs.get_last_version(author="author3") + with self.assertRaises(NoFile): + await self.fs.get_last_version(filename="nottest", author="author1") + + await self.fs.delete(one) + await self.fs.delete(two) + + async def test_get_version(self): + await self.fs.put(b"foo", filename="test") + await asyncio.sleep(0.01) + await self.fs.put(b"bar", filename="test") + await asyncio.sleep(0.01) + await self.fs.put(b"baz", filename="test") + await asyncio.sleep(0.01) + + self.assertEqual(b"foo", await (await self.fs.get_version("test", 0)).read()) + self.assertEqual(b"bar", await (await self.fs.get_version("test", 1)).read()) + self.assertEqual(b"baz", await (await self.fs.get_version("test", 2)).read()) + + self.assertEqual(b"baz", await (await self.fs.get_version("test", -1)).read()) + self.assertEqual(b"bar", await (await self.fs.get_version("test", -2)).read()) + self.assertEqual(b"foo", await (await self.fs.get_version("test", -3)).read()) + + with self.assertRaises(NoFile): + await self.fs.get_version("test", 3) + with self.assertRaises(NoFile): + await self.fs.get_version("test", -4) + + async def test_get_version_with_metadata(self): + one = await self.fs.put(b"foo", filename="test", author="author1") + await asyncio.sleep(0.01) + two = await self.fs.put(b"bar", filename="test", author="author1") + await asyncio.sleep(0.01) + three = await self.fs.put(b"baz", filename="test", author="author2") + + self.assertEqual( + b"foo", + await (await self.fs.get_version(filename="test", author="author1", version=-2)).read(), + ) + self.assertEqual( + b"bar", + await (await self.fs.get_version(filename="test", author="author1", version=-1)).read(), + ) + self.assertEqual( + b"foo", + await (await self.fs.get_version(filename="test", author="author1", version=0)).read(), + ) + self.assertEqual( + b"bar", + await (await self.fs.get_version(filename="test", author="author1", version=1)).read(), + ) + self.assertEqual( + b"baz", + await (await self.fs.get_version(filename="test", author="author2", version=0)).read(), + ) + self.assertEqual( + b"baz", await (await self.fs.get_version(filename="test", version=-1)).read() + ) + self.assertEqual( + b"baz", await (await self.fs.get_version(filename="test", version=2)).read() + ) + + with self.assertRaises(NoFile): + await self.fs.get_version(filename="test", author="author3") + with self.assertRaises(NoFile): + await self.fs.get_version(filename="test", author="author1", version=2) + + await self.fs.delete(one) + await self.fs.delete(two) + await self.fs.delete(three) + + async def test_put_filelike(self): + oid = await self.fs.put(BytesIO(b"hello world"), chunk_size=1) + self.assertEqual(11, await self.db.fs.chunks.count_documents({})) + self.assertEqual(b"hello world", await (await self.fs.get(oid)).read()) + + async def test_file_exists(self): + oid = await self.fs.put(b"hello") + with self.assertRaises(FileExists): + await self.fs.put(b"world", _id=oid) + + one = self.fs.new_file(_id=123) + await one.write(b"some content") + await one.close() + + # Attempt to upload a file with more chunks to the same _id. + with patch("gridfs.asynchronous.grid_file._UPLOAD_BUFFER_SIZE", DEFAULT_CHUNK_SIZE): + two = self.fs.new_file(_id=123) + with self.assertRaises(FileExists): + await two.write(b"x" * DEFAULT_CHUNK_SIZE * 3) + # Original file is still readable (no extra chunks were uploaded). + self.assertEqual(await (await self.fs.get(123)).read(), b"some content") + + two = self.fs.new_file(_id=123) + await two.write(b"some content") + with self.assertRaises(FileExists): + await two.close() + # Original file is still readable. + self.assertEqual(await (await self.fs.get(123)).read(), b"some content") + + async def test_exists(self): + oid = await self.fs.put(b"hello") + self.assertTrue(await self.fs.exists(oid)) + self.assertTrue(await self.fs.exists({"_id": oid})) + self.assertTrue(await self.fs.exists(_id=oid)) + + self.assertFalse(await self.fs.exists(filename="mike")) + self.assertFalse(await self.fs.exists("mike")) + + oid = await self.fs.put(b"hello", filename="mike", foo=12) + self.assertTrue(await self.fs.exists(oid)) + self.assertTrue(await self.fs.exists({"_id": oid})) + self.assertTrue(await self.fs.exists(_id=oid)) + self.assertTrue(await self.fs.exists(filename="mike")) + self.assertTrue(await self.fs.exists({"filename": "mike"})) + self.assertTrue(await self.fs.exists(foo=12)) + self.assertTrue(await self.fs.exists({"foo": 12})) + self.assertTrue(await self.fs.exists(foo={"$gt": 11})) + self.assertTrue(await self.fs.exists({"foo": {"$gt": 11}})) + + self.assertFalse(await self.fs.exists(foo=13)) + self.assertFalse(await self.fs.exists({"foo": 13})) + self.assertFalse(await self.fs.exists(foo={"$gt": 12})) + self.assertFalse(await self.fs.exists({"foo": {"$gt": 12}})) + + async def test_put_unicode(self): + with self.assertRaises(TypeError): + await self.fs.put("hello") + + oid = await self.fs.put("hello", encoding="utf-8") + self.assertEqual(b"hello", await (await self.fs.get(oid)).read()) + self.assertEqual("utf-8", (await self.fs.get(oid)).encoding) + + oid = await self.fs.put("aé", encoding="iso-8859-1") + self.assertEqual("aé".encode("iso-8859-1"), await (await self.fs.get(oid)).read()) + self.assertEqual("iso-8859-1", (await self.fs.get(oid)).encoding) + + async def test_missing_length_iter(self): + # Test fix that guards against PHP-237 + await self.fs.put(b"", filename="empty") + doc = await self.db.fs.files.find_one({"filename": "empty"}) + assert doc is not None + doc.pop("length") + await self.db.fs.files.replace_one({"_id": doc["_id"]}, doc) + f = await self.fs.get_last_version(filename="empty") + + async def iterate_file(grid_file): + async for _chunk in grid_file: + pass + return True + + self.assertTrue(await iterate_file(f)) + + async def test_gridfs_lazy_connect(self): + client = await self.async_single_client( + "badhost", connect=False, serverSelectionTimeoutMS=10 + ) + db = client.db + gfs = gridfs.AsyncGridFS(db) + with self.assertRaises(ServerSelectionTimeoutError): + await gfs.list() + + fs = gridfs.AsyncGridFS(db) + f = fs.new_file() + with self.assertRaises(ServerSelectionTimeoutError): + await f.close() + + async def test_gridfs_find(self): + await self.fs.put(b"test2", filename="two") + await asyncio.sleep(0.01) + await self.fs.put(b"test2+", filename="two") + await asyncio.sleep(0.01) + await self.fs.put(b"test1", filename="one") + await asyncio.sleep(0.01) + await self.fs.put(b"test2++", filename="two") + files = self.db.fs.files + self.assertEqual(3, await files.count_documents({"filename": "two"})) + self.assertEqual(4, await files.count_documents({})) + cursor = self.fs.find(no_cursor_timeout=False).sort("uploadDate", -1).skip(1).limit(2) + gout = await cursor.next() + self.assertEqual(b"test1", await gout.read()) + await cursor.rewind() + gout = await cursor.next() + self.assertEqual(b"test1", await gout.read()) + gout = await cursor.next() + self.assertEqual(b"test2+", await gout.read()) + with self.assertRaises(StopAsyncIteration): + await cursor.__anext__() + await cursor.rewind() + items = await cursor.to_list() + self.assertEqual(len(items), 2) + await cursor.rewind() + items = await cursor.to_list(1) + self.assertEqual(len(items), 1) + await cursor.close() + self.assertRaises(TypeError, self.fs.find, {}, {"_id": True}) + + async def test_delete_not_initialized(self): + # Creating a cursor with invalid arguments will not run __init__ + # but will still call __del__. + cursor = AsyncGridOutCursor.__new__(AsyncGridOutCursor) # Skip calling __init__ + with self.assertRaises(TypeError): + cursor.__init__(self.db.fs.files, {}, {"_id": True}) # type: ignore + cursor.__del__() # no error + + async def test_gridfs_find_one(self): + self.assertEqual(None, await self.fs.find_one()) + + id1 = await self.fs.put(b"test1", filename="file1") + res = await self.fs.find_one() + assert res is not None + self.assertEqual(b"test1", await res.read()) + + id2 = await self.fs.put(b"test2", filename="file2", meta="data") + res1 = await self.fs.find_one(id1) + assert res1 is not None + self.assertEqual(b"test1", await res1.read()) + res2 = await self.fs.find_one(id2) + assert res2 is not None + self.assertEqual(b"test2", await res2.read()) + + res3 = await self.fs.find_one({"filename": "file1"}) + assert res3 is not None + self.assertEqual(b"test1", await res3.read()) + + res4 = await self.fs.find_one(id2) + assert res4 is not None + self.assertEqual("data", res4.meta) + + async def test_grid_in_non_int_chunksize(self): + # Lua, and perhaps other buggy AsyncGridFS clients, store size as a float. + data = b"data" + await self.fs.put(data, filename="f") + await self.db.fs.files.update_one({"filename": "f"}, {"$set": {"chunkSize": 100.0}}) + + self.assertEqual(data, await (await self.fs.get_version("f")).read()) + + async def test_unacknowledged(self): + # w=0 is prohibited. + with self.assertRaises(ConfigurationError): + gridfs.AsyncGridFS((await self.async_rs_or_single_client(w=0)).pymongo_test) + + async def test_md5(self): + gin = self.fs.new_file() + await gin.write(b"no md5 sum") + await gin.close() + self.assertIsNone(gin.md5) + + gout = await self.fs.get(gin._id) + self.assertIsNone(gout.md5) + + _id = await self.fs.put(b"still no md5 sum") + gout = await self.fs.get(_id) + self.assertIsNone(gout.md5) + + +class TestGridfsReplicaSet(AsyncIntegrationTest): + @async_client_context.require_secondaries_count(1) + async def asyncSetUp(self): + await super().asyncSetUp() + + @classmethod + @async_client_context.require_connection + async def asyncTearDownClass(cls): + await async_client_context.client.drop_database("gfsreplica") + + async def test_gridfs_replica_set(self): + rsc = await self.async_rs_client( + w=async_client_context.w, read_preference=ReadPreference.SECONDARY + ) + + fs = gridfs.AsyncGridFS(rsc.gfsreplica, "gfsreplicatest") + + gin = fs.new_file() + self.assertEqual(gin._coll.read_preference, ReadPreference.PRIMARY) + + oid = await fs.put(b"foo") + content = await (await fs.get(oid)).read() + self.assertEqual(b"foo", content) + + async def test_gridfs_secondary(self): + secondary_host, secondary_port = one(await self.client.secondaries) + secondary_connection = await self.async_single_client( + secondary_host, secondary_port, read_preference=ReadPreference.SECONDARY + ) + + # Should detect it's connected to secondary and not attempt to + # create index + fs = gridfs.AsyncGridFS(secondary_connection.gfsreplica, "gfssecondarytest") + + # This won't detect secondary, raises error + with self.assertRaises(NotPrimaryError): + await fs.put(b"foo") + + async def test_gridfs_secondary_lazy(self): + # Should detect it's connected to secondary and not attempt to + # create index. + secondary_host, secondary_port = one(await self.client.secondaries) + client = await self.async_single_client( + secondary_host, secondary_port, read_preference=ReadPreference.SECONDARY, connect=False + ) + + # Still no connection. + fs = gridfs.AsyncGridFS(client.gfsreplica, "gfssecondarylazytest") + + # Connects, doesn't create index. + with self.assertRaises(NoFile): + await fs.get_last_version() + with self.assertRaises(NotPrimaryError): + await fs.put("data", encoding="utf-8") + + +if __name__ == "__main__": + unittest.main() diff --git a/test/asynchronous/test_gridfs_bucket.py b/test/asynchronous/test_gridfs_bucket.py new file mode 100644 index 0000000000..fd9b9883bf --- /dev/null +++ b/test/asynchronous/test_gridfs_bucket.py @@ -0,0 +1,597 @@ +# +# Copyright 2015-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tests for the gridfs package.""" +from __future__ import annotations + +import asyncio +import datetime +import itertools +import sys +import threading +import time +from io import BytesIO +from test.asynchronous.helpers import ConcurrentRunner +from unittest.mock import patch + +sys.path[0:0] = [""] + +from test.asynchronous import AsyncIntegrationTest, async_client_context, unittest +from test.asynchronous.utils import async_joinall +from test.utils_shared import one + +import gridfs +from bson.binary import Binary +from bson.int64 import Int64 +from bson.objectid import ObjectId +from bson.son import SON +from gridfs.errors import CorruptGridFile, NoFile +from pymongo.asynchronous.mongo_client import AsyncMongoClient +from pymongo.errors import ( + ConfigurationError, + NotPrimaryError, + ServerSelectionTimeoutError, + WriteConcernError, +) +from pymongo.read_preferences import ReadPreference + +_IS_SYNC = False + + +class JustWrite(ConcurrentRunner): + def __init__(self, gfs, num): + super().__init__() + self.gfs = gfs + self.num = num + self.daemon = True + + async def run(self): + for _ in range(self.num): + file = self.gfs.open_upload_stream("test") + await file.write(b"hello") + await file.close() + + +class JustRead(ConcurrentRunner): + def __init__(self, gfs, num, results): + super().__init__() + self.gfs = gfs + self.num = num + self.results = results + self.daemon = True + + async def run(self): + for _ in range(self.num): + file = await self.gfs.open_download_stream_by_name("test") + data = await file.read() + self.results.append(data) + assert data == b"hello" + + +class TestGridfs(AsyncIntegrationTest): + fs: gridfs.AsyncGridFSBucket + alt: gridfs.AsyncGridFSBucket + + async def asyncSetUp(self): + await super().asyncSetUp() + self.fs = gridfs.AsyncGridFSBucket(self.db) + self.alt = gridfs.AsyncGridFSBucket(self.db, bucket_name="alt") + await self.cleanup_colls( + self.db.fs.files, self.db.fs.chunks, self.db.alt.files, self.db.alt.chunks + ) + + async def test_basic(self): + oid = await self.fs.upload_from_stream("test_filename", b"hello world") + self.assertEqual(b"hello world", await (await self.fs.open_download_stream(oid)).read()) + self.assertEqual(1, await self.db.fs.files.count_documents({})) + self.assertEqual(1, await self.db.fs.chunks.count_documents({})) + + await self.fs.delete(oid) + with self.assertRaises(NoFile): + await self.fs.open_download_stream(oid) + self.assertEqual(0, await self.db.fs.files.count_documents({})) + self.assertEqual(0, await self.db.fs.chunks.count_documents({})) + + async def test_multi_chunk_delete(self): + self.assertEqual(0, await self.db.fs.files.count_documents({})) + self.assertEqual(0, await self.db.fs.chunks.count_documents({})) + gfs = gridfs.AsyncGridFSBucket(self.db) + oid = await gfs.upload_from_stream("test_filename", b"hello", chunk_size_bytes=1) + self.assertEqual(1, await self.db.fs.files.count_documents({})) + self.assertEqual(5, await self.db.fs.chunks.count_documents({})) + await gfs.delete(oid) + self.assertEqual(0, await self.db.fs.files.count_documents({})) + self.assertEqual(0, await self.db.fs.chunks.count_documents({})) + + async def test_delete_by_name(self): + self.assertEqual(0, await self.db.fs.files.count_documents({})) + self.assertEqual(0, await self.db.fs.chunks.count_documents({})) + gfs = gridfs.AsyncGridFSBucket(self.db) + await gfs.upload_from_stream("test_filename", b"hello", chunk_size_bytes=1) + self.assertEqual(1, await self.db.fs.files.count_documents({})) + self.assertEqual(5, await self.db.fs.chunks.count_documents({})) + await gfs.delete_by_name("test_filename") + self.assertEqual(0, await self.db.fs.files.count_documents({})) + self.assertEqual(0, await self.db.fs.chunks.count_documents({})) + + async def test_empty_file(self): + oid = await self.fs.upload_from_stream("test_filename", b"") + self.assertEqual(b"", await (await self.fs.open_download_stream(oid)).read()) + self.assertEqual(1, await self.db.fs.files.count_documents({})) + self.assertEqual(0, await self.db.fs.chunks.count_documents({})) + + raw = await self.db.fs.files.find_one() + assert raw is not None + self.assertEqual(0, raw["length"]) + self.assertEqual(oid, raw["_id"]) + self.assertIsInstance(raw["uploadDate"], datetime.datetime) + self.assertEqual(255 * 1024, raw["chunkSize"]) + self.assertNotIn("md5", raw) + + async def test_corrupt_chunk(self): + files_id = await self.fs.upload_from_stream("test_filename", b"foobar") + await self.db.fs.chunks.update_one( + {"files_id": files_id}, {"$set": {"data": Binary(b"foo", 0)}} + ) + try: + out = await self.fs.open_download_stream(files_id) + with self.assertRaises(CorruptGridFile): + await out.read() + + out = await self.fs.open_download_stream(files_id) + with self.assertRaises(CorruptGridFile): + await out.readline() + finally: + await self.fs.delete(files_id) + + async def test_upload_ensures_index(self): + chunks = self.db.fs.chunks + files = self.db.fs.files + # Ensure the collections are removed. + await chunks.drop() + await files.drop() + await self.fs.upload_from_stream("filename", b"junk") + + self.assertIn( + [("files_id", 1), ("n", 1)], + [info.get("key") for info in (await chunks.index_information()).values()], + "Missing required index on chunks collection: {files_id: 1, n: 1}", + ) + + self.assertIn( + [("filename", 1), ("uploadDate", 1)], + [info.get("key") for info in (await files.index_information()).values()], + "Missing required index on files collection: {filename: 1, uploadDate: 1}", + ) + + async def test_ensure_index_shell_compat(self): + files = self.db.fs.files + for i, j in itertools.combinations_with_replacement([1, 1.0, Int64(1)], 2): + # Create the index with different numeric types (as might be done + # from the mongo shell). + shell_index = [("filename", i), ("uploadDate", j)] + await self.db.command( + "createIndexes", + files.name, + indexes=[{"key": SON(shell_index), "name": "filename_1.0_uploadDate_1.0"}], + ) + + # No error. + await self.fs.upload_from_stream("filename", b"data") + + self.assertIn( + [("filename", 1), ("uploadDate", 1)], + [info.get("key") for info in (await files.index_information()).values()], + "Missing required index on files collection: {filename: 1, uploadDate: 1}", + ) + await files.drop() + + async def test_alt_collection(self): + oid = await self.alt.upload_from_stream("test_filename", b"hello world") + self.assertEqual(b"hello world", await (await self.alt.open_download_stream(oid)).read()) + self.assertEqual(1, await self.db.alt.files.count_documents({})) + self.assertEqual(1, await self.db.alt.chunks.count_documents({})) + + await self.alt.delete(oid) + with self.assertRaises(NoFile): + await self.alt.open_download_stream(oid) + self.assertEqual(0, await self.db.alt.files.count_documents({})) + self.assertEqual(0, await self.db.alt.chunks.count_documents({})) + + with self.assertRaises(NoFile): + await self.alt.open_download_stream("foo") + await self.alt.upload_from_stream("foo", b"hello world") + self.assertEqual( + b"hello world", await (await self.alt.open_download_stream_by_name("foo")).read() + ) + + await self.alt.upload_from_stream("mike", b"") + await self.alt.upload_from_stream("test", b"foo") + await self.alt.upload_from_stream("hello world", b"") + + self.assertEqual( + {"mike", "test", "hello world", "foo"}, + {k["filename"] for k in await self.db.alt.files.find().to_list()}, + ) + + async def test_threaded_reads(self): + await self.fs.upload_from_stream("test", b"hello") + + threads = [] + results: list = [] + for i in range(10): + threads.append(JustRead(self.fs, 10, results)) + await threads[i].start() + + await async_joinall(threads) + + self.assertEqual(100 * [b"hello"], results) + + async def test_threaded_writes(self): + threads = [] + for i in range(10): + threads.append(JustWrite(self.fs, 10)) + await threads[i].start() + + await async_joinall(threads) + + fstr = await self.fs.open_download_stream_by_name("test") + self.assertEqual(await fstr.read(), b"hello") + + # Should have created 100 versions of 'test' file + self.assertEqual(100, await self.db.fs.files.count_documents({"filename": "test"})) + + async def test_get_last_version(self): + one = await self.fs.upload_from_stream("test", b"foo") + await asyncio.sleep(0.01) + two = self.fs.open_upload_stream("test") + await two.write(b"bar") + await two.close() + await asyncio.sleep(0.01) + two = two._id + three = await self.fs.upload_from_stream("test", b"baz") + + self.assertEqual(b"baz", await (await self.fs.open_download_stream_by_name("test")).read()) + await self.fs.delete(three) + self.assertEqual(b"bar", await (await self.fs.open_download_stream_by_name("test")).read()) + await self.fs.delete(two) + self.assertEqual(b"foo", await (await self.fs.open_download_stream_by_name("test")).read()) + await self.fs.delete(one) + with self.assertRaises(NoFile): + await self.fs.open_download_stream_by_name("test") + + async def test_get_version(self): + await self.fs.upload_from_stream("test", b"foo") + await asyncio.sleep(0.01) + await self.fs.upload_from_stream("test", b"bar") + await asyncio.sleep(0.01) + await self.fs.upload_from_stream("test", b"baz") + await asyncio.sleep(0.01) + + self.assertEqual( + b"foo", await (await self.fs.open_download_stream_by_name("test", revision=0)).read() + ) + self.assertEqual( + b"bar", await (await self.fs.open_download_stream_by_name("test", revision=1)).read() + ) + self.assertEqual( + b"baz", await (await self.fs.open_download_stream_by_name("test", revision=2)).read() + ) + + self.assertEqual( + b"baz", await (await self.fs.open_download_stream_by_name("test", revision=-1)).read() + ) + self.assertEqual( + b"bar", await (await self.fs.open_download_stream_by_name("test", revision=-2)).read() + ) + self.assertEqual( + b"foo", await (await self.fs.open_download_stream_by_name("test", revision=-3)).read() + ) + + with self.assertRaises(NoFile): + await self.fs.open_download_stream_by_name("test", revision=3) + with self.assertRaises(NoFile): + await self.fs.open_download_stream_by_name("test", revision=-4) + + async def test_upload_from_stream(self): + oid = await self.fs.upload_from_stream( + "test_file", BytesIO(b"hello world"), chunk_size_bytes=1 + ) + self.assertEqual(11, await self.db.fs.chunks.count_documents({})) + self.assertEqual(b"hello world", await (await self.fs.open_download_stream(oid)).read()) + + async def test_upload_from_stream_with_id(self): + oid = ObjectId() + await self.fs.upload_from_stream_with_id( + oid, "test_file_custom_id", BytesIO(b"custom id"), chunk_size_bytes=1 + ) + self.assertEqual(b"custom id", await (await self.fs.open_download_stream(oid)).read()) + + @patch("gridfs.asynchronous.grid_file._UPLOAD_BUFFER_CHUNKS", 3) + @async_client_context.require_failCommand_fail_point + async def test_upload_bulk_write_error(self): + # Test BulkWriteError from insert_many is converted to an insert_one style error. + expected_wce = { + "code": 100, + "codeName": "UnsatisfiableWriteConcern", + "errmsg": "Not enough data-bearing nodes", + } + cause_wce = { + "configureFailPoint": "failCommand", + "mode": {"times": 2}, + "data": {"failCommands": ["insert"], "writeConcernError": expected_wce}, + } + gin = self.fs.open_upload_stream("test_file", chunk_size_bytes=1) + async with self.fail_point(cause_wce): + # Assert we raise WriteConcernError, not BulkWriteError. + with self.assertRaises(WriteConcernError): + await gin.write(b"hello world") + # 3 chunks were uploaded. + self.assertEqual(3, await self.db.fs.chunks.count_documents({"files_id": gin._id})) + await gin.abort() + + @patch("gridfs.asynchronous.grid_file._UPLOAD_BUFFER_CHUNKS", 10) + async def test_upload_batching(self): + async with self.fs.open_upload_stream("test_file", chunk_size_bytes=1) as gin: + await gin.write(b"s" * (10 - 1)) + # No chunks were uploaded yet. + self.assertEqual(0, await self.db.fs.chunks.count_documents({"files_id": gin._id})) + await gin.write(b"s") + # All chunks were uploaded since we hit the _UPLOAD_BUFFER_CHUNKS limit. + self.assertEqual(10, await self.db.fs.chunks.count_documents({"files_id": gin._id})) + + async def test_open_upload_stream(self): + gin = self.fs.open_upload_stream("from_stream") + await gin.write(b"from stream") + await gin.close() + self.assertEqual(b"from stream", await (await self.fs.open_download_stream(gin._id)).read()) + + async def test_open_upload_stream_with_id(self): + oid = ObjectId() + gin = self.fs.open_upload_stream_with_id(oid, "from_stream_custom_id") + await gin.write(b"from stream with custom id") + await gin.close() + self.assertEqual( + b"from stream with custom id", await (await self.fs.open_download_stream(oid)).read() + ) + + async def test_missing_length_iter(self): + # Test fix that guards against PHP-237 + await self.fs.upload_from_stream("empty", b"") + doc = await self.db.fs.files.find_one({"filename": "empty"}) + assert doc is not None + doc.pop("length") + await self.db.fs.files.replace_one({"_id": doc["_id"]}, doc) + fstr = await self.fs.open_download_stream_by_name("empty") + + async def iterate_file(grid_file): + async for _ in grid_file: + pass + return True + + self.assertTrue(await iterate_file(fstr)) + + async def test_gridfs_lazy_connect(self): + client = await self.async_single_client( + "badhost", connect=False, serverSelectionTimeoutMS=0 + ) + cdb = client.db + gfs = gridfs.AsyncGridFSBucket(cdb) + with self.assertRaises(ServerSelectionTimeoutError): + await gfs.delete(0) + + gfs = gridfs.AsyncGridFSBucket(cdb) + with self.assertRaises(ServerSelectionTimeoutError): + await gfs.upload_from_stream("test", b"") # Still no connection. + + async def test_gridfs_find(self): + await self.fs.upload_from_stream("two", b"test2") + await asyncio.sleep(0.01) + await self.fs.upload_from_stream("two", b"test2+") + await asyncio.sleep(0.01) + await self.fs.upload_from_stream("one", b"test1") + await asyncio.sleep(0.01) + await self.fs.upload_from_stream("two", b"test2++") + files = self.db.fs.files + self.assertEqual(3, await files.count_documents({"filename": "two"})) + self.assertEqual(4, await files.count_documents({})) + cursor = self.fs.find( + {}, no_cursor_timeout=False, sort=[("uploadDate", -1)], skip=1, limit=2 + ) + gout = await cursor.next() + self.assertEqual(b"test1", await gout.read()) + await cursor.rewind() + gout = await cursor.next() + self.assertEqual(b"test1", await gout.read()) + gout = await cursor.next() + self.assertEqual(b"test2+", await gout.read()) + with self.assertRaises(StopAsyncIteration): + await cursor.next() + await cursor.close() + self.assertRaises(TypeError, self.fs.find, {}, {"_id": True}) + + async def test_grid_in_non_int_chunksize(self): + # Lua, and perhaps other buggy AsyncGridFS clients, store size as a float. + data = b"data" + await self.fs.upload_from_stream("f", data) + await self.db.fs.files.update_one({"filename": "f"}, {"$set": {"chunkSize": 100.0}}) + + self.assertEqual(data, await (await self.fs.open_download_stream_by_name("f")).read()) + + async def test_unacknowledged(self): + # w=0 is prohibited. + with self.assertRaises(ConfigurationError): + gridfs.AsyncGridFSBucket((await self.async_rs_or_single_client(w=0)).pymongo_test) + + async def test_rename(self): + _id = await self.fs.upload_from_stream("first_name", b"testing") + self.assertEqual( + b"testing", await (await self.fs.open_download_stream_by_name("first_name")).read() + ) + + await self.fs.rename(_id, "second_name") + with self.assertRaises(NoFile): + await self.fs.open_download_stream_by_name("first_name") + self.assertEqual( + b"testing", await (await self.fs.open_download_stream_by_name("second_name")).read() + ) + + async def test_rename_by_name(self): + _id = await self.fs.upload_from_stream("first_name", b"testing") + self.assertEqual( + b"testing", await (await self.fs.open_download_stream_by_name("first_name")).read() + ) + + await self.fs.rename_by_name("first_name", "second_name") + with self.assertRaises(NoFile): + await self.fs.open_download_stream_by_name("first_name") + self.assertEqual( + b"testing", await (await self.fs.open_download_stream_by_name("second_name")).read() + ) + + @patch("gridfs.asynchronous.grid_file._UPLOAD_BUFFER_SIZE", 5) + async def test_abort(self): + gin = self.fs.open_upload_stream("test_filename", chunk_size_bytes=5) + await gin.write(b"test1") + await gin.write(b"test2") + await gin.write(b"test3") + self.assertEqual(3, await self.db.fs.chunks.count_documents({"files_id": gin._id})) + await gin.abort() + self.assertTrue(gin.closed) + with self.assertRaises(ValueError): + await gin.write(b"test4") + self.assertEqual(0, await self.db.fs.chunks.count_documents({"files_id": gin._id})) + + async def test_download_to_stream(self): + file1 = BytesIO(b"hello world") + # Test with one chunk. + oid = await self.fs.upload_from_stream("one_chunk", file1) + self.assertEqual(1, await self.db.fs.chunks.count_documents({})) + file2 = BytesIO() + await self.fs.download_to_stream(oid, file2) + file1.seek(0) + file2.seek(0) + self.assertEqual(file1.read(), file2.read()) + + # Test with many chunks. + await self.db.drop_collection("fs.files") + await self.db.drop_collection("fs.chunks") + file1.seek(0) + oid = await self.fs.upload_from_stream("many_chunks", file1, chunk_size_bytes=1) + self.assertEqual(11, await self.db.fs.chunks.count_documents({})) + file2 = BytesIO() + await self.fs.download_to_stream(oid, file2) + file1.seek(0) + file2.seek(0) + self.assertEqual(file1.read(), file2.read()) + + async def test_download_to_stream_by_name(self): + file1 = BytesIO(b"hello world") + # Test with one chunk. + _ = await self.fs.upload_from_stream("one_chunk", file1) + self.assertEqual(1, await self.db.fs.chunks.count_documents({})) + file2 = BytesIO() + await self.fs.download_to_stream_by_name("one_chunk", file2) + file1.seek(0) + file2.seek(0) + self.assertEqual(file1.read(), file2.read()) + + # Test with many chunks. + await self.db.drop_collection("fs.files") + await self.db.drop_collection("fs.chunks") + file1.seek(0) + await self.fs.upload_from_stream("many_chunks", file1, chunk_size_bytes=1) + self.assertEqual(11, await self.db.fs.chunks.count_documents({})) + + file2 = BytesIO() + await self.fs.download_to_stream_by_name("many_chunks", file2) + file1.seek(0) + file2.seek(0) + self.assertEqual(file1.read(), file2.read()) + + async def test_md5(self): + gin = self.fs.open_upload_stream("no md5") + await gin.write(b"no md5 sum") + await gin.close() + self.assertIsNone(gin.md5) + + gout = await self.fs.open_download_stream(gin._id) + self.assertIsNone(gout.md5) + + gin = self.fs.open_upload_stream_with_id(ObjectId(), "also no md5") + await gin.write(b"also no md5 sum") + await gin.close() + self.assertIsNone(gin.md5) + + gout = await self.fs.open_download_stream(gin._id) + self.assertIsNone(gout.md5) + + +class TestGridfsBucketReplicaSet(AsyncIntegrationTest): + @async_client_context.require_secondaries_count(1) + async def asyncSetUp(self): + await super().asyncSetUp() + + @classmethod + @async_client_context.require_connection + async def asyncTearDownClass(cls): + await async_client_context.client.drop_database("gfsbucketreplica") + + async def test_gridfs_replica_set(self): + rsc = await self.async_rs_client( + w=async_client_context.w, read_preference=ReadPreference.SECONDARY + ) + + gfs = gridfs.AsyncGridFSBucket(rsc.gfsbucketreplica, "gfsbucketreplicatest") + oid = await gfs.upload_from_stream("test_filename", b"foo") + content = await (await gfs.open_download_stream(oid)).read() + self.assertEqual(b"foo", content) + + async def test_gridfs_secondary(self): + secondary_host, secondary_port = one(await self.client.secondaries) + secondary_connection = await self.async_single_client( + secondary_host, secondary_port, read_preference=ReadPreference.SECONDARY + ) + + # Should detect it's connected to secondary and not attempt to + # create index + gfs = gridfs.AsyncGridFSBucket( + secondary_connection.gfsbucketreplica, "gfsbucketsecondarytest" + ) + + # This won't detect secondary, raises error + with self.assertRaises(NotPrimaryError): + await gfs.upload_from_stream("test_filename", b"foo") + + async def test_gridfs_secondary_lazy(self): + # Should detect it's connected to secondary and not attempt to + # create index. + secondary_host, secondary_port = one(await self.client.secondaries) + client = await self.async_single_client( + secondary_host, secondary_port, read_preference=ReadPreference.SECONDARY, connect=False + ) + + # Still no connection. + gfs = gridfs.AsyncGridFSBucket(client.gfsbucketreplica, "gfsbucketsecondarylazytest") + + # Connects, doesn't create index. + with self.assertRaises(NoFile): + await gfs.open_download_stream_by_name("test_filename") + with self.assertRaises(NotPrimaryError): + await gfs.upload_from_stream("test_filename", b"data") + + +if __name__ == "__main__": + unittest.main() diff --git a/test/asynchronous/test_gridfs_spec.py b/test/asynchronous/test_gridfs_spec.py new file mode 100644 index 0000000000..f3dc14fbdc --- /dev/null +++ b/test/asynchronous/test_gridfs_spec.py @@ -0,0 +1,39 @@ +# Copyright 2015-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test the AsyncGridFS unified spec tests.""" +from __future__ import annotations + +import os +import sys +from pathlib import Path + +sys.path[0:0] = [""] + +from test import unittest +from test.asynchronous.unified_format import generate_test_classes + +_IS_SYNC = False + +# Location of JSON test specifications. +if _IS_SYNC: + TEST_PATH = os.path.join(Path(__file__).resolve().parent, "gridfs") +else: + TEST_PATH = os.path.join(Path(__file__).resolve().parent.parent, "gridfs") + +# Generate unified tests. +globals().update(generate_test_classes(TEST_PATH, module=__name__)) + +if __name__ == "__main__": + unittest.main() diff --git a/test/asynchronous/test_heartbeat_monitoring.py b/test/asynchronous/test_heartbeat_monitoring.py new file mode 100644 index 0000000000..aa8a205021 --- /dev/null +++ b/test/asynchronous/test_heartbeat_monitoring.py @@ -0,0 +1,98 @@ +# Copyright 2016-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test the monitoring of the server heartbeats.""" +from __future__ import annotations + +import sys +from test.asynchronous.utils import AsyncMockPool + +sys.path[0:0] = [""] + +from test.asynchronous import AsyncIntegrationTest, client_knobs, unittest +from test.utils_shared import HeartbeatEventListener, async_wait_until + +from pymongo.asynchronous.monitor import Monitor +from pymongo.errors import ConnectionFailure +from pymongo.hello import Hello, HelloCompat + +_IS_SYNC = False + + +class TestHeartbeatMonitoring(AsyncIntegrationTest): + async def create_mock_monitor(self, responses, uri, expected_results): + listener = HeartbeatEventListener() + with client_knobs( + heartbeat_frequency=0.1, min_heartbeat_interval=0.1, events_queue_frequency=0.1 + ): + + class MockMonitor(Monitor): + async def _check_with_socket(self, *args, **kwargs): + if isinstance(responses[1], Exception): + raise responses[1] + return Hello(responses[1]), 99 + + _ = await self.async_single_client( + h=uri, + event_listeners=(listener,), + _monitor_class=MockMonitor, + _pool_class=AsyncMockPool, + connect=True, + ) + + expected_len = len(expected_results) + # Wait for *at least* expected_len number of results. The + # monitor thread may run multiple times during the execution + # of this test. + await async_wait_until( + lambda: len(listener.events) >= expected_len, "publish all events" + ) + + # zip gives us len(expected_results) pairs. + for expected, actual in zip(expected_results, listener.events): + self.assertEqual(expected, actual.__class__.__name__) + self.assertEqual(actual.connection_id, responses[0]) + if expected != "ServerHeartbeatStartedEvent": + if isinstance(actual.reply, Hello): + self.assertEqual(actual.duration, 99) + self.assertEqual(actual.reply._doc, responses[1]) + else: + self.assertEqual(actual.reply, responses[1]) + + async def test_standalone(self): + responses = ( + ("a", 27017), + {HelloCompat.LEGACY_CMD: True, "maxWireVersion": 4, "minWireVersion": 0, "ok": 1}, + ) + uri = "mongodb://a:27017" + expected_results = ["ServerHeartbeatStartedEvent", "ServerHeartbeatSucceededEvent"] + + await self.create_mock_monitor(responses, uri, expected_results) + + async def test_standalone_error(self): + responses = (("a", 27017), ConnectionFailure("SPECIAL MESSAGE")) + uri = "mongodb://a:27017" + # _check_with_socket failing results in a second attempt. + expected_results = [ + "ServerHeartbeatStartedEvent", + "ServerHeartbeatFailedEvent", + "ServerHeartbeatStartedEvent", + "ServerHeartbeatFailedEvent", + ] + + await self.create_mock_monitor(responses, uri, expected_results) + + +if __name__ == "__main__": + unittest.main() diff --git a/test/asynchronous/test_index_management.py b/test/asynchronous/test_index_management.py new file mode 100644 index 0000000000..890788fc56 --- /dev/null +++ b/test/asynchronous/test_index_management.py @@ -0,0 +1,379 @@ +# Copyright 2023-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Run the auth spec tests.""" +from __future__ import annotations + +import asyncio +import os +import pathlib +import sys +import time +import uuid +from typing import Any, Mapping + +import pytest + +sys.path[0:0] = [""] + +from test.asynchronous import AsyncIntegrationTest, AsyncPyMongoTestCase, unittest +from test.asynchronous.unified_format import generate_test_classes +from test.utils_shared import AllowListEventListener, OvertCommandListener + +from pymongo.errors import OperationFailure +from pymongo.operations import SearchIndexModel +from pymongo.read_concern import ReadConcern +from pymongo.write_concern import WriteConcern + +_IS_SYNC = False + +pytestmark = pytest.mark.search_index + +# Location of JSON test specifications. +if _IS_SYNC: + _TEST_PATH = os.path.join(pathlib.Path(__file__).resolve().parent, "index_management") +else: + _TEST_PATH = os.path.join(pathlib.Path(__file__).resolve().parent.parent, "index_management") + +_NAME = "test-search-index" + + +class TestCreateSearchIndex(AsyncIntegrationTest): + async def test_inputs(self): + listener = AllowListEventListener("createSearchIndexes") + client = self.simple_client(event_listeners=[listener]) + coll = client.test.test + await coll.drop() + definition = dict(mappings=dict(dynamic=True)) + model_kwarg_list: list[Mapping[str, Any]] = [ + dict(definition=definition, name=None), + dict(definition=definition, name="test"), + ] + for model_kwargs in model_kwarg_list: + model = SearchIndexModel(**model_kwargs) + with self.assertRaises(OperationFailure): + await coll.create_search_index(model) + with self.assertRaises(OperationFailure): + await coll.create_search_index(model_kwargs) + + listener.reset() + with self.assertRaises(OperationFailure): + await coll.create_search_index({"definition": definition, "arbitraryOption": 1}) + self.assertEqual( + {"definition": definition, "arbitraryOption": 1}, + listener.events[0].command["indexes"][0], + ) + + listener.reset() + with self.assertRaises(OperationFailure): + await coll.create_search_index({"definition": definition, "type": "search"}) + self.assertEqual( + {"definition": definition, "type": "search"}, listener.events[0].command["indexes"][0] + ) + + +class SearchIndexIntegrationBase(AsyncPyMongoTestCase): + db_name = "test_search_index_base" + + @classmethod + def setUpClass(cls) -> None: + cls.url = os.environ.get("MONGODB_URI") + cls.username = os.environ["DB_USER"] + cls.password = os.environ["DB_PASSWORD"] + cls.listener = OvertCommandListener() + + async def asyncSetUp(self) -> None: + self.client = self.simple_client( + self.url, + username=self.username, + password=self.password, + event_listeners=[self.listener], + ) + await self.client.drop_database(_NAME) + self.db = self.client[self.db_name] + + async def asyncTearDown(self): + await self.client.drop_database(_NAME) + + async def wait_for_ready(self, coll, name=_NAME, predicate=None): + """Wait for a search index to be ready.""" + indices: list[Mapping[str, Any]] = [] + if predicate is None: + predicate = lambda index: index.get("queryable") is True + + while True: + indices = await (await coll.list_search_indexes(name)).to_list() + if len(indices) and predicate(indices[0]): + return indices[0] + await asyncio.sleep(5) + + +class TestSearchIndexIntegration(SearchIndexIntegrationBase): + db_name = "test_search_index" + + async def test_comment_field(self): + # Create a collection with the "create" command using a randomly generated name (referred to as ``coll0``). + coll0 = self.db[f"col{uuid.uuid4()}"] + await coll0.insert_one({}) + + # Create a new search index on ``coll0`` that implicitly passes its type. + search_definition = {"mappings": {"dynamic": False}} + self.listener.reset() + implicit_search_resp = await coll0.create_search_index( + model={"name": _NAME + "-implicit", "definition": search_definition}, comment="foo" + ) + event = self.listener.events[0] + self.assertEqual(event.command["comment"], "foo") + + # Get the index definition. + self.listener.reset() + await (await coll0.list_search_indexes(name=implicit_search_resp, comment="foo")).next() + event = self.listener.events[0] + self.assertEqual(event.command["comment"], "foo") + + +class TestSearchIndexProse(SearchIndexIntegrationBase): + db_name = "test_search_index_prose" + + async def test_case_1(self): + """Driver can successfully create and list search indexes.""" + + # Create a collection with the "create" command using a randomly generated name (referred to as ``coll0``). + coll0 = self.db[f"col{uuid.uuid4()}"] + + # Create a new search index on ``coll0`` with the ``createSearchIndex`` helper. Use the following definition: + model = {"name": _NAME, "definition": {"mappings": {"dynamic": False}}} + await coll0.insert_one({}) + resp = await coll0.create_search_index(model) + + # Assert that the command returns the name of the index: ``"test-search-index"``. + self.assertEqual(resp, _NAME) + + # Run ``coll0.listSearchIndexes()`` repeatedly every 5 seconds until the following condition is satisfied and store the value in a variable ``index``: + # An index with the ``name`` of ``test-search-index`` is present and the index has a field ``queryable`` with a value of ``true``. + index = await self.wait_for_ready(coll0) + + # . Assert that ``index`` has a property ``latestDefinition`` whose value is ``{ 'mappings': { 'dynamic': false } }`` + self.assertIn("latestDefinition", index) + self.assertEqual(index["latestDefinition"], model["definition"]) + + async def test_case_2(self): + """Driver can successfully create multiple indexes in batch.""" + + # Create a collection with the "create" command using a randomly generated name (referred to as ``coll0``). + coll0 = self.db[f"col{uuid.uuid4()}"] + await coll0.insert_one({}) + + # Create two new search indexes on ``coll0`` with the ``createSearchIndexes`` helper. + name1 = "test-search-index-1" + name2 = "test-search-index-2" + definition = {"mappings": {"dynamic": False}} + index_definitions: list[dict[str, Any]] = [ + {"name": name1, "definition": definition}, + {"name": name2, "definition": definition}, + ] + await coll0.create_search_indexes( + [SearchIndexModel(i["definition"], i["name"]) for i in index_definitions] + ) + + # .Assert that the command returns an array containing the new indexes' names: ``["test-search-index-1", "test-search-index-2"]``. + indices = await (await coll0.list_search_indexes()).to_list() + names = [i["name"] for i in indices] + self.assertIn(name1, names) + self.assertIn(name2, names) + + # Run ``coll0.listSearchIndexes()`` repeatedly every 5 seconds until the following condition is satisfied. + # An index with the ``name`` of ``test-search-index-1`` is present and index has a field ``queryable`` with the value of ``true``. Store result in ``index1``. + # An index with the ``name`` of ``test-search-index-2`` is present and index has a field ``queryable`` with the value of ``true``. Store result in ``index2``. + index1 = await self.wait_for_ready(coll0, name1) + index2 = await self.wait_for_ready(coll0, name2) + + # Assert that ``index1`` and ``index2`` have the property ``latestDefinition`` whose value is ``{ "mappings" : { "dynamic" : false } }`` + for index in [index1, index2]: + self.assertIn("latestDefinition", index) + self.assertEqual(index["latestDefinition"], definition) + + async def test_case_3(self): + """Driver can successfully drop search indexes.""" + + # Create a collection with the "create" command using a randomly generated name (referred to as ``coll0``). + coll0 = self.db[f"col{uuid.uuid4()}"] + await coll0.insert_one({}) + + # Create a new search index on ``coll0``. + model = {"name": _NAME, "definition": {"mappings": {"dynamic": False}}} + resp = await coll0.create_search_index(model) + + # Assert that the command returns the name of the index: ``"test-search-index"``. + self.assertEqual(resp, "test-search-index") + + # Run ``coll0.listSearchIndexes()`` repeatedly every 5 seconds until the following condition is satisfied: + # An index with the ``name`` of ``test-search-index`` is present and index has a field ``queryable`` with the value of ``true``. + await self.wait_for_ready(coll0) + + # Run a ``dropSearchIndex`` on ``coll0``, using ``test-search-index`` for the name. + await coll0.drop_search_index(_NAME) + + # Run ``coll0.listSearchIndexes()`` repeatedly every 5 seconds until ``listSearchIndexes`` returns an empty array. + t0 = time.time() + while True: + indices = await (await coll0.list_search_indexes()).to_list() + if indices: + break + if (time.time() - t0) / 60 > 5: + raise TimeoutError("Timed out waiting for index deletion") + await asyncio.sleep(5) + + async def test_case_4(self): + """Driver can update a search index.""" + # Create a collection with the "create" command using a randomly generated name (referred to as ``coll0``). + coll0 = self.db[f"col{uuid.uuid4()}"] + await coll0.insert_one({}) + + # Create a new search index on ``coll0``. + model = {"name": _NAME, "definition": {"mappings": {"dynamic": False}}} + resp = await coll0.create_search_index(model) + + # Assert that the command returns the name of the index: ``"test-search-index"``. + self.assertEqual(resp, _NAME) + + # Run ``coll0.listSearchIndexes()`` repeatedly every 5 seconds until the following condition is satisfied: + # An index with the ``name`` of ``test-search-index`` is present and index has a field ``queryable`` with the value of ``true``. + await self.wait_for_ready(coll0) + + # Run a ``updateSearchIndex`` on ``coll0``. + # Assert that the command does not error and the server responds with a success. + model2: dict[str, Any] = {"name": _NAME, "definition": {"mappings": {"dynamic": True}}} + await coll0.update_search_index(_NAME, model2["definition"]) + + # Run ``coll0.listSearchIndexes()`` repeatedly every 5 seconds until the following condition is satisfied: + # An index with the ``name`` of ``test-search-index`` is present. This index is referred to as ``index``. + # The index has a field ``queryable`` with a value of ``true`` and has a field ``status`` with the value of ``READY``. + predicate = lambda index: index.get("queryable") is True and index.get("status") == "READY" + await self.wait_for_ready(coll0, predicate=predicate) + + # Assert that an index is present with the name ``test-search-index`` and the definition has a property ``latestDefinition`` whose value is ``{ 'mappings': { 'dynamic': true } }``. + index = (await (await coll0.list_search_indexes(_NAME)).to_list())[0] + self.assertIn("latestDefinition", index) + self.assertEqual(index["latestDefinition"], model2["definition"]) + + async def test_case_5(self): + """``dropSearchIndex`` suppresses namespace not found errors.""" + # Create a driver-side collection object for a randomly generated collection name. Do not create this collection on the server. + coll0 = self.db[f"col{uuid.uuid4()}"] + + # Run a ``dropSearchIndex`` command and assert that no error is thrown. + await coll0.drop_search_index("foo") + + async def test_case_6(self): + """Driver can successfully create and list search indexes with non-default readConcern and writeConcern.""" + # Create a collection with the "create" command using a randomly generated name (referred to as ``coll0``). + coll0 = self.db[f"col{uuid.uuid4()}"] + await coll0.insert_one({}) + + # Apply a write concern ``WriteConcern(w=1)`` and a read concern with ``ReadConcern(level="majority")`` to ``coll0``. + coll0 = coll0.with_options( + write_concern=WriteConcern(w="1"), read_concern=ReadConcern(level="majority") + ) + + # Create a new search index on ``coll0`` with the ``createSearchIndex`` helper. + name = "test-search-index-case6" + model = {"name": name, "definition": {"mappings": {"dynamic": False}}} + resp = await coll0.create_search_index(model) + + # Assert that the command returns the name of the index: ``"test-search-index-case6"``. + self.assertEqual(resp, name) + + # Run ``coll0.listSearchIndexes()`` repeatedly every 5 seconds until the following condition is satisfied and store the value in a variable ``index``: + # - An index with the ``name`` of ``test-search-index-case6`` is present and the index has a field ``queryable`` with a value of ``true``. + index = await self.wait_for_ready(coll0, name) + + # Assert that ``index`` has a property ``latestDefinition`` whose value is ``{ 'mappings': { 'dynamic': false } }`` + self.assertIn("latestDefinition", index) + self.assertEqual(index["latestDefinition"], model["definition"]) + + async def test_case_7(self): + """Driver handles index types.""" + + # Create a collection with the "create" command using a randomly generated name (referred to as ``coll0``). + coll0 = self.db[f"col{uuid.uuid4()}"] + await coll0.insert_one({}) + + # Use these search and vector search definitions for indexes. + search_definition = {"mappings": {"dynamic": False}} + vector_search_definition = { + "fields": [ + { + "type": "vector", + "path": "plot_embedding", + "numDimensions": 1536, + "similarity": "euclidean", + }, + ] + } + + # Create a new search index on ``coll0`` that implicitly passes its type. + implicit_search_resp = await coll0.create_search_index( + model={"name": _NAME + "-implicit", "definition": search_definition} + ) + + # Get the index definition. + resp = await (await coll0.list_search_indexes(name=implicit_search_resp)).next() + + # Assert that the index model contains the correct index type: ``"search"``. + self.assertEqual(resp["type"], "search") + + # Create a new search index on ``coll0`` that explicitly passes its type. + explicit_search_resp = await coll0.create_search_index( + model={"name": _NAME + "-explicit", "type": "search", "definition": search_definition} + ) + + # Get the index definition. + resp = await (await coll0.list_search_indexes(name=explicit_search_resp)).next() + + # Assert that the index model contains the correct index type: ``"search"``. + self.assertEqual(resp["type"], "search") + + # Create a new vector search index on ``coll0`` that explicitly passes its type. + explicit_vector_resp = await coll0.create_search_index( + model={ + "name": _NAME + "-vector", + "type": "vectorSearch", + "definition": vector_search_definition, + } + ) + + # Get the index definition. + resp = await (await coll0.list_search_indexes(name=explicit_vector_resp)).next() + + # Assert that the index model contains the correct index type: ``"vectorSearch"``. + self.assertEqual(resp["type"], "vectorSearch") + + # Catch the error raised when trying to create a vector search index without specifying the type + with self.assertRaises(OperationFailure) as e: + await coll0.create_search_index( + model={"name": _NAME + "-error", "definition": vector_search_definition} + ) + self.assertIn("Attribute mappings missing.", e.exception.details["errmsg"]) + + +globals().update( + generate_test_classes( + _TEST_PATH, + module=__name__, + ) +) + +if __name__ == "__main__": + unittest.main() diff --git a/test/asynchronous/test_json_util_integration.py b/test/asynchronous/test_json_util_integration.py new file mode 100644 index 0000000000..32312cb9d3 --- /dev/null +++ b/test/asynchronous/test_json_util_integration.py @@ -0,0 +1,28 @@ +from __future__ import annotations + +from test.asynchronous import AsyncIntegrationTest +from typing import Any, List, MutableMapping + +from bson import Binary, Code, DBRef, ObjectId, json_util +from bson.binary import USER_DEFINED_SUBTYPE + +_IS_SYNC = False + + +class TestJsonUtilRoundtrip(AsyncIntegrationTest): + async def test_cursor(self): + db = self.db + + await db.drop_collection("test") + docs: List[MutableMapping[str, Any]] = [ + {"foo": [1, 2]}, + {"bar": {"hello": "world"}}, + {"code": Code("function x() { return 1; }")}, + {"bin": Binary(b"\x00\x01\x02\x03\x04", USER_DEFINED_SUBTYPE)}, + {"dbref": {"_ref": DBRef("simple", ObjectId("509b8db456c02c5ab7e63c34"))}}, + ] + + await db.test.insert_many(docs) + reloaded_docs = json_util.loads(json_util.dumps(await (db.test.find()).to_list())) + for doc in docs: + self.assertIn(doc, reloaded_docs) diff --git a/test/asynchronous/test_load_balancer.py b/test/asynchronous/test_load_balancer.py new file mode 100644 index 0000000000..17d85841f9 --- /dev/null +++ b/test/asynchronous/test_load_balancer.py @@ -0,0 +1,194 @@ +# Copyright 2021-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test the Load Balancer unified spec tests.""" +from __future__ import annotations + +import asyncio +import gc +import os +import pathlib +import sys +import threading +from asyncio import Event +from test.asynchronous.helpers import ConcurrentRunner, ExceptionCatchingTask +from test.asynchronous.utils import async_get_pool + +import pytest + +sys.path[0:0] = [""] + +from test.asynchronous import AsyncIntegrationTest, async_client_context, unittest +from test.asynchronous.unified_format import generate_test_classes +from test.utils_shared import ( + async_wait_until, + create_async_event, +) + +_IS_SYNC = False + +pytestmark = pytest.mark.load_balancer + +# Location of JSON test specifications. +if _IS_SYNC: + _TEST_PATH = os.path.join(pathlib.Path(__file__).resolve().parent, "load_balancer") +else: + _TEST_PATH = os.path.join(pathlib.Path(__file__).resolve().parent.parent, "load_balancer") + +# Generate unified tests. +globals().update(generate_test_classes(_TEST_PATH, module=__name__)) + + +class TestLB(AsyncIntegrationTest): + RUN_ON_LOAD_BALANCER = True + + async def test_connections_are_only_returned_once(self): + if "PyPy" in sys.version: + # Tracked in PYTHON-3011 + self.skipTest("Test is flaky on PyPy") + pool = await async_get_pool(self.client) + n_conns = len(pool.conns) + await self.db.test.find_one({}) + self.assertEqual(len(pool.conns), n_conns) + await (await self.db.test.aggregate([{"$limit": 1}])).to_list() + self.assertEqual(len(pool.conns), n_conns) + + @async_client_context.require_load_balancer + async def test_unpin_committed_transaction(self): + client = await self.async_rs_client() + pool = await async_get_pool(client) + coll = client[self.db.name].test + async with client.start_session() as session: + async with await session.start_transaction(): + self.assertEqual(pool.active_sockets, 0) + await coll.insert_one({}, session=session) + self.assertEqual(pool.active_sockets, 1) # Pinned. + self.assertEqual(pool.active_sockets, 1) # Still pinned. + self.assertEqual(pool.active_sockets, 0) # Unpinned. + + @async_client_context.require_failCommand_fail_point + async def test_cursor_gc(self): + async def create_resource(coll): + cursor = coll.find({}, batch_size=3) + await anext(cursor) + return cursor + + await self._test_no_gc_deadlock(create_resource) + + @async_client_context.require_failCommand_fail_point + async def test_command_cursor_gc(self): + async def create_resource(coll): + cursor = await coll.aggregate([], batchSize=3) + await anext(cursor) + return cursor + + await self._test_no_gc_deadlock(create_resource) + + async def _test_no_gc_deadlock(self, create_resource): + client = await self.async_rs_client() + pool = await async_get_pool(client) + coll = client[self.db.name].test + await coll.insert_many([{} for _ in range(10)]) + self.assertEqual(pool.active_sockets, 0) + # Cause the initial find attempt to fail to induce a reference cycle. + args = { + "mode": {"times": 1}, + "data": { + "failCommands": ["find", "aggregate"], + "closeConnection": True, + }, + } + async with self.fail_point(args): + resource = await create_resource(coll) + if async_client_context.load_balancer: + self.assertEqual(pool.active_sockets, 1) # Pinned. + + task = PoolLocker(pool) + await task.start() + self.assertTrue(await task.wait(task.locked, 5), "timed out") + # Garbage collect the resource while the pool is locked to ensure we + # don't deadlock. + del resource + # On PyPy it can take a few rounds to collect the cursor. + for _ in range(3): + gc.collect() + task.unlock.set() + await task.join(5) + self.assertFalse(task.is_alive()) + self.assertIsNone(task.exc) + + await async_wait_until(lambda: pool.active_sockets == 0, "return socket") + # Run another operation to ensure the socket still works. + await coll.delete_many({}) + + @async_client_context.require_transactions + async def test_session_gc(self): + client = await self.async_rs_client() + pool = await async_get_pool(client) + session = client.start_session() + await session.start_transaction() + await client.test_session_gc.test.find_one({}, session=session) + # Cleanup the transaction left open on the server + self.addAsyncCleanup(self.client.admin.command, "killSessions", [session.session_id]) + if async_client_context.load_balancer: + self.assertEqual(pool.active_sockets, 1) # Pinned. + + task = PoolLocker(pool) + await task.start() + self.assertTrue(await task.wait(task.locked, 5), "timed out") + # Garbage collect the session while the pool is locked to ensure we + # don't deadlock. + del session + # On PyPy it can take a few rounds to collect the session. + for _ in range(3): + gc.collect() + task.unlock.set() + await task.join(5) + self.assertFalse(task.is_alive()) + self.assertIsNone(task.exc) + + await async_wait_until(lambda: pool.active_sockets == 0, "return socket") + # Run another operation to ensure the socket still works. + await client[self.db.name].test.delete_many({}) + + +class PoolLocker(ExceptionCatchingTask): + def __init__(self, pool): + super().__init__(target=self.lock_pool) + self.pool = pool + self.daemon = True + self.locked = create_async_event() + self.unlock = create_async_event() + + async def lock_pool(self): + async with self.pool.lock: + self.locked.set() + # Wait for the unlock flag. + unlock_pool = await self.wait(self.unlock, 10) + if not unlock_pool: + raise Exception("timed out waiting for unlock signal: deadlock?") + + async def wait(self, event: Event, timeout: int): + if _IS_SYNC: + return event.wait(timeout) # type: ignore[call-arg] + else: + try: + await asyncio.wait_for(event.wait(), timeout=timeout) + except asyncio.TimeoutError: + return False + return True + + +if __name__ == "__main__": + unittest.main() diff --git a/test/asynchronous/test_logger.py b/test/asynchronous/test_logger.py index a2e8b35c5f..d024735fd8 100644 --- a/test/asynchronous/test_logger.py +++ b/test/asynchronous/test_logger.py @@ -15,7 +15,7 @@ import os from test import unittest -from test.asynchronous import AsyncIntegrationTest +from test.asynchronous import AsyncIntegrationTest, async_client_context from unittest.mock import patch from bson import json_util @@ -97,6 +97,49 @@ async def test_logging_without_listeners(self): await c.db.test.insert_one({"x": "1"}) self.assertGreater(len(cm.records), 0) + @async_client_context.require_failCommand_fail_point + async def test_logging_retry_read_attempts(self): + await self.db.test.insert_one({"x": "1"}) + + async with self.fail_point( + { + "mode": {"times": 1}, + "data": { + "failCommands": ["find"], + "errorCode": 10107, + "errorLabels": ["RetryableWriteError"], + }, + } + ): + with self.assertLogs("pymongo.command", level="DEBUG") as cm: + await self.db.test.find_one({"x": "1"}) + + retry_messages = [ + r.getMessage() for r in cm.records if "Retrying read attempt" in r.getMessage() + ] + self.assertEqual(len(retry_messages), 1) + + @async_client_context.require_failCommand_fail_point + @async_client_context.require_retryable_writes + async def test_logging_retry_write_attempts(self): + async with self.fail_point( + { + "mode": {"times": 1}, + "data": { + "errorCode": 10107, + "errorLabels": ["RetryableWriteError"], + "failCommands": ["insert"], + }, + } + ): + with self.assertLogs("pymongo.command", level="DEBUG") as cm: + await self.db.test.insert_one({"x": "1"}) + + retry_messages = [ + r.getMessage() for r in cm.records if "Retrying write attempt" in r.getMessage() + ] + self.assertEqual(len(retry_messages), 1) + if __name__ == "__main__": unittest.main() diff --git a/test/asynchronous/test_max_staleness.py b/test/asynchronous/test_max_staleness.py new file mode 100644 index 0000000000..b6e15f9158 --- /dev/null +++ b/test/asynchronous/test_max_staleness.py @@ -0,0 +1,149 @@ +# Copyright 2016 MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test maxStalenessSeconds support.""" +from __future__ import annotations + +import asyncio +import os +import sys +import time +import warnings +from pathlib import Path + +from pymongo import AsyncMongoClient +from pymongo.operations import _Op + +sys.path[0:0] = [""] + +from test.asynchronous import AsyncPyMongoTestCase, async_client_context, unittest +from test.asynchronous.utils_selection_tests import create_selection_tests + +from pymongo.errors import ConfigurationError +from pymongo.server_selectors import writable_server_selector + +_IS_SYNC = False + +# Location of JSON test specifications. +if _IS_SYNC: + TEST_PATH = os.path.join(Path(__file__).resolve().parent, "max_staleness") +else: + TEST_PATH = os.path.join(Path(__file__).resolve().parent.parent, "max_staleness") + + +class TestAllScenarios(create_selection_tests(TEST_PATH)): # type: ignore + pass + + +class TestMaxStaleness(AsyncPyMongoTestCase): + async def test_max_staleness(self): + client = self.simple_client() + self.assertEqual(-1, client.read_preference.max_staleness) + + client = self.simple_client("mongodb://a/?readPreference=secondary") + self.assertEqual(-1, client.read_preference.max_staleness) + + # These tests are specified in max-staleness-tests.rst. + with self.assertRaises(ConfigurationError): + # Default read pref "primary" can't be used with max staleness. + self.simple_client("mongodb://a/?maxStalenessSeconds=120") + + with self.assertRaises(ConfigurationError): + # Read pref "primary" can't be used with max staleness. + self.simple_client("mongodb://a/?readPreference=primary&maxStalenessSeconds=120") + + client = self.simple_client("mongodb://host/?maxStalenessSeconds=-1") + self.assertEqual(-1, client.read_preference.max_staleness) + + client = self.simple_client("mongodb://host/?readPreference=primary&maxStalenessSeconds=-1") + self.assertEqual(-1, client.read_preference.max_staleness) + + client = self.simple_client( + "mongodb://host/?readPreference=secondary&maxStalenessSeconds=120" + ) + self.assertEqual(120, client.read_preference.max_staleness) + + client = self.simple_client("mongodb://a/?readPreference=secondary&maxStalenessSeconds=1") + self.assertEqual(1, client.read_preference.max_staleness) + + client = self.simple_client("mongodb://a/?readPreference=secondary&maxStalenessSeconds=-1") + self.assertEqual(-1, client.read_preference.max_staleness) + + client = self.simple_client(maxStalenessSeconds=-1, readPreference="nearest") + self.assertEqual(-1, client.read_preference.max_staleness) + + with self.assertRaises(TypeError): + # Prohibit None. + self.simple_client(maxStalenessSeconds=None, readPreference="nearest") + + async def test_max_staleness_float(self): + with self.assertRaises(TypeError) as ctx: + await self.async_rs_or_single_client(maxStalenessSeconds=1.5, readPreference="nearest") + + self.assertIn("must be an integer", str(ctx.exception)) + + with warnings.catch_warnings(record=True) as ctx: + warnings.simplefilter("always") + client = self.simple_client( + "mongodb://host/?maxStalenessSeconds=1.5&readPreference=nearest" + ) + + # Option was ignored. + self.assertEqual(-1, client.read_preference.max_staleness) + self.assertIn("must be an integer", str(ctx[0])) + + async def test_max_staleness_zero(self): + # Zero is too small. + with self.assertRaises(ValueError) as ctx: + await self.async_rs_or_single_client(maxStalenessSeconds=0, readPreference="nearest") + + self.assertIn("must be a positive integer", str(ctx.exception)) + + with warnings.catch_warnings(record=True) as ctx: + warnings.simplefilter("always") + client = self.simple_client( + "mongodb://host/?maxStalenessSeconds=0&readPreference=nearest" + ) + + # Option was ignored. + self.assertEqual(-1, client.read_preference.max_staleness) + self.assertIn("must be a positive integer", str(ctx[0])) + + @async_client_context.require_replica_set + async def test_last_write_date(self): + # From max-staleness-tests.rst, "Parse lastWriteDate". + client = await self.async_rs_or_single_client(heartbeatFrequencyMS=500) + await client.pymongo_test.test.insert_one({}) + # Wait for the server description to be updated. + await asyncio.sleep(1) + server = await client._topology.select_server(writable_server_selector, _Op.TEST) + first = server.description.last_write_date + self.assertTrue(first) + # The first last_write_date may correspond to a internal server write, + # sleep so that the next write does not occur within the same second. + await asyncio.sleep(1) + await client.pymongo_test.test.insert_one({}) + # Wait for the server description to be updated. + await asyncio.sleep(1) + server = await client._topology.select_server(writable_server_selector, _Op.TEST) + second = server.description.last_write_date + assert first is not None + + assert second is not None + self.assertGreater(second, first) + self.assertLess(second, first + 10) + + +if __name__ == "__main__": + unittest.main() diff --git a/test/asynchronous/test_mongos_load_balancing.py b/test/asynchronous/test_mongos_load_balancing.py new file mode 100644 index 0000000000..97170aa9e0 --- /dev/null +++ b/test/asynchronous/test_mongos_load_balancing.py @@ -0,0 +1,199 @@ +# Copyright 2015-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test AsyncMongoClient's mongos load balancing using a mock.""" +from __future__ import annotations + +import asyncio +import sys +import threading +from test.asynchronous.helpers import ConcurrentRunner + +from pymongo.operations import _Op + +sys.path[0:0] = [""] + +from test.asynchronous import AsyncMockClientTest, async_client_context, connected, unittest +from test.asynchronous.pymongo_mocks import AsyncMockClient +from test.utils_shared import async_wait_until + +from pymongo.errors import AutoReconnect, InvalidOperation +from pymongo.server_selectors import writable_server_selector +from pymongo.topology_description import TOPOLOGY_TYPE + +_IS_SYNC = False + + +class SimpleOp(ConcurrentRunner): + def __init__(self, client): + super().__init__() + self.client = client + self.passed = False + + async def run(self): + await self.client.db.command("ping") + self.passed = True # No exception raised. + + +async def do_simple_op(client, ntasks): + tasks = [SimpleOp(client) for _ in range(ntasks)] + for t in tasks: + await t.start() + + for t in tasks: + await t.join() + + for t in tasks: + assert t.passed + + +async def writable_addresses(topology): + return { + server.description.address + for server in await topology.select_servers(writable_server_selector, _Op.TEST) + } + + +class TestMongosLoadBalancing(AsyncMockClientTest): + @async_client_context.require_connection + @async_client_context.require_no_load_balancer + async def asyncSetUp(self): + await super().asyncSetUp() + + def mock_client(self, **kwargs): + mock_client = AsyncMockClient( + standalones=[], + members=[], + mongoses=["a:1", "b:2", "c:3"], + host="a:1,b:2,c:3", + connect=False, + **kwargs, + ) + self.addAsyncCleanup(mock_client.aclose) + + # Latencies in seconds. + mock_client.mock_rtts["a:1"] = 0.020 + mock_client.mock_rtts["b:2"] = 0.025 + mock_client.mock_rtts["c:3"] = 0.045 + return mock_client + + async def test_lazy_connect(self): + # While connected() ensures we can trigger connection from the main + # thread and wait for the monitors, this test triggers connection from + # several threads at once to check for data races. + nthreads = 10 + client = self.mock_client() + self.assertEqual(0, len(client.nodes)) + + # Trigger initial connection. + await do_simple_op(client, nthreads) + await async_wait_until(lambda: len(client.nodes) == 3, "connect to all mongoses") + + async def test_failover(self): + ntasks = 10 + client = await connected(self.mock_client(localThresholdMS=0.001)) + await async_wait_until(lambda: len(client.nodes) == 3, "connect to all mongoses") + + # Our chosen mongos goes down. + client.kill_host("a:1") + + # Trigger failover to higher-latency nodes. AutoReconnect should be + # raised at most once in each thread. + passed = [] + + async def f(): + try: + await client.db.command("ping") + except AutoReconnect: + # Second attempt succeeds. + await client.db.command("ping") + + passed.append(True) + + tasks = [ConcurrentRunner(target=f) for _ in range(ntasks)] + for t in tasks: + await t.start() + + for t in tasks: + await t.join() + + self.assertEqual(ntasks, len(passed)) + + # Down host removed from list. + self.assertEqual(2, len(client.nodes)) + + async def test_local_threshold(self): + client = await connected(self.mock_client(localThresholdMS=30)) + self.assertEqual(30, client.options.local_threshold_ms) + await async_wait_until(lambda: len(client.nodes) == 3, "connect to all mongoses") + topology = client._topology + + # All are within a 30-ms latency window, see self.mock_client(). + self.assertEqual({("a", 1), ("b", 2), ("c", 3)}, await writable_addresses(topology)) + + # No error + await client.admin.command("ping") + + client = await connected(self.mock_client(localThresholdMS=0)) + self.assertEqual(0, client.options.local_threshold_ms) + # No error + await client.db.command("ping") + # Our chosen mongos goes down. + client.kill_host("{}:{}".format(*next(iter(client.nodes)))) + try: + await client.db.command("ping") + except: + pass + + # We eventually connect to a new mongos. + async def connect_to_new_mongos(): + try: + return await client.db.command("ping") + except AutoReconnect: + pass + + await async_wait_until(connect_to_new_mongos, "connect to a new mongos") + + async def test_load_balancing(self): + # Although the server selection JSON tests already prove that + # select_servers works for sharded topologies, here we do an end-to-end + # test of discovering servers' round trip times and configuring + # localThresholdMS. + client = await connected(self.mock_client()) + await async_wait_until(lambda: len(client.nodes) == 3, "connect to all mongoses") + + # Prohibited for topology type Sharded. + with self.assertRaises(InvalidOperation): + await client.address + + topology = client._topology + self.assertEqual(TOPOLOGY_TYPE.Sharded, topology.description.topology_type) + + # a and b are within the 15-ms latency window, see self.mock_client(). + self.assertEqual({("a", 1), ("b", 2)}, await writable_addresses(topology)) + + client.mock_rtts["a:1"] = 0.045 + + # Discover only b is within latency window. + async def predicate(): + return {("b", 2)} == await writable_addresses(topology) + + await async_wait_until( + predicate, + 'discover server "a" is too far', + ) + + +if __name__ == "__main__": + unittest.main() diff --git a/test/asynchronous/test_monitor.py b/test/asynchronous/test_monitor.py new file mode 100644 index 0000000000..dde8976c06 --- /dev/null +++ b/test/asynchronous/test_monitor.py @@ -0,0 +1,125 @@ +# Copyright 2014-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test the monitor module.""" +from __future__ import annotations + +import asyncio +import gc +import subprocess +import sys +import warnings +from functools import partial + +sys.path[0:0] = [""] + +from test.asynchronous import AsyncIntegrationTest, async_client_context, connected, unittest +from test.asynchronous.utils import ( + async_wait_until, +) +from test.utils_shared import ServerAndTopologyEventListener, gevent_monkey_patched + +from pymongo.periodic_executor import _EXECUTORS + +_IS_SYNC = False + + +def unregistered(ref): + gc.collect() + return ref not in _EXECUTORS + + +def get_executors(client): + executors = [] + for server in client._topology._servers.values(): + executors.append(server._monitor._executor) + executors.append(server._monitor._rtt_monitor._executor) + executors.append(client._kill_cursors_executor) + executors.append(client._topology._Topology__events_executor) + return [e for e in executors if e is not None] + + +class TestMonitor(AsyncIntegrationTest): + async def create_client(self): + listener = ServerAndTopologyEventListener() + client = await self.unmanaged_async_single_client(event_listeners=[listener]) + await connected(client) + return client + + @unittest.skipIf("PyPy" in sys.version, "PYTHON-5283 fails often on PyPy") + @unittest.skipIf( + gevent_monkey_patched(), "PYTHON-5516 Resources are not cleared when using gevent" + ) + async def test_cleanup_executors_on_client_del(self): + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + client = await self.create_client() + executors = get_executors(client) + self.assertEqual(len(executors), 4) + + # Each executor stores a weakref to itself in _EXECUTORS. + executor_refs = [(r, r()._name) for r in _EXECUTORS.copy() if r() in executors] + + del executors + del client + + for ref, name in executor_refs: + await async_wait_until( + partial(unregistered, ref), f"unregister executor: {name}", timeout=5 + ) + + def resource_warning_caught(): + gc.collect() + for warning in w: + if ( + issubclass(warning.category, ResourceWarning) + and "Call AsyncMongoClient.close() to safely shut down your client and free up resources." + in str(warning.message) + ): + return True + return False + + await async_wait_until(resource_warning_caught, "catch resource warning") + + async def test_cleanup_executors_on_client_close(self): + client = await self.create_client() + executors = get_executors(client) + self.assertEqual(len(executors), 4) + + await client.close() + + for executor in executors: + await async_wait_until( + lambda: executor._stopped, f"closed executor: {executor._name}", timeout=5 + ) + + @async_client_context.require_sync + def test_no_thread_start_runtime_err_on_shutdown(self): + """Test we silence noisy runtime errors fired when the AsyncMongoClient spawns a new thread + on process shutdown.""" + command = [ + sys.executable, + "-c", + "from pymongo import AsyncMongoClient; c = AsyncMongoClient()", + ] + completed_process: subprocess.CompletedProcess = subprocess.run( + command, capture_output=True + ) + + self.assertFalse(completed_process.stderr) + self.assertFalse(completed_process.stdout) + + +if __name__ == "__main__": + unittest.main() diff --git a/test/asynchronous/test_monitoring.py b/test/asynchronous/test_monitoring.py index eaad60beac..6a9a5b8da7 100644 --- a/test/asynchronous/test_monitoring.py +++ b/test/asynchronous/test_monitoring.py @@ -29,7 +29,7 @@ sanitize_cmd, unittest, ) -from test.utils import ( +from test.utils_shared import ( EventListener, OvertCommandListener, async_wait_until, @@ -40,7 +40,6 @@ from bson.son import SON from pymongo import CursorType, DeleteOne, InsertOne, UpdateOne, monitoring from pymongo.asynchronous.command_cursor import AsyncCommandCursor -from pymongo.asynchronous.helpers import anext from pymongo.errors import AutoReconnect, NotPrimaryError, OperationFailure from pymongo.read_preferences import ReadPreference from pymongo.write_concern import WriteConcern @@ -68,26 +67,26 @@ async def test_started_simple(self): started = self.listener.started_events[0] succeeded = self.listener.succeeded_events[0] self.assertEqual(0, len(self.listener.failed_events)) - self.assertTrue(isinstance(succeeded, monitoring.CommandSucceededEvent)) - self.assertTrue(isinstance(started, monitoring.CommandStartedEvent)) + self.assertIsInstance(succeeded, monitoring.CommandSucceededEvent) + self.assertIsInstance(started, monitoring.CommandStartedEvent) self.assertEqualCommand(SON([("ping", 1)]), started.command) self.assertEqual("ping", started.command_name) self.assertEqual(await self.client.address, started.connection_id) self.assertEqual("pymongo_test", started.database_name) - self.assertTrue(isinstance(started.request_id, int)) + self.assertIsInstance(started.request_id, int) async def test_succeeded_simple(self): await self.client.pymongo_test.command("ping") started = self.listener.started_events[0] succeeded = self.listener.succeeded_events[0] self.assertEqual(0, len(self.listener.failed_events)) - self.assertTrue(isinstance(started, monitoring.CommandStartedEvent)) - self.assertTrue(isinstance(succeeded, monitoring.CommandSucceededEvent)) + self.assertIsInstance(started, monitoring.CommandStartedEvent) + self.assertIsInstance(succeeded, monitoring.CommandSucceededEvent) self.assertEqual("ping", succeeded.command_name) self.assertEqual(await self.client.address, succeeded.connection_id) self.assertEqual(1, succeeded.reply.get("ok")) - self.assertTrue(isinstance(succeeded.request_id, int)) - self.assertTrue(isinstance(succeeded.duration_micros, int)) + self.assertIsInstance(succeeded.request_id, int) + self.assertIsInstance(succeeded.duration_micros, int) async def test_failed_simple(self): try: @@ -97,21 +96,21 @@ async def test_failed_simple(self): started = self.listener.started_events[0] failed = self.listener.failed_events[0] self.assertEqual(0, len(self.listener.succeeded_events)) - self.assertTrue(isinstance(started, monitoring.CommandStartedEvent)) - self.assertTrue(isinstance(failed, monitoring.CommandFailedEvent)) + self.assertIsInstance(started, monitoring.CommandStartedEvent) + self.assertIsInstance(failed, monitoring.CommandFailedEvent) self.assertEqual("oops!", failed.command_name) self.assertEqual(await self.client.address, failed.connection_id) self.assertEqual(0, failed.failure.get("ok")) - self.assertTrue(isinstance(failed.request_id, int)) - self.assertTrue(isinstance(failed.duration_micros, int)) + self.assertIsInstance(failed.request_id, int) + self.assertIsInstance(failed.duration_micros, int) async def test_find_one(self): await self.client.pymongo_test.test.find_one() started = self.listener.started_events[0] succeeded = self.listener.succeeded_events[0] self.assertEqual(0, len(self.listener.failed_events)) - self.assertTrue(isinstance(succeeded, monitoring.CommandSucceededEvent)) - self.assertTrue(isinstance(started, monitoring.CommandStartedEvent)) + self.assertIsInstance(succeeded, monitoring.CommandSucceededEvent) + self.assertIsInstance(started, monitoring.CommandStartedEvent) self.assertEqualCommand( SON([("find", "test"), ("filter", {}), ("limit", 1), ("singleBatch", True)]), started.command, @@ -119,7 +118,7 @@ async def test_find_one(self): self.assertEqual("find", started.command_name) self.assertEqual(await self.client.address, started.connection_id) self.assertEqual("pymongo_test", started.database_name) - self.assertTrue(isinstance(started.request_id, int)) + self.assertIsInstance(started.request_id, int) async def test_find_and_get_more(self): await self.client.pymongo_test.test.drop() @@ -132,7 +131,7 @@ async def test_find_and_get_more(self): started = self.listener.started_events[0] succeeded = self.listener.succeeded_events[0] self.assertEqual(0, len(self.listener.failed_events)) - self.assertTrue(isinstance(started, monitoring.CommandStartedEvent)) + self.assertIsInstance(started, monitoring.CommandStartedEvent) self.assertEqualCommand( SON( [("find", "test"), ("filter", {}), ("projection", {"_id": False}), ("batchSize", 4)] @@ -142,11 +141,11 @@ async def test_find_and_get_more(self): self.assertEqual("find", started.command_name) self.assertEqual(await self.client.address, started.connection_id) self.assertEqual("pymongo_test", started.database_name) - self.assertTrue(isinstance(started.request_id, int)) - self.assertTrue(isinstance(succeeded, monitoring.CommandSucceededEvent)) - self.assertTrue(isinstance(succeeded.duration_micros, int)) + self.assertIsInstance(started.request_id, int) + self.assertIsInstance(succeeded, monitoring.CommandSucceededEvent) + self.assertIsInstance(succeeded.duration_micros, int) self.assertEqual("find", succeeded.command_name) - self.assertTrue(isinstance(succeeded.request_id, int)) + self.assertIsInstance(succeeded.request_id, int) self.assertEqual(cursor.address, succeeded.connection_id) csr = succeeded.reply["cursor"] self.assertEqual(csr["id"], cursor_id) @@ -161,7 +160,7 @@ async def test_find_and_get_more(self): started = self.listener.started_events[0] succeeded = self.listener.succeeded_events[0] self.assertEqual(0, len(self.listener.failed_events)) - self.assertTrue(isinstance(started, monitoring.CommandStartedEvent)) + self.assertIsInstance(started, monitoring.CommandStartedEvent) self.assertEqualCommand( SON([("getMore", cursor_id), ("collection", "test"), ("batchSize", 4)]), started.command, @@ -169,11 +168,11 @@ async def test_find_and_get_more(self): self.assertEqual("getMore", started.command_name) self.assertEqual(await self.client.address, started.connection_id) self.assertEqual("pymongo_test", started.database_name) - self.assertTrue(isinstance(started.request_id, int)) - self.assertTrue(isinstance(succeeded, monitoring.CommandSucceededEvent)) - self.assertTrue(isinstance(succeeded.duration_micros, int)) + self.assertIsInstance(started.request_id, int) + self.assertIsInstance(succeeded, monitoring.CommandSucceededEvent) + self.assertIsInstance(succeeded.duration_micros, int) self.assertEqual("getMore", succeeded.command_name) - self.assertTrue(isinstance(succeeded.request_id, int)) + self.assertIsInstance(succeeded.request_id, int) self.assertEqual(cursor.address, succeeded.connection_id) csr = succeeded.reply["cursor"] self.assertEqual(csr["id"], cursor_id) @@ -196,16 +195,16 @@ async def test_find_with_explain(self): started = self.listener.started_events[0] succeeded = self.listener.succeeded_events[0] self.assertEqual(0, len(self.listener.failed_events)) - self.assertTrue(isinstance(started, monitoring.CommandStartedEvent)) + self.assertIsInstance(started, monitoring.CommandStartedEvent) self.assertEqualCommand(cmd, started.command) self.assertEqual("explain", started.command_name) self.assertEqual(await self.client.address, started.connection_id) self.assertEqual("pymongo_test", started.database_name) - self.assertTrue(isinstance(started.request_id, int)) - self.assertTrue(isinstance(succeeded, monitoring.CommandSucceededEvent)) - self.assertTrue(isinstance(succeeded.duration_micros, int)) + self.assertIsInstance(started.request_id, int) + self.assertIsInstance(succeeded, monitoring.CommandSucceededEvent) + self.assertIsInstance(succeeded.duration_micros, int) self.assertEqual("explain", succeeded.command_name) - self.assertTrue(isinstance(succeeded.request_id, int)) + self.assertIsInstance(succeeded.request_id, int) self.assertEqual(await self.client.address, succeeded.connection_id) self.assertEqual(res, succeeded.reply) @@ -227,16 +226,16 @@ async def _test_find_options(self, query, expected_cmd): started = self.listener.started_events[0] succeeded = self.listener.succeeded_events[0] self.assertEqual(0, len(self.listener.failed_events)) - self.assertTrue(isinstance(started, monitoring.CommandStartedEvent)) + self.assertIsInstance(started, monitoring.CommandStartedEvent) self.assertEqualCommand(expected_cmd, started.command) self.assertEqual("find", started.command_name) self.assertEqual(await self.client.address, started.connection_id) self.assertEqual("pymongo_test", started.database_name) - self.assertTrue(isinstance(started.request_id, int)) - self.assertTrue(isinstance(succeeded, monitoring.CommandSucceededEvent)) - self.assertTrue(isinstance(succeeded.duration_micros, int)) + self.assertIsInstance(started.request_id, int) + self.assertIsInstance(succeeded, monitoring.CommandSucceededEvent) + self.assertIsInstance(succeeded.duration_micros, int) self.assertEqual("find", succeeded.command_name) - self.assertTrue(isinstance(succeeded.request_id, int)) + self.assertIsInstance(succeeded.request_id, int) self.assertEqual(await self.client.address, succeeded.connection_id) finally: # Exhaust the cursor to avoid kill cursors. @@ -308,7 +307,7 @@ async def test_command_and_get_more(self): started = self.listener.started_events[0] succeeded = self.listener.succeeded_events[0] self.assertEqual(0, len(self.listener.failed_events)) - self.assertTrue(isinstance(started, monitoring.CommandStartedEvent)) + self.assertIsInstance(started, monitoring.CommandStartedEvent) self.assertEqualCommand( SON( [ @@ -322,11 +321,11 @@ async def test_command_and_get_more(self): self.assertEqual("aggregate", started.command_name) self.assertEqual(await self.client.address, started.connection_id) self.assertEqual("pymongo_test", started.database_name) - self.assertTrue(isinstance(started.request_id, int)) - self.assertTrue(isinstance(succeeded, monitoring.CommandSucceededEvent)) - self.assertTrue(isinstance(succeeded.duration_micros, int)) + self.assertIsInstance(started.request_id, int) + self.assertIsInstance(succeeded, monitoring.CommandSucceededEvent) + self.assertIsInstance(succeeded.duration_micros, int) self.assertEqual("aggregate", succeeded.command_name) - self.assertTrue(isinstance(succeeded.request_id, int)) + self.assertIsInstance(succeeded.request_id, int) self.assertEqual(cursor.address, succeeded.connection_id) expected_cursor = { "id": cursor_id, @@ -341,7 +340,7 @@ async def test_command_and_get_more(self): started = self.listener.started_events[0] succeeded = self.listener.succeeded_events[0] self.assertEqual(0, len(self.listener.failed_events)) - self.assertTrue(isinstance(started, monitoring.CommandStartedEvent)) + self.assertIsInstance(started, monitoring.CommandStartedEvent) self.assertEqualCommand( SON([("getMore", cursor_id), ("collection", "test"), ("batchSize", 4)]), started.command, @@ -349,11 +348,11 @@ async def test_command_and_get_more(self): self.assertEqual("getMore", started.command_name) self.assertEqual(await self.client.address, started.connection_id) self.assertEqual("pymongo_test", started.database_name) - self.assertTrue(isinstance(started.request_id, int)) - self.assertTrue(isinstance(succeeded, monitoring.CommandSucceededEvent)) - self.assertTrue(isinstance(succeeded.duration_micros, int)) + self.assertIsInstance(started.request_id, int) + self.assertIsInstance(succeeded, monitoring.CommandSucceededEvent) + self.assertIsInstance(succeeded.duration_micros, int) self.assertEqual("getMore", succeeded.command_name) - self.assertTrue(isinstance(succeeded.request_id, int)) + self.assertIsInstance(succeeded.request_id, int) self.assertEqual(cursor.address, succeeded.connection_id) expected_result = { "cursor": { @@ -381,18 +380,18 @@ async def test_get_more_failure(self): started = self.listener.started_events[0] self.assertEqual(0, len(self.listener.succeeded_events)) failed = self.listener.failed_events[0] - self.assertTrue(isinstance(started, monitoring.CommandStartedEvent)) + self.assertIsInstance(started, monitoring.CommandStartedEvent) self.assertEqualCommand( SON([("getMore", cursor_id), ("collection", "test")]), started.command ) self.assertEqual("getMore", started.command_name) self.assertEqual(await self.client.address, started.connection_id) self.assertEqual("pymongo_test", started.database_name) - self.assertTrue(isinstance(started.request_id, int)) - self.assertTrue(isinstance(failed, monitoring.CommandFailedEvent)) - self.assertTrue(isinstance(failed.duration_micros, int)) + self.assertIsInstance(started.request_id, int) + self.assertIsInstance(failed, monitoring.CommandFailedEvent) + self.assertIsInstance(failed.duration_micros, int) self.assertEqual("getMore", failed.command_name) - self.assertTrue(isinstance(failed.request_id, int)) + self.assertIsInstance(failed.request_id, int) self.assertEqual(cursor.address, failed.connection_id) self.assertEqual(0, failed.failure.get("ok")) @@ -412,13 +411,13 @@ async def test_not_primary_error(self): started = self.listener.started_events[0] failed = self.listener.failed_events[0] self.assertEqual(0, len(self.listener.succeeded_events)) - self.assertTrue(isinstance(started, monitoring.CommandStartedEvent)) - self.assertTrue(isinstance(failed, monitoring.CommandFailedEvent)) + self.assertIsInstance(started, monitoring.CommandStartedEvent) + self.assertIsInstance(failed, monitoring.CommandFailedEvent) self.assertEqual("findAndModify", failed.command_name) self.assertEqual(address, failed.connection_id) self.assertEqual(0, failed.failure.get("ok")) - self.assertTrue(isinstance(failed.request_id, int)) - self.assertTrue(isinstance(failed.duration_micros, int)) + self.assertIsInstance(failed.request_id, int) + self.assertIsInstance(failed.duration_micros, int) self.assertEqual(error, failed.failure) @async_client_context.require_no_mongos @@ -434,7 +433,7 @@ async def test_exhaust(self): started = self.listener.started_events[0] succeeded = self.listener.succeeded_events[0] self.assertEqual(0, len(self.listener.failed_events)) - self.assertTrue(isinstance(started, monitoring.CommandStartedEvent)) + self.assertIsInstance(started, monitoring.CommandStartedEvent) self.assertEqualCommand( SON( [("find", "test"), ("filter", {}), ("projection", {"_id": False}), ("batchSize", 5)] @@ -444,11 +443,11 @@ async def test_exhaust(self): self.assertEqual("find", started.command_name) self.assertEqual(cursor.address, started.connection_id) self.assertEqual("pymongo_test", started.database_name) - self.assertTrue(isinstance(started.request_id, int)) - self.assertTrue(isinstance(succeeded, monitoring.CommandSucceededEvent)) - self.assertTrue(isinstance(succeeded.duration_micros, int)) + self.assertIsInstance(started.request_id, int) + self.assertIsInstance(succeeded, monitoring.CommandSucceededEvent) + self.assertIsInstance(succeeded.duration_micros, int) self.assertEqual("find", succeeded.command_name) - self.assertTrue(isinstance(succeeded.request_id, int)) + self.assertIsInstance(succeeded.request_id, int) self.assertEqual(cursor.address, succeeded.connection_id) expected_result = { "cursor": { @@ -464,7 +463,7 @@ async def test_exhaust(self): tuple(await cursor.to_list()) self.assertEqual(0, len(self.listener.failed_events)) for event in self.listener.started_events: - self.assertTrue(isinstance(event, monitoring.CommandStartedEvent)) + self.assertIsInstance(event, monitoring.CommandStartedEvent) self.assertEqualCommand( SON([("getMore", cursor_id), ("collection", "test"), ("batchSize", 5)]), event.command, @@ -472,12 +471,12 @@ async def test_exhaust(self): self.assertEqual("getMore", event.command_name) self.assertEqual(cursor.address, event.connection_id) self.assertEqual("pymongo_test", event.database_name) - self.assertTrue(isinstance(event.request_id, int)) + self.assertIsInstance(event.request_id, int) for event in self.listener.succeeded_events: - self.assertTrue(isinstance(event, monitoring.CommandSucceededEvent)) - self.assertTrue(isinstance(event.duration_micros, int)) + self.assertIsInstance(event, monitoring.CommandSucceededEvent) + self.assertIsInstance(event.duration_micros, int) self.assertEqual("getMore", event.command_name) - self.assertTrue(isinstance(event.request_id, int)) + self.assertIsInstance(event.request_id, int) self.assertEqual(cursor.address, event.connection_id) # Last getMore receives a response with cursor id 0. self.assertEqual(0, self.listener.succeeded_events[-1].reply["cursor"]["id"]) @@ -495,7 +494,7 @@ async def test_kill_cursors(self): started = self.listener.started_events[0] succeeded = self.listener.succeeded_events[0] self.assertEqual(0, len(self.listener.failed_events)) - self.assertTrue(isinstance(started, monitoring.CommandStartedEvent)) + self.assertIsInstance(started, monitoring.CommandStartedEvent) # There could be more than one cursor_id here depending on # when the thread last ran. self.assertIn(cursor_id, started.command["cursors"]) @@ -503,18 +502,17 @@ async def test_kill_cursors(self): self.assertIs(type(started.connection_id), tuple) self.assertEqual(cursor.address, started.connection_id) self.assertEqual("pymongo_test", started.database_name) - self.assertTrue(isinstance(started.request_id, int)) - self.assertTrue(isinstance(succeeded, monitoring.CommandSucceededEvent)) - self.assertTrue(isinstance(succeeded.duration_micros, int)) + self.assertIsInstance(started.request_id, int) + self.assertIsInstance(succeeded, monitoring.CommandSucceededEvent) + self.assertIsInstance(succeeded.duration_micros, int) self.assertEqual("killCursors", succeeded.command_name) - self.assertTrue(isinstance(succeeded.request_id, int)) + self.assertIsInstance(succeeded.request_id, int) self.assertIs(type(succeeded.connection_id), tuple) self.assertEqual(cursor.address, succeeded.connection_id) # There could be more than one cursor_id here depending on # when the thread last ran. - self.assertTrue( - cursor_id in succeeded.reply["cursorsUnknown"] - or cursor_id in succeeded.reply["cursorsKilled"] + self.assertIn( + cursor_id, succeeded.reply["cursorsUnknown"] + succeeded.reply["cursorsKilled"] ) async def test_non_bulk_writes(self): @@ -1066,7 +1064,7 @@ async def test_write_errors(self): self.assertEqual(2, len(errors)) fields = {"index", "code", "errmsg"} for error in errors: - self.assertTrue(fields.issubset(set(error))) + self.assertLessEqual(fields, set(error)) async def test_first_batch_helper(self): # Regardless of server version and use of helpers._first_batch @@ -1088,8 +1086,8 @@ async def test_first_batch_helper(self): self.assertEqual(started.command_name, succeeded.command_name) self.assertEqual(started.request_id, succeeded.request_id) self.assertEqual(started.connection_id, succeeded.connection_id) - self.assertTrue("cursor" in succeeded.reply) - self.assertTrue("ok" in succeeded.reply) + self.assertIn("cursor", succeeded.reply) + self.assertIn("ok", succeeded.reply) self.listener.reset() @@ -1157,13 +1155,13 @@ async def test_simple(self): started = self.listener.started_events[0] succeeded = self.listener.succeeded_events[0] self.assertEqual(0, len(self.listener.failed_events)) - self.assertTrue(isinstance(succeeded, monitoring.CommandSucceededEvent)) - self.assertTrue(isinstance(started, monitoring.CommandStartedEvent)) + self.assertIsInstance(succeeded, monitoring.CommandSucceededEvent) + self.assertIsInstance(started, monitoring.CommandStartedEvent) self.assertEqualCommand(SON([("ping", 1)]), started.command) self.assertEqual("ping", started.command_name) self.assertEqual(await self.client.address, started.connection_id) self.assertEqual("pymongo_test", started.database_name) - self.assertTrue(isinstance(started.request_id, int)) + self.assertIsInstance(started.request_id, int) class AsyncTestEventClasses(unittest.IsolatedAsyncioTestCase): diff --git a/test/asynchronous/test_on_demand_csfle.py b/test/asynchronous/test_on_demand_csfle.py new file mode 100644 index 0000000000..55394ddeb8 --- /dev/null +++ b/test/asynchronous/test_on_demand_csfle.py @@ -0,0 +1,115 @@ +# Copyright 2022-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test client side encryption with on demand credentials.""" +from __future__ import annotations + +import os +import sys +import unittest + +import pytest + +sys.path[0:0] = [""] + +from test.asynchronous import AsyncIntegrationTest, async_client_context + +from bson.codec_options import CodecOptions +from pymongo.asynchronous.encryption import ( + _HAVE_PYMONGOCRYPT, + AsyncClientEncryption, + EncryptionError, +) + +_IS_SYNC = False + +pytestmark = pytest.mark.kms + + +class TestonDemandGCPCredentials(AsyncIntegrationTest): + @unittest.skipUnless(_HAVE_PYMONGOCRYPT, "pymongocrypt is not installed") + @async_client_context.require_version_min(4, 2, -1) + async def asyncSetUp(self): + await super().asyncSetUp() + self.master_key = { + "projectId": "devprod-drivers", + "location": "global", + "keyRing": "key-ring-csfle", + "keyName": "key-name-csfle", + } + + @unittest.skipIf(not os.getenv("TEST_FLE_GCP_AUTO"), "Not testing FLE GCP auto") + async def test_01_failure(self): + if os.environ["SUCCESS"].lower() == "true": + self.skipTest("Expecting success") + self.client_encryption = AsyncClientEncryption( + kms_providers={"gcp": {}}, + key_vault_namespace="keyvault.datakeys", + key_vault_client=async_client_context.client, + codec_options=CodecOptions(), + ) + with self.assertRaises(EncryptionError): + await self.client_encryption.create_data_key("gcp", self.master_key) + + @unittest.skipIf(not os.getenv("TEST_FLE_GCP_AUTO"), "Not testing FLE GCP auto") + async def test_02_success(self): + if os.environ["SUCCESS"].lower() == "false": + self.skipTest("Expecting failure") + self.client_encryption = AsyncClientEncryption( + kms_providers={"gcp": {}}, + key_vault_namespace="keyvault.datakeys", + key_vault_client=async_client_context.client, + codec_options=CodecOptions(), + ) + await self.client_encryption.create_data_key("gcp", self.master_key) + + +class TestonDemandAzureCredentials(AsyncIntegrationTest): + @unittest.skipUnless(_HAVE_PYMONGOCRYPT, "pymongocrypt is not installed") + @async_client_context.require_version_min(4, 2, -1) + async def asyncSetUp(self): + await super().asyncSetUp() + self.master_key = { + "keyVaultEndpoint": os.environ["KEY_VAULT_ENDPOINT"], + "keyName": os.environ["KEY_NAME"], + } + + @unittest.skipIf(not os.getenv("TEST_FLE_AZURE_AUTO"), "Not testing FLE Azure auto") + async def test_01_failure(self): + if os.environ["SUCCESS"].lower() == "true": + self.skipTest("Expecting success") + self.client_encryption = AsyncClientEncryption( + kms_providers={"azure": {}}, + key_vault_namespace="keyvault.datakeys", + key_vault_client=async_client_context.client, + codec_options=CodecOptions(), + ) + with self.assertRaises(EncryptionError): + await self.client_encryption.create_data_key("azure", self.master_key) + + @unittest.skipIf(not os.getenv("TEST_FLE_AZURE_AUTO"), "Not testing FLE Azure auto") + async def test_02_success(self): + if os.environ["SUCCESS"].lower() == "false": + self.skipTest("Expecting failure") + self.client_encryption = AsyncClientEncryption( + kms_providers={"azure": {}}, + key_vault_namespace="keyvault.datakeys", + key_vault_client=async_client_context.client, + codec_options=CodecOptions(), + ) + await self.client_encryption.create_data_key("azure", self.master_key) + + +if __name__ == "__main__": + unittest.main(verbosity=2) diff --git a/test/asynchronous/test_pooling.py b/test/asynchronous/test_pooling.py new file mode 100644 index 0000000000..3193d9e3d5 --- /dev/null +++ b/test/asynchronous/test_pooling.py @@ -0,0 +1,614 @@ +# Copyright 2009-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test built in connection-pooling with threads.""" +from __future__ import annotations + +import asyncio +import gc +import random +import socket +import sys +import time +from test.asynchronous.utils import async_get_pool, async_joinall, flaky + +from bson.codec_options import DEFAULT_CODEC_OPTIONS +from bson.son import SON +from pymongo import AsyncMongoClient, message, timeout +from pymongo.errors import AutoReconnect, ConnectionFailure, DuplicateKeyError +from pymongo.hello import HelloCompat +from pymongo.lock import _async_create_lock + +sys.path[0:0] = [""] + +from test.asynchronous import AsyncIntegrationTest, async_client_context, unittest +from test.asynchronous.helpers import ConcurrentRunner +from test.utils_shared import delay + +from pymongo.asynchronous.pool import Pool, PoolOptions +from pymongo.socket_checker import SocketChecker + +_IS_SYNC = False + + +N = 10 +DB = "pymongo-pooling-tests" + + +async def gc_collect_until_done(tasks, timeout=60): + start = time.time() + running = list(tasks) + while running: + assert (time.time() - start) < timeout, "Tasks timed out" + for t in running: + await t.join(0.1) + if not t.is_alive(): + running.remove(t) + gc.collect() + + +class MongoTask(ConcurrentRunner): + """A thread/Task that uses a AsyncMongoClient.""" + + def __init__(self, client): + super().__init__() + self.daemon = True # Don't hang whole test if task hangs. + self.client = client + self.db = self.client[DB] + self.passed = False + + async def run(self): + await self.run_mongo_thread() + self.passed = True + + async def run_mongo_thread(self): + raise NotImplementedError + + +class InsertOneAndFind(MongoTask): + async def run_mongo_thread(self): + for _ in range(N): + rand = random.randint(0, N) + _id = (await self.db.sf.insert_one({"x": rand})).inserted_id + assert rand == (await self.db.sf.find_one(_id))["x"] + + +class Unique(MongoTask): + async def run_mongo_thread(self): + for _ in range(N): + await self.db.unique.insert_one({}) # no error + + +class NonUnique(MongoTask): + async def run_mongo_thread(self): + for _ in range(N): + try: + await self.db.unique.insert_one({"_id": "jesse"}) + except DuplicateKeyError: + pass + else: + raise AssertionError("Should have raised DuplicateKeyError") + + +class SocketGetter(MongoTask): + """Utility for TestPooling. + + Checks out a socket and holds it forever. Used in + test_no_wait_queue_timeout. + """ + + def __init__(self, client, pool): + super().__init__(client) + self.state = "init" + self.pool = pool + self.sock = None + + async def run_mongo_thread(self): + self.state = "get_socket" + + # Call 'pin_cursor' so we can hold the socket. + async with self.pool.checkout() as sock: + sock.pin_cursor() + self.sock = sock + + self.state = "connection" + + async def release_conn(self): + if self.sock: + await self.sock.unpin() + self.sock = None + return True + return False + + +async def run_cases(client, cases): + tasks = [] + n_runs = 5 + + for case in cases: + for _i in range(n_runs): + t = case(client) + await t.start() + tasks.append(t) + + for t in tasks: + await t.join() + + for t in tasks: + assert t.passed, "%s.run() threw an exception" % repr(t) + + +class _TestPoolingBase(AsyncIntegrationTest): + """Base class for all connection-pool tests.""" + + @async_client_context.require_connection + async def asyncSetUp(self): + await super().asyncSetUp() + self.c = await self.async_rs_or_single_client() + db = self.c[DB] + await db.unique.drop() + await db.test.drop() + await db.unique.insert_one({"_id": "jesse"}) + await db.test.insert_many([{} for _ in range(10)]) + + async def create_pool(self, pair=None, *args, **kwargs): + if pair is None: + pair = (await async_client_context.host, await async_client_context.port) + # Start the pool with the correct ssl options. + pool_options = async_client_context.client._topology_settings.pool_options + kwargs["ssl_context"] = pool_options._ssl_context + kwargs["tls_allow_invalid_hostnames"] = pool_options.tls_allow_invalid_hostnames + kwargs["server_api"] = pool_options.server_api + pool = Pool(pair, PoolOptions(*args, **kwargs)) + await pool.ready() + return pool + + +class TestPooling(_TestPoolingBase): + async def test_max_pool_size_validation(self): + host, port = await async_client_context.host, await async_client_context.port + self.assertRaises(ValueError, AsyncMongoClient, host=host, port=port, maxPoolSize=-1) + + self.assertRaises(ValueError, AsyncMongoClient, host=host, port=port, maxPoolSize="foo") + + c = AsyncMongoClient(host=host, port=port, maxPoolSize=100, connect=False) + self.assertEqual(c.options.pool_options.max_pool_size, 100) + + async def test_no_disconnect(self): + await run_cases(self.c, [NonUnique, Unique, InsertOneAndFind]) + + async def test_pool_reuses_open_socket(self): + # Test Pool's _check_closed() method doesn't close a healthy socket. + cx_pool = await self.create_pool(max_pool_size=10) + cx_pool._check_interval_seconds = 0 # Always check. + async with cx_pool.checkout() as conn: + pass + + async with cx_pool.checkout() as new_connection: + self.assertEqual(conn, new_connection) + + self.assertEqual(1, len(cx_pool.conns)) + + async def test_get_socket_and_exception(self): + # get_socket() returns socket after a non-network error. + cx_pool = await self.create_pool(max_pool_size=1, wait_queue_timeout=1) + with self.assertRaises(ZeroDivisionError): + async with cx_pool.checkout() as conn: + 1 / 0 + + # Socket was returned, not closed. + async with cx_pool.checkout() as new_connection: + self.assertEqual(conn, new_connection) + + self.assertEqual(1, len(cx_pool.conns)) + + async def test_pool_removes_closed_socket(self): + # Test that Pool removes explicitly closed socket. + cx_pool = await self.create_pool() + + async with cx_pool.checkout() as conn: + # Use Connection's API to close the socket. + await conn.close_conn(None) + + self.assertEqual(0, len(cx_pool.conns)) + + async def test_pool_removes_dead_socket(self): + # Test that Pool removes dead socket and the socket doesn't return + # itself PYTHON-344 + cx_pool = await self.create_pool(max_pool_size=1, wait_queue_timeout=1) + cx_pool._check_interval_seconds = 0 # Always check. + + async with cx_pool.checkout() as conn: + # Simulate a closed socket without telling the Connection it's + # closed. + await conn.conn.close() + self.assertTrue(conn.conn_closed()) + + async with cx_pool.checkout() as new_connection: + self.assertEqual(0, len(cx_pool.conns)) + self.assertNotEqual(conn, new_connection) + + self.assertEqual(1, len(cx_pool.conns)) + + # Semaphore was released. + async with cx_pool.checkout(): + pass + + async def test_socket_closed(self): + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + s.connect((await async_client_context.host, await async_client_context.port)) + socket_checker = SocketChecker() + self.assertFalse(socket_checker.socket_closed(s)) + s.close() + self.assertTrue(socket_checker.socket_closed(s)) + + async def test_socket_checker(self): + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + s.connect((await async_client_context.host, await async_client_context.port)) + socket_checker = SocketChecker() + # Socket has nothing to read. + self.assertFalse(socket_checker.select(s, read=True)) + self.assertFalse(socket_checker.select(s, read=True, timeout=0)) + self.assertFalse(socket_checker.select(s, read=True, timeout=0.05)) + # Socket is writable. + self.assertTrue(socket_checker.select(s, write=True, timeout=None)) + self.assertTrue(socket_checker.select(s, write=True)) + self.assertTrue(socket_checker.select(s, write=True, timeout=0)) + self.assertTrue(socket_checker.select(s, write=True, timeout=0.05)) + # Make the socket readable + _, msg, _ = message._query( + 0, "admin.$cmd", 0, -1, SON([("ping", 1)]), None, DEFAULT_CODEC_OPTIONS + ) + s.sendall(msg) + # Block until the socket is readable. + self.assertTrue(socket_checker.select(s, read=True, timeout=None)) + self.assertTrue(socket_checker.select(s, read=True)) + self.assertTrue(socket_checker.select(s, read=True, timeout=0)) + self.assertTrue(socket_checker.select(s, read=True, timeout=0.05)) + # Socket is still writable. + self.assertTrue(socket_checker.select(s, write=True, timeout=None)) + self.assertTrue(socket_checker.select(s, write=True)) + self.assertTrue(socket_checker.select(s, write=True, timeout=0)) + self.assertTrue(socket_checker.select(s, write=True, timeout=0.05)) + s.close() + self.assertTrue(socket_checker.socket_closed(s)) + + async def test_return_socket_after_reset(self): + pool = await self.create_pool() + async with pool.checkout() as sock: + self.assertEqual(pool.active_sockets, 1) + self.assertEqual(pool.operation_count, 1) + await pool.reset() + + self.assertTrue(sock.closed) + self.assertEqual(0, len(pool.conns)) + self.assertEqual(pool.active_sockets, 0) + self.assertEqual(pool.operation_count, 0) + + async def test_pool_check(self): + # Test that Pool recovers from two connection failures in a row. + # This exercises code at the end of Pool._check(). + cx_pool = await self.create_pool(max_pool_size=1, connect_timeout=1, wait_queue_timeout=1) + cx_pool._check_interval_seconds = 0 # Always check. + self.addAsyncCleanup(cx_pool.close) + + async with cx_pool.checkout() as conn: + # Simulate a closed socket without telling the Connection it's + # closed. + await conn.conn.close() + + # Swap pool's address with a bad one. + address, cx_pool.address = cx_pool.address, ("foo.com", 1234) + with self.assertRaises(AutoReconnect): + async with cx_pool.checkout(): + pass + + # Back to normal, semaphore was correctly released. + cx_pool.address = address + async with cx_pool.checkout(): + pass + + async def test_wait_queue_timeout(self): + wait_queue_timeout = 2 # Seconds + pool = await self.create_pool(max_pool_size=1, wait_queue_timeout=wait_queue_timeout) + self.addAsyncCleanup(pool.close) + + async with pool.checkout(): + start = time.time() + with self.assertRaises(ConnectionFailure): + async with pool.checkout(): + pass + + duration = time.time() - start + self.assertLess( + abs(wait_queue_timeout - duration), + 1, + f"Waited {duration:.2f} seconds for a socket, expected {wait_queue_timeout:f}", + ) + + async def test_no_wait_queue_timeout(self): + # Verify get_socket() with no wait_queue_timeout blocks forever. + pool = await self.create_pool(max_pool_size=1) + self.addAsyncCleanup(pool.close) + + # Reach max_size. + async with pool.checkout() as s1: + t = SocketGetter(self.c, pool) + await t.start() + while t.state != "get_socket": + await asyncio.sleep(0.1) + + await asyncio.sleep(1) + self.assertEqual(t.state, "get_socket") + + while t.state != "connection": + await asyncio.sleep(0.1) + + self.assertEqual(t.state, "connection") + self.assertEqual(t.sock, s1) + # Cleanup + await t.release_conn() + await t.join() + await pool.close() + + async def test_checkout_more_than_max_pool_size(self): + pool = await self.create_pool(max_pool_size=2) + + socks = [] + for _ in range(2): + # Call 'pin_cursor' so we can hold the socket. + async with pool.checkout() as sock: + sock.pin_cursor() + socks.append(sock) + + tasks = [] + for _ in range(10): + t = SocketGetter(self.c, pool) + await t.start() + tasks.append(t) + await asyncio.sleep(1) + for t in tasks: + self.assertEqual(t.state, "get_socket") + # Cleanup + for socket_info in socks: + await socket_info.unpin() + while tasks: + to_remove = [] + for t in tasks: + if await t.release_conn(): + to_remove.append(t) + await t.join() + for t in to_remove: + tasks.remove(t) + await asyncio.sleep(0.05) + await pool.close() + + async def test_maxConnecting(self): + client = await self.async_rs_or_single_client() + await self.client.test.test.insert_one({}) + self.addAsyncCleanup(self.client.test.test.delete_many, {}) + pool = await async_get_pool(client) + docs = [] + + # Run 50 short running operations + async def find_one(): + docs.append(await client.test.test.find_one({})) + + tasks = [ConcurrentRunner(target=find_one) for _ in range(50)] + for task in tasks: + await task.start() + for task in tasks: + await task.join(10) + + self.assertEqual(len(docs), 50) + self.assertLessEqual(len(pool.conns), 50) + # TLS and auth make connection establishment more expensive than + # the query which leads to more threads hitting maxConnecting. + # The end result is fewer total connections and better latency. + if async_client_context.tls and async_client_context.auth_enabled: + self.assertLessEqual(len(pool.conns), 30) + else: + self.assertLessEqual(len(pool.conns), 50) + # MongoDB 4.4.1 with auth + ssl: + # maxConnecting = 2: 6 connections in ~0.231+ seconds + # maxConnecting = unbounded: 50 connections in ~0.642+ seconds + # + # MongoDB 4.4.1 with no-auth no-ssl Python 3.8: + # maxConnecting = 2: 15-22 connections in ~0.108+ seconds + # maxConnecting = unbounded: 30+ connections in ~0.140+ seconds + print(len(pool.conns)) + + @async_client_context.require_failCommand_appName + async def test_csot_timeout_message(self): + client = await self.async_rs_or_single_client(appName="connectionTimeoutApp") + # Mock an operation failing due to pymongo.timeout(). + mock_connection_timeout = { + "configureFailPoint": "failCommand", + "mode": "alwaysOn", + "data": { + "blockConnection": True, + "blockTimeMS": 1000, + "failCommands": ["find"], + "appName": "connectionTimeoutApp", + }, + } + + await client.db.t.insert_one({"x": 1}) + + async with self.fail_point(mock_connection_timeout): + with self.assertRaises(Exception) as error: + with timeout(0.5): + await client.db.t.find_one({"$where": delay(2)}) + + self.assertIn("(configured timeouts: timeoutMS: 500.0ms", str(error.exception)) + + @async_client_context.require_failCommand_appName + async def test_socket_timeout_message(self): + client = await self.async_rs_or_single_client( + socketTimeoutMS=500, appName="connectionTimeoutApp" + ) + # Mock an operation failing due to socketTimeoutMS. + mock_connection_timeout = { + "configureFailPoint": "failCommand", + "mode": "alwaysOn", + "data": { + "blockConnection": True, + "blockTimeMS": 1000, + "failCommands": ["find"], + "appName": "connectionTimeoutApp", + }, + } + + await client.db.t.insert_one({"x": 1}) + + async with self.fail_point(mock_connection_timeout): + with self.assertRaises(Exception) as error: + await client.db.t.find_one({"$where": delay(2)}) + + self.assertIn( + "(configured timeouts: socketTimeoutMS: 500.0ms, connectTimeoutMS: 20000.0ms)", + str(error.exception), + ) + + @async_client_context.require_failCommand_appName + async def test_connection_timeout_message(self): + # Mock a connection creation failing due to timeout. + mock_connection_timeout = { + "configureFailPoint": "failCommand", + "mode": "alwaysOn", + "data": { + "blockConnection": True, + "blockTimeMS": 1000, + "failCommands": [HelloCompat.LEGACY_CMD, "hello"], + "appName": "connectionTimeoutApp", + }, + } + + client = await self.async_rs_or_single_client( + connectTimeoutMS=500, + socketTimeoutMS=500, + appName="connectionTimeoutApp", + heartbeatFrequencyMS=1000000, + ) + await client.admin.command("ping") + pool = await async_get_pool(client) + await pool.reset_without_pause() + async with self.fail_point(mock_connection_timeout): + with self.assertRaises(Exception) as error: + await client.admin.command("ping") + + self.assertIn( + "(configured timeouts: socketTimeoutMS: 500.0ms, connectTimeoutMS: 500.0ms)", + str(error.exception), + ) + + +class TestPoolMaxSize(_TestPoolingBase): + async def test_max_pool_size(self): + max_pool_size = 4 + c = await self.async_rs_or_single_client(maxPoolSize=max_pool_size) + collection = c[DB].test + + # Need one document. + await collection.drop() + await collection.insert_one({}) + + # ntasks had better be much larger than max_pool_size to ensure that + # max_pool_size connections are actually required at some point in this + # test's execution. + cx_pool = await async_get_pool(c) + ntasks = 10 + tasks = [] + lock = _async_create_lock() + self.n_passed = 0 + + async def f(): + for _ in range(5): + await collection.find_one({"$where": delay(0.1)}) + assert len(cx_pool.conns) <= max_pool_size + + async with lock: + self.n_passed += 1 + + for _i in range(ntasks): + t = ConcurrentRunner(target=f) + tasks.append(t) + await t.start() + + await async_joinall(tasks) + self.assertEqual(ntasks, self.n_passed) + self.assertGreater(len(cx_pool.conns), 1) + self.assertEqual(0, cx_pool.requests) + + async def test_max_pool_size_none(self): + c = await self.async_rs_or_single_client(maxPoolSize=None) + collection = c[DB].test + + # Need one document. + await collection.drop() + await collection.insert_one({}) + + cx_pool = await async_get_pool(c) + ntasks = 10 + tasks = [] + lock = _async_create_lock() + self.n_passed = 0 + + async def f(): + for _ in range(5): + await collection.find_one({"$where": delay(0.1)}) + + async with lock: + self.n_passed += 1 + + for _i in range(ntasks): + t = ConcurrentRunner(target=f) + tasks.append(t) + await t.start() + + await async_joinall(tasks) + self.assertEqual(ntasks, self.n_passed) + self.assertGreater(len(cx_pool.conns), 1) + self.assertEqual(cx_pool.max_pool_size, float("inf")) + + async def test_max_pool_size_zero(self): + c = await self.async_rs_or_single_client(maxPoolSize=0) + pool = await async_get_pool(c) + self.assertEqual(pool.max_pool_size, float("inf")) + + async def test_max_pool_size_with_connection_failure(self): + # The pool acquires its semaphore before attempting to connect; ensure + # it releases the semaphore on connection failure. + test_pool = Pool( + ("somedomainthatdoesntexist.org", 27017), + PoolOptions(max_pool_size=1, connect_timeout=1, socket_timeout=1, wait_queue_timeout=1), + ) + await test_pool.ready() + + # First call to get_socket fails; if pool doesn't release its semaphore + # then the second call raises "ConnectionFailure: Timed out waiting for + # socket from pool" instead of AutoReconnect. + for _i in range(2): + with self.assertRaises(AutoReconnect) as context: + async with test_pool.checkout(): + pass + + # Testing for AutoReconnect instead of ConnectionFailure, above, + # is sufficient right *now* to catch a semaphore leak. But that + # seems error-prone, so check the message too. + self.assertNotIn("waiting for socket from pool", str(context.exception)) + + +if __name__ == "__main__": + unittest.main() diff --git a/test/asynchronous/test_read_concern.py b/test/asynchronous/test_read_concern.py new file mode 100644 index 0000000000..8659bf80b2 --- /dev/null +++ b/test/asynchronous/test_read_concern.py @@ -0,0 +1,122 @@ +# Copyright 2015-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test the read_concern module.""" +from __future__ import annotations + +import sys +import unittest + +sys.path[0:0] = [""] + +from test.asynchronous import AsyncIntegrationTest, async_client_context +from test.utils_shared import OvertCommandListener + +from bson.son import SON +from pymongo.errors import OperationFailure +from pymongo.read_concern import ReadConcern + +_IS_SYNC = False + + +class TestReadConcern(AsyncIntegrationTest): + listener: OvertCommandListener + + @async_client_context.require_connection + async def asyncSetUp(self): + await super().asyncSetUp() + self.listener = OvertCommandListener() + self.client = await self.async_rs_or_single_client(event_listeners=[self.listener]) + self.db = self.client.pymongo_test + await async_client_context.client.pymongo_test.create_collection("coll") + + async def asyncTearDown(self): + await async_client_context.client.pymongo_test.drop_collection("coll") + + def test_read_concern(self): + rc = ReadConcern() + self.assertIsNone(rc.level) + self.assertTrue(rc.ok_for_legacy) + + rc = ReadConcern("majority") + self.assertEqual("majority", rc.level) + self.assertFalse(rc.ok_for_legacy) + + rc = ReadConcern("local") + self.assertEqual("local", rc.level) + self.assertTrue(rc.ok_for_legacy) + + self.assertRaises(TypeError, ReadConcern, 42) + + async def test_read_concern_uri(self): + uri = f"mongodb://{await async_client_context.pair}/?readConcernLevel=majority" + client = await self.async_rs_or_single_client(uri, connect=False) + self.assertEqual(ReadConcern("majority"), client.read_concern) + + async def test_invalid_read_concern(self): + coll = self.db.get_collection("coll", read_concern=ReadConcern("unknown")) + # We rely on the server to validate read concern. + with self.assertRaises(OperationFailure): + await coll.find_one() + + async def test_find_command(self): + # readConcern not sent in command if not specified. + coll = self.db.coll + await coll.find({"field": "value"}).to_list() + self.assertNotIn("readConcern", self.listener.started_events[0].command) + + self.listener.reset() + + # Explicitly set readConcern to 'local'. + coll = self.db.get_collection("coll", read_concern=ReadConcern("local")) + await coll.find({"field": "value"}).to_list() + self.assertEqualCommand( + SON( + [ + ("find", "coll"), + ("filter", {"field": "value"}), + ("readConcern", {"level": "local"}), + ] + ), + self.listener.started_events[0].command, + ) + + async def test_command_cursor(self): + # readConcern not sent in command if not specified. + coll = self.db.coll + await (await coll.aggregate([{"$match": {"field": "value"}}])).to_list() + self.assertNotIn("readConcern", self.listener.started_events[0].command) + + self.listener.reset() + + # Explicitly set readConcern to 'local'. + coll = self.db.get_collection("coll", read_concern=ReadConcern("local")) + await (await coll.aggregate([{"$match": {"field": "value"}}])).to_list() + self.assertEqual({"level": "local"}, self.listener.started_events[0].command["readConcern"]) + + async def test_aggregate_out(self): + coll = self.db.get_collection("coll", read_concern=ReadConcern("local")) + await ( + await coll.aggregate([{"$match": {"field": "value"}}, {"$out": "output_collection"}]) + ).to_list() + + # Aggregate with $out supports readConcern MongoDB 4.2 onwards. + if async_client_context.version >= (4, 1): + self.assertIn("readConcern", self.listener.started_events[0].command) + else: + self.assertNotIn("readConcern", self.listener.started_events[0].command) + + +if __name__ == "__main__": + unittest.main() diff --git a/test/asynchronous/test_read_preferences.py b/test/asynchronous/test_read_preferences.py new file mode 100644 index 0000000000..d18887da40 --- /dev/null +++ b/test/asynchronous/test_read_preferences.py @@ -0,0 +1,742 @@ +# Copyright 2011-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test the replica_set_connection module.""" +from __future__ import annotations + +import contextlib +import copy +import pickle +import random +import sys +from typing import Any + +from pymongo.operations import _Op + +sys.path[0:0] = [""] + +from test.asynchronous import ( + AsyncIntegrationTest, + SkipTest, + async_client_context, + connected, + unittest, +) +from test.utils_shared import ( + OvertCommandListener, + _ignore_deprecations, + async_wait_until, + one, +) +from test.version import Version + +from bson.son import SON +from pymongo.asynchronous.mongo_client import AsyncMongoClient +from pymongo.errors import ConfigurationError, OperationFailure +from pymongo.message import _maybe_add_read_preference +from pymongo.read_preferences import ( + MovingAverage, + Nearest, + Primary, + PrimaryPreferred, + ReadPreference, + Secondary, + SecondaryPreferred, +) +from pymongo.server_description import ServerDescription +from pymongo.server_selectors import Selection, readable_server_selector +from pymongo.server_type import SERVER_TYPE +from pymongo.write_concern import WriteConcern + +_IS_SYNC = False + + +class TestSelections(AsyncIntegrationTest): + @async_client_context.require_connection + async def test_bool(self): + client = await self.async_single_client() + + async def predicate(): + return await client.address + + await async_wait_until(predicate, "discover primary") + selection = Selection.from_topology_description(client._topology.description) + + self.assertTrue(selection) + self.assertFalse(selection.with_server_descriptions([])) + + +class TestReadPreferenceObjects(unittest.TestCase): + prefs = [ + Primary(), + PrimaryPreferred(), + Secondary(), + Nearest(tag_sets=[{"a": 1}, {"b": 2}]), + SecondaryPreferred(max_staleness=30), + ] + + def test_pickle(self): + for pref in self.prefs: + self.assertEqual(pref, pickle.loads(pickle.dumps(pref))) + + def test_copy(self): + for pref in self.prefs: + self.assertEqual(pref, copy.copy(pref)) + + def test_deepcopy(self): + for pref in self.prefs: + self.assertEqual(pref, copy.deepcopy(pref)) + + +class TestReadPreferencesBase(AsyncIntegrationTest): + @async_client_context.require_secondaries_count(1) + async def asyncSetUp(self): + await super().asyncSetUp() + # Insert some data so we can use cursors in read_from_which_host + await self.client.pymongo_test.test.drop() + await self.client.get_database( + "pymongo_test", write_concern=WriteConcern(w=async_client_context.w) + ).test.insert_many([{"_id": i} for i in range(10)]) + + self.addAsyncCleanup(self.client.pymongo_test.test.drop) + + async def read_from_which_host(self, client): + """Do a find() on the client and return which host was used""" + cursor = client.pymongo_test.test.find() + await anext(cursor) + return cursor.address + + async def read_from_which_kind(self, client): + """Do a find() on the client and return 'primary' or 'secondary' + depending on which the client used. + """ + address = await self.read_from_which_host(client) + if address == await client.primary: + return "primary" + elif address in await client.secondaries: + return "secondary" + else: + self.fail( + f"Cursor used address {address}, expected either primary " + f"{client.primary} or secondaries {client.secondaries}" + ) + + async def assertReadsFrom(self, expected, **kwargs): + c = await self.async_rs_client(**kwargs) + + async def predicate(): + return len(c.nodes - await c.arbiters) == async_client_context.w + + await async_wait_until(predicate, "discovered all nodes") + + used = await self.read_from_which_kind(c) + self.assertEqual(expected, used, f"Cursor used {used}, expected {expected}") + + +class TestSingleSecondaryOk(TestReadPreferencesBase): + async def test_reads_from_secondary(self): + host, port = next(iter(await self.client.secondaries)) + # Direct connection to a secondary. + client = await self.async_single_client(host, port) + self.assertFalse(await client.is_primary) + + # Regardless of read preference, we should be able to do + # "reads" with a direct connection to a secondary. + # See server-selection.rst#topology-type-single. + self.assertEqual(client.read_preference, ReadPreference.PRIMARY) + + db = client.pymongo_test + coll = db.test + + # Test find and find_one. + self.assertIsNotNone(await coll.find_one()) + self.assertEqual(10, len(await coll.find().to_list())) + + # Test some database helpers. + self.assertIsNotNone(await db.list_collection_names()) + self.assertIsNotNone(await db.validate_collection("test")) + self.assertIsNotNone(await db.command("ping")) + + # Test some collection helpers. + self.assertEqual(10, await coll.count_documents({})) + self.assertEqual(10, len(await coll.distinct("_id"))) + self.assertIsNotNone(await coll.aggregate([])) + self.assertIsNotNone(await coll.index_information()) + + +class TestReadPreferences(TestReadPreferencesBase): + async def test_mode_validation(self): + for mode in ( + ReadPreference.PRIMARY, + ReadPreference.PRIMARY_PREFERRED, + ReadPreference.SECONDARY, + ReadPreference.SECONDARY_PREFERRED, + ReadPreference.NEAREST, + ): + self.assertEqual( + mode, (await self.async_rs_client(read_preference=mode)).read_preference + ) + + with self.assertRaises(TypeError): + await self.async_rs_client(read_preference="foo") + + async def test_tag_sets_validation(self): + S = Secondary(tag_sets=[{}]) + self.assertEqual( + [{}], (await self.async_rs_client(read_preference=S)).read_preference.tag_sets + ) + + S = Secondary(tag_sets=[{"k": "v"}]) + self.assertEqual( + [{"k": "v"}], (await self.async_rs_client(read_preference=S)).read_preference.tag_sets + ) + + S = Secondary(tag_sets=[{"k": "v"}, {}]) + self.assertEqual( + [{"k": "v"}, {}], + (await self.async_rs_client(read_preference=S)).read_preference.tag_sets, + ) + + self.assertRaises(ValueError, Secondary, tag_sets=[]) + + # One dict not ok, must be a list of dicts + self.assertRaises(TypeError, Secondary, tag_sets={"k": "v"}) + + self.assertRaises(TypeError, Secondary, tag_sets="foo") + + self.assertRaises(TypeError, Secondary, tag_sets=["foo"]) + + async def test_threshold_validation(self): + self.assertEqual( + 17, + ( + await self.async_rs_client(localThresholdMS=17, connect=False) + ).options.local_threshold_ms, + ) + + self.assertEqual( + 42, + ( + await self.async_rs_client(localThresholdMS=42, connect=False) + ).options.local_threshold_ms, + ) + + self.assertEqual( + 666, + ( + await self.async_rs_client(localThresholdMS=666, connect=False) + ).options.local_threshold_ms, + ) + + self.assertEqual( + 0, + ( + await self.async_rs_client(localThresholdMS=0, connect=False) + ).options.local_threshold_ms, + ) + + with self.assertRaises(ValueError): + await self.async_rs_client(localthresholdms=-1) + + async def test_zero_latency(self): + ping_times: set = set() + # Generate unique ping times. + while len(ping_times) < len(self.client.nodes): + ping_times.add(random.random()) + for ping_time, host in zip(ping_times, self.client.nodes): + ServerDescription._host_to_round_trip_time[host] = ping_time + try: + client = await connected( + await self.async_rs_client(readPreference="nearest", localThresholdMS=0) + ) + await async_wait_until( + lambda: client.nodes == self.client.nodes, "discovered all nodes" + ) + host = await self.read_from_which_host(client) + for _ in range(5): + self.assertEqual(host, await self.read_from_which_host(client)) + finally: + ServerDescription._host_to_round_trip_time.clear() + + async def test_primary(self): + await self.assertReadsFrom("primary", read_preference=ReadPreference.PRIMARY) + + async def test_primary_with_tags(self): + # Tags not allowed with PRIMARY + with self.assertRaises(ConfigurationError): + await self.async_rs_client(tag_sets=[{"dc": "ny"}]) + + async def test_primary_preferred(self): + await self.assertReadsFrom("primary", read_preference=ReadPreference.PRIMARY_PREFERRED) + + async def test_secondary(self): + await self.assertReadsFrom("secondary", read_preference=ReadPreference.SECONDARY) + + async def test_secondary_preferred(self): + await self.assertReadsFrom("secondary", read_preference=ReadPreference.SECONDARY_PREFERRED) + + async def test_nearest(self): + # With high localThresholdMS, expect to read from any + # member + c = await self.async_rs_client( + read_preference=ReadPreference.NEAREST, localThresholdMS=10000 + ) # 10 seconds + + data_members = {await self.client.primary} | await self.client.secondaries + + # This is a probabilistic test; track which members we've read from so + # far, and keep reading until we've used all the members or give up. + # Chance of using only 2 of 3 members 10k times if there's no bug = + # 3 * (2/3)**10000, very low. + used: set = set() + i = 0 + while data_members.difference(used) and i < 10000: + address = await self.read_from_which_host(c) + used.add(address) + i += 1 + + not_used = data_members.difference(used) + latencies = ", ".join( + "%s: %sms" % (server.description.address, server.description.round_trip_time) + for server in await (await c._get_topology()).select_servers( + readable_server_selector, _Op.TEST + ) + ) + + self.assertFalse( + not_used, + "Expected to use primary and all secondaries for mode NEAREST," + f" but didn't use {not_used}\nlatencies: {latencies}", + ) + + +class ReadPrefTester(AsyncMongoClient): + def __init__(self, *args, **kwargs): + self.has_read_from = set() + client_options = async_client_context.client_options + client_options.update(kwargs) + super().__init__(*args, **client_options) + + async def _conn_for_reads(self, read_preference, session, operation): + context = await super()._conn_for_reads(read_preference, session, operation) + return context + + @contextlib.asynccontextmanager + async def _conn_from_server(self, read_preference, server, session): + context = super()._conn_from_server(read_preference, server, session) + async with context as (conn, read_preference): + await self.record_a_read(conn.address) + yield conn, read_preference + + async def record_a_read(self, address): + server = await (await self._get_topology()).select_server_by_address(address, _Op.TEST, 0) + self.has_read_from.add(server) + + +_PREF_MAP = [ + (Primary, SERVER_TYPE.RSPrimary), + (PrimaryPreferred, SERVER_TYPE.RSPrimary), + (Secondary, SERVER_TYPE.RSSecondary), + (SecondaryPreferred, SERVER_TYPE.RSSecondary), + (Nearest, "any"), +] + + +class TestCommandAndReadPreference(AsyncIntegrationTest): + c: ReadPrefTester + client_version: Version + + @async_client_context.require_secondaries_count(1) + async def asyncSetUp(self): + await super().asyncSetUp() + self.c = ReadPrefTester( + # Ignore round trip times, to test ReadPreference modes only. + localThresholdMS=1000 * 1000, + ) + self.client_version = await Version.async_from_client(self.c) + # mapReduce fails if the collection does not exist. + coll = self.c.pymongo_test.get_collection( + "test", write_concern=WriteConcern(w=async_client_context.w) + ) + await coll.insert_one({}) + + async def asyncTearDown(self): + await self.c.drop_database("pymongo_test") + await self.c.close() + + async def executed_on_which_server(self, client, fn, *args, **kwargs): + """Execute fn(*args, **kwargs) and return the Server instance used.""" + client.has_read_from.clear() + await fn(*args, **kwargs) + self.assertEqual(1, len(client.has_read_from)) + return one(client.has_read_from) + + async def assertExecutedOn(self, server_type, client, fn, *args, **kwargs): + server = await self.executed_on_which_server(client, fn, *args, **kwargs) + self.assertEqual( + SERVER_TYPE._fields[server_type], SERVER_TYPE._fields[server.description.server_type] + ) + + async def _test_fn(self, server_type, fn): + for _ in range(10): + if server_type == "any": + used = set() + for _ in range(1000): + server = await self.executed_on_which_server(self.c, fn) + used.add(server.description.address) + if len(used) == len(await self.c.secondaries) + 1: + # Success + break + + assert await self.c.primary is not None + unused = (await self.c.secondaries).union({await self.c.primary}).difference(used) + if unused: + self.fail("Some members not used for NEAREST: %s" % (unused)) + else: + await self.assertExecutedOn(server_type, self.c, fn) + + async def _test_primary_helper(self, func): + # Helpers that ignore read preference. + await self._test_fn(SERVER_TYPE.RSPrimary, func) + + async def _test_coll_helper(self, secondary_ok, coll, meth, *args, **kwargs): + for mode, server_type in _PREF_MAP: + new_coll = coll.with_options(read_preference=mode()) + + async def func(): + return await getattr(new_coll, meth)(*args, **kwargs) + + if secondary_ok: + await self._test_fn(server_type, func) + else: + await self._test_fn(SERVER_TYPE.RSPrimary, func) + + async def test_command(self): + # Test that the generic command helper obeys the read preference + # passed to it. + for mode, server_type in _PREF_MAP: + + async def func(): + return await self.c.pymongo_test.command("dbStats", read_preference=mode()) + + await self._test_fn(server_type, func) + + async def test_create_collection(self): + # create_collection runs listCollections on the primary to check if + # the collection already exists. + async def func(): + return await self.c.pymongo_test.create_collection( + "some_collection%s" % random.randint(0, sys.maxsize) + ) + + await self._test_primary_helper(func) + + async def test_count_documents(self): + await self._test_coll_helper(True, self.c.pymongo_test.test, "count_documents", {}) + + async def test_estimated_document_count(self): + await self._test_coll_helper(True, self.c.pymongo_test.test, "estimated_document_count") + + async def test_distinct(self): + await self._test_coll_helper(True, self.c.pymongo_test.test, "distinct", "a") + + async def test_aggregate(self): + await self._test_coll_helper( + True, self.c.pymongo_test.test, "aggregate", [{"$project": {"_id": 1}}] + ) + + async def test_aggregate_write(self): + # 5.0 servers support $out on secondaries. + secondary_ok = async_client_context.version.at_least(5, 0) + await self._test_coll_helper( + secondary_ok, + self.c.pymongo_test.test, + "aggregate", + [{"$project": {"_id": 1}}, {"$out": "agg_write_test"}], + ) + + +class TestMovingAverage(unittest.TestCase): + def test_moving_average(self): + avg = MovingAverage() + self.assertIsNone(avg.get()) + avg.add_sample(10) + self.assertAlmostEqual(10, avg.get()) # type: ignore + avg.add_sample(20) + self.assertAlmostEqual(12, avg.get()) # type: ignore + avg.add_sample(30) + self.assertAlmostEqual(15.6, avg.get()) # type: ignore + + +class TestMongosAndReadPreference(AsyncIntegrationTest): + def test_read_preference_document(self): + pref = Primary() + self.assertEqual(pref.document, {"mode": "primary"}) + + pref = PrimaryPreferred() + self.assertEqual(pref.document, {"mode": "primaryPreferred"}) + pref = PrimaryPreferred(tag_sets=[{"dc": "sf"}]) + self.assertEqual(pref.document, {"mode": "primaryPreferred", "tags": [{"dc": "sf"}]}) + pref = PrimaryPreferred(tag_sets=[{"dc": "sf"}], max_staleness=30) + self.assertEqual( + pref.document, + {"mode": "primaryPreferred", "tags": [{"dc": "sf"}], "maxStalenessSeconds": 30}, + ) + + pref = Secondary() + self.assertEqual(pref.document, {"mode": "secondary"}) + pref = Secondary(tag_sets=[{"dc": "sf"}]) + self.assertEqual(pref.document, {"mode": "secondary", "tags": [{"dc": "sf"}]}) + pref = Secondary(tag_sets=[{"dc": "sf"}], max_staleness=30) + self.assertEqual( + pref.document, {"mode": "secondary", "tags": [{"dc": "sf"}], "maxStalenessSeconds": 30} + ) + + pref = SecondaryPreferred() + self.assertEqual(pref.document, {"mode": "secondaryPreferred"}) + pref = SecondaryPreferred(tag_sets=[{"dc": "sf"}]) + self.assertEqual(pref.document, {"mode": "secondaryPreferred", "tags": [{"dc": "sf"}]}) + pref = SecondaryPreferred(tag_sets=[{"dc": "sf"}], max_staleness=30) + self.assertEqual( + pref.document, + {"mode": "secondaryPreferred", "tags": [{"dc": "sf"}], "maxStalenessSeconds": 30}, + ) + + pref = Nearest() + self.assertEqual(pref.document, {"mode": "nearest"}) + pref = Nearest(tag_sets=[{"dc": "sf"}]) + self.assertEqual(pref.document, {"mode": "nearest", "tags": [{"dc": "sf"}]}) + pref = Nearest(tag_sets=[{"dc": "sf"}], max_staleness=30) + self.assertEqual( + pref.document, {"mode": "nearest", "tags": [{"dc": "sf"}], "maxStalenessSeconds": 30} + ) + + with self.assertRaises(TypeError): + # Float is prohibited. + Nearest(max_staleness=1.5) # type: ignore + + with self.assertRaises(ValueError): + Nearest(max_staleness=0) + + with self.assertRaises(ValueError): + Nearest(max_staleness=-2) + + def test_read_preference_document_hedge(self): + cases = { + "primaryPreferred": PrimaryPreferred, + "secondary": Secondary, + "secondaryPreferred": SecondaryPreferred, + "nearest": Nearest, + } + for mode, cls in cases.items(): + with self.assertRaises(TypeError): + cls(hedge=[]) # type: ignore + with _ignore_deprecations(): + pref = cls(hedge={}) + self.assertEqual(pref.document, {"mode": mode}) + out = _maybe_add_read_preference({}, pref) + if cls == SecondaryPreferred: + # SecondaryPreferred without hedge doesn't add $readPreference. + self.assertEqual(out, {}) + else: + self.assertEqual(out, SON([("$query", {}), ("$readPreference", pref.document)])) + + hedge: dict[str, Any] = {"enabled": True} + pref = cls(hedge=hedge) + self.assertEqual(pref.document, {"mode": mode, "hedge": hedge}) + out = _maybe_add_read_preference({}, pref) + self.assertEqual(out, SON([("$query", {}), ("$readPreference", pref.document)])) + + hedge = {"enabled": False} + pref = cls(hedge=hedge) + self.assertEqual(pref.document, {"mode": mode, "hedge": hedge}) + out = _maybe_add_read_preference({}, pref) + self.assertEqual(out, SON([("$query", {}), ("$readPreference", pref.document)])) + + hedge = {"enabled": False, "extra": "option"} + pref = cls(hedge=hedge) + self.assertEqual(pref.document, {"mode": mode, "hedge": hedge}) + out = _maybe_add_read_preference({}, pref) + self.assertEqual(out, SON([("$query", {}), ("$readPreference", pref.document)])) + + def test_read_preference_hedge_deprecated(self): + cases = { + "primaryPreferred": PrimaryPreferred, + "secondary": Secondary, + "secondaryPreferred": SecondaryPreferred, + "nearest": Nearest, + } + for _, cls in cases.items(): + with self.assertRaises(DeprecationWarning): + cls(hedge={"enabled": True}) + + async def test_send_hedge(self): + cases = { + "primaryPreferred": PrimaryPreferred, + "secondaryPreferred": SecondaryPreferred, + "nearest": Nearest, + } + if await async_client_context.supports_secondary_read_pref: + cases["secondary"] = Secondary + listener = OvertCommandListener() + client = await self.async_rs_client(event_listeners=[listener]) + await client.admin.command("ping") + for _mode, cls in cases.items(): + with _ignore_deprecations(): + pref = cls(hedge={"enabled": True}) + coll = client.test.get_collection("test", read_preference=pref) + listener.reset() + await coll.find_one() + started = listener.started_events + self.assertEqual(len(started), 1, started) + cmd = started[0].command + if async_client_context.is_rs or async_client_context.is_mongos: + self.assertIn("$readPreference", cmd) + self.assertEqual(cmd["$readPreference"], pref.document) + else: + self.assertNotIn("$readPreference", cmd) + + def test_maybe_add_read_preference(self): + # Primary doesn't add $readPreference + out = _maybe_add_read_preference({}, Primary()) + self.assertEqual(out, {}) + + pref = PrimaryPreferred() + out = _maybe_add_read_preference({}, pref) + self.assertEqual(out, SON([("$query", {}), ("$readPreference", pref.document)])) + pref = PrimaryPreferred(tag_sets=[{"dc": "nyc"}]) + out = _maybe_add_read_preference({}, pref) + self.assertEqual(out, SON([("$query", {}), ("$readPreference", pref.document)])) + + pref = Secondary() + out = _maybe_add_read_preference({}, pref) + self.assertEqual(out, SON([("$query", {}), ("$readPreference", pref.document)])) + pref = Secondary(tag_sets=[{"dc": "nyc"}]) + out = _maybe_add_read_preference({}, pref) + self.assertEqual(out, SON([("$query", {}), ("$readPreference", pref.document)])) + + # SecondaryPreferred without tag_sets or max_staleness doesn't add + # $readPreference + pref = SecondaryPreferred() + out = _maybe_add_read_preference({}, pref) + self.assertEqual(out, {}) + pref = SecondaryPreferred(tag_sets=[{"dc": "nyc"}]) + out = _maybe_add_read_preference({}, pref) + self.assertEqual(out, SON([("$query", {}), ("$readPreference", pref.document)])) + pref = SecondaryPreferred(max_staleness=120) + out = _maybe_add_read_preference({}, pref) + self.assertEqual(out, SON([("$query", {}), ("$readPreference", pref.document)])) + + pref = Nearest() + out = _maybe_add_read_preference({}, pref) + self.assertEqual(out, SON([("$query", {}), ("$readPreference", pref.document)])) + pref = Nearest(tag_sets=[{"dc": "nyc"}]) + out = _maybe_add_read_preference({}, pref) + self.assertEqual(out, SON([("$query", {}), ("$readPreference", pref.document)])) + + criteria = SON([("$query", {}), ("$orderby", SON([("_id", 1)]))]) + pref = Nearest() + out = _maybe_add_read_preference(criteria, pref) + self.assertEqual( + out, + SON( + [ + ("$query", {}), + ("$orderby", SON([("_id", 1)])), + ("$readPreference", pref.document), + ] + ), + ) + pref = Nearest(tag_sets=[{"dc": "nyc"}]) + out = _maybe_add_read_preference(criteria, pref) + self.assertEqual( + out, + SON( + [ + ("$query", {}), + ("$orderby", SON([("_id", 1)])), + ("$readPreference", pref.document), + ] + ), + ) + + @async_client_context.require_mongos + async def test_mongos(self): + res = await async_client_context.client.config.shards.find_one() + assert res is not None + shard = res["host"] + num_members = shard.count(",") + 1 + if num_members == 1: + raise SkipTest("Need a replica set shard to test.") + coll = async_client_context.client.pymongo_test.get_collection( + "test", write_concern=WriteConcern(w=num_members) + ) + await coll.drop() + res = await coll.insert_many([{} for _ in range(5)]) + first_id = res.inserted_ids[0] + last_id = res.inserted_ids[-1] + + # Note - this isn't a perfect test since there's no way to + # tell what shard member a query ran on. + for pref in (Primary(), PrimaryPreferred(), Secondary(), SecondaryPreferred(), Nearest()): + qcoll = coll.with_options(read_preference=pref) + results = await qcoll.find().sort([("_id", 1)]).to_list() + self.assertEqual(first_id, results[0]["_id"]) + self.assertEqual(last_id, results[-1]["_id"]) + results = await qcoll.find().sort([("_id", -1)]).to_list() + self.assertEqual(first_id, results[-1]["_id"]) + self.assertEqual(last_id, results[0]["_id"]) + + @async_client_context.require_mongos + async def test_mongos_max_staleness(self): + # Sanity check that we're sending maxStalenessSeconds + coll = async_client_context.client.pymongo_test.get_collection( + "test", read_preference=SecondaryPreferred(max_staleness=120) + ) + # No error + await coll.find_one() + + coll = async_client_context.client.pymongo_test.get_collection( + "test", read_preference=SecondaryPreferred(max_staleness=10) + ) + try: + await coll.find_one() + except OperationFailure as exc: + self.assertEqual(160, exc.code) + else: + self.fail("mongos accepted invalid staleness") + + coll = ( + await self.async_single_client( + readPreference="secondaryPreferred", maxStalenessSeconds=120 + ) + ).pymongo_test.test + # No error + await coll.find_one() + + coll = ( + await self.async_single_client( + readPreference="secondaryPreferred", maxStalenessSeconds=10 + ) + ).pymongo_test.test + try: + await coll.find_one() + except OperationFailure as exc: + self.assertEqual(160, exc.code) + else: + self.fail("mongos accepted invalid staleness") + + +if __name__ == "__main__": + unittest.main() diff --git a/test/asynchronous/test_read_write_concern_spec.py b/test/asynchronous/test_read_write_concern_spec.py new file mode 100644 index 0000000000..b5cb32932f --- /dev/null +++ b/test/asynchronous/test_read_write_concern_spec.py @@ -0,0 +1,348 @@ +# Copyright 2018-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Run the read and write concern tests.""" +from __future__ import annotations + +import json +import os +import sys +import warnings +from pathlib import Path + +sys.path[0:0] = [""] + +from test.asynchronous import AsyncIntegrationTest, async_client_context, unittest +from test.asynchronous.unified_format import generate_test_classes +from test.utils_shared import OvertCommandListener + +from pymongo import DESCENDING +from pymongo.asynchronous.mongo_client import AsyncMongoClient +from pymongo.errors import ( + BulkWriteError, + ConfigurationError, + WriteConcernError, + WriteError, + WTimeoutError, +) +from pymongo.operations import IndexModel, InsertOne +from pymongo.read_concern import ReadConcern +from pymongo.write_concern import WriteConcern + +_IS_SYNC = False + +# Location of JSON test specifications. +if _IS_SYNC: + TEST_PATH = os.path.join(Path(__file__).resolve().parent, "read_write_concern") +else: + TEST_PATH = os.path.join(Path(__file__).resolve().parent.parent, "read_write_concern") + + +class TestReadWriteConcernSpec(AsyncIntegrationTest): + async def test_omit_default_read_write_concern(self): + listener = OvertCommandListener() + # Client with default readConcern and writeConcern + client = await self.async_rs_or_single_client(event_listeners=[listener]) + collection = client.pymongo_test.collection + # Prepare for tests of find() and aggregate(). + await collection.insert_many([{} for _ in range(10)]) + self.addAsyncCleanup(collection.drop) + self.addAsyncCleanup(client.pymongo_test.collection2.drop) + # Commands MUST NOT send the default read/write concern to the server. + + async def rename_and_drop(): + # Ensure collection exists. + await collection.insert_one({}) + await collection.rename("collection2") + await client.pymongo_test.collection2.drop() + + async def insert_command_default_write_concern(): + await collection.database.command( + "insert", "collection", documents=[{}], write_concern=WriteConcern() + ) + + async def aggregate_op(): + await (await collection.aggregate([])).to_list() + + ops = [ + ("aggregate", aggregate_op), + ("find", lambda: collection.find().to_list()), + ("insert_one", lambda: collection.insert_one({})), + ("update_one", lambda: collection.update_one({}, {"$set": {"x": 1}})), + ("update_many", lambda: collection.update_many({}, {"$set": {"x": 1}})), + ("delete_one", lambda: collection.delete_one({})), + ("delete_many", lambda: collection.delete_many({})), + ("bulk_write", lambda: collection.bulk_write([InsertOne({})])), + ("rename_and_drop", rename_and_drop), + ("command", insert_command_default_write_concern), + ] + + for name, f in ops: + listener.reset() + await f() + + self.assertGreaterEqual(len(listener.started_events), 1) + for _i, event in enumerate(listener.started_events): + self.assertNotIn( + "readConcern", + event.command, + f"{name} sent default readConcern with {event.command_name}", + ) + self.assertNotIn( + "writeConcern", + event.command, + f"{name} sent default writeConcern with {event.command_name}", + ) + + async def assertWriteOpsRaise(self, write_concern, expected_exception): + wc = write_concern.document + # Set socket timeout to avoid indefinite stalls + client = await self.async_rs_or_single_client( + w=wc["w"], wTimeoutMS=wc["wtimeout"], socketTimeoutMS=30000 + ) + db = client.get_database("pymongo_test") + coll = db.test + + async def insert_command(): + await coll.database.command( + "insert", + "new_collection", + documents=[{}], + writeConcern=write_concern.document, + parse_write_concern_error=True, + ) + + ops = [ + ("insert_one", lambda: coll.insert_one({})), + ("insert_many", lambda: coll.insert_many([{}, {}])), + ("update_one", lambda: coll.update_one({}, {"$set": {"x": 1}})), + ("update_many", lambda: coll.update_many({}, {"$set": {"x": 1}})), + ("delete_one", lambda: coll.delete_one({})), + ("delete_many", lambda: coll.delete_many({})), + ("bulk_write", lambda: coll.bulk_write([InsertOne({})])), + ("command", insert_command), + ("aggregate", lambda: coll.aggregate([{"$out": "out"}])), + # SERVER-46668 Delete all the documents in the collection to + # workaround a hang in createIndexes. + ("delete_many", lambda: coll.delete_many({})), + ("create_index", lambda: coll.create_index([("a", DESCENDING)])), + ("create_indexes", lambda: coll.create_indexes([IndexModel("b")])), + ("drop_index", lambda: coll.drop_index([("a", DESCENDING)])), + ("create", lambda: db.create_collection("new")), + ("rename", lambda: coll.rename("new")), + ("drop", lambda: db.new.drop()), + ] + # SERVER-47194: dropDatabase does not respect wtimeout in 3.6. + if async_client_context.version[:2] != (3, 6): + ops.append(("drop_database", lambda: client.drop_database(db))) + + for name, f in ops: + # Ensure insert_many and bulk_write still raise BulkWriteError. + if name in ("insert_many", "bulk_write"): + expected = BulkWriteError + else: + expected = expected_exception + with self.assertRaises(expected, msg=name) as cm: + await f() + if expected == BulkWriteError: + bulk_result = cm.exception.details + assert bulk_result is not None + wc_errors = bulk_result["writeConcernErrors"] + self.assertTrue(wc_errors) + + @async_client_context.require_replica_set + async def test_raise_write_concern_error(self): + self.addAsyncCleanup(async_client_context.client.drop_database, "pymongo_test") + assert async_client_context.w is not None + await self.assertWriteOpsRaise( + WriteConcern(w=async_client_context.w + 1, wtimeout=1), WriteConcernError + ) + + @async_client_context.require_secondaries_count(1) + @async_client_context.require_test_commands + async def test_raise_wtimeout(self): + self.addAsyncCleanup(async_client_context.client.drop_database, "pymongo_test") + self.addAsyncCleanup(self.enable_replication, async_client_context.client) + # Disable replication to guarantee a wtimeout error. + await self.disable_replication(async_client_context.client) + await self.assertWriteOpsRaise( + WriteConcern(w=async_client_context.w, wtimeout=1), WTimeoutError + ) + + # https://github.com/mongodb/specifications/tree/master/source/crud/tests + # Test 1 (included here instead of test_client_bulk_write.py) + @async_client_context.require_failCommand_fail_point + async def test_error_includes_errInfo(self): + expected_wce = { + "code": 100, + "codeName": "UnsatisfiableWriteConcern", + "errmsg": "Not enough data-bearing nodes", + "errInfo": {"writeConcern": {"w": 2, "wtimeout": 0, "provenance": "clientSupplied"}}, + } + cause_wce = { + "configureFailPoint": "failCommand", + "mode": {"times": 2}, + "data": {"failCommands": ["insert"], "writeConcernError": expected_wce}, + } + async with self.fail_point(cause_wce): + # Write concern error on insert includes errInfo. + with self.assertRaises(WriteConcernError) as ctx: + await self.db.test.insert_one({}) + self.assertEqual(ctx.exception.details, expected_wce) + + # Test bulk_write as well. + with self.assertRaises(BulkWriteError) as ctx: + await self.db.test.bulk_write([InsertOne({})]) + expected_details = { + "writeErrors": [], + "writeConcernErrors": [expected_wce], + "nInserted": 1, + "nUpserted": 0, + "nMatched": 0, + "nModified": 0, + "nRemoved": 0, + "upserted": [], + } + self.assertEqual(ctx.exception.details, expected_details) + + # https://github.com/mongodb/specifications/tree/master/source/crud/tests + # Test 2 (included here instead of test_client_bulk_write.py) + @async_client_context.require_version_min(4, 9) + async def test_write_error_details_exposes_errinfo(self): + listener = OvertCommandListener() + client = await self.async_rs_or_single_client(event_listeners=[listener]) + db = client.errinfotest + self.addAsyncCleanup(client.drop_database, "errinfotest") + validator = {"x": {"$type": "string"}} + await db.create_collection("test", validator=validator) + with self.assertRaises(WriteError) as ctx: + await db.test.insert_one({"x": 1}) + self.assertEqual(ctx.exception.code, 121) + self.assertIsNotNone(ctx.exception.details) + assert ctx.exception.details is not None + self.assertIsNotNone(ctx.exception.details.get("errInfo")) + for event in listener.succeeded_events: + if event.command_name == "insert": + self.assertEqual(event.reply["writeErrors"][0], ctx.exception.details) + break + else: + self.fail("Couldn't find insert event.") + + +def normalize_write_concern(concern): + result = {} + for key in concern: + if key.lower() == "wtimeoutms": + result["wtimeout"] = concern[key] + elif key == "journal": + result["j"] = concern[key] + else: + result[key] = concern[key] + return result + + +def create_connection_string_test(test_case): + def run_test(self): + uri = test_case["uri"] + valid = test_case["valid"] + warning = test_case["warning"] + + if not valid: + if warning is False: + self.assertRaises( + (ConfigurationError, ValueError), AsyncMongoClient, uri, connect=False + ) + else: + with warnings.catch_warnings(): + warnings.simplefilter("error", UserWarning) + self.assertRaises(UserWarning, AsyncMongoClient, uri, connect=False) + else: + client = AsyncMongoClient(uri, connect=False) + if "writeConcern" in test_case: + document = client.write_concern.document + self.assertEqual(document, normalize_write_concern(test_case["writeConcern"])) + if "readConcern" in test_case: + document = client.read_concern.document + self.assertEqual(document, test_case["readConcern"]) + + return run_test + + +def create_document_test(test_case): + def run_test(self): + valid = test_case["valid"] + + if "writeConcern" in test_case: + normalized = normalize_write_concern(test_case["writeConcern"]) + if not valid: + self.assertRaises((ConfigurationError, ValueError), WriteConcern, **normalized) + else: + write_concern = WriteConcern(**normalized) + self.assertEqual(write_concern.document, test_case["writeConcernDocument"]) + self.assertEqual(write_concern.acknowledged, test_case["isAcknowledged"]) + self.assertEqual(write_concern.is_server_default, test_case["isServerDefault"]) + if "readConcern" in test_case: + # Any string for 'level' is equally valid + read_concern = ReadConcern(**test_case["readConcern"]) + self.assertEqual(read_concern.document, test_case["readConcernDocument"]) + self.assertEqual(not bool(read_concern.level), test_case["isServerDefault"]) + + return run_test + + +def create_tests(): + for dirpath, _, filenames in os.walk(TEST_PATH): + dirname = os.path.split(dirpath)[-1] + + if dirname == "operation": + # This directory is tested by TestOperations. + continue + elif dirname == "connection-string": + create_test = create_connection_string_test + else: + create_test = create_document_test + + for filename in filenames: + with open(os.path.join(dirpath, filename)) as test_stream: + test_cases = json.load(test_stream)["tests"] + + fname = os.path.splitext(filename)[0] + for test_case in test_cases: + new_test = create_test(test_case) + test_name = "test_{}_{}_{}".format( + dirname.replace("-", "_"), + fname.replace("-", "_"), + str(test_case["description"].lower().replace(" ", "_")), + ) + + new_test.__name__ = test_name + setattr(TestReadWriteConcernSpec, new_test.__name__, new_test) + + +create_tests() + + +# Generate unified tests. +# PyMongo does not support MapReduce. +globals().update( + generate_test_classes( + os.path.join(TEST_PATH, "operation"), + module=__name__, + expected_failures=["MapReduce .*"], + ) +) + + +if __name__ == "__main__": + unittest.main() diff --git a/test/asynchronous/test_retryable_reads.py b/test/asynchronous/test_retryable_reads.py index bde7a9f2ee..47ac91b0f5 100644 --- a/test/asynchronous/test_retryable_reads.py +++ b/test/asynchronous/test_retryable_reads.py @@ -19,8 +19,9 @@ import pprint import sys import threading +from test.asynchronous.utils import async_set_fail_point -from pymongo.errors import AutoReconnect +from pymongo.errors import OperationFailure sys.path[0:0] = [""] @@ -31,10 +32,9 @@ client_knobs, unittest, ) -from test.utils import ( +from test.utils_shared import ( CMAPListener, OvertCommandListener, - async_set_fail_point, ) from pymongo.monitoring import ( @@ -80,7 +80,6 @@ async def run(self): class TestPoolPausedError(AsyncIntegrationTest): # Pools don't get paused in load balanced mode. RUN_ON_LOAD_BALANCER = False - RUN_ON_SERVERLESS = False @async_client_context.require_sync @async_client_context.require_failCommand_blockConnection @@ -88,7 +87,7 @@ class TestPoolPausedError(AsyncIntegrationTest): async def test_pool_paused_error_is_retryable(self): if "PyPy" in sys.version: # Tracked in PYTHON-3519 - self.skipTest("Test is flakey on PyPy") + self.skipTest("Test is flaky on PyPy") cmap_listener = CMAPListener() cmd_listener = OvertCommandListener() client = await self.async_rs_or_single_client( @@ -148,15 +147,11 @@ async def test_pool_paused_error_is_retryable(self): class TestRetryableReads(AsyncIntegrationTest): @async_client_context.require_multiple_mongoses @async_client_context.require_failCommand_fail_point - async def test_retryable_reads_in_sharded_cluster_multiple_available(self): + async def test_retryable_reads_are_retried_on_a_different_mongos_when_one_is_available(self): fail_command = { "configureFailPoint": "failCommand", "mode": {"times": 1}, - "data": { - "failCommands": ["find"], - "closeConnection": True, - "appName": "retryableReadTest", - }, + "data": {"failCommands": ["find"], "errorCode": 6}, } mongos_clients = [] @@ -169,12 +164,11 @@ async def test_retryable_reads_in_sharded_cluster_multiple_available(self): listener = OvertCommandListener() client = await self.async_rs_or_single_client( async_client_context.mongos_seeds(), - appName="retryableReadTest", event_listeners=[listener], retryReads=True, ) - with self.assertRaises(AutoReconnect): + with self.assertRaises(OperationFailure): await client.t.t.find_one({}) # Disable failpoints on each mongos @@ -185,6 +179,88 @@ async def test_retryable_reads_in_sharded_cluster_multiple_available(self): self.assertEqual(len(listener.failed_events), 2) self.assertEqual(len(listener.succeeded_events), 0) + # Assert that both events occurred on different mongos. + assert listener.failed_events[0].connection_id != listener.failed_events[1].connection_id + + @async_client_context.require_multiple_mongoses + @async_client_context.require_failCommand_fail_point + async def test_retryable_reads_are_retried_on_the_same_mongos_when_no_others_are_available( + self + ): + fail_command = { + "configureFailPoint": "failCommand", + "mode": {"times": 1}, + "data": {"failCommands": ["find"], "errorCode": 6}, + } + + host = async_client_context.mongos_seeds().split(",")[0] + mongos_client = await self.async_rs_or_single_client(host) + await async_set_fail_point(mongos_client, fail_command) + + listener = OvertCommandListener() + client = await self.async_rs_or_single_client( + host, + directConnection=False, + event_listeners=[listener], + retryReads=True, + ) + + await client.t.t.find_one({}) + + # Disable failpoint. + fail_command["mode"] = "off" + await async_set_fail_point(mongos_client, fail_command) + + # Assert that exactly one failed command event and one succeeded command event occurred. + self.assertEqual(len(listener.failed_events), 1) + self.assertEqual(len(listener.succeeded_events), 1) + + # Assert that both events occurred on the same mongos. + assert listener.succeeded_events[0].connection_id == listener.failed_events[0].connection_id + + @async_client_context.require_failCommand_fail_point + async def test_retryable_reads_are_retried_on_the_same_implicit_session(self): + listener = OvertCommandListener() + client = await self.async_rs_or_single_client( + directConnection=False, + event_listeners=[listener], + retryReads=True, + ) + + await client.t.t.insert_one({"x": 1}) + + commands = [ + ("aggregate", lambda: client.t.t.count_documents({})), + ("aggregate", lambda: client.t.t.aggregate([{"$match": {}}])), + ("count", lambda: client.t.t.estimated_document_count()), + ("distinct", lambda: client.t.t.distinct("x")), + ("find", lambda: client.t.t.find_one({})), + ("listDatabases", lambda: client.list_databases()), + ("listCollections", lambda: client.t.list_collections()), + ("listIndexes", lambda: client.t.t.list_indexes()), + ] + + for command_name, operation in commands: + listener.reset() + fail_command = { + "configureFailPoint": "failCommand", + "mode": {"times": 1}, + "data": {"failCommands": [command_name], "errorCode": 6}, + } + + async with self.fail_point(fail_command): + await operation() + + # Assert that both events occurred on the same session. + command_docs = [ + event.command + for event in listener.started_events + if event.command_name == command_name + ] + self.assertEqual(len(command_docs), 2) + self.assertEqual(command_docs[0]["lsid"], command_docs[1]["lsid"]) + self.assertIsNot(command_docs[0], command_docs[1]) + if __name__ == "__main__": unittest.main() diff --git a/test/asynchronous/test_retryable_reads_unified.py b/test/asynchronous/test_retryable_reads_unified.py new file mode 100644 index 0000000000..e62d606810 --- /dev/null +++ b/test/asynchronous/test_retryable_reads_unified.py @@ -0,0 +1,46 @@ +# Copyright 2022-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test the Retryable Reads unified spec tests.""" +from __future__ import annotations + +import os +import sys +from pathlib import Path + +sys.path[0:0] = [""] + +from test import unittest +from test.asynchronous.unified_format import generate_test_classes + +_IS_SYNC = False + +# Location of JSON test specifications. +if _IS_SYNC: + TEST_PATH = os.path.join(Path(__file__).resolve().parent, "retryable_reads/unified") +else: + TEST_PATH = os.path.join(Path(__file__).resolve().parent.parent, "retryable_reads/unified") + +# Generate unified tests. +# PyMongo does not support MapReduce, ListDatabaseObjects or ListCollectionObjects. +globals().update( + generate_test_classes( + TEST_PATH, + module=__name__, + expected_failures=["ListDatabaseObjects .*", "ListCollectionObjects .*", "MapReduce .*"], + ) +) + +if __name__ == "__main__": + unittest.main() diff --git a/test/asynchronous/test_retryable_writes.py b/test/asynchronous/test_retryable_writes.py index 738ce04192..ddb1d39eb7 100644 --- a/test/asynchronous/test_retryable_writes.py +++ b/test/asynchronous/test_retryable_writes.py @@ -20,6 +20,7 @@ import pprint import sys import threading +from test.asynchronous.utils import async_set_fail_point, flaky sys.path[0:0] = [""] @@ -30,12 +31,11 @@ unittest, ) from test.asynchronous.helpers import client_knobs -from test.utils import ( +from test.utils_shared import ( CMAPListener, DeprecationFilter, EventListener, OvertCommandListener, - async_set_fail_point, ) from test.version import Version @@ -129,7 +129,6 @@ def non_retryable_single_statement_ops(coll): class IgnoreDeprecationsTest(AsyncIntegrationTest): RUN_ON_LOAD_BALANCER = True - RUN_ON_SERVERLESS = True deprecation_filter: DeprecationFilter async def asyncSetUp(self) -> None: @@ -137,43 +136,14 @@ async def asyncSetUp(self) -> None: self.deprecation_filter = DeprecationFilter() async def asyncTearDown(self) -> None: + await super().asyncTearDown() self.deprecation_filter.stop() -class TestRetryableWritesMMAPv1(IgnoreDeprecationsTest): - knobs: client_knobs - - async def asyncSetUp(self) -> None: - await super().asyncSetUp() - # Speed up the tests by decreasing the heartbeat frequency. - self.knobs = client_knobs(heartbeat_frequency=0.1, min_heartbeat_interval=0.1) - self.knobs.enable() - self.client = await self.async_rs_or_single_client(retryWrites=True) - self.db = self.client.pymongo_test - - async def asyncTearDown(self) -> None: - self.knobs.disable() - - @async_client_context.require_no_standalone - async def test_actionable_error_message(self): - if async_client_context.storage_engine != "mmapv1": - raise SkipTest("This cluster is not running MMAPv1") - - expected_msg = ( - "This MongoDB deployment does not support retryable " - "writes. Please add retryWrites=false to your " - "connection string." - ) - for method, args, kwargs in retryable_single_statement_ops(self.db.retryable_write_test): - with self.assertRaisesRegex(OperationFailure, expected_msg): - await method(*args, **kwargs) - - class TestRetryableWrites(IgnoreDeprecationsTest): listener: OvertCommandListener knobs: client_knobs - @async_client_context.require_no_mmap async def asyncSetUp(self) -> None: await super().asyncSetUp() # Speed up the tests by decreasing the heartbeat frequency. @@ -196,6 +166,7 @@ async def asyncTearDown(self): SON([("configureFailPoint", "onPrimaryTransactionalWrite"), ("mode", "off")]) ) self.knobs.disable() + await super().asyncTearDown() async def test_supported_single_statement_no_retry(self): listener = OvertCommandListener() @@ -421,11 +392,9 @@ async def test_retryable_writes_in_sharded_cluster_multiple_available(self): class TestWriteConcernError(AsyncIntegrationTest): RUN_ON_LOAD_BALANCER = True - RUN_ON_SERVERLESS = True fail_insert: dict @async_client_context.require_replica_set - @async_client_context.require_no_mmap @async_client_context.require_failCommand_fail_point async def asyncSetUp(self) -> None: await super().asyncSetUp() @@ -492,12 +461,12 @@ async def run(self): class TestPoolPausedError(AsyncIntegrationTest): # Pools don't get paused in load balanced mode. RUN_ON_LOAD_BALANCER = False - RUN_ON_SERVERLESS = False @async_client_context.require_sync @async_client_context.require_failCommand_blockConnection @async_client_context.require_retryable_writes @client_knobs(heartbeat_frequency=0.05, min_heartbeat_interval=0.05) + @flaky(reason="PYTHON-5291") async def test_pool_paused_error_is_retryable(self): cmap_listener = CMAPListener() cmd_listener = OvertCommandListener() @@ -597,7 +566,6 @@ async def test_returns_original_error_code( # TODO: Make this a real integration test where we stepdown the primary. class TestRetryableWritesTxnNumber(IgnoreDeprecationsTest): @async_client_context.require_replica_set - @async_client_context.require_no_mmap async def test_increment_transaction_id_without_sending_command(self): """Test that the txnNumber field is properly incremented, even when the first attempt fails before sending the command. diff --git a/test/asynchronous/test_retryable_writes_unified.py b/test/asynchronous/test_retryable_writes_unified.py new file mode 100644 index 0000000000..bb493e6010 --- /dev/null +++ b/test/asynchronous/test_retryable_writes_unified.py @@ -0,0 +1,39 @@ +# Copyright 2021-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test the Retryable Writes unified spec tests.""" +from __future__ import annotations + +import os +import sys +from pathlib import Path + +sys.path[0:0] = [""] + +from test import unittest +from test.asynchronous.unified_format import generate_test_classes + +_IS_SYNC = False + +# Location of JSON test specifications. +if _IS_SYNC: + TEST_PATH = os.path.join(Path(__file__).resolve().parent, "retryable_writes/unified") +else: + TEST_PATH = os.path.join(Path(__file__).resolve().parent.parent, "retryable_writes/unified") + +# Generate unified tests. +globals().update(generate_test_classes(TEST_PATH, module=__name__)) + +if __name__ == "__main__": + unittest.main() diff --git a/test/asynchronous/test_run_command.py b/test/asynchronous/test_run_command.py new file mode 100644 index 0000000000..3ac8c32706 --- /dev/null +++ b/test/asynchronous/test_run_command.py @@ -0,0 +1,41 @@ +# Copyright 2024-Present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Run Command unified tests.""" +from __future__ import annotations + +import os +import unittest +from pathlib import Path +from test.asynchronous.unified_format import generate_test_classes + +_IS_SYNC = False + +# Location of JSON test specifications. +if _IS_SYNC: + TEST_PATH = os.path.join(Path(__file__).resolve().parent, "run_command") +else: + TEST_PATH = os.path.join(Path(__file__).resolve().parent.parent, "run_command") + + +globals().update( + generate_test_classes( + os.path.join(TEST_PATH, "unified"), + module=__name__, + ) +) + + +if __name__ == "__main__": + unittest.main() diff --git a/test/asynchronous/test_sdam_monitoring_spec.py b/test/asynchronous/test_sdam_monitoring_spec.py new file mode 100644 index 0000000000..71ec6c6b46 --- /dev/null +++ b/test/asynchronous/test_sdam_monitoring_spec.py @@ -0,0 +1,374 @@ +# Copyright 2016 MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Run the sdam monitoring spec tests.""" +from __future__ import annotations + +import asyncio +import json +import os +import sys +import time +from pathlib import Path + +sys.path[0:0] = [""] + +from test.asynchronous import AsyncIntegrationTest, async_client_context, client_knobs, unittest +from test.utils_shared import ( + ServerAndTopologyEventListener, + async_wait_until, + server_name_to_type, +) + +from bson.json_util import object_hook +from pymongo import AsyncMongoClient, monitoring +from pymongo.asynchronous.collection import AsyncCollection +from pymongo.asynchronous.monitor import Monitor +from pymongo.common import clean_node +from pymongo.errors import ConnectionFailure, NotPrimaryError +from pymongo.hello import Hello +from pymongo.server_description import ServerDescription +from pymongo.topology_description import TOPOLOGY_TYPE + +_IS_SYNC = False + +# Location of JSON test specifications. +if _IS_SYNC: + TEST_PATH = os.path.join(Path(__file__).resolve().parent, "sdam_monitoring") +else: + TEST_PATH = os.path.join(Path(__file__).resolve().parent.parent, "sdam_monitoring") + + +def compare_server_descriptions(expected, actual): + if (expected["address"] != "{}:{}".format(*actual.address)) or ( + server_name_to_type(expected["type"]) != actual.server_type + ): + return False + expected_hosts = set(expected["arbiters"] + expected["passives"] + expected["hosts"]) + return expected_hosts == {"{}:{}".format(*s) for s in actual.all_hosts} + + +def compare_topology_descriptions(expected, actual): + if TOPOLOGY_TYPE.__getattribute__(expected["topologyType"]) != actual.topology_type: + return False + expected = expected["servers"] + actual = actual.server_descriptions() + if len(expected) != len(actual): + return False + for exp_server in expected: + for _address, actual_server in actual.items(): + if compare_server_descriptions(exp_server, actual_server): + break + else: + return False + return True + + +def compare_events(expected_dict, actual): + if not expected_dict: + return False, "Error: Bad expected value in YAML test" + if not actual: + return False, "Error: Event published was None" + + expected_type, expected = list(expected_dict.items())[0] + + if expected_type == "server_opening_event": + if not isinstance(actual, monitoring.ServerOpeningEvent): + return False, "Expected ServerOpeningEvent, got %s" % (actual.__class__) + if expected["address"] != "{}:{}".format(*actual.server_address): + return ( + False, + "ServerOpeningEvent published with wrong address (expected" " {}, got {}".format( + expected["address"], actual.server_address + ), + ) + + elif expected_type == "server_description_changed_event": + if not isinstance(actual, monitoring.ServerDescriptionChangedEvent): + return (False, "Expected ServerDescriptionChangedEvent, got %s" % (actual.__class__)) + if expected["address"] != "{}:{}".format(*actual.server_address): + return ( + False, + "ServerDescriptionChangedEvent has wrong address" " (expected {}, got {}".format( + expected["address"], actual.server_address + ), + ) + + if not compare_server_descriptions(expected["newDescription"], actual.new_description): + return (False, "New ServerDescription incorrect in ServerDescriptionChangedEvent") + if not compare_server_descriptions( + expected["previousDescription"], actual.previous_description + ): + return ( + False, + "Previous ServerDescription incorrect in ServerDescriptionChangedEvent", + ) + + elif expected_type == "server_closed_event": + if not isinstance(actual, monitoring.ServerClosedEvent): + return False, "Expected ServerClosedEvent, got %s" % (actual.__class__) + if expected["address"] != "{}:{}".format(*actual.server_address): + return ( + False, + "ServerClosedEvent published with wrong address" " (expected {}, got {}".format( + expected["address"], actual.server_address + ), + ) + + elif expected_type == "topology_opening_event": + if not isinstance(actual, monitoring.TopologyOpenedEvent): + return False, "Expected TopologyOpenedEvent, got %s" % (actual.__class__) + + elif expected_type == "topology_description_changed_event": + if not isinstance(actual, monitoring.TopologyDescriptionChangedEvent): + return ( + False, + "Expected TopologyDescriptionChangedEvent, got %s" % (actual.__class__), + ) + if not compare_topology_descriptions(expected["newDescription"], actual.new_description): + return ( + False, + "New TopologyDescription incorrect in TopologyDescriptionChangedEvent", + ) + if not compare_topology_descriptions( + expected["previousDescription"], actual.previous_description + ): + return ( + False, + "Previous TopologyDescription incorrect in TopologyDescriptionChangedEvent", + ) + + elif expected_type == "topology_await aclosed_event": + if not isinstance(actual, monitoring.TopologyClosedEvent): + return False, "Expected TopologyClosedEvent, got %s" % (actual.__class__) + + else: + return False, f"Incorrect event: expected {expected_type}, actual {actual}" + + return True, "" + + +def compare_multiple_events(i, expected_results, actual_results): + events_in_a_row = [] + j = i + while j < len(expected_results) and isinstance(actual_results[j], actual_results[i].__class__): + events_in_a_row.append(actual_results[j]) + j += 1 + message = "" + for event in events_in_a_row: + for k in range(i, j): + passed, message = compare_events(expected_results[k], event) + if passed: + expected_results[k] = None + break + else: + return i, False, message + return j, True, "" + + +class TestAllScenarios(AsyncIntegrationTest): + async def asyncSetUp(self): + await super().asyncSetUp() + self.all_listener = ServerAndTopologyEventListener() + + +def create_test(scenario_def): + async def run_scenario(self): + with client_knobs(events_queue_frequency=0.05, min_heartbeat_interval=0.05): + await _run_scenario(self) + + async def _run_scenario(self): + class NoopMonitor(Monitor): + """Override the _run method to do nothing.""" + + async def _run(self): + await asyncio.sleep(0.05) + + m = AsyncMongoClient( + host=scenario_def["uri"], + port=27017, + event_listeners=[self.all_listener], + _monitor_class=NoopMonitor, + ) + topology = await m._get_topology() + + try: + for phase in scenario_def["phases"]: + for source, response in phase.get("responses", []): + source_address = clean_node(source) + await topology.on_change( + ServerDescription( + address=source_address, hello=Hello(response), round_trip_time=0 + ) + ) + + expected_results = phase["outcome"]["events"] + expected_len = len(expected_results) + await async_wait_until( + lambda: len(self.all_listener.results) >= expected_len, + "publish all events", + timeout=15, + ) + + # Wait some time to catch possible lagging extra events. + await async_wait_until(lambda: topology._events.empty(), "publish lagging events") + + i = 0 + while i < expected_len: + result = ( + self.all_listener.results[i] if len(self.all_listener.results) > i else None + ) + # The order of ServerOpening/ClosedEvents doesn't matter + if isinstance( + result, (monitoring.ServerOpeningEvent, monitoring.ServerClosedEvent) + ): + i, passed, message = compare_multiple_events( + i, expected_results, self.all_listener.results + ) + self.assertTrue(passed, message) + else: + self.assertTrue(*compare_events(expected_results[i], result)) + i += 1 + + # Assert no extra events. + extra_events = self.all_listener.results[expected_len:] + if extra_events: + self.fail(f"Extra events {extra_events!r}") + + self.all_listener.reset() + finally: + await m.close() + + return run_scenario + + +def create_tests(): + for dirpath, _, filenames in os.walk(TEST_PATH): + for filename in filenames: + with open(os.path.join(dirpath, filename)) as scenario_stream: + scenario_def = json.load(scenario_stream, object_hook=object_hook) + # Construct test from scenario. + new_test = create_test(scenario_def) + test_name = f"test_{os.path.splitext(filename)[0]}" + new_test.__name__ = test_name + setattr(TestAllScenarios, new_test.__name__, new_test) + + +create_tests() + + +class TestSdamMonitoring(AsyncIntegrationTest): + knobs: client_knobs + listener: ServerAndTopologyEventListener + test_client: AsyncMongoClient + coll: AsyncCollection + + @classmethod + def setUpClass(cls): + # Speed up the tests by decreasing the event publish frequency. + cls.knobs = client_knobs( + events_queue_frequency=0.1, heartbeat_frequency=0.1, min_heartbeat_interval=0.1 + ) + cls.knobs.enable() + cls.listener = ServerAndTopologyEventListener() + + @classmethod + def tearDownClass(cls): + cls.knobs.disable() + + @async_client_context.require_failCommand_fail_point + async def asyncSetUp(self): + await super().asyncSetUp() + + retry_writes = async_client_context.supports_transactions() + self.test_client = await self.async_rs_or_single_client( + event_listeners=[self.listener], retryWrites=retry_writes + ) + self.coll = self.test_client[self.client.db.name].test + await self.coll.insert_one({}) + self.listener.reset() + + async def asyncTearDown(self): + await super().asyncTearDown() + + async def _test_app_error(self, fail_command_opts, expected_error): + address = await self.test_client.address + + # Test that an application error causes a ServerDescriptionChangedEvent + # to be published. + data = {"failCommands": ["insert"]} + data.update(fail_command_opts) + fail_insert = { + "configureFailPoint": "failCommand", + "mode": {"times": 1}, + "data": data, + } + async with self.fail_point(fail_insert): + if self.test_client.options.retry_writes: + await self.coll.insert_one({}) + else: + with self.assertRaises(expected_error): + await self.coll.insert_one({}) + await self.coll.insert_one({}) + + def marked_unknown(event): + return ( + isinstance(event, monitoring.ServerDescriptionChangedEvent) + and event.server_address == address + and not event.new_description.is_server_type_known + ) + + def discovered_node(event): + return ( + isinstance(event, monitoring.ServerDescriptionChangedEvent) + and event.server_address == address + and not event.previous_description.is_server_type_known + and event.new_description.is_server_type_known + ) + + def marked_unknown_and_rediscovered(): + return ( + len(self.listener.matching(marked_unknown)) >= 1 + and len(self.listener.matching(discovered_node)) >= 1 + ) + + # Topology events are not published synchronously + await async_wait_until(marked_unknown_and_rediscovered, "rediscover node") + + # Expect a single ServerDescriptionChangedEvent for the network error. + marked_unknown_events = self.listener.matching(marked_unknown) + self.assertEqual(len(marked_unknown_events), 1, marked_unknown_events) + self.assertIsInstance(marked_unknown_events[0].new_description.error, expected_error) + + async def test_network_error_publishes_events(self): + await self._test_app_error({"closeConnection": True}, ConnectionFailure) + + # In 4.4+, not primary errors from failCommand don't cause SDAM state + # changes because topologyVersion is not incremented. + @async_client_context.require_version_max(4, 3) + async def test_not_primary_error_publishes_events(self): + await self._test_app_error( + {"errorCode": 10107, "closeConnection": False, "errorLabels": ["RetryableWriteError"]}, + NotPrimaryError, + ) + + async def test_shutdown_error_publishes_events(self): + await self._test_app_error( + {"errorCode": 91, "closeConnection": False, "errorLabels": ["RetryableWriteError"]}, + NotPrimaryError, + ) + + +if __name__ == "__main__": + unittest.main() diff --git a/test/asynchronous/test_server_selection.py b/test/asynchronous/test_server_selection.py new file mode 100644 index 0000000000..f570662b85 --- /dev/null +++ b/test/asynchronous/test_server_selection.py @@ -0,0 +1,212 @@ +# Copyright 2015-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test the topology module's Server Selection Spec implementation.""" +from __future__ import annotations + +import os +import sys +from pathlib import Path + +from pymongo import AsyncMongoClient, ReadPreference +from pymongo.asynchronous.settings import TopologySettings +from pymongo.asynchronous.topology import Topology +from pymongo.errors import ServerSelectionTimeoutError +from pymongo.hello import HelloCompat +from pymongo.operations import _Op +from pymongo.server_selectors import writable_server_selector +from pymongo.typings import strip_optional + +sys.path[0:0] = [""] + +from test.asynchronous import AsyncIntegrationTest, async_client_context, unittest +from test.asynchronous.utils import async_wait_until +from test.asynchronous.utils_selection_tests import ( + create_selection_tests, + get_topology_settings_dict, +) +from test.utils_selection_tests_shared import ( + get_addresses, + make_server_description, +) +from test.utils_shared import ( + FunctionCallRecorder, + OvertCommandListener, +) + +_IS_SYNC = False + +# Location of JSON test specifications. +if _IS_SYNC: + TEST_PATH = os.path.join( + Path(__file__).resolve().parent, "server_selection", "server_selection" + ) +else: + TEST_PATH = os.path.join( + Path(__file__).resolve().parent.parent, "server_selection", "server_selection" + ) + + +class SelectionStoreSelector: + """No-op selector that keeps track of what was passed to it.""" + + def __init__(self): + self.selection = None + + def __call__(self, selection): + self.selection = selection + return selection + + +class TestAllScenarios(create_selection_tests(TEST_PATH)): # type: ignore + pass + + +class TestCustomServerSelectorFunction(AsyncIntegrationTest): + @async_client_context.require_replica_set + async def test_functional_select_max_port_number_host(self): + # Selector that returns server with highest port number. + def custom_selector(servers): + ports = [s.address[1] for s in servers] + idx = ports.index(max(ports)) + return [servers[idx]] + + # Initialize client with appropriate listeners. + listener = OvertCommandListener() + client = await self.async_rs_or_single_client( + server_selector=custom_selector, event_listeners=[listener] + ) + coll = client.get_database("testdb", read_preference=ReadPreference.NEAREST).coll + self.addAsyncCleanup(client.drop_database, "testdb") + + # Wait the node list to be fully populated. + async def all_hosts_started(): + return len((await client.admin.command(HelloCompat.LEGACY_CMD))["hosts"]) == len( + client._topology._description.readable_servers + ) + + await async_wait_until(all_hosts_started, "receive heartbeat from all hosts") + + expected_port = max( + [strip_optional(n.address[1]) for n in client._topology._description.readable_servers] + ) + + # Insert 1 record and access it 10 times. + await coll.insert_one({"name": "John Doe"}) + for _ in range(10): + await coll.find_one({"name": "John Doe"}) + + # Confirm all find commands are run against appropriate host. + for command in listener.started_events: + if command.command_name == "find": + self.assertEqual(command.connection_id[1], expected_port) + + async def test_invalid_server_selector(self): + # Client initialization must fail if server_selector is not callable. + for selector_candidate in [[], 10, "string", {}]: + with self.assertRaisesRegex(ValueError, "must be a callable"): + AsyncMongoClient(connect=False, server_selector=selector_candidate) + + # None value for server_selector is OK. + AsyncMongoClient(connect=False, server_selector=None) + + @async_client_context.require_replica_set + async def test_selector_called(self): + selector = FunctionCallRecorder(lambda x: x) + + # Client setup. + mongo_client = await self.async_rs_or_single_client(server_selector=selector) + test_collection = mongo_client.testdb.test_collection + self.addAsyncCleanup(mongo_client.drop_database, "testdb") + + # Do N operations and test selector is called at least N-1 times due to fast path. + await test_collection.insert_one({"age": 20, "name": "John"}) + await test_collection.insert_one({"age": 31, "name": "Jane"}) + await test_collection.update_one({"name": "Jane"}, {"$set": {"age": 21}}) + await test_collection.find_one({"name": "Roe"}) + self.assertGreaterEqual(selector.call_count, 3) + + @async_client_context.require_replica_set + async def test_latency_threshold_application(self): + selector = SelectionStoreSelector() + + scenario_def: dict = { + "topology_description": { + "type": "ReplicaSetWithPrimary", + "servers": [ + {"address": "b:27017", "avg_rtt_ms": 10000, "type": "RSSecondary", "tag": {}}, + {"address": "c:27017", "avg_rtt_ms": 20000, "type": "RSSecondary", "tag": {}}, + {"address": "a:27017", "avg_rtt_ms": 30000, "type": "RSPrimary", "tag": {}}, + ], + } + } + + # Create & populate Topology such that all but one server is too slow. + rtt_times = [srv["avg_rtt_ms"] for srv in scenario_def["topology_description"]["servers"]] + min_rtt_idx = rtt_times.index(min(rtt_times)) + seeds, hosts = get_addresses(scenario_def["topology_description"]["servers"]) + settings = get_topology_settings_dict( + heartbeat_frequency=1, local_threshold_ms=1, seeds=seeds, server_selector=selector + ) + topology = Topology(TopologySettings(**settings)) + await topology.open() + for server in scenario_def["topology_description"]["servers"]: + server_description = make_server_description(server, hosts) + await topology.on_change(server_description) + + # Invoke server selection and assert no filtering based on latency + # prior to custom server selection logic kicking in. + server = await topology.select_server(ReadPreference.NEAREST, _Op.TEST) + assert selector.selection is not None + self.assertEqual(len(selector.selection), len(topology.description.server_descriptions())) + + # Ensure proper filtering based on latency after custom selection. + self.assertEqual(server.description.address, seeds[min_rtt_idx]) + + @async_client_context.require_replica_set + async def test_server_selector_bypassed(self): + selector = FunctionCallRecorder(lambda x: x) + + scenario_def = { + "topology_description": { + "type": "ReplicaSetNoPrimary", + "servers": [ + {"address": "b:27017", "avg_rtt_ms": 10000, "type": "RSSecondary", "tag": {}}, + {"address": "c:27017", "avg_rtt_ms": 20000, "type": "RSSecondary", "tag": {}}, + {"address": "a:27017", "avg_rtt_ms": 30000, "type": "RSSecondary", "tag": {}}, + ], + } + } + + # Create & populate Topology such that no server is writeable. + seeds, hosts = get_addresses(scenario_def["topology_description"]["servers"]) + settings = get_topology_settings_dict( + heartbeat_frequency=1, local_threshold_ms=1, seeds=seeds, server_selector=selector + ) + topology = Topology(TopologySettings(**settings)) + await topology.open() + for server in scenario_def["topology_description"]["servers"]: + server_description = make_server_description(server, hosts) + await topology.on_change(server_description) + + # Invoke server selection and assert no calls to our custom selector. + with self.assertRaisesRegex(ServerSelectionTimeoutError, "No primary available for writes"): + await topology.select_server( + writable_server_selector, _Op.TEST, server_selection_timeout=0.1 + ) + self.assertEqual(selector.call_count, 0) + + +if __name__ == "__main__": + unittest.main() diff --git a/test/asynchronous/test_server_selection_in_window.py b/test/asynchronous/test_server_selection_in_window.py new file mode 100644 index 0000000000..dd0ff734f7 --- /dev/null +++ b/test/asynchronous/test_server_selection_in_window.py @@ -0,0 +1,180 @@ +# Copyright 2020-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test the topology module's Server Selection Spec implementation.""" +from __future__ import annotations + +import asyncio +import os +import threading +from pathlib import Path +from test.asynchronous import AsyncIntegrationTest, async_client_context, unittest +from test.asynchronous.helpers import ConcurrentRunner +from test.asynchronous.utils import flaky +from test.asynchronous.utils_selection_tests import create_topology +from test.asynchronous.utils_spec_runner import AsyncSpecTestCreator +from test.utils_shared import ( + CMAPListener, + OvertCommandListener, + async_wait_until, +) + +from pymongo.common import clean_node +from pymongo.monitoring import ConnectionReadyEvent +from pymongo.operations import _Op +from pymongo.read_preferences import ReadPreference + +_IS_SYNC = False +# Location of JSON test specifications. +if _IS_SYNC: + TEST_PATH = os.path.join(Path(__file__).resolve().parent, "server_selection", "in_window") +else: + TEST_PATH = os.path.join( + Path(__file__).resolve().parent.parent, "server_selection", "in_window" + ) + + +class TestAllScenarios(unittest.IsolatedAsyncioTestCase): + async def run_scenario(self, scenario_def): + topology = await create_topology(scenario_def) + + # Update mock operation_count state: + for mock in scenario_def["mocked_topology_state"]: + address = clean_node(mock["address"]) + server = topology.get_server_by_address(address) + server.pool.operation_count = mock["operation_count"] + + pref = ReadPreference.NEAREST + counts = {address: 0 for address in topology._description.server_descriptions()} + + # Number of times to repeat server selection + iterations = scenario_def["iterations"] + for _ in range(iterations): + server = await topology.select_server(pref, _Op.TEST, server_selection_timeout=0) + counts[server.description.address] += 1 + + # Verify expected_frequencies + outcome = scenario_def["outcome"] + tolerance = outcome["tolerance"] + expected_frequencies = outcome["expected_frequencies"] + for host_str, freq in expected_frequencies.items(): + address = clean_node(host_str) + actual_freq = float(counts[address]) / iterations + if freq == 0: + # Should be exactly 0. + self.assertEqual(actual_freq, 0) + else: + # Should be within 'tolerance'. + self.assertAlmostEqual(actual_freq, freq, delta=tolerance) + + +def create_test(scenario_def, test, name): + async def run_scenario(self): + await self.run_scenario(scenario_def) + + return run_scenario + + +class CustomSpecTestCreator(AsyncSpecTestCreator): + def tests(self, scenario_def): + """Extract the tests from a spec file. + + Server selection in_window tests do not have a 'tests' field. + The whole file represents a single test case. + """ + return [scenario_def] + + +CustomSpecTestCreator(create_test, TestAllScenarios, TEST_PATH).create_tests() + + +class FinderTask(ConcurrentRunner): + def __init__(self, collection, iterations): + super().__init__() + self.daemon = True + self.collection = collection + self.iterations = iterations + self.passed = False + + async def run(self): + for _ in range(self.iterations): + await self.collection.find_one({}) + self.passed = True + + +class TestProse(AsyncIntegrationTest): + async def frequencies(self, client, listener, n_finds=10): + coll = client.test.test + N_TASKS = 10 + tasks = [FinderTask(coll, n_finds) for _ in range(N_TASKS)] + for task in tasks: + await task.start() + for task in tasks: + await task.join() + for task in tasks: + self.assertTrue(task.passed) + + events = listener.started_events + self.assertEqual(len(events), n_finds * N_TASKS) + nodes = client.nodes + self.assertEqual(len(nodes), 2) + freqs = {address: 0.0 for address in nodes} + for event in events: + freqs[event.connection_id] += 1 + for address in freqs: + freqs[address] = freqs[address] / float(len(events)) + return freqs + + @async_client_context.require_failCommand_appName + @async_client_context.require_multiple_mongoses + @flaky(reason="PYTHON-3689") + async def test_load_balancing(self): + listener = OvertCommandListener() + cmap_listener = CMAPListener() + # PYTHON-2584: Use a large localThresholdMS to avoid the impact of + # varying RTTs. + client = await self.async_rs_client( + async_client_context.mongos_seeds(), + appName="loadBalancingTest", + event_listeners=[listener, cmap_listener], + localThresholdMS=30000, + minPoolSize=10, + ) + await async_wait_until(lambda: len(client.nodes) == 2, "discover both nodes") + # Wait for both pools to be populated. + await cmap_listener.async_wait_for_event(ConnectionReadyEvent, 20) + # Delay find commands on only one mongos. + delay_finds = { + "configureFailPoint": "failCommand", + "mode": {"times": 10000}, + "data": { + "failCommands": ["find"], + "blockConnection": True, + "blockTimeMS": 500, + "appName": "loadBalancingTest", + }, + } + async with self.fail_point(delay_finds): + nodes = async_client_context.client.nodes + self.assertEqual(len(nodes), 1) + delayed_server = next(iter(nodes)) + freqs = await self.frequencies(client, listener) + self.assertLessEqual(freqs[delayed_server], 0.25) + listener.reset() + freqs = await self.frequencies(client, listener, n_finds=150) + self.assertAlmostEqual(freqs[delayed_server], 0.50, delta=0.15) + + +if __name__ == "__main__": + unittest.main() diff --git a/test/asynchronous/test_server_selection_logging.py b/test/asynchronous/test_server_selection_logging.py new file mode 100644 index 0000000000..6b0975318a --- /dev/null +++ b/test/asynchronous/test_server_selection_logging.py @@ -0,0 +1,45 @@ +# Copyright 2024-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Run the server selection logging unified format spec tests.""" +from __future__ import annotations + +import os +import sys +from pathlib import Path + +sys.path[0:0] = [""] + +from test import unittest +from test.asynchronous.unified_format import generate_test_classes + +_IS_SYNC = False + +# Location of JSON test specifications. +if _IS_SYNC: + TEST_PATH = os.path.join(Path(__file__).resolve().parent, "server_selection_logging") +else: + TEST_PATH = os.path.join(Path(__file__).resolve().parent.parent, "server_selection_logging") + + +globals().update( + generate_test_classes( + TEST_PATH, + module=__name__, + ) +) + + +if __name__ == "__main__": + unittest.main() diff --git a/test/asynchronous/test_server_selection_rtt.py b/test/asynchronous/test_server_selection_rtt.py new file mode 100644 index 0000000000..1f8f6bc7df --- /dev/null +++ b/test/asynchronous/test_server_selection_rtt.py @@ -0,0 +1,77 @@ +# Copyright 2015 MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test the topology module.""" +from __future__ import annotations + +import json +import os +import sys +from pathlib import Path + +sys.path[0:0] = [""] + +from test import unittest +from test.asynchronous import AsyncPyMongoTestCase + +from pymongo.read_preferences import MovingAverage + +_IS_SYNC = False + +# Location of JSON test specifications. +if _IS_SYNC: + TEST_PATH = os.path.join(Path(__file__).resolve().parent, "server_selection/rtt") +else: + TEST_PATH = os.path.join(Path(__file__).resolve().parent.parent, "server_selection/rtt") + + +class TestAllScenarios(AsyncPyMongoTestCase): + pass + + +def create_test(scenario_def): + def run_scenario(self): + moving_average = MovingAverage() + + if scenario_def["avg_rtt_ms"] != "NULL": + moving_average.add_sample(scenario_def["avg_rtt_ms"]) + + if scenario_def["new_rtt_ms"] != "NULL": + moving_average.add_sample(scenario_def["new_rtt_ms"]) + + self.assertAlmostEqual(moving_average.get(), scenario_def["new_avg_rtt"]) + + return run_scenario + + +def create_tests(): + for dirpath, _, filenames in os.walk(TEST_PATH): + dirname = os.path.split(dirpath)[-1] + + for filename in filenames: + with open(os.path.join(dirpath, filename)) as scenario_stream: + scenario_def = json.load(scenario_stream) + + # Construct test from scenario. + new_test = create_test(scenario_def) + test_name = f"test_{dirname}_{os.path.splitext(filename)[0]}" + + new_test.__name__ = test_name + setattr(TestAllScenarios, new_test.__name__, new_test) + + +create_tests() + +if __name__ == "__main__": + unittest.main() diff --git a/test/asynchronous/test_session.py b/test/asynchronous/test_session.py index 42bc253b56..ff0feebafc 100644 --- a/test/asynchronous/test_session.py +++ b/test/asynchronous/test_session.py @@ -15,10 +15,13 @@ """Test the client_session module.""" from __future__ import annotations +import asyncio import copy import sys import time +from inspect import iscoroutinefunction from io import BytesIO +from test.asynchronous.helpers import ExceptionCatchingTask from typing import Any, Callable, List, Set, Tuple from pymongo.synchronous.mongo_client import MongoClient @@ -27,25 +30,24 @@ from test.asynchronous import ( AsyncIntegrationTest, - AsyncPyMongoTestCase, AsyncUnitTest, SkipTest, async_client_context, unittest, ) -from test.utils import ( +from test.asynchronous.helpers import client_knobs +from test.utils_shared import ( EventListener, - ExceptionCatchingThread, + HeartbeatEventListener, OvertCommandListener, async_wait_until, ) from bson import DBRef from gridfs.asynchronous.grid_file import AsyncGridFS, AsyncGridFSBucket -from pymongo import ASCENDING, AsyncMongoClient, monitoring +from pymongo import ASCENDING, AsyncMongoClient, _csot, monitoring from pymongo.asynchronous.command_cursor import AsyncCommandCursor from pymongo.asynchronous.cursor import AsyncCursor -from pymongo.asynchronous.helpers import anext from pymongo.common import _MAX_END_SESSIONS from pymongo.errors import ConfigurationError, InvalidOperation, OperationFailure from pymongo.operations import IndexModel, InsertOne, UpdateOne @@ -131,8 +133,9 @@ async def _test_ops(self, client, *ops): await f(*args, **kw) self.assertGreaterEqual(len(listener.started_events), 1) for event in listener.started_events: - self.assertTrue( - "lsid" in event.command, + self.assertIn( + "lsid", + event.command, f"{f.__name__} sent no lsid with {event.command_name}", ) @@ -167,8 +170,9 @@ async def _test_ops(self, client, *ops): self.assertGreaterEqual(len(listener.started_events), 1) lsids = [] for event in listener.started_events: - self.assertTrue( - "lsid" in event.command, + self.assertIn( + "lsid", + event.command, f"{f.__name__} sent no lsid with {event.command_name}", ) @@ -184,16 +188,16 @@ async def _test_ops(self, client, *ops): f"{f.__name__} did not return implicit session to pool", ) - @async_client_context.require_sync - def test_implicit_sessions_checkout(self): + async def test_implicit_sessions_checkout(self): # "To confirm that implicit sessions only allocate their server session after a # successful connection checkout" test from Driver Sessions Spec. succeeded = False lsid_set = set() - failures = 0 - for _ in range(5): - listener = OvertCommandListener() - client = self.async_rs_or_single_client(event_listeners=[listener], maxPoolSize=1) + listener = OvertCommandListener() + client = await self.async_rs_or_single_client(event_listeners=[listener], maxPoolSize=1) + # Retry up to 10 times because there is a known race condition that can cause multiple + # sessions to be used: connection check in happens before session check in + for _ in range(10): cursor = client.db.test.find({}) ops: List[Tuple[Callable, List[Any]]] = [ (client.db.test.find_one, [{"_id": 1}]), @@ -210,34 +214,34 @@ def test_implicit_sessions_checkout(self): (cursor.distinct, ["_id"]), (client.db.list_collections, []), ] - threads = [] + tasks = [] listener.reset() - def thread_target(op, *args): - res = op(*args) + async def target(op, *args): + if iscoroutinefunction(op): + res = await op(*args) + else: + res = op(*args) if isinstance(res, (AsyncCursor, AsyncCommandCursor)): - list(res) # type: ignore[call-overload] + await res.to_list() for op, args in ops: - threads.append( - ExceptionCatchingThread( - target=thread_target, args=[op, *args], name=op.__name__ - ) + tasks.append( + ExceptionCatchingTask(target=target, args=[op, *args], name=op.__name__) ) - threads[-1].start() - self.assertEqual(len(threads), len(ops)) - for thread in threads: - thread.join() - self.assertIsNone(thread.exc) - client.close() + await tasks[-1].start() + self.assertEqual(len(tasks), len(ops)) + for t in tasks: + await t.join() + self.assertIsNone(t.exc) lsid_set.clear() for i in listener.started_events: if i.command.get("lsid"): lsid_set.add(i.command.get("lsid")["id"]) if len(lsid_set) == 1: + # Break on first success. succeeded = True - else: - failures += 1 + break self.assertTrue(succeeded, lsid_set) async def test_pool_lifo(self): @@ -373,9 +377,9 @@ async def test_cursor_clone(self): async with self.client.start_session() as s: cursor = coll.find(session=s) - self.assertTrue(cursor.session is s) + self.assertIs(cursor.session, s) clone = cursor.clone() - self.assertTrue(clone.session is s) + self.assertIs(clone.session, s) # No explicit session. cursor = coll.find(batch_size=2) @@ -387,7 +391,7 @@ async def test_cursor_clone(self): await anext(clone) self.assertIsNone(clone.session) self.assertIsNotNone(clone._session) - self.assertFalse(cursor._session is clone._session) + self.assertIsNot(cursor._session, clone._session) await cursor.close() await clone.close() @@ -419,8 +423,9 @@ async def test_cursor(self): await f(session=s) self.assertGreaterEqual(len(listener.started_events), 1) for event in listener.started_events: - self.assertTrue( - "lsid" in event.command, + self.assertIn( + "lsid", + event.command, f"{name} sent no lsid with {event.command_name}", ) @@ -438,15 +443,13 @@ async def test_cursor(self): listener.reset() await f(session=None) event0 = listener.first_command_started() - self.assertTrue( - "lsid" in event0.command, f"{name} sent no lsid with {event0.command_name}" - ) + self.assertIn("lsid", event0.command, f"{name} sent no lsid with {event0.command_name}") lsid = event0.command["lsid"] for event in listener.started_events[1:]: - self.assertTrue( - "lsid" in event.command, f"{name} sent no lsid with {event.command_name}" + self.assertIn( + "lsid", event.command, f"{name} sent no lsid with {event.command_name}" ) self.assertEqual( @@ -538,9 +541,10 @@ async def find(session=None): (bucket.download_to_stream_by_name, ["f", sio], {}), (find, [], {}), (bucket.rename, [1, "f2"], {}), + (bucket.rename_by_name, ["f2", "f3"], {}), # Delete both files so _test_ops can run these operations twice. (bucket.delete, [1], {}), - (bucket.delete, [2], {}), + (bucket.delete_by_name, ["f"], {}), ) async def test_gridfsbucket_cursor(self): @@ -1040,14 +1044,6 @@ async def test_writes_do_not_include_read_concern(self): lambda coll, session: coll.find({}, session=session).explain() ) - @async_client_context.require_no_standalone - @async_client_context.require_version_max(4, 1, 0) - async def test_aggregate_out_does_not_include_read_concern(self): - async def alambda(coll, session): - await (await coll.aggregate([{"$out": "aggout"}], session=session)).to_list() - - await self._test_no_read_concern(alambda) - @async_client_context.require_no_standalone async def test_get_more_does_not_include_read_concern(self): coll = self.client.pymongo_test.test @@ -1090,7 +1086,6 @@ async def test_server_not_causal(self): self.assertIsNone(act) @async_client_context.require_no_standalone - @async_client_context.require_no_mmap async def test_read_concern(self): async with self.client.start_session(causal_consistency=True) as s: coll = self.client.pymongo_test.test @@ -1133,12 +1128,10 @@ async def asyncSetUp(self): if "$clusterTime" not in (await async_client_context.hello): raise SkipTest("$clusterTime not supported") + # Sessions prose test: 3) $clusterTime in commands async def test_cluster_time(self): listener = SessionTestListener() - # Prevent heartbeats from updating $clusterTime between operations. - client = await self.async_rs_or_single_client( - event_listeners=[listener], heartbeatFrequencyMS=999999 - ) + client = await self.async_rs_or_single_client(event_listeners=[listener]) collection = client.pymongo_test.collection # Prepare for tests of find() and aggregate(). await collection.insert_many([{} for _ in range(10)]) @@ -1199,15 +1192,17 @@ async def aggregate(): self.assertGreaterEqual(len(listener.started_events), 1) for i, event in enumerate(listener.started_events): - self.assertTrue( - "$clusterTime" in event.command, + self.assertIn( + "$clusterTime", + event.command, f"{f.__name__} sent no $clusterTime with {event.command_name}", ) if i > 0: succeeded = listener.succeeded_events[i - 1] - self.assertTrue( - "$clusterTime" in succeeded.reply, + self.assertIn( + "$clusterTime", + succeeded.reply, f"{f.__name__} received no $clusterTime with {succeeded.command_name}", ) @@ -1217,6 +1212,40 @@ async def aggregate(): f"{f.__name__} sent wrong $clusterTime with {event.command_name}", ) + # Sessions prose test: 20) Drivers do not gossip `$clusterTime` on SDAM commands + async def test_cluster_time_not_used_by_sdam(self): + heartbeat_listener = HeartbeatEventListener() + cmd_listener = OvertCommandListener() + with client_knobs(min_heartbeat_interval=0.01): + c1 = await self.async_single_client( + event_listeners=[heartbeat_listener, cmd_listener], heartbeatFrequencyMS=10 + ) + cluster_time = (await c1.admin.command({"ping": 1}))["$clusterTime"] + self.assertEqual(c1._topology.max_cluster_time(), cluster_time) + + # Advance the server's $clusterTime by performing an insert via another client. + await self.db.test.insert_one({"advance": "$clusterTime"}) + # Wait until the client C1 processes the next pair of SDAM heartbeat started + succeeded events. + heartbeat_listener.reset() + + async def next_heartbeat(): + events = heartbeat_listener.events + for i in range(len(events) - 1): + if isinstance(events[i], monitoring.ServerHeartbeatStartedEvent): + if isinstance(events[i + 1], monitoring.ServerHeartbeatSucceededEvent): + return True + return False + + await async_wait_until( + next_heartbeat, "never found pair of heartbeat started + succeeded events" + ) + # Assert that C1's max $clusterTime is still the same and has not been updated by SDAM. + cmd_listener.reset() + await c1.admin.command({"ping": 1}) + started = cmd_listener.started_events[0] + self.assertEqual(started.command_name, "ping") + self.assertEqual(started.command["$clusterTime"], cluster_time) + if __name__ == "__main__": unittest.main() diff --git a/test/asynchronous/test_sessions_unified.py b/test/asynchronous/test_sessions_unified.py new file mode 100644 index 0000000000..b4cbac5704 --- /dev/null +++ b/test/asynchronous/test_sessions_unified.py @@ -0,0 +1,40 @@ +# Copyright 2021-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test the Sessions unified spec tests.""" +from __future__ import annotations + +import os +import sys +from pathlib import Path + +sys.path[0:0] = [""] + +from test import unittest +from test.asynchronous.unified_format import generate_test_classes + +_IS_SYNC = False + +# Location of JSON test specifications. +if _IS_SYNC: + TEST_PATH = os.path.join(Path(__file__).resolve().parent, "sessions") +else: + TEST_PATH = os.path.join(Path(__file__).resolve().parent.parent, "sessions") + + +# Generate unified tests. +globals().update(generate_test_classes(TEST_PATH, module=__name__)) + +if __name__ == "__main__": + unittest.main() diff --git a/test/asynchronous/test_srv_polling.py b/test/asynchronous/test_srv_polling.py new file mode 100644 index 0000000000..3d4aed1bc1 --- /dev/null +++ b/test/asynchronous/test_srv_polling.py @@ -0,0 +1,387 @@ +# Copyright 2019-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Run the SRV support tests.""" +from __future__ import annotations + +import asyncio +import sys +import time +from test.asynchronous.utils import flaky +from test.utils_shared import FunctionCallRecorder +from typing import Any + +sys.path[0:0] = [""] + +from test.asynchronous import AsyncPyMongoTestCase, client_knobs, unittest +from test.asynchronous.utils import async_wait_until + +import pymongo +from pymongo import common +from pymongo.asynchronous.srv_resolver import _have_dnspython +from pymongo.errors import ConfigurationError + +_IS_SYNC = False + +WAIT_TIME = 0.1 + + +class SrvPollingKnobs: + def __init__( + self, + ttl_time=None, + min_srv_rescan_interval=None, + nodelist_callback=None, + count_resolver_calls=False, + ): + self.ttl_time = ttl_time + self.min_srv_rescan_interval = min_srv_rescan_interval + self.nodelist_callback = nodelist_callback + self.count_resolver_calls = count_resolver_calls + + self.old_min_srv_rescan_interval = None + self.old_dns_resolver_response = None + + def enable(self): + self.old_min_srv_rescan_interval = common.MIN_SRV_RESCAN_INTERVAL + self.old_dns_resolver_response = ( + pymongo.asynchronous.srv_resolver._SrvResolver.get_hosts_and_min_ttl + ) + + if self.min_srv_rescan_interval is not None: + common.MIN_SRV_RESCAN_INTERVAL = self.min_srv_rescan_interval + + async def mock_get_hosts_and_min_ttl(resolver, *args): + assert self.old_dns_resolver_response is not None + nodes, ttl = await self.old_dns_resolver_response(resolver) + if self.nodelist_callback is not None: + nodes = self.nodelist_callback() + if self.ttl_time is not None: + ttl = self.ttl_time + return nodes, ttl + + patch_func: Any + if self.count_resolver_calls: + patch_func = FunctionCallRecorder(mock_get_hosts_and_min_ttl) + else: + patch_func = mock_get_hosts_and_min_ttl + + pymongo.asynchronous.srv_resolver._SrvResolver.get_hosts_and_min_ttl = patch_func # type: ignore + + def __enter__(self): + self.enable() + + def disable(self): + common.MIN_SRV_RESCAN_INTERVAL = self.old_min_srv_rescan_interval # type: ignore + pymongo.asynchronous.srv_resolver._SrvResolver.get_hosts_and_min_ttl = ( # type: ignore + self.old_dns_resolver_response + ) + + def __exit__(self, exc_type, exc_val, exc_tb): + self.disable() + + +class TestSrvPolling(AsyncPyMongoTestCase): + BASE_SRV_RESPONSE = [ + ("localhost.test.build.10gen.cc", 27017), + ("localhost.test.build.10gen.cc", 27018), + ] + + CONNECTION_STRING = "mongodb+srv://test1.test.build.10gen.cc" + + async def asyncSetUp(self): + # Patch timeouts to ensure short rescan SRV interval. + self.client_knobs = client_knobs( + heartbeat_frequency=WAIT_TIME, + min_heartbeat_interval=WAIT_TIME, + events_queue_frequency=WAIT_TIME, + ) + self.client_knobs.enable() + + async def asyncTearDown(self): + self.client_knobs.disable() + + def get_nodelist(self, client): + return client._topology.description.server_descriptions().keys() + + async def assert_nodelist_change(self, expected_nodelist, client, timeout=(100 * WAIT_TIME)): + """Check if the client._topology eventually sees all nodes in the + expected_nodelist. + """ + + def predicate(): + nodelist = self.get_nodelist(client) + if set(expected_nodelist) == set(nodelist): + return True + return False + + await async_wait_until(predicate, "see expected nodelist", timeout=timeout) + + async def assert_nodelist_nochange(self, expected_nodelist, client, timeout=(100 * WAIT_TIME)): + """Check if the client._topology ever deviates from seeing all nodes + in the expected_nodelist. Consistency is checked after sleeping for + (WAIT_TIME * 10) seconds. Also check that the resolver is called at + least once. + """ + + def predicate(): + if set(expected_nodelist) == set(self.get_nodelist(client)): + return ( + pymongo.asynchronous.srv_resolver._SrvResolver.get_hosts_and_min_ttl.call_count + >= 1 + ) + return False + + await async_wait_until(predicate, "Node list equals expected nodelist", timeout=timeout) + + nodelist = self.get_nodelist(client) + if set(expected_nodelist) != set(nodelist): + msg = "Client nodelist %s changed unexpectedly (expected %s)" + raise self.fail(msg % (nodelist, expected_nodelist)) + self.assertGreaterEqual( + pymongo.asynchronous.srv_resolver._SrvResolver.get_hosts_and_min_ttl.call_count, # type: ignore + 1, + "resolver was never called", + ) + return True + + async def run_scenario(self, dns_response, expect_change): + self.assertEqual(_have_dnspython(), True) + if callable(dns_response): + dns_resolver_response = dns_response + else: + + def dns_resolver_response(): + return dns_response + + if expect_change: + assertion_method = self.assert_nodelist_change + count_resolver_calls = False + expected_response = dns_response + else: + assertion_method = self.assert_nodelist_nochange + count_resolver_calls = True + expected_response = self.BASE_SRV_RESPONSE + + # Patch timeouts to ensure short test running times. + with SrvPollingKnobs(ttl_time=WAIT_TIME, min_srv_rescan_interval=WAIT_TIME): + client = self.simple_client(self.CONNECTION_STRING) + await client.aconnect() + await self.assert_nodelist_change(self.BASE_SRV_RESPONSE, client) + # Patch list of hosts returned by DNS query. + with SrvPollingKnobs( + nodelist_callback=dns_resolver_response, count_resolver_calls=count_resolver_calls + ): + await assertion_method(expected_response, client) + + # Close the client early to avoid affecting the next scenario run. + await client.close() + + async def test_addition(self): + response = self.BASE_SRV_RESPONSE[:] + response.append(("localhost.test.build.10gen.cc", 27019)) + await self.run_scenario(response, True) + + async def test_removal(self): + response = self.BASE_SRV_RESPONSE[:] + response.remove(("localhost.test.build.10gen.cc", 27018)) + await self.run_scenario(response, True) + + async def test_replace_one(self): + response = self.BASE_SRV_RESPONSE[:] + response.remove(("localhost.test.build.10gen.cc", 27018)) + response.append(("localhost.test.build.10gen.cc", 27019)) + await self.run_scenario(response, True) + + async def test_replace_both_with_one(self): + response = [("localhost.test.build.10gen.cc", 27019)] + await self.run_scenario(response, True) + + async def test_replace_both_with_two(self): + response = [ + ("localhost.test.build.10gen.cc", 27019), + ("localhost.test.build.10gen.cc", 27020), + ] + await self.run_scenario(response, True) + + async def test_dns_failures(self): + from dns import exception + + for exc in (exception.FormError, exception.TooBig, exception.Timeout): + + def response_callback(*args): + raise exc("DNS Failure!") + + await self.run_scenario(response_callback, False) + + @flaky(reason="PYTHON-5500", max_runs=3) + async def test_dns_failures_logging(self): + from dns import exception + + with self.assertLogs("pymongo.topology", level="DEBUG") as cm: + + def response_callback(*args): + raise exception.Timeout("DNS Failure!") + + await self.run_scenario(response_callback, False) + + srv_failure_logs = [r for r in cm.records if "SRV monitor check failed" in r.getMessage()] + self.assertEqual(len(srv_failure_logs), 1) + + async def test_dns_record_lookup_empty(self): + response: list = [] + await self.run_scenario(response, False) + + async def _test_recover_from_initial(self, initial_callback): + # Construct a valid final response callback distinct from base. + response_final = self.BASE_SRV_RESPONSE[:] + response_final.pop() + + def final_callback(): + return response_final + + with SrvPollingKnobs( + ttl_time=WAIT_TIME, + min_srv_rescan_interval=WAIT_TIME, + nodelist_callback=initial_callback, + count_resolver_calls=True, + ): + # Client uses unpatched method to get initial nodelist + client = self.simple_client(self.CONNECTION_STRING) + await client.aconnect() + # Invalid DNS resolver response should not change nodelist. + await self.assert_nodelist_nochange(self.BASE_SRV_RESPONSE, client) + + with SrvPollingKnobs( + ttl_time=WAIT_TIME, min_srv_rescan_interval=WAIT_TIME, nodelist_callback=final_callback + ): + # Nodelist should reflect new valid DNS resolver response. + await self.assert_nodelist_change(response_final, client) + + @flaky(reason="PYTHON-5315") + async def test_recover_from_initially_empty_seedlist(self): + def empty_seedlist(): + return [] + + await self._test_recover_from_initial(empty_seedlist) + + @flaky(reason="PYTHON-5315") + async def test_recover_from_initially_erroring_seedlist(self): + def erroring_seedlist(): + raise ConfigurationError + + await self._test_recover_from_initial(erroring_seedlist) + + async def test_10_all_dns_selected(self): + response = [ + ("localhost.test.build.10gen.cc", 27017), + ("localhost.test.build.10gen.cc", 27019), + ("localhost.test.build.10gen.cc", 27020), + ] + + def nodelist_callback(): + return response + + with SrvPollingKnobs(ttl_time=WAIT_TIME, min_srv_rescan_interval=WAIT_TIME): + client = self.simple_client(self.CONNECTION_STRING, srvMaxHosts=0) + await client.aconnect() + with SrvPollingKnobs(nodelist_callback=nodelist_callback): + await self.assert_nodelist_change(response, client) + + async def test_11_all_dns_selected(self): + response = [ + ("localhost.test.build.10gen.cc", 27019), + ("localhost.test.build.10gen.cc", 27020), + ] + + def nodelist_callback(): + return response + + with SrvPollingKnobs(ttl_time=WAIT_TIME, min_srv_rescan_interval=WAIT_TIME): + client = self.simple_client(self.CONNECTION_STRING, srvMaxHosts=2) + await client.aconnect() + with SrvPollingKnobs(nodelist_callback=nodelist_callback): + await self.assert_nodelist_change(response, client) + + async def test_12_new_dns_randomly_selected(self): + response = [ + ("localhost.test.build.10gen.cc", 27020), + ("localhost.test.build.10gen.cc", 27019), + ("localhost.test.build.10gen.cc", 27017), + ] + + def nodelist_callback(): + return response + + with SrvPollingKnobs(ttl_time=WAIT_TIME, min_srv_rescan_interval=WAIT_TIME): + client = self.simple_client(self.CONNECTION_STRING, srvMaxHosts=2) + await client.aconnect() + with SrvPollingKnobs(nodelist_callback=nodelist_callback): + await asyncio.sleep(2 * common.MIN_SRV_RESCAN_INTERVAL) + final_topology = set(client.topology_description.server_descriptions()) + self.assertIn(("localhost.test.build.10gen.cc", 27017), final_topology) + self.assertEqual(len(final_topology), 2) + + async def test_does_not_flipflop(self): + with SrvPollingKnobs(ttl_time=WAIT_TIME, min_srv_rescan_interval=WAIT_TIME): + client = self.simple_client(self.CONNECTION_STRING, srvMaxHosts=1) + await client.aconnect() + old = set(client.topology_description.server_descriptions()) + await asyncio.sleep(4 * WAIT_TIME) + new = set(client.topology_description.server_descriptions()) + self.assertSetEqual(old, new) + + async def test_srv_service_name(self): + # Construct a valid final response callback distinct from base. + response = [ + ("localhost.test.build.10gen.cc.", 27019), + ("localhost.test.build.10gen.cc.", 27020), + ] + + def nodelist_callback(): + return response + + with SrvPollingKnobs(ttl_time=WAIT_TIME, min_srv_rescan_interval=WAIT_TIME): + client = self.simple_client( + "mongodb+srv://test22.test.build.10gen.cc/?srvServiceName=customname" + ) + await client.aconnect() + with SrvPollingKnobs(nodelist_callback=nodelist_callback): + await self.assert_nodelist_change(response, client) + + async def test_srv_waits_to_poll(self): + modified = [("localhost.test.build.10gen.cc", 27019)] + + def resolver_response(): + return modified + + with SrvPollingKnobs( + ttl_time=WAIT_TIME, + min_srv_rescan_interval=WAIT_TIME, + nodelist_callback=resolver_response, + ): + client = self.simple_client(self.CONNECTION_STRING) + await client.aconnect() + with self.assertRaises(AssertionError): + await self.assert_nodelist_change(modified, client, timeout=WAIT_TIME / 2) + + def test_import_dns_resolver(self): + # Regression test for PYTHON-4407 + import dns.resolver + + self.assertTrue(hasattr(dns.resolver, "resolve") or hasattr(dns.resolver, "query")) + + +if __name__ == "__main__": + unittest.main() diff --git a/test/asynchronous/test_ssl.py b/test/asynchronous/test_ssl.py new file mode 100644 index 0000000000..0ce3e8bbac --- /dev/null +++ b/test/asynchronous/test_ssl.py @@ -0,0 +1,691 @@ +# Copyright 2011-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tests for SSL support.""" +from __future__ import annotations + +import os +import pathlib +import socket +import sys + +sys.path[0:0] = [""] + +from test.asynchronous import ( + HAVE_IPADDRESS, + AsyncIntegrationTest, + AsyncPyMongoTestCase, + SkipTest, + async_client_context, + connected, + remove_all_users, + unittest, +) +from test.utils_shared import ( + EventListener, + OvertCommandListener, + cat_files, + ignore_deprecations, +) +from urllib.parse import quote_plus + +from pymongo import AsyncMongoClient, ssl_support +from pymongo.errors import ConfigurationError, ConnectionFailure, OperationFailure +from pymongo.hello import HelloCompat +from pymongo.ssl_support import HAVE_PYSSL, HAVE_SSL, _ssl, get_ssl_context +from pymongo.write_concern import WriteConcern + +_HAVE_PYOPENSSL = False +try: + # All of these must be available to use PyOpenSSL + import OpenSSL + import requests + import service_identity + + # Ensure service_identity>=18.1 is installed + from service_identity.pyopenssl import verify_ip_address + + from pymongo.ocsp_support import _load_trusted_ca_certs + + _HAVE_PYOPENSSL = True +except ImportError: + _load_trusted_ca_certs = None # type: ignore + + +if HAVE_SSL: + import ssl + +_IS_SYNC = False + +if _IS_SYNC: + CERT_PATH = os.path.join(pathlib.Path(__file__).resolve().parent, "certificates") +else: + CERT_PATH = os.path.join(pathlib.Path(__file__).resolve().parent.parent, "certificates") + +CLIENT_PEM = os.path.join(CERT_PATH, "client.pem") +CLIENT_ENCRYPTED_PEM = os.path.join(CERT_PATH, "password_protected.pem") +CA_PEM = os.path.join(CERT_PATH, "ca.pem") +CA_BUNDLE_PEM = os.path.join(CERT_PATH, "trusted-ca.pem") +CRL_PEM = os.path.join(CERT_PATH, "crl.pem") +MONGODB_X509_USERNAME = "C=US,ST=New York,L=New York City,O=MDB,OU=Drivers,CN=client" + +# To fully test this start a mongod instance (built with SSL support) like so: +# mongod --dbpath /path/to/data/directory --sslOnNormalPorts \ +# --sslPEMKeyFile /path/to/pymongo/test/certificates/server.pem \ +# --sslCAFile /path/to/pymongo/test/certificates/ca.pem \ +# --sslWeakCertificateValidation +# Also, make sure you have 'server' as an alias for localhost in /etc/hosts +# +# Note: For all replica set tests to pass, the replica set configuration must +# use 'localhost' for the hostname of all hosts. + + +class TestClientSSL(AsyncPyMongoTestCase): + @unittest.skipIf(HAVE_SSL, "The ssl module is available, can't test what happens without it.") + def test_no_ssl_module(self): + # Explicit + self.assertRaises(ConfigurationError, self.simple_client, ssl=True) + + # Implied + self.assertRaises(ConfigurationError, self.simple_client, tlsCertificateKeyFile=CLIENT_PEM) + + @unittest.skipUnless(HAVE_SSL, "The ssl module is not available.") + @ignore_deprecations + def test_config_ssl(self): + # Tests various ssl configurations + self.assertRaises(ValueError, self.simple_client, ssl="foo") + self.assertRaises( + ConfigurationError, self.simple_client, tls=False, tlsCertificateKeyFile=CLIENT_PEM + ) + self.assertRaises(TypeError, self.simple_client, ssl=0) + self.assertRaises(TypeError, self.simple_client, ssl=5.5) + self.assertRaises(TypeError, self.simple_client, ssl=[]) + + self.assertRaises(IOError, self.simple_client, tlsCertificateKeyFile="NoSuchFile") + self.assertRaises(TypeError, self.simple_client, tlsCertificateKeyFile=True) + self.assertRaises(TypeError, self.simple_client, tlsCertificateKeyFile=[]) + + # Test invalid combinations + self.assertRaises( + ConfigurationError, self.simple_client, tls=False, tlsCertificateKeyFile=CLIENT_PEM + ) + self.assertRaises(ConfigurationError, self.simple_client, tls=False, tlsCAFile=CA_PEM) + self.assertRaises(ConfigurationError, self.simple_client, tls=False, tlsCRLFile=CRL_PEM) + self.assertRaises( + ConfigurationError, self.simple_client, tls=False, tlsAllowInvalidCertificates=False + ) + self.assertRaises( + ConfigurationError, self.simple_client, tls=False, tlsAllowInvalidHostnames=False + ) + self.assertRaises( + ConfigurationError, self.simple_client, tls=False, tlsDisableOCSPEndpointCheck=False + ) + + @unittest.skipUnless(_HAVE_PYOPENSSL, "PyOpenSSL is not available.") + def test_use_pyopenssl_when_available(self): + self.assertTrue(HAVE_PYSSL) + + @unittest.skipUnless(_HAVE_PYOPENSSL, "Cannot test without PyOpenSSL") + def test_load_trusted_ca_certs(self): + trusted_ca_certs = _load_trusted_ca_certs(CA_BUNDLE_PEM) + self.assertEqual(2, len(trusted_ca_certs)) + + +class TestSSL(AsyncIntegrationTest): + saved_port: int + + async def assertClientWorks(self, client): + coll = client.pymongo_test.ssl_test.with_options( + write_concern=WriteConcern(w=async_client_context.w) + ) + await coll.drop() + await coll.insert_one({"ssl": True}) + self.assertTrue((await coll.find_one())["ssl"]) + await coll.drop() + + @unittest.skipUnless(HAVE_SSL, "The ssl module is not available.") + async def asyncSetUp(self): + await super().asyncSetUp() + # MongoClient should connect to the primary by default. + self.saved_port = AsyncMongoClient.PORT + AsyncMongoClient.PORT = await async_client_context.port + + async def asyncTearDown(self): + AsyncMongoClient.PORT = self.saved_port + + @async_client_context.require_tls + async def test_simple_ssl(self): + if "PyPy" in sys.version: + self.skipTest("Test is flaky on PyPy") + # Expects the server to be running with ssl and with + # no --sslPEMKeyFile or with --sslWeakCertificateValidation + await self.assertClientWorks(self.client) + + @async_client_context.require_tlsCertificateKeyFile + @async_client_context.require_no_api_version + @ignore_deprecations + async def test_tlsCertificateKeyFilePassword(self): + # Expects the server to be running with server.pem and ca.pem + # + # --sslPEMKeyFile=/path/to/pymongo/test/certificates/server.pem + # --sslCAFile=/path/to/pymongo/test/certificates/ca.pem + if not hasattr(ssl, "SSLContext") and not HAVE_PYSSL: + self.assertRaises( + ConfigurationError, + self.simple_client, + "localhost", + ssl=True, + tlsCertificateKeyFile=CLIENT_ENCRYPTED_PEM, + tlsCertificateKeyFilePassword="qwerty", + tlsCAFile=CA_PEM, + serverSelectionTimeoutMS=1000, + ) + else: + await connected( + self.simple_client( + "localhost", + ssl=True, + tlsCertificateKeyFile=CLIENT_ENCRYPTED_PEM, + tlsCertificateKeyFilePassword="qwerty", + tlsCAFile=CA_PEM, + serverSelectionTimeoutMS=5000, + **self.credentials, # type: ignore[arg-type] + ) + ) + + uri_fmt = ( + "mongodb://localhost/?ssl=true" + "&tlsCertificateKeyFile=%s&tlsCertificateKeyFilePassword=qwerty" + "&tlsCAFile=%s&serverSelectionTimeoutMS=5000" + ) + await connected( + self.simple_client(uri_fmt % (CLIENT_ENCRYPTED_PEM, CA_PEM), **self.credentials) # type: ignore[arg-type] + ) + + @async_client_context.require_tlsCertificateKeyFile + @async_client_context.require_no_auth + @ignore_deprecations + async def test_cert_ssl_implicitly_set(self): + # Expects the server to be running with server.pem and ca.pem + # + # --sslPEMKeyFile=/path/to/pymongo/test/certificates/server.pem + # --sslCAFile=/path/to/pymongo/test/certificates/ca.pem + # + + # test that setting tlsCertificateKeyFile causes ssl to be set to True + client = self.simple_client( + await async_client_context.host, + await async_client_context.port, + tlsAllowInvalidCertificates=True, + tlsCertificateKeyFile=CLIENT_PEM, + ) + response = await client.admin.command(HelloCompat.LEGACY_CMD) + if "setName" in response: + client = self.simple_client( + await async_client_context.pair, + replicaSet=response["setName"], + w=len(response["hosts"]), + tlsAllowInvalidCertificates=True, + tlsCertificateKeyFile=CLIENT_PEM, + ) + + await self.assertClientWorks(client) + + @async_client_context.require_tlsCertificateKeyFile + @async_client_context.require_no_auth + @ignore_deprecations + async def test_cert_ssl_validation(self): + # Expects the server to be running with server.pem and ca.pem + # + # --sslPEMKeyFile=/path/to/pymongo/test/certificates/server.pem + # --sslCAFile=/path/to/pymongo/test/certificates/ca.pem + # + client = self.simple_client( + "localhost", + ssl=True, + tlsCertificateKeyFile=CLIENT_PEM, + tlsAllowInvalidCertificates=False, + tlsCAFile=CA_PEM, + ) + response = await client.admin.command(HelloCompat.LEGACY_CMD) + if "setName" in response: + if response["primary"].split(":")[0] != "localhost": + raise SkipTest( + "No hosts in the replicaset for 'localhost'. " + "Cannot validate hostname in the certificate" + ) + + client = self.simple_client( + "localhost", + replicaSet=response["setName"], + w=len(response["hosts"]), + ssl=True, + tlsCertificateKeyFile=CLIENT_PEM, + tlsAllowInvalidCertificates=False, + tlsCAFile=CA_PEM, + ) + + await self.assertClientWorks(client) + + if HAVE_IPADDRESS: + client = self.simple_client( + "127.0.0.1", + ssl=True, + tlsCertificateKeyFile=CLIENT_PEM, + tlsAllowInvalidCertificates=False, + tlsCAFile=CA_PEM, + ) + await self.assertClientWorks(client) + + @async_client_context.require_tlsCertificateKeyFile + @async_client_context.require_no_auth + @ignore_deprecations + async def test_cert_ssl_uri_support(self): + # Expects the server to be running with server.pem and ca.pem + # + # --sslPEMKeyFile=/path/to/pymongo/test/certificates/server.pem + # --sslCAFile=/path/to/pymongo/test/certificates/ca.pem + # + uri_fmt = ( + "mongodb://localhost/?ssl=true&tlsCertificateKeyFile=%s&tlsAllowInvalidCertificates" + "=%s&tlsCAFile=%s&tlsAllowInvalidHostnames=false" + ) + client = self.simple_client(uri_fmt % (CLIENT_PEM, "true", CA_PEM)) + await self.assertClientWorks(client) + + @unittest.skipIf( + "PyPy" in sys.version and not _IS_SYNC, + "https://github.com/pypy/pypy/issues/5131 flaky on async PyPy due to SSL EOF", + ) + @async_client_context.require_tlsCertificateKeyFile + @async_client_context.require_server_resolvable + @async_client_context.require_no_api_version + @ignore_deprecations + async def test_cert_ssl_validation_hostname_matching(self): + # Expects the server to be running with server.pem and ca.pem + # + # --sslPEMKeyFile=/path/to/pymongo/test/certificates/server.pem + # --sslCAFile=/path/to/pymongo/test/certificates/ca.pem + ctx = get_ssl_context(None, None, None, None, True, True, False, _IS_SYNC) + self.assertFalse(ctx.check_hostname) + ctx = get_ssl_context(None, None, None, None, True, False, False, _IS_SYNC) + self.assertFalse(ctx.check_hostname) + ctx = get_ssl_context(None, None, None, None, False, True, False, _IS_SYNC) + self.assertFalse(ctx.check_hostname) + ctx = get_ssl_context(None, None, None, None, False, False, False, _IS_SYNC) + self.assertTrue(ctx.check_hostname) + + response = await self.client.admin.command(HelloCompat.LEGACY_CMD) + + with self.assertRaises(ConnectionFailure) as cm: + await connected( + self.simple_client( + "server", + ssl=True, + tlsCertificateKeyFile=CLIENT_PEM, + tlsAllowInvalidCertificates=False, + tlsCAFile=CA_PEM, + serverSelectionTimeoutMS=500, + **self.credentials, # type: ignore[arg-type] + ) + ) + # PYTHON-5414 Check for "module service_identity has no attribute SICertificateError" + self.assertNotIn("has no attribute", str(cm.exception)) + + await connected( + self.simple_client( + "server", + ssl=True, + tlsCertificateKeyFile=CLIENT_PEM, + tlsAllowInvalidCertificates=False, + tlsCAFile=CA_PEM, + tlsAllowInvalidHostnames=True, + serverSelectionTimeoutMS=500, + **self.credentials, # type: ignore[arg-type] + ) + ) + + if "setName" in response: + with self.assertRaises(ConnectionFailure): + await connected( + self.simple_client( + "server", + replicaSet=response["setName"], + ssl=True, + tlsCertificateKeyFile=CLIENT_PEM, + tlsAllowInvalidCertificates=False, + tlsCAFile=CA_PEM, + serverSelectionTimeoutMS=500, + **self.credentials, # type: ignore[arg-type] + ) + ) + + await connected( + self.simple_client( + "server", + replicaSet=response["setName"], + ssl=True, + tlsCertificateKeyFile=CLIENT_PEM, + tlsAllowInvalidCertificates=False, + tlsCAFile=CA_PEM, + tlsAllowInvalidHostnames=True, + serverSelectionTimeoutMS=500, + **self.credentials, # type: ignore[arg-type] + ) + ) + + @async_client_context.require_tlsCertificateKeyFile + @async_client_context.require_sync + @async_client_context.require_no_api_version + @ignore_deprecations + async def test_tlsCRLFile_support(self): + if not hasattr(ssl, "VERIFY_CRL_CHECK_LEAF") or HAVE_PYSSL: + self.assertRaises( + ConfigurationError, + self.simple_client, + "localhost", + ssl=True, + tlsCAFile=CA_PEM, + tlsCRLFile=CRL_PEM, + serverSelectionTimeoutMS=1000, + ) + else: + await connected( + self.simple_client( + "localhost", + ssl=True, + tlsCAFile=CA_PEM, + serverSelectionTimeoutMS=1000, + **self.credentials, # type: ignore[arg-type] + ) + ) + + with self.assertRaises(ConnectionFailure): + await connected( + self.simple_client( + "localhost", + ssl=True, + tlsCAFile=CA_PEM, + tlsCRLFile=CRL_PEM, + serverSelectionTimeoutMS=1000, + **self.credentials, # type: ignore[arg-type] + ) + ) + + uri_fmt = "mongodb://localhost/?ssl=true&tlsCAFile=%s&serverSelectionTimeoutMS=1000" + await connected(self.simple_client(uri_fmt % (CA_PEM,), **self.credentials)) # type: ignore + + uri_fmt = ( + "mongodb://localhost/?ssl=true&tlsCRLFile=%s" + "&tlsCAFile=%s&serverSelectionTimeoutMS=1000" + ) + with self.assertRaises(ConnectionFailure): + await connected( + self.simple_client(uri_fmt % (CRL_PEM, CA_PEM), **self.credentials) # type: ignore[arg-type] + ) + + @unittest.skipIf( + "PyPy" in sys.version and not _IS_SYNC, + "https://github.com/pypy/pypy/issues/5131 flaky on async PyPy due to SSL EOF", + ) + @async_client_context.require_tlsCertificateKeyFile + @async_client_context.require_server_resolvable + @async_client_context.require_no_api_version + @ignore_deprecations + async def test_validation_with_system_ca_certs(self): + # Expects the server to be running with server.pem and ca.pem. + # + # --sslPEMKeyFile=/path/to/pymongo/test/certificates/server.pem + # --sslCAFile=/path/to/pymongo/test/certificates/ca.pem + # --sslWeakCertificateValidation + # + self.patch_system_certs(CA_PEM) + with self.assertRaises(ConnectionFailure): + # Server cert is verified but hostname matching fails + await connected( + self.simple_client( + "server", ssl=True, serverSelectionTimeoutMS=1000, **self.credentials + ) # type: ignore[arg-type] + ) + + # Server cert is verified. Disable hostname matching. + await connected( + self.simple_client( + "server", + ssl=True, + tlsAllowInvalidHostnames=True, + serverSelectionTimeoutMS=1000, + **self.credentials, # type: ignore[arg-type] + ) + ) + + # Server cert and hostname are verified. + await connected( + self.simple_client( + "localhost", ssl=True, serverSelectionTimeoutMS=1000, **self.credentials + ) # type: ignore[arg-type] + ) + + # Server cert and hostname are verified. + await connected( + self.simple_client( + "mongodb://localhost/?ssl=true&serverSelectionTimeoutMS=1000", + **self.credentials, # type: ignore[arg-type] + ) + ) + + def test_system_certs_config_error(self): + ctx = get_ssl_context(None, None, None, None, True, True, False, _IS_SYNC) + if (sys.platform != "win32" and hasattr(ctx, "set_default_verify_paths")) or hasattr( + ctx, "load_default_certs" + ): + raise SkipTest("Can't test when system CA certificates are loadable.") + + have_certifi = ssl_support.HAVE_CERTIFI + have_wincertstore = ssl_support.HAVE_WINCERTSTORE + # Force the test regardless of environment. + ssl_support.HAVE_CERTIFI = False + ssl_support.HAVE_WINCERTSTORE = False + try: + with self.assertRaises(ConfigurationError): + self.simple_client("mongodb://localhost/?ssl=true") + finally: + ssl_support.HAVE_CERTIFI = have_certifi + ssl_support.HAVE_WINCERTSTORE = have_wincertstore + + def test_certifi_support(self): + if hasattr(ssl, "SSLContext"): + # SSLSocket doesn't provide ca_certs attribute on pythons + # with SSLContext and SSLContext provides no information + # about ca_certs. + raise SkipTest("Can't test when SSLContext available.") + if not ssl_support.HAVE_CERTIFI: + raise SkipTest("Need certifi to test certifi support.") + + have_wincertstore = ssl_support.HAVE_WINCERTSTORE + # Force the test on Windows, regardless of environment. + ssl_support.HAVE_WINCERTSTORE = False + try: + ctx = get_ssl_context(None, None, CA_PEM, None, False, False, False, _IS_SYNC) + ssl_sock = ctx.wrap_socket(socket.socket()) + self.assertEqual(ssl_sock.ca_certs, CA_PEM) + + ctx = get_ssl_context(None, None, None, None, False, False, False, _IS_SYNC) + ssl_sock = ctx.wrap_socket(socket.socket()) + self.assertEqual(ssl_sock.ca_certs, ssl_support.certifi.where()) + finally: + ssl_support.HAVE_WINCERTSTORE = have_wincertstore + + def test_wincertstore(self): + if sys.platform != "win32": + raise SkipTest("Only valid on Windows.") + if hasattr(ssl, "SSLContext"): + # SSLSocket doesn't provide ca_certs attribute on pythons + # with SSLContext and SSLContext provides no information + # about ca_certs. + raise SkipTest("Can't test when SSLContext available.") + if not ssl_support.HAVE_WINCERTSTORE: + raise SkipTest("Need wincertstore to test wincertstore.") + + ctx = get_ssl_context(None, None, CA_PEM, None, False, False, False, _IS_SYNC) + ssl_sock = ctx.wrap_socket(socket.socket()) + self.assertEqual(ssl_sock.ca_certs, CA_PEM) + + ctx = get_ssl_context(None, None, None, None, False, False, False, _IS_SYNC) + ssl_sock = ctx.wrap_socket(socket.socket()) + self.assertEqual(ssl_sock.ca_certs, ssl_support._WINCERTS.name) + + @async_client_context.require_auth + @async_client_context.require_tlsCertificateKeyFile + @async_client_context.require_no_api_version + @ignore_deprecations + async def test_mongodb_x509_auth(self): + host, port = await async_client_context.host, await async_client_context.port + self.addAsyncCleanup(remove_all_users, async_client_context.client["$external"]) + + # Give x509 user all necessary privileges. + await async_client_context.create_user( + "$external", + MONGODB_X509_USERNAME, + roles=[ + {"role": "readWriteAnyDatabase", "db": "admin"}, + {"role": "userAdminAnyDatabase", "db": "admin"}, + ], + ) + + noauth = self.simple_client( + await async_client_context.pair, + ssl=True, + tlsAllowInvalidCertificates=True, + tlsCertificateKeyFile=CLIENT_PEM, + ) + + with self.assertRaises(OperationFailure): + await noauth.pymongo_test.test.find_one() + + listener = EventListener() + auth = self.simple_client( + await async_client_context.pair, + authMechanism="MONGODB-X509", + ssl=True, + tlsAllowInvalidCertificates=True, + tlsCertificateKeyFile=CLIENT_PEM, + event_listeners=[listener], + ) + + # No error + await auth.pymongo_test.test.find_one() + names = listener.started_command_names() + if async_client_context.version.at_least(4, 4, -1): + # Speculative auth skips the authenticate command. + self.assertEqual(names, ["find"]) + else: + self.assertEqual(names, ["authenticate", "find"]) + + uri = "mongodb://%s@%s:%d/?authMechanism=MONGODB-X509" % ( + quote_plus(MONGODB_X509_USERNAME), + host, + port, + ) + client = self.simple_client( + uri, ssl=True, tlsAllowInvalidCertificates=True, tlsCertificateKeyFile=CLIENT_PEM + ) + # No error + await client.pymongo_test.test.find_one() + + uri = "mongodb://%s:%d/?authMechanism=MONGODB-X509" % (host, port) + client = self.simple_client( + uri, ssl=True, tlsAllowInvalidCertificates=True, tlsCertificateKeyFile=CLIENT_PEM + ) + # No error + await client.pymongo_test.test.find_one() + # Auth should fail if username and certificate do not match + uri = "mongodb://%s@%s:%d/?authMechanism=MONGODB-X509" % ( + quote_plus("not the username"), + host, + port, + ) + + bad_client = self.simple_client( + uri, ssl=True, tlsAllowInvalidCertificates=True, tlsCertificateKeyFile=CLIENT_PEM + ) + + with self.assertRaises(OperationFailure): + await bad_client.pymongo_test.test.find_one() + + bad_client = self.simple_client( + await async_client_context.pair, + username="not the username", + authMechanism="MONGODB-X509", + ssl=True, + tlsAllowInvalidCertificates=True, + tlsCertificateKeyFile=CLIENT_PEM, + ) + + with self.assertRaises(OperationFailure): + await bad_client.pymongo_test.test.find_one() + + # Invalid certificate (using CA certificate as client certificate) + uri = "mongodb://%s@%s:%d/?authMechanism=MONGODB-X509" % ( + quote_plus(MONGODB_X509_USERNAME), + host, + port, + ) + try: + await connected( + self.simple_client( + uri, + ssl=True, + tlsAllowInvalidCertificates=True, + tlsCertificateKeyFile=CA_PEM, + serverSelectionTimeoutMS=1000, + ) + ) + except (ConnectionFailure, ConfigurationError): + pass + else: + self.fail("Invalid certificate accepted.") + + @async_client_context.require_tlsCertificateKeyFile + @async_client_context.require_no_api_version + @ignore_deprecations + async def test_connect_with_ca_bundle(self): + def remove(path): + try: + os.remove(path) + except OSError: + pass + + temp_ca_bundle = os.path.join(CERT_PATH, "trusted-ca-bundle.pem") + self.addCleanup(remove, temp_ca_bundle) + # Add the CA cert file to the bundle. + cat_files(temp_ca_bundle, CA_BUNDLE_PEM, CA_PEM) + async with self.simple_client( + "localhost", tls=True, tlsCertificateKeyFile=CLIENT_PEM, tlsCAFile=temp_ca_bundle + ) as client: + self.assertTrue(await client.admin.command("ping")) + + @async_client_context.require_async + @unittest.skipUnless(_HAVE_PYOPENSSL, "PyOpenSSL is not available.") + @unittest.skipUnless(HAVE_SSL, "The ssl module is not available.") + async def test_pyopenssl_ignored_in_async(self): + client = AsyncMongoClient( + "mongodb://localhost:27017?tls=true&tlsAllowInvalidCertificates=true" + ) + await client.admin.command("ping") # command doesn't matter, just needs it to connect + await client.close() + + +if __name__ == "__main__": + unittest.main() diff --git a/test/asynchronous/test_streaming_protocol.py b/test/asynchronous/test_streaming_protocol.py new file mode 100644 index 0000000000..70ec49de80 --- /dev/null +++ b/test/asynchronous/test_streaming_protocol.py @@ -0,0 +1,228 @@ +# Copyright 2020-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test the database module.""" +from __future__ import annotations + +import sys +import time + +sys.path[0:0] = [""] + +from test.asynchronous import AsyncIntegrationTest, async_client_context, unittest +from test.utils_shared import ( + HeartbeatEventListener, + ServerEventListener, + async_wait_until, +) + +from pymongo import monitoring +from pymongo.hello import HelloCompat + +_IS_SYNC = False + + +class TestStreamingProtocol(AsyncIntegrationTest): + @async_client_context.require_failCommand_appName + async def test_failCommand_streaming(self): + listener = ServerEventListener() + hb_listener = HeartbeatEventListener() + client = await self.async_rs_or_single_client( + event_listeners=[listener, hb_listener], + heartbeatFrequencyMS=500, + appName="failingHeartbeatTest", + ) + # Force a connection. + await client.admin.command("ping") + address = await client.address + listener.reset() + + fail_hello = { + "configureFailPoint": "failCommand", + "mode": {"times": 4}, + "data": { + "failCommands": [HelloCompat.LEGACY_CMD, "hello"], + "closeConnection": False, + "errorCode": 10107, + "appName": "failingHeartbeatTest", + }, + } + async with self.fail_point(fail_hello): + + def _marked_unknown(event): + return ( + event.server_address == address + and not event.new_description.is_server_type_known + ) + + def _discovered_node(event): + return ( + event.server_address == address + and not event.previous_description.is_server_type_known + and event.new_description.is_server_type_known + ) + + def marked_unknown(): + return len(listener.matching(_marked_unknown)) >= 1 + + def rediscovered(): + return len(listener.matching(_discovered_node)) >= 1 + + # Topology events are not published synchronously + await async_wait_until(marked_unknown, "mark node unknown") + await async_wait_until(rediscovered, "rediscover node") + + # Server should be selectable. + await client.admin.command("ping") + + @async_client_context.require_failCommand_appName + async def test_streaming_rtt(self): + listener = ServerEventListener() + hb_listener = HeartbeatEventListener() + # On Windows, RTT can actually be 0.0 because time.time() only has + # 1-15 millisecond resolution. We need to delay the initial hello + # to ensure that RTT is never zero. + name = "streamingRttTest" + delay_hello: dict = { + "configureFailPoint": "failCommand", + "mode": {"times": 1000}, + "data": { + "failCommands": [HelloCompat.LEGACY_CMD, "hello"], + "blockConnection": True, + "blockTimeMS": 20, + # This can be uncommented after SERVER-49220 is fixed. + # 'appName': name, + }, + } + async with self.fail_point(delay_hello): + client = await self.async_rs_or_single_client( + event_listeners=[listener, hb_listener], heartbeatFrequencyMS=500, appName=name + ) + # Force a connection. + await client.admin.command("ping") + address = await client.address + + delay_hello["data"]["blockTimeMS"] = 500 + delay_hello["data"]["appName"] = name + async with self.fail_point(delay_hello): + + def rtt_exceeds_250_ms(): + # XXX: Add a public TopologyDescription getter to MongoClient? + topology = client._topology + sd = topology.description.server_descriptions()[address] + assert sd.round_trip_time is not None + return sd.round_trip_time > 0.250 + + await async_wait_until(rtt_exceeds_250_ms, "exceed 250ms RTT") + + # Server should be selectable. + await client.admin.command("ping") + + def changed_event(event): + return event.server_address == address and isinstance( + event, monitoring.ServerDescriptionChangedEvent + ) + + # There should only be one event published, for the initial discovery. + events = listener.matching(changed_event) + self.assertEqual(1, len(events)) + self.assertGreater(events[0].new_description.round_trip_time, 0) + + @async_client_context.require_failCommand_appName + async def test_monitor_waits_after_server_check_error(self): + # This test implements: + # https://github.com/mongodb/specifications/blob/master/source/server-discovery-and-monitoring/server-discovery-and-monitoring-tests.md#monitors-sleep-at-least-minheartbeatfreqencyms-between-checks + fail_hello = { + "mode": {"times": 5}, + "data": { + "failCommands": [HelloCompat.LEGACY_CMD, "hello"], + "errorCode": 1234, + "appName": "SDAMMinHeartbeatFrequencyTest", + }, + } + async with self.fail_point(fail_hello): + start = time.time() + client = await self.async_single_client( + appName="SDAMMinHeartbeatFrequencyTest", serverSelectionTimeoutMS=5000 + ) + # Force a connection. + await client.admin.command("ping") + duration = time.time() - start + # Explanation of the expected events: + # 0ms: run configureFailPoint + # 1ms: create MongoClient + # 2ms: failed monitor handshake, 1 + # 502ms: failed monitor handshake, 2 + # 1002ms: failed monitor handshake, 3 + # 1502ms: failed monitor handshake, 4 + # 2002ms: failed monitor handshake, 5 + # 2502ms: monitor handshake succeeds + # 2503ms: run awaitable hello + # 2504ms: application handshake succeeds + # 2505ms: ping command succeeds + self.assertGreaterEqual(duration, 2) + self.assertLessEqual(duration, 4.0) + + @async_client_context.require_failCommand_appName + async def test_heartbeat_awaited_flag(self): + hb_listener = HeartbeatEventListener() + client = await self.async_single_client( + event_listeners=[hb_listener], + heartbeatFrequencyMS=500, + appName="heartbeatEventAwaitedFlag", + ) + # Force a connection. + await client.admin.command("ping") + + def hb_succeeded(event): + return isinstance(event, monitoring.ServerHeartbeatSucceededEvent) + + def hb_failed(event): + return isinstance(event, monitoring.ServerHeartbeatFailedEvent) + + fail_heartbeat = { + "mode": {"times": 2}, + "data": { + "failCommands": [HelloCompat.LEGACY_CMD, "hello"], + "closeConnection": True, + "appName": "heartbeatEventAwaitedFlag", + }, + } + async with self.fail_point(fail_heartbeat): + await async_wait_until( + lambda: hb_listener.matching(hb_failed), "published failed event" + ) + # Reconnect. + await client.admin.command("ping") + + hb_succeeded_events = hb_listener.matching(hb_succeeded) + hb_failed_events = hb_listener.matching(hb_failed) + self.assertFalse(hb_succeeded_events[0].awaited) + self.assertTrue(hb_failed_events[0].awaited) + # Depending on thread scheduling, the failed heartbeat could occur on + # the second or third check. + events = [type(e) for e in hb_listener.events[:4]] + if events == [ + monitoring.ServerHeartbeatStartedEvent, + monitoring.ServerHeartbeatSucceededEvent, + monitoring.ServerHeartbeatStartedEvent, + monitoring.ServerHeartbeatFailedEvent, + ]: + self.assertFalse(hb_succeeded_events[1].awaited) + else: + self.assertTrue(hb_succeeded_events[1].awaited) + + +if __name__ == "__main__": + unittest.main() diff --git a/test/asynchronous/test_transactions.py b/test/asynchronous/test_transactions.py index d11d0a9776..478710362e 100644 --- a/test/asynchronous/test_transactions.py +++ b/test/asynchronous/test_transactions.py @@ -20,11 +20,13 @@ from test.asynchronous.utils_spec_runner import AsyncSpecRunner from gridfs.asynchronous.grid_file import AsyncGridFS, AsyncGridFSBucket +from pymongo.asynchronous.pool import PoolState +from pymongo.server_selectors import writable_server_selector sys.path[0:0] = [""] from test.asynchronous import AsyncIntegrationTest, async_client_context, unittest -from test.utils import ( +from test.utils_shared import ( OvertCommandListener, async_wait_until, ) @@ -32,13 +34,13 @@ from bson import encode from bson.raw_bson import RawBSONDocument -from pymongo import WriteConcern +from pymongo import WriteConcern, _csot from pymongo.asynchronous import client_session from pymongo.asynchronous.client_session import TransactionOptions from pymongo.asynchronous.command_cursor import AsyncCommandCursor from pymongo.asynchronous.cursor import AsyncCursor -from pymongo.asynchronous.helpers import anext from pymongo.errors import ( + AutoReconnect, CollectionInvalid, ConfigurationError, ConnectionFailure, @@ -70,8 +72,6 @@ def maybe_skip_scenario(self, test): class TestTransactions(AsyncTransactionsBase): - RUN_ON_SERVERLESS = True - @async_client_context.require_transactions def test_transaction_options_validation(self): default_options = TransactionOptions() @@ -295,6 +295,14 @@ async def gridfs_open_upload_stream(*args, **kwargs): "new-name", ), ), + ( + bucket.rename_by_name, + ( + "new-name", + "new-name2", + ), + ), + (bucket.delete_by_name, ("new-name2",)), ] async with client.start_session() as s, await s.start_transaction(): @@ -386,6 +394,22 @@ async def find_raw_batches(*args, **kwargs): if isinstance(res, (AsyncCommandCursor, AsyncCursor)): await res.to_list() + @async_client_context.require_transactions + async def test_transaction_pool_cleared_error_labelled_transient(self): + c = await self.async_single_client() + + with self.assertRaises(AutoReconnect) as context: + async with c.start_session() as session: + async with await session.start_transaction(): + server = await c._select_server(writable_server_selector, session, "test") + # Pause the server's pool, causing it to fail connection checkout. + server.pool.state = PoolState.PAUSED + async with c._checkout(server, session): + pass + + # Verify that the TransientTransactionError label is present in the error. + self.assertTrue(context.exception.has_error_label("TransientTransactionError")) + class PatchSessionTimeout: """Patches the client_session's with_transaction timeout for testing.""" @@ -410,15 +434,10 @@ async def asyncSetUp(self) -> None: for address in async_client_context.mongoses: self.mongos_clients.append(await self.async_single_client("{}:{}".format(*address))) - async def _set_fail_point(self, client, command_args): - cmd = {"configureFailPoint": "failCommand"} - cmd.update(command_args) - await client.admin.command(cmd) - async def set_fail_point(self, command_args): clients = self.mongos_clients if self.mongos_clients else [self.client] for client in clients: - await self._set_fail_point(client, command_args) + await self.configure_fail_point(client, command_args) @async_client_context.require_transactions async def test_callback_raises_custom_error(self): @@ -583,5 +602,29 @@ async def callback(session): self.assertFalse(s.in_transaction) +class TestOptionsInsideTransactionProse(AsyncTransactionsBase): + @async_client_context.require_transactions + @async_client_context.require_no_standalone + async def test_case_1(self): + # Write concern not inherited from collection object inside transaction + # Create a MongoClient running against a configured sharded/replica set/load balanced cluster. + client = async_client_context.client + coll = client[self.db.name].test + await coll.delete_many({}) + # Start a new session on the client. + async with client.start_session() as s: + # Start a transaction on the session. + await s.start_transaction() + # Instantiate a collection object in the driver with a default write concern of { w: 0 }. + inner_coll = coll.with_options(write_concern=WriteConcern(w=0)) + # Insert the document { n: 1 } on the instantiated collection. + result = await inner_coll.insert_one({"n": 1}, session=s) + # Commit the transaction. + await s.commit_transaction() + # End the session. + # Ensure the document was inserted and no error was thrown from the transaction. + assert result.inserted_id is not None + + if __name__ == "__main__": unittest.main() diff --git a/test/asynchronous/test_transactions_unified.py b/test/asynchronous/test_transactions_unified.py new file mode 100644 index 0000000000..8e5b1ae181 --- /dev/null +++ b/test/asynchronous/test_transactions_unified.py @@ -0,0 +1,55 @@ +# Copyright 2021-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test the Transactions unified spec tests.""" +from __future__ import annotations + +import os +import sys +from pathlib import Path + +sys.path[0:0] = [""] + +from test import client_context, unittest +from test.asynchronous.unified_format import generate_test_classes + +_IS_SYNC = False + + +def setUpModule(): + pass + + +# Location of JSON test specifications. +if _IS_SYNC: + TEST_PATH = os.path.join(Path(__file__).resolve().parent, "transactions/unified") +else: + TEST_PATH = os.path.join(Path(__file__).resolve().parent.parent, "transactions/unified") + +# Generate unified tests. +globals().update(generate_test_classes(TEST_PATH, module=__name__)) + +# Location of JSON test specifications for transactions-convenient-api. +if _IS_SYNC: + TEST_PATH = os.path.join(Path(__file__).resolve().parent, "transactions-convenient-api/unified") +else: + TEST_PATH = os.path.join( + Path(__file__).resolve().parent.parent, "transactions-convenient-api/unified" + ) + +# Generate unified tests. +globals().update(generate_test_classes(TEST_PATH, module=__name__)) + +if __name__ == "__main__": + unittest.main() diff --git a/test/asynchronous/test_unified_format.py b/test/asynchronous/test_unified_format.py new file mode 100644 index 0000000000..58a1ea3326 --- /dev/null +++ b/test/asynchronous/test_unified_format.py @@ -0,0 +1,97 @@ +# Copyright 2020-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import os +import sys +from pathlib import Path +from typing import Any + +sys.path[0:0] = [""] + +from test import UnitTest, unittest +from test.asynchronous.unified_format import MatchEvaluatorUtil, generate_test_classes + +from bson import ObjectId + +_IS_SYNC = False + +# Location of JSON test specifications. +if _IS_SYNC: + TEST_PATH = os.path.join(Path(__file__).resolve().parent, "unified-test-format") +else: + TEST_PATH = os.path.join(Path(__file__).resolve().parent.parent, "unified-test-format") + + +globals().update( + generate_test_classes( + os.path.join(TEST_PATH, "valid-pass"), + module=__name__, + class_name_prefix="UnifiedTestFormat", + expected_failures=[ + "Client side error in command starting transaction", # PYTHON-1894 + ], + ) +) + + +globals().update( + generate_test_classes( + os.path.join(TEST_PATH, "valid-fail"), + module=__name__, + class_name_prefix="UnifiedTestFormat", + bypass_test_generation_errors=True, + expected_failures=[ + ".*", # All tests expected to fail + ], + ) +) + + +class TestMatchEvaluatorUtil(UnitTest): + def setUp(self): + self.match_evaluator = MatchEvaluatorUtil(self) + + def test_unsetOrMatches(self): + spec: dict[str, Any] = {"$$unsetOrMatches": {"y": {"$$unsetOrMatches": 2}}} + for actual in [{}, {"y": 2}, None]: + self.match_evaluator.match_result(spec, actual) + + spec = {"x": {"$$unsetOrMatches": {"y": {"$$unsetOrMatches": 2}}}} + for actual in [{}, {"x": {}}, {"x": {"y": 2}}]: + self.match_evaluator.match_result(spec, actual) + + spec = {"y": {"$$unsetOrMatches": {"$$exists": True}}} + self.match_evaluator.match_result(spec, {}) + self.match_evaluator.match_result(spec, {"y": 2}) + self.match_evaluator.match_result(spec, {"x": 1}) + self.match_evaluator.match_result(spec, {"y": {}}) + + def test_type(self): + self.match_evaluator.match_result( + { + "operationType": "insert", + "ns": {"db": "change-stream-tests", "coll": "test"}, + "fullDocument": {"_id": {"$$type": "objectId"}, "x": 1}, + }, + { + "operationType": "insert", + "fullDocument": {"_id": ObjectId("5fc93511ac93941052098f0c"), "x": 1}, + "ns": {"db": "change-stream-tests", "coll": "test"}, + }, + ) + + +if __name__ == "__main__": + unittest.main() diff --git a/test/asynchronous/test_versioned_api_integration.py b/test/asynchronous/test_versioned_api_integration.py new file mode 100644 index 0000000000..0f6b544465 --- /dev/null +++ b/test/asynchronous/test_versioned_api_integration.py @@ -0,0 +1,85 @@ +# Copyright 2020-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import os +import sys +from pathlib import Path +from test.asynchronous.unified_format import generate_test_classes + +sys.path[0:0] = [""] + +from test.asynchronous import AsyncIntegrationTest, async_client_context, unittest +from test.utils_shared import OvertCommandListener + +from pymongo.server_api import ServerApi + +_IS_SYNC = False + +# Location of JSON test specifications. +if _IS_SYNC: + TEST_PATH = os.path.join(Path(__file__).resolve().parent, "versioned-api") +else: + TEST_PATH = os.path.join(Path(__file__).resolve().parent.parent, "versioned-api") + + +# Generate unified tests. +globals().update(generate_test_classes(TEST_PATH, module=__name__)) + + +class TestServerApiIntegration(AsyncIntegrationTest): + RUN_ON_LOAD_BALANCER = True + + def assertServerApi(self, event): + self.assertIn("apiVersion", event.command) + self.assertEqual(event.command["apiVersion"], "1") + + def assertServerApiInAllCommands(self, events): + for event in events: + self.assertServerApi(event) + + @async_client_context.require_version_min(4, 7) + async def test_command_options(self): + listener = OvertCommandListener() + client = await self.async_rs_or_single_client( + server_api=ServerApi("1"), event_listeners=[listener] + ) + coll = client.test.test + await coll.insert_many([{} for _ in range(100)]) + self.addAsyncCleanup(coll.delete_many, {}) + await coll.find(batch_size=25).to_list() + await client.admin.command("ping") + self.assertServerApiInAllCommands(listener.started_events) + + @async_client_context.require_version_min(4, 7) + @async_client_context.require_transactions + async def test_command_options_txn(self): + listener = OvertCommandListener() + client = await self.async_rs_or_single_client( + server_api=ServerApi("1"), event_listeners=[listener] + ) + coll = client.test.test + await coll.insert_many([{} for _ in range(100)]) + self.addAsyncCleanup(coll.delete_many, {}) + + listener.reset() + async with client.start_session() as s, await s.start_transaction(): + await coll.insert_many([{} for _ in range(100)], session=s) + await coll.find(batch_size=25, session=s).to_list() + await client.test.command("find", "test", session=s) + self.assertServerApiInAllCommands(listener.started_events) + + +if __name__ == "__main__": + unittest.main() diff --git a/test/asynchronous/unified_format.py b/test/asynchronous/unified_format.py index 52d964eb3e..0c9e8c10c8 100644 --- a/test/asynchronous/unified_format.py +++ b/test/asynchronous/unified_format.py @@ -27,18 +27,20 @@ import sys import time import traceback -from asyncio import iscoroutinefunction from collections import defaultdict +from inspect import iscoroutinefunction from test.asynchronous import ( AsyncIntegrationTest, async_client_context, client_knobs, unittest, ) +from test.asynchronous.utils import async_get_pool, flaky +from test.asynchronous.utils_spec_runner import SpecRunnerTask +from test.helpers_shared import ALL_KMS_PROVIDERS, DEFAULT_KMS_TLS from test.unified_format_shared import ( KMS_TLS_OPTS, PLACEHOLDER_MAP, - SKIP_CSOT_TESTS, EventListenerUtil, MatchEvaluatorUtil, coerce_result, @@ -48,8 +50,7 @@ parse_collection_or_database_options, with_metaclass, ) -from test.utils import ( - async_get_pool, +from test.utils_shared import ( async_wait_until, camel_to_snake, camel_to_snake_args, @@ -58,15 +59,17 @@ snake_to_camel, wait_until, ) -from test.utils_spec_runner import SpecRunnerThread from test.version import Version from typing import Any, Dict, List, Mapping, Optional +import pytest + import pymongo from bson import SON, json_util from bson.codec_options import DEFAULT_CODEC_OPTIONS from bson.objectid import ObjectId -from gridfs import AsyncGridFSBucket, GridOut +from gridfs import AsyncGridFSBucket, GridOut, NoFile +from gridfs.errors import CorruptGridFile from pymongo import ASCENDING, AsyncMongoClient, CursorType, _csot from pymongo.asynchronous.change_stream import AsyncChangeStream from pymongo.asynchronous.client_session import AsyncClientSession, TransactionOptions, _TxnState @@ -74,8 +77,8 @@ from pymongo.asynchronous.command_cursor import AsyncCommandCursor from pymongo.asynchronous.database import AsyncDatabase from pymongo.asynchronous.encryption import AsyncClientEncryption -from pymongo.asynchronous.helpers import anext -from pymongo.encryption_options import _HAVE_PYMONGOCRYPT +from pymongo.driver_info import DriverInfo +from pymongo.encryption_options import _HAVE_PYMONGOCRYPT, AutoEncryptionOpts from pymongo.errors import ( AutoReconnect, BulkWriteError, @@ -131,14 +134,6 @@ async def is_run_on_requirement_satisfied(requirement): if req_max_server_version: max_version_satisfied = Version.from_string(req_max_server_version) >= server_version - serverless = requirement.get("serverless") - if serverless == "require": - serverless_satisfied = async_client_context.serverless - elif serverless == "forbid": - serverless_satisfied = not async_client_context.serverless - else: # unset or "allow" - serverless_satisfied = True - params_satisfied = True params = requirement.get("serverParameters") if params: @@ -161,14 +156,23 @@ async def is_run_on_requirement_satisfied(requirement): csfle_satisfied = True req_csfle = requirement.get("csfle") if req_csfle is True: - min_version_satisfied = Version.from_string("4.2") <= server_version + # Don't overwrite unsatisfied minimum version requirements. + if min_version_satisfied: + min_version_satisfied = Version.from_string("4.2") <= server_version csfle_satisfied = _HAVE_PYMONGOCRYPT and min_version_satisfied + elif isinstance(req_csfle, dict) and "minLibmongocryptVersion" in req_csfle: + csfle_satisfied = False + req_version = req_csfle["minLibmongocryptVersion"] + if _HAVE_PYMONGOCRYPT: + from pymongocrypt import libmongocrypt_version + + if Version.from_string(libmongocrypt_version()) >= Version.from_string(req_version): + csfle_satisfied = True return ( topology_satisfied and min_version_satisfied and max_version_satisfied - and serverless_satisfied and params_satisfied and auth_satisfied and csfle_satisfied @@ -222,7 +226,6 @@ def __init__(self, test_class): self._listeners: Dict[str, EventListenerUtil] = {} self._session_lsids: Dict[str, Mapping[str, Any]] = {} self.test: UnifiedSpecTestMixinV1 = test_class - self._cluster_time: Mapping[str, Any] = {} def __contains__(self, item): return item in self._entities @@ -251,6 +254,10 @@ def _handle_placeholders(self, spec: dict, current: dict, path: str) -> Any: raise ValueError(f"Could not find a placeholder value for {path}") return PLACEHOLDER_MAP[path] + # Distinguish between temp and non-temp aws credentials. + if path.endswith("/kmsProviders/aws") and "sessionToken" in current: + path = path.replace("aws", "aws_temp") + for key in list(current): value = current[key] if isinstance(value, dict): @@ -268,6 +275,21 @@ async def _create_entity(self, entity_spec, uri=None): kwargs: dict = {} observe_events = spec.get("observeEvents", []) + if "autoEncryptOpts" in spec: + auto_encrypt_opts = spec["autoEncryptOpts"].copy() + auto_encrypt_kwargs: dict = dict(kms_tls_options=DEFAULT_KMS_TLS) + kms_providers = auto_encrypt_opts.pop("kmsProviders", ALL_KMS_PROVIDERS.copy()) + key_vault_namespace = auto_encrypt_opts.pop("keyVaultNamespace") + extra_opts = auto_encrypt_opts.pop("extraOptions", {}) + for key, value in extra_opts.items(): + auto_encrypt_kwargs[camel_to_snake(key)] = value + for key, value in auto_encrypt_opts.items(): + auto_encrypt_kwargs[camel_to_snake(key)] = value + auto_encryption_opts = AutoEncryptionOpts( + kms_providers, key_vault_namespace, **auto_encrypt_kwargs + ) + kwargs["auto_encryption_opts"] = auto_encryption_opts + # The unified tests use topologyOpeningEvent, we use topologyOpenedEvent for i in range(len(observe_events)): if "topologyOpeningEvent" == observe_events[i]: @@ -285,7 +307,7 @@ async def _create_entity(self, entity_spec, uri=None): self._listeners[spec["id"]] = listener kwargs["event_listeners"] = [listener] if spec.get("useMultipleMongoses"): - if async_client_context.load_balancer or async_client_context.serverless: + if async_client_context.load_balancer: kwargs["h"] = async_client_context.MULTI_MONGOS_LB_URI elif async_client_context.is_mongos: kwargs["h"] = async_client_context.mongos_seeds() @@ -304,6 +326,7 @@ async def _create_entity(self, entity_spec, uri=None): if uri: kwargs["h"] = uri client = await self.test.async_rs_or_single_client(**kwargs) + await client.aconnect() self[spec["id"]] = client return elif entity_type == "database": @@ -378,12 +401,14 @@ async def drop(self: AsyncGridFSBucket, *args: Any, **kwargs: Any) -> None: opts["key_vault_client"], DEFAULT_CODEC_OPTIONS, opts.get("kms_tls_options", kms_tls_options), + opts.get("key_expiration_ms"), ) return elif entity_type == "thread": name = spec["id"] - thread = SpecRunnerThread(name) - thread.start() + thread = SpecRunnerTask(name) + await thread.start() + self.test.addAsyncCleanup(thread.join, 5) self[name] = thread return @@ -419,13 +444,11 @@ def get_lsid_for_session(self, session_name): # session has been closed. return self._session_lsids[session_name] - async def advance_cluster_times(self) -> None: + async def advance_cluster_times(self, cluster_time) -> None: """Manually synchronize entities when desired""" - if not self._cluster_time: - self._cluster_time = (await self.test.client.admin.command("ping")).get("$clusterTime") for entity in self._entities.values(): - if isinstance(entity, AsyncClientSession) and self._cluster_time: - entity.advance_cluster_time(self._cluster_time) + if isinstance(entity, AsyncClientSession) and cluster_time: + entity.advance_cluster_time(cluster_time) class UnifiedSpecTestMixinV1(AsyncIntegrationTest): @@ -438,9 +461,8 @@ class UnifiedSpecTestMixinV1(AsyncIntegrationTest): a class attribute ``TEST_SPEC``. """ - SCHEMA_VERSION = Version.from_string("1.21") + SCHEMA_VERSION = Version.from_string("1.25") RUN_ON_LOAD_BALANCER = True - RUN_ON_SERVERLESS = True TEST_SPEC: Any TEST_PATH = "" # This gets filled in by generate_test_classes mongos_clients: list[AsyncMongoClient] = [] @@ -471,6 +493,13 @@ async def insert_initial_data(self, initial_data): wc = WriteConcern(w="majority") else: wc = WriteConcern(w=1) + + # Remove any encryption collections associated with the collection. + collections = await db.list_collection_names() + for collection in collections: + if collection in [f"enxcol_.{coll_name}.esc", f"enxcol_.{coll_name}.ecoc"]: + await db.drop_collection(collection) + if documents: if opts: await db.create_collection(coll_name, **opts) @@ -503,19 +532,10 @@ async def asyncSetUp(self): raise unittest.SkipTest(f"{self.__class__.__name__} runOnRequirements not satisfied") # add any special-casing for skipping tests here - if async_client_context.storage_engine == "mmapv1": - if "retryable-writes" in self.TEST_SPEC["description"] or "retryable_writes" in str( - self.TEST_PATH - ): - raise unittest.SkipTest("MMAPv1 does not support retryWrites=True") # Handle mongos_clients for transactions tests. self.mongos_clients = [] - if ( - async_client_context.supports_transactions() - and not async_client_context.load_balancer - and not async_client_context.serverless - ): + if async_client_context.supports_transactions() and not async_client_context.load_balancer: for address in async_client_context.mongoses: self.mongos_clients.append(await self.async_single_client("{}:{}".format(*address))) @@ -533,32 +553,63 @@ async def asyncSetUp(self): def maybe_skip_test(self, spec): # add any special-casing for skipping tests here - if async_client_context.storage_engine == "mmapv1": - if ( - "Dirty explicit session is discarded" in spec["description"] - or "Dirty implicit session is discarded" in spec["description"] - or "Cancel server check" in spec["description"] - ): - self.skipTest("MMAPv1 does not support retryWrites=True") - if "Client side error in command starting transaction" in spec["description"]: + class_name = self.__class__.__name__.lower() + description = spec["description"].lower() + + if "client side error in command starting transaction" in description: self.skipTest("Implement PYTHON-1894") - if "timeoutMS applied to entire download" in spec["description"]: + if "type=symbol" in description: + self.skipTest("PyMongo does not support the symbol type") + if "timeoutms applied to entire download" in description: self.skipTest("PyMongo's open_download_stream does not cap the stream's lifetime") + if any( + x in description + for x in [ + "first insertone is never committed", + "second updateone is never committed", + "third updateone is never committed", + ] + ): + self.skipTest("Implement PYTHON-4597") - class_name = self.__class__.__name__.lower() - description = spec["description"].lower() if "csot" in class_name: - if "gridfs" in class_name and sys.platform == "win32": - self.skipTest("PYTHON-3522 CSOT GridFS tests are flaky on Windows") - if async_client_context.storage_engine == "mmapv1": - self.skipTest( - "MMAPv1 does not support retryable writes which is required for CSOT tests" - ) + # Skip tests that are too slow to run on a given platform. + slow_macos = [ + "operation fails after two consecutive socket timeouts.*", + "operation succeeds after one socket timeout.*", + "Non-tailable cursor lifetime remaining timeoutMS applied to getMore if timeoutMode is unset", + ] + slow_win32 = [ + *slow_macos, + "maxTimeMS value in the command is less than timeoutMS", + "timeoutMS applies to whole operation.*", + ] + slow_pypy = [ + "timeoutMS applies to whole operation.*", + ] + if "CI" in os.environ and sys.platform == "win32" and "gridfs" in class_name: + self.skipTest("PYTHON-3522 CSOT GridFS test runs too slow on Windows") + if "CI" in os.environ and sys.platform == "win32": + for pat in slow_win32: + if re.match(pat.lower(), description): + self.skipTest("PYTHON-3522 CSOT test runs too slow on Windows") + if "CI" in os.environ and sys.platform == "darwin": + for pat in slow_macos: + if re.match(pat.lower(), description): + self.skipTest("PYTHON-3522 CSOT test runs too slow on MacOS") + if "CI" in os.environ and sys.implementation.name.lower() == "pypy": + for pat in slow_pypy: + if re.match(pat.lower(), description): + self.skipTest("PYTHON-3522 CSOT test runs too slow on PyPy") if "change" in description or "change" in class_name: self.skipTest("CSOT not implemented for watch()") if "cursors" in class_name: self.skipTest("CSOT not implemented for cursors") - if "tailable" in class_name: + if ( + "tailable" in class_name + or "tailable" in description + and "non-tailable" not in description + ): self.skipTest("CSOT not implemented for tailable cursors") if "sessions" in class_name: self.skipTest("CSOT not implemented for sessions") @@ -574,11 +625,6 @@ def maybe_skip_test(self, spec): self.skipTest("PyMongo does not support count()") if name == "listIndexNames": self.skipTest("PyMongo does not support list_index_names()") - if async_client_context.storage_engine == "mmapv1": - if name == "createChangeStream": - self.skipTest("MMAPv1 does not support change streams") - if name == "withTransaction" or name == "startTransaction": - self.skipTest("MMAPv1 does not support document-level locking") if not async_client_context.test_commands_enabled: if name == "failPoint" or name == "targetedFailPoint": self.skipTest("Test commands must be enabled to use fail points") @@ -618,7 +664,9 @@ def process_error(self, exception, spec): # Connection errors are considered client errors. if isinstance(error, ConnectionFailure): self.assertNotIsInstance(error, NotPrimaryError) - elif isinstance(error, (InvalidOperation, ConfigurationError, EncryptionError)): + elif isinstance(error, CorruptGridFile): + pass + elif isinstance(error, (InvalidOperation, ConfigurationError, EncryptionError, NoFile)): pass else: self.assertNotIsInstance(error, PyMongoError) @@ -674,7 +722,7 @@ def process_error(self, exception, spec): self.match_evaluator.match_result(expect_result, result) else: self.fail( - f"expectResult can only be specified with {BulkWriteError} or {ClientBulkWriteException} exceptions" + f"expectResult can only be specified with {BulkWriteError} or {ClientBulkWriteException} exceptions, got {exception}" ) return exception @@ -684,8 +732,6 @@ def __raise_if_unsupported(self, opname, target, *target_types): self.fail(f"Operation {opname} not supported for entity of type {type(target)}") async def __entityOperation_createChangeStream(self, target, *args, **kwargs): - if async_client_context.storage_engine == "mmapv1": - self.skipTest("MMAPv1 does not support change streams") self.__raise_if_unsupported( "createChangeStream", target, AsyncMongoClient, AsyncDatabase, AsyncCollection ) @@ -711,7 +757,7 @@ async def _databaseOperation_runCommand(self, target, **kwargs): return await target.command(**kwargs) async def _databaseOperation_runCursorCommand(self, target, **kwargs): - return list(await self._databaseOperation_createCommandCursor(target, **kwargs)) + return await (await self._databaseOperation_createCommandCursor(target, **kwargs)).to_list() async def _databaseOperation_createCommandCursor(self, target, **kwargs): self.__raise_if_unsupported("createCommandCursor", target, AsyncDatabase) @@ -743,6 +789,38 @@ async def _databaseOperation_createCommandCursor(self, target, **kwargs): return cursor + async def _collectionOperation_assertIndexExists(self, target, **kwargs): + collection = self.client[kwargs["database_name"]][kwargs["collection_name"]] + index_names = [idx["name"] async for idx in await collection.list_indexes()] + self.assertIn(kwargs["index_name"], index_names) + + async def _collectionOperation_assertIndexNotExists(self, target, **kwargs): + collection = self.client[kwargs["database_name"]][kwargs["collection_name"]] + async for index in await collection.list_indexes(): + self.assertNotEqual(kwargs["indexName"], index["name"]) + + async def _collectionOperation_assertCollectionExists(self, target, **kwargs): + database_name = kwargs["database_name"] + collection_name = kwargs["collection_name"] + collection_name_list = await self.client.get_database(database_name).list_collection_names() + self.assertIn(collection_name, collection_name_list) + + async def _databaseOperation_assertIndexExists(self, target, **kwargs): + collection = self.client[kwargs["database_name"]][kwargs["collection_name"]] + index_names = [idx["name"] async for idx in await collection.list_indexes()] + self.assertIn(kwargs["index_name"], index_names) + + async def _databaseOperation_assertIndexNotExists(self, target, **kwargs): + collection = self.client[kwargs["database_name"]][kwargs["collection_name"]] + async for index in await collection.list_indexes(): + self.assertNotEqual(kwargs["indexName"], index["name"]) + + async def _databaseOperation_assertCollectionExists(self, target, **kwargs): + database_name = kwargs["database_name"] + collection_name = kwargs["collection_name"] + collection_name_list = await self.client.get_database(database_name).list_collection_names() + self.assertIn(collection_name, collection_name_list) + async def kill_all_sessions(self): if getattr(self, "client", None) is None: return @@ -812,14 +890,10 @@ async def _collectionOperation_listSearchIndexes(self, target, *args, **kwargs): return await (await target.list_search_indexes(name, **agg_kwargs)).to_list() async def _sessionOperation_withTransaction(self, target, *args, **kwargs): - if async_client_context.storage_engine == "mmapv1": - self.skipTest("MMAPv1 does not support document-level locking") self.__raise_if_unsupported("withTransaction", target, AsyncClientSession) return await target.with_transaction(*args, **kwargs) async def _sessionOperation_startTransaction(self, target, *args, **kwargs): - if async_client_context.storage_engine == "mmapv1": - self.skipTest("MMAPv1 does not support document-level locking") self.__raise_if_unsupported("startTransaction", target, AsyncClientSession) return await target.start_transaction(*args, **kwargs) @@ -842,6 +916,11 @@ async def _cursor_close(self, target, *args, **kwargs): self.__raise_if_unsupported("close", target, NonLazyCursor, AsyncCommandCursor) return await target.close() + async def _clientOperation_appendMetadata(self, target, *args, **kwargs): + info_opts = kwargs["driver_info_options"] + driver_info = DriverInfo(info_opts["name"], info_opts["version"], info_opts["platform"]) + target.append_metadata(driver_info) + async def _clientEncryptionOperation_createDataKey(self, target, *args, **kwargs): if "opts" in kwargs: kwargs.update(camel_to_snake_args(kwargs.pop("opts"))) @@ -985,15 +1064,11 @@ async def run_entity_operation(self, spec): if ignore and isinstance(exc, (PyMongoError,)): return exc if expect_error: - if method_name == "_collectionOperation_bulkWrite": - self.skipTest("Skipping test pending PYTHON-4598") return self.process_error(exc, expect_error) raise else: - if method_name == "_collectionOperation_bulkWrite": - self.skipTest("Skipping test pending PYTHON-4598") if expect_error: - self.fail(f'Excepted error {expect_error} but "{opname}" succeeded: {result}') + self.fail(f'Expected error {expect_error} but "{opname}" succeeded: {result}') if expect_result: actual = coerce_result(opname, result) @@ -1008,12 +1083,8 @@ async def __set_fail_point(self, client, command_args): if not async_client_context.test_commands_enabled: self.skipTest("Test commands must be enabled") - cmd_on = SON([("configureFailPoint", "failCommand")]) - cmd_on.update(command_args) - await client.admin.command(cmd_on) - self.addAsyncCleanup( - client.admin.command, "configureFailPoint", cmd_on["configureFailPoint"], mode="off" - ) + await self.configure_fail_point(client, command_args) + self.addAsyncCleanup(self.configure_fail_point, client, command_args, off=True) async def _testOperation_failPoint(self, spec): await self.__set_fail_point( @@ -1034,7 +1105,7 @@ async def _testOperation_targetedFailPoint(self, spec): async def _testOperation_createEntities(self, spec): await self.entity_map.create_entities_from_spec(spec["entities"], uri=self._uri) - await self.entity_map.advance_cluster_times() + await self.entity_map.advance_cluster_times(self._cluster_time) def _testOperation_assertSessionTransactionState(self, spec): session = self.entity_map[spec["session"]] @@ -1155,7 +1226,7 @@ def _testOperation_assertTopologyType(self, spec): self.assertIsInstance(description, TopologyDescription) self.assertEqual(description.topology_type_name, spec["topologyType"]) - def _testOperation_waitForPrimaryChange(self, spec: dict) -> None: + async def _testOperation_waitForPrimaryChange(self, spec: dict) -> None: """Run the waitForPrimaryChange test operation.""" client = self.entity_map[spec["client"]] old_description: TopologyDescription = self.entity_map[spec["priorTopologyDescription"]] @@ -1169,24 +1240,24 @@ def get_primary(td: TopologyDescription) -> Optional[_Address]: old_primary = get_primary(old_description) - def primary_changed() -> bool: - primary = client.primary + async def primary_changed() -> bool: + primary = await client.primary if primary is None: return False return primary != old_primary - wait_until(primary_changed, "change primary", timeout=timeout) + await async_wait_until(primary_changed, "change primary", timeout=timeout) - def _testOperation_runOnThread(self, spec): + async def _testOperation_runOnThread(self, spec): """Run the 'runOnThread' operation.""" thread = self.entity_map[spec["thread"]] - thread.schedule(lambda: self.run_entity_operation(spec["operation"])) + await thread.schedule(functools.partial(self.run_entity_operation, spec["operation"])) - def _testOperation_waitForThread(self, spec): + async def _testOperation_waitForThread(self, spec): """Run the 'waitForThread' operation.""" thread = self.entity_map[spec["thread"]] - thread.stop() - thread.join(10) + await thread.stop() + await thread.join(10) if thread.exc: raise thread.exc self.assertFalse(thread.is_alive(), "Thread {} is still running".format(spec["thread"])) @@ -1380,35 +1451,31 @@ async def verify_outcome(self, spec): self.assertListEqual(sorted_expected_documents, actual_documents) async def run_scenario(self, spec, uri=None): - if "csot" in self.id().lower() and SKIP_CSOT_TESTS: - raise unittest.SkipTest("SKIP_CSOT_TESTS is set, skipping...") - # Kill all sessions before and after each test to prevent an open # transaction (from a test failure) from blocking collection/database # operations during test set up and tear down. await self.kill_all_sessions() - self.addAsyncCleanup(self.kill_all_sessions) - - if "csot" in self.id().lower(): - # Retry CSOT tests up to 2 times to deal with flakey tests. - attempts = 3 - for i in range(attempts): - try: - return await self._run_scenario(spec, uri) - except AssertionError: - if i < attempts - 1: - print( - f"Retrying after attempt {i+1} of {self.id()} failed with:\n" - f"{traceback.format_exc()}", - file=sys.stderr, - ) - await self.asyncSetUp() - continue - raise - return None - else: - await self._run_scenario(spec, uri) - return None + + # Handle flaky tests. + flaky_tests = [ + ("PYTHON-5170", ".*test_discovery_and_monitoring.*"), + ("PYTHON-5174", ".*Driver_extends_timeout_while_streaming"), + ("PYTHON-5315", ".*TestSrvPolling.test_recover_from_initially_.*"), + ("PYTHON-4987", ".*UnknownTransactionCommitResult_labels_to_connection_errors"), + ("PYTHON-3689", ".*TestProse.test_load_balancing"), + ("PYTHON-3522", ".*csot.*"), + ] + for reason, flaky_test in flaky_tests: + if re.match(flaky_test.lower(), self.id().lower()) is not None: + func_name = self.id() + options = dict(reason=reason, reset_func=self.asyncSetUp, func_name=func_name) + if "csot" in func_name.lower(): + options["max_runs"] = 3 + options["affects_cpython_linux"] = True + decorator = flaky(**options) + await decorator(self._run_scenario)(spec, uri) + return + await self._run_scenario(spec, uri) async def _run_scenario(self, spec, uri=None): # maybe skip test manually @@ -1430,11 +1497,12 @@ async def _run_scenario(self, spec, uri=None): await self.entity_map.create_entities_from_spec( self.TEST_SPEC.get("createEntities", []), uri=uri ) + self._cluster_time = None # process initialData if "initialData" in self.TEST_SPEC: await self.insert_initial_data(self.TEST_SPEC["initialData"]) - self._cluster_time = (await self.client.admin.command("ping")).get("$clusterTime") - await self.entity_map.advance_cluster_times() + self._cluster_time = self.client._topology.max_cluster_time() + await self.entity_map.advance_cluster_times(self._cluster_time) if "expectLogMessages" in spec: expect_log_messages = spec["expectLogMessages"] @@ -1519,7 +1587,14 @@ class SpecTestBase(with_metaclass(UnifiedSpecTestMeta)): # type: ignore TEST_SPEC = test_spec EXPECTED_FAILURES = expected_failures - return SpecTestBase + base = SpecTestBase + + # Add "encryption" marker if the "csfle" runOnRequirement is set. + for req in test_spec.get("runOnRequirements", []): + if "csfle" in req: + base = pytest.mark.encryption(base) + + return base for dirpath, _, filenames in os.walk(test_path): dirname = os.path.split(dirpath)[-1] diff --git a/test/asynchronous/utils.py b/test/asynchronous/utils.py new file mode 100644 index 0000000000..02ba46c71a --- /dev/null +++ b/test/asynchronous/utils.py @@ -0,0 +1,276 @@ +# Copyright 2012-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Utilities for testing pymongo that require synchronization.""" +from __future__ import annotations + +import asyncio +import contextlib +import os +import random +import sys +import threading # Used in the synchronized version of this file +import time +import traceback +from functools import wraps +from inspect import iscoroutinefunction + +from bson.son import SON +from pymongo import AsyncMongoClient +from pymongo.errors import ConfigurationError +from pymongo.hello import HelloCompat +from pymongo.lock import _async_create_lock +from pymongo.operations import _Op +from pymongo.read_preferences import ReadPreference +from pymongo.server_selectors import any_server_selector, writable_server_selector +from pymongo.synchronous.pool import _CancellationContext, _PoolGeneration + +_IS_SYNC = False + + +async def async_get_pool(client): + """Get the standalone, primary, or mongos pool.""" + topology = await client._get_topology() + server = await topology._select_server(writable_server_selector, _Op.TEST) + return server.pool + + +async def async_get_pools(client): + """Get all pools.""" + return [ + server.pool + for server in await (await client._get_topology()).select_servers( + any_server_selector, _Op.TEST + ) + ] + + +async def async_wait_until(predicate, success_description, timeout=10): + """Wait up to 10 seconds (by default) for predicate to be true. + + E.g.: + + wait_until(lambda: client.primary == ('a', 1), + 'connect to the primary') + + If the lambda-expression isn't true after 10 seconds, we raise + AssertionError("Didn't ever connect to the primary"). + + Returns the predicate's first true value. + """ + start = time.time() + interval = min(float(timeout) / 100, 0.1) + while True: + if iscoroutinefunction(predicate): + retval = await predicate() + else: + retval = predicate() + if retval: + return retval + + if time.time() - start > timeout: + raise AssertionError("Didn't ever %s" % success_description) + + await asyncio.sleep(interval) + + +async def async_is_mongos(client): + res = await client.admin.command(HelloCompat.LEGACY_CMD) + return res.get("msg", "") == "isdbgrid" + + +async def async_ensure_all_connected(client: AsyncMongoClient) -> None: + """Ensure that the client's connection pool has socket connections to all + members of a replica set. Raises ConfigurationError when called with a + non-replica set client. + + Depending on the use-case, the caller may need to clear any event listeners + that are configured on the client. + """ + hello: dict = await client.admin.command(HelloCompat.LEGACY_CMD) + if "setName" not in hello: + raise ConfigurationError("cluster is not a replica set") + + target_host_list = set(hello["hosts"] + hello.get("passives", [])) + connected_host_list = {hello["me"]} + + # Run hello until we have connected to each host at least once. + async def discover(): + i = 0 + while i < 100 and connected_host_list != target_host_list: + hello: dict = await client.admin.command( + HelloCompat.LEGACY_CMD, read_preference=ReadPreference.SECONDARY + ) + connected_host_list.update([hello["me"]]) + i += 1 + return connected_host_list + + try: + + async def predicate(): + return target_host_list == await discover() + + await async_wait_until(predicate, "connected to all hosts") + except AssertionError as exc: + raise AssertionError( + f"{exc}, {connected_host_list} != {target_host_list}, {client.topology_description}" + ) + + +async def asyncAssertRaisesExactly(cls, fn, *args, **kwargs): + """ + Unlike the standard assertRaises, this checks that a function raises a + specific class of exception, and not a subclass. E.g., check that + MongoClient() raises ConnectionFailure but not its subclass, AutoReconnect. + """ + try: + await fn(*args, **kwargs) + except Exception as e: + assert e.__class__ == cls, f"got {e.__class__.__name__}, expected {cls.__name__}" + else: + raise AssertionError("%s not raised" % cls) + + +async def async_set_fail_point(client, command_args): + cmd = SON([("configureFailPoint", "failCommand")]) + cmd.update(command_args) + await client.admin.command(cmd) + + +async def async_joinall(tasks): + """Join threads with a 5-minute timeout, assert joins succeeded""" + if _IS_SYNC: + for t in tasks: + t.join(300) + assert not t.is_alive(), "Thread %s hung" % t + else: + await asyncio.wait([t.task for t in tasks if t is not None], timeout=300) + + +def flaky( + *, + reason=None, + max_runs=2, + min_passes=1, + delay=1, + affects_cpython_linux=False, + func_name=None, + reset_func=None, +): + """Decorate a test as flaky. + + :param reason: the reason why the test is flaky + :param max_runs: the maximum number of runs before raising an error + :param min_passes: the minimum number of passing runs + :param delay: the delay in seconds between retries + :param affects_cpython_links: whether the test is flaky on CPython on Linux + :param func_name: the name of the function, used for the rety message + :param reset_func: a function to call before retrying + + """ + if reason is None: + raise ValueError("flaky requires a reason input") + is_cpython_linux = sys.platform == "linux" and sys.implementation.name == "cpython" + disable_flaky = "DISABLE_FLAKY" in os.environ + if "CI" not in os.environ and "ENABLE_FLAKY" not in os.environ: + disable_flaky = True + + if disable_flaky or (is_cpython_linux and not affects_cpython_linux): + max_runs = 1 + min_passes = 1 + + def decorator(target_func): + @wraps(target_func) + async def wrapper(*args, **kwargs): + passes = 0 + for i in range(max_runs): + try: + result = await target_func(*args, **kwargs) + passes += 1 + if passes == min_passes: + return result + except Exception as e: + if i == max_runs - 1: + raise e + print( + f"Retrying after attempt {i+1} of {func_name or target_func.__name__} failed with ({reason})):\n" + f"{traceback.format_exc()}", + file=sys.stderr, + ) + await asyncio.sleep(delay) + if reset_func: + await reset_func() + + return wrapper + + return decorator + + +class AsyncMockConnection: + def __init__(self): + self.cancel_context = _CancellationContext() + self.more_to_come = False + self.id = random.randint(0, 100) + self.is_sdam = False + self.server_connection_id = random.randint(0, 100) + + def close_conn(self, reason): + pass + + def __aenter__(self): + return self + + def __aexit__(self, exc_type, exc_val, exc_tb): + pass + + +class AsyncMockPool: + def __init__(self, address, options, is_sdam=False, client_id=None): + self.gen = _PoolGeneration() + self._lock = _async_create_lock() + self.opts = options + self.operation_count = 0 + self.conns = [] + + def stale_generation(self, gen, service_id): + return self.gen.stale(gen, service_id) + + @contextlib.asynccontextmanager + async def checkout(self, handler=None): + yield AsyncMockConnection() + + async def checkin(self, *args, **kwargs): + pass + + async def _reset(self, service_id=None): + async with self._lock: + self.gen.inc(service_id) + + async def ready(self): + pass + + async def reset(self, service_id=None, interrupt_connections=False): + await self._reset() + + async def reset_without_pause(self): + await self._reset() + + async def close(self): + await self._reset() + + async def update_is_writable(self, is_writable): + pass + + async def remove_stale_sockets(self, *args, **kwargs): + pass diff --git a/test/asynchronous/utils_selection_tests.py b/test/asynchronous/utils_selection_tests.py new file mode 100644 index 0000000000..d6b92fadb4 --- /dev/null +++ b/test/asynchronous/utils_selection_tests.py @@ -0,0 +1,204 @@ +# Copyright 2015-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Utilities for testing Server Selection and Max Staleness.""" +from __future__ import annotations + +import datetime +import os +import sys +from test.asynchronous import AsyncPyMongoTestCase +from test.asynchronous.utils import AsyncMockPool + +sys.path[0:0] = [""] + +from test import unittest +from test.pymongo_mocks import DummyMonitor +from test.utils_selection_tests_shared import ( + get_addresses, + get_topology_type_name, + make_server_description, +) +from test.utils_shared import parse_read_preference + +from bson import json_util +from pymongo.asynchronous.settings import TopologySettings +from pymongo.asynchronous.topology import Topology +from pymongo.common import HEARTBEAT_FREQUENCY +from pymongo.errors import AutoReconnect, ConfigurationError +from pymongo.operations import _Op +from pymongo.server_selectors import writable_server_selector + +_IS_SYNC = False + + +def get_topology_settings_dict(**kwargs): + settings = { + "monitor_class": DummyMonitor, + "heartbeat_frequency": HEARTBEAT_FREQUENCY, + "pool_class": AsyncMockPool, + } + settings.update(kwargs) + return settings + + +async def create_topology(scenario_def, **kwargs): + # Initialize topologies. + if "heartbeatFrequencyMS" in scenario_def: + frequency = int(scenario_def["heartbeatFrequencyMS"]) / 1000.0 + else: + frequency = HEARTBEAT_FREQUENCY + + seeds, hosts = get_addresses(scenario_def["topology_description"]["servers"]) + + topology_type = get_topology_type_name(scenario_def) + if topology_type == "LoadBalanced": + kwargs.setdefault("load_balanced", True) + # Force topology description to ReplicaSet + elif topology_type in ["ReplicaSetNoPrimary", "ReplicaSetWithPrimary"]: + kwargs.setdefault("replica_set_name", "rs") + settings = get_topology_settings_dict(heartbeat_frequency=frequency, seeds=seeds, **kwargs) + + # "Eligible servers" is defined in the server selection spec as + # the set of servers matching both the ReadPreference's mode + # and tag sets. + topology = Topology(TopologySettings(**settings)) + await topology.open() + + # Update topologies with server descriptions. + for server in scenario_def["topology_description"]["servers"]: + server_description = make_server_description(server, hosts) + await topology.on_change(server_description) + + # Assert that descriptions match + assert ( + scenario_def["topology_description"]["type"] == topology.description.topology_type_name + ), topology.description.topology_type_name + + return topology + + +def create_test(scenario_def): + async def run_scenario(self): + _, hosts = get_addresses(scenario_def["topology_description"]["servers"]) + # "Eligible servers" is defined in the server selection spec as + # the set of servers matching both the ReadPreference's mode + # and tag sets. + top_latency = await create_topology(scenario_def) + + # "In latency window" is defined in the server selection + # spec as the subset of suitable_servers that falls within the + # allowable latency window. + top_suitable = await create_topology(scenario_def, local_threshold_ms=1000000) + + # Create server selector. + if scenario_def.get("operation") == "write": + pref = writable_server_selector + else: + # Make first letter lowercase to match read_pref's modes. + pref_def = scenario_def["read_preference"] + if scenario_def.get("error"): + with self.assertRaises((ConfigurationError, ValueError)): + # Error can be raised when making Read Pref or selecting. + pref = parse_read_preference(pref_def) + await top_latency.select_server(pref, _Op.TEST) + return + + pref = parse_read_preference(pref_def) + + # Select servers. + if not scenario_def.get("suitable_servers"): + with self.assertRaises(AutoReconnect): + await top_suitable.select_server(pref, _Op.TEST, server_selection_timeout=0) + + return + + if not scenario_def["in_latency_window"]: + with self.assertRaises(AutoReconnect): + await top_latency.select_server(pref, _Op.TEST, server_selection_timeout=0) + + return + + actual_suitable_s = await top_suitable.select_servers( + pref, _Op.TEST, server_selection_timeout=0 + ) + actual_latency_s = await top_latency.select_servers( + pref, _Op.TEST, server_selection_timeout=0 + ) + + expected_suitable_servers = {} + for server in scenario_def["suitable_servers"]: + server_description = make_server_description(server, hosts) + expected_suitable_servers[server["address"]] = server_description + + actual_suitable_servers = {} + for s in actual_suitable_s: + actual_suitable_servers[ + "%s:%d" % (s.description.address[0], s.description.address[1]) + ] = s.description + + self.assertEqual(len(actual_suitable_servers), len(expected_suitable_servers)) + for k, actual in actual_suitable_servers.items(): + expected = expected_suitable_servers[k] + self.assertEqual(expected.address, actual.address) + self.assertEqual(expected.server_type, actual.server_type) + self.assertEqual(expected.round_trip_time, actual.round_trip_time) + self.assertEqual(expected.tags, actual.tags) + self.assertEqual(expected.all_hosts, actual.all_hosts) + + expected_latency_servers = {} + for server in scenario_def["in_latency_window"]: + server_description = make_server_description(server, hosts) + expected_latency_servers[server["address"]] = server_description + + actual_latency_servers = {} + for s in actual_latency_s: + actual_latency_servers[ + "%s:%d" % (s.description.address[0], s.description.address[1]) + ] = s.description + + self.assertEqual(len(actual_latency_servers), len(expected_latency_servers)) + for k, actual in actual_latency_servers.items(): + expected = expected_latency_servers[k] + self.assertEqual(expected.address, actual.address) + self.assertEqual(expected.server_type, actual.server_type) + self.assertEqual(expected.round_trip_time, actual.round_trip_time) + self.assertEqual(expected.tags, actual.tags) + self.assertEqual(expected.all_hosts, actual.all_hosts) + + return run_scenario + + +def create_selection_tests(test_dir): + class TestAllScenarios(AsyncPyMongoTestCase): + pass + + for dirpath, _, filenames in os.walk(test_dir): + dirname = os.path.split(dirpath) + dirname = os.path.split(dirname[-2])[-1] + "_" + dirname[-1] + + for filename in filenames: + if os.path.splitext(filename)[1] != ".json": + continue + with open(os.path.join(dirpath, filename)) as scenario_stream: + scenario_def = json_util.loads(scenario_stream.read()) + + # Construct test from scenario. + new_test = create_test(scenario_def) + test_name = f"test_{dirname}_{os.path.splitext(filename)[0]}" + + new_test.__name__ = test_name + setattr(TestAllScenarios, new_test.__name__, new_test) + + return TestAllScenarios diff --git a/test/asynchronous/utils_spec_runner.py b/test/asynchronous/utils_spec_runner.py index b79e5258b5..496c28a045 100644 --- a/test/asynchronous/utils_spec_runner.py +++ b/test/asynchronous/utils_spec_runner.py @@ -18,12 +18,13 @@ import asyncio import functools import os -import threading +import time import unittest -from asyncio import iscoroutinefunction from collections import abc +from inspect import iscoroutinefunction from test.asynchronous import AsyncIntegrationTest, async_client_context, client_knobs -from test.utils import ( +from test.asynchronous.helpers import ConcurrentRunner +from test.utils_shared import ( CMAPListener, CompareType, EventListener, @@ -47,6 +48,7 @@ from pymongo.asynchronous.command_cursor import AsyncCommandCursor from pymongo.asynchronous.cursor import AsyncCursor from pymongo.errors import AutoReconnect, BulkWriteError, OperationFailure, PyMongoError +from pymongo.lock import _async_cond_wait, _async_create_condition, _async_create_lock from pymongo.read_concern import ReadConcern from pymongo.read_preferences import ReadPreference from pymongo.results import BulkWriteResult, _WriteResult @@ -55,38 +57,36 @@ _IS_SYNC = False -class SpecRunnerThread(threading.Thread): +class SpecRunnerTask(ConcurrentRunner): def __init__(self, name): - super().__init__() - self.name = name + super().__init__(name=name) self.exc = None self.daemon = True - self.cond = threading.Condition() + self.cond = _async_create_condition(_async_create_lock()) self.ops = [] - self.stopped = False - def schedule(self, work): + async def schedule(self, work): self.ops.append(work) - with self.cond: + async with self.cond: self.cond.notify() - def stop(self): + async def stop(self): self.stopped = True - with self.cond: + async with self.cond: self.cond.notify() - def run(self): + async def run(self): while not self.stopped or self.ops: if not self.ops: - with self.cond: - self.cond.wait(10) + async with self.cond: + await _async_cond_wait(self.cond, 10) if self.ops: try: work = self.ops.pop(0) - work() + await work() except Exception as exc: self.exc = exc - self.stop() + await self.stop() class AsyncSpecTestCreator: @@ -124,18 +124,6 @@ def _ensure_min_max_server_version(self, scenario_def, method): if max_ver is not None: method = async_client_context.require_version_max(*max_ver)(method) - if "serverless" in scenario_def: - serverless = scenario_def["serverless"] - if serverless == "require": - serverless_satisfied = async_client_context.serverless - elif serverless == "forbid": - serverless_satisfied = not async_client_context.serverless - else: # unset or "allow" - serverless_satisfied = True - method = unittest.skipUnless( - serverless_satisfied, "Serverless requirement not satisfied" - )(method) - return method @staticmethod @@ -168,16 +156,6 @@ def valid_auth_enabled(run_on_req): return not async_client_context.auth_enabled return True - @staticmethod - def serverless_ok(run_on_req): - serverless = run_on_req["serverless"] - if serverless == "require": - return async_client_context.serverless - elif serverless == "forbid": - return not async_client_context.serverless - else: # unset or "allow" - return True - async def should_run_on(self, scenario_def): run_on = scenario_def.get("runOn", []) if not run_on: @@ -190,7 +168,6 @@ async def should_run_on(self, scenario_def): and self.min_server_version(req) and self.max_server_version(req) and self.valid_auth_enabled(req) - and self.serverless_ok(req) ): return True return False @@ -230,7 +207,7 @@ async def _create_tests(self): str(test_def["description"].replace(" ", "_").replace(".", "_")), ) - new_test = await self._create_test(scenario_def, test_def, test_name) + new_test = self._create_test(scenario_def, test_def, test_name) new_test = self._ensure_min_max_server_version(scenario_def, new_test) new_test = self.ensure_run_on(scenario_def, new_test) @@ -265,15 +242,10 @@ async def asyncSetUp(self) -> None: async def asyncTearDown(self) -> None: self.knobs.disable() - async def _set_fail_point(self, client, command_args): - cmd = SON([("configureFailPoint", "failCommand")]) - cmd.update(command_args) - await client.admin.command(cmd) - async def set_fail_point(self, command_args): clients = self.mongos_clients if self.mongos_clients else [self.client] for client in clients: - await self._set_fail_point(client, command_args) + await self.configure_fail_point(client, command_args) async def targeted_fail_point(self, session, fail_point): """Run the targetedFailPoint test operation. @@ -282,7 +254,7 @@ async def targeted_fail_point(self, session, fail_point): """ clients = {c.address: c for c in self.mongos_clients} client = clients[session._pinned_address] - await self._set_fail_point(client, fail_point) + await self.configure_fail_point(client, fail_point) self.addAsyncCleanup(self.set_fail_point, {"mode": "off"}) def assert_session_pinned(self, session): @@ -320,6 +292,10 @@ async def assert_index_not_exists(self, database, collection, index): coll = self.client[database][collection] self.assertNotIn(index, [doc["name"] async for doc in await coll.list_indexes()]) + async def wait(self, ms): + """Run the "wait" test operation.""" + await asyncio.sleep(ms / 1000.0) + def assertErrorLabelsContain(self, exc, expected_labels): labels = [l for l in expected_labels if exc.has_error_label(l)] self.assertEqual(labels, expected_labels) @@ -672,16 +648,10 @@ async def run_scenario(self, scenario_def, test): server_listener = ServerAndTopologyEventListener() # Create a new client, to avoid interference from pooled sessions. client_options = self.parse_client_options(test["clientOptions"]) - # MMAPv1 does not support retryable writes. - if ( - client_options.get("retryWrites") is True - and async_client_context.storage_engine == "mmapv1" - ): - self.skipTest("MMAPv1 does not support retryWrites=True") use_multi_mongos = test["useMultipleMongoses"] host = None if use_multi_mongos: - if async_client_context.load_balancer or async_client_context.serverless: + if async_client_context.load_balancer: host = async_client_context.MULTI_MONGOS_LB_URI elif async_client_context.is_mongos: host = async_client_context.mongos_seeds() diff --git a/test/atlas/test_connection.py b/test/atlas/test_connection.py index 4dcbba6d11..ac217ab40d 100644 --- a/test/atlas/test_connection.py +++ b/test/atlas/test_connection.py @@ -26,9 +26,9 @@ sys.path[0:0] = [""] import pymongo -from pymongo.ssl_support import HAS_SNI +from pymongo.ssl_support import _has_sni -pytestmark = pytest.mark.atlas +pytestmark = pytest.mark.atlas_connect URIS = { @@ -37,13 +37,12 @@ "ATLAS_FREE": os.environ.get("ATLAS_FREE"), "ATLAS_TLS11": os.environ.get("ATLAS_TLS11"), "ATLAS_TLS12": os.environ.get("ATLAS_TLS12"), - "ATLAS_SERVERLESS": os.environ.get("ATLAS_SERVERLESS"), "ATLAS_SRV_REPL": os.environ.get("ATLAS_SRV_REPL"), "ATLAS_SRV_SHRD": os.environ.get("ATLAS_SRV_SHRD"), "ATLAS_SRV_FREE": os.environ.get("ATLAS_SRV_FREE"), "ATLAS_SRV_TLS11": os.environ.get("ATLAS_SRV_TLS11"), "ATLAS_SRV_TLS12": os.environ.get("ATLAS_SRV_TLS12"), - "ATLAS_SRV_SERVERLESS": os.environ.get("ATLAS_SRV_SERVERLESS"), + "ATLAS_X509_DEV_WITH_CERT": os.environ.get("ATLAS_X509_DEV_WITH_CERT"), } @@ -57,7 +56,7 @@ def connect(self, uri): # No auth error client.test.test.count_documents({}) - @unittest.skipUnless(HAS_SNI, "Free tier requires SNI support") + @unittest.skipUnless(_has_sni(True), "Free tier requires SNI support") def test_free_tier(self): self.connect(URIS["ATLAS_FREE"]) @@ -73,14 +72,11 @@ def test_tls_11(self): def test_tls_12(self): self.connect(URIS["ATLAS_TLS12"]) - def test_serverless(self): - self.connect(URIS["ATLAS_SERVERLESS"]) - def connect_srv(self, uri): self.connect(uri) self.assertIn("mongodb+srv://", uri) - @unittest.skipUnless(HAS_SNI, "Free tier requires SNI support") + @unittest.skipUnless(_has_sni(True), "Free tier requires SNI support") def test_srv_free_tier(self): self.connect_srv(URIS["ATLAS_SRV_FREE"]) @@ -96,8 +92,8 @@ def test_srv_tls_11(self): def test_srv_tls_12(self): self.connect_srv(URIS["ATLAS_SRV_TLS12"]) - def test_srv_serverless(self): - self.connect_srv(URIS["ATLAS_SRV_SERVERLESS"]) + def test_x509_with_cert(self): + self.connect(URIS["ATLAS_X509_DEV_WITH_CERT"]) def test_uniqueness(self): """Ensure that we don't accidentally duplicate the test URIs.""" diff --git a/test/auth_aws/test_auth_aws.py b/test/auth_aws/test_auth_aws.py index a7660f2f67..9738694d85 100644 --- a/test/auth_aws/test_auth_aws.py +++ b/test/auth_aws/test_auth_aws.py @@ -32,7 +32,7 @@ from pymongo import MongoClient from pymongo.errors import OperationFailure -from pymongo.uri_parser import parse_uri +from pymongo.synchronous.uri_parser import parse_uri pytestmark = pytest.mark.auth_aws diff --git a/test/bson_binary_vector/float32.json b/test/bson_binary_vector/float32.json index bbbe00b758..72dafce10f 100644 --- a/test/bson_binary_vector/float32.json +++ b/test/bson_binary_vector/float32.json @@ -11,6 +11,15 @@ "padding": 0, "canonical_bson": "1C00000005766563746F72000A0000000927000000FE420000E04000" }, + { + "description": "Vector with decimals and negative value FLOAT32", + "valid": true, + "vector": [127.7, -7.7], + "dtype_hex": "0x27", + "dtype_alias": "FLOAT32", + "padding": 0, + "canonical_bson": "1C00000005766563746F72000A0000000927006666FF426666F6C000" + }, { "description": "Empty Vector FLOAT32", "valid": true, @@ -23,7 +32,7 @@ { "description": "Infinity Vector FLOAT32", "valid": true, - "vector": ["-inf", 0.0, "inf"], + "vector": [{"$numberDouble": "-Infinity"}, 0.0, {"$numberDouble": "Infinity"} ], "dtype_hex": "0x27", "dtype_alias": "FLOAT32", "padding": 0, @@ -35,8 +44,22 @@ "vector": [127.0, 7.0], "dtype_hex": "0x27", "dtype_alias": "FLOAT32", - "padding": 3 + "padding": 3, + "canonical_bson": "1C00000005766563746F72000A0000000927030000FE420000E04000" + }, + { + "description": "Insufficient vector data with 3 bytes FLOAT32", + "valid": false, + "dtype_hex": "0x27", + "dtype_alias": "FLOAT32", + "canonical_bson": "1700000005766563746F7200050000000927002A2A2A00" + }, + { + "description": "Insufficient vector data with 5 bytes FLOAT32", + "valid": false, + "dtype_hex": "0x27", + "dtype_alias": "FLOAT32", + "canonical_bson": "1900000005766563746F7200070000000927002A2A2A2A2A00" } ] } - diff --git a/test/bson_binary_vector/int8.json b/test/bson_binary_vector/int8.json index 7529721e5e..29524fb617 100644 --- a/test/bson_binary_vector/int8.json +++ b/test/bson_binary_vector/int8.json @@ -42,7 +42,8 @@ "vector": [127, 7], "dtype_hex": "0x03", "dtype_alias": "INT8", - "padding": 3 + "padding": 3, + "canonical_bson": "1600000005766563746F7200040000000903037F0700" }, { "description": "INT8 with float inputs", @@ -54,4 +55,3 @@ } ] } - diff --git a/test/bson_binary_vector/packed_bit.json b/test/bson_binary_vector/packed_bit.json index a41cd593f5..7cc272e38b 100644 --- a/test/bson_binary_vector/packed_bit.json +++ b/test/bson_binary_vector/packed_bit.json @@ -2,6 +2,15 @@ "description": "Tests of Binary subtype 9, Vectors, with dtype PACKED_BIT", "test_key": "vector", "tests": [ + { + "description": "Padding specified with no vector data PACKED_BIT", + "valid": false, + "vector": [], + "dtype_hex": "0x10", + "dtype_alias": "PACKED_BIT", + "padding": 1, + "canonical_bson": "1400000005766563746F72000200000009100100" + }, { "description": "Simple Vector PACKED_BIT", "valid": true, @@ -12,22 +21,22 @@ "canonical_bson": "1600000005766563746F7200040000000910007F0700" }, { - "description": "Empty Vector PACKED_BIT", + "description": "PACKED_BIT with padding", "valid": true, - "vector": [], + "vector": [127, 8], "dtype_hex": "0x10", "dtype_alias": "PACKED_BIT", - "padding": 0, - "canonical_bson": "1400000005766563746F72000200000009100000" + "padding": 3, + "canonical_bson": "1600000005766563746F7200040000000910037F0800" }, { - "description": "PACKED_BIT with padding", + "description": "Empty Vector PACKED_BIT", "valid": true, - "vector": [127, 7], + "vector": [], "dtype_hex": "0x10", "dtype_alias": "PACKED_BIT", - "padding": 3, - "canonical_bson": "1600000005766563746F7200040000000910037F0700" + "padding": 0, + "canonical_bson": "1400000005766563746F72000200000009100000" }, { "description": "Overflow Vector PACKED_BIT", @@ -44,7 +53,31 @@ "dtype_hex": "0x10", "dtype_alias": "PACKED_BIT", "padding": 0 + }, + { + "description": "Vector with float values PACKED_BIT", + "valid": false, + "vector": [127.5], + "dtype_hex": "0x10", + "dtype_alias": "PACKED_BIT", + "padding": 0 + }, + { + "description": "Exceeding maximum padding PACKED_BIT", + "valid": false, + "vector": [1], + "dtype_hex": "0x10", + "dtype_alias": "PACKED_BIT", + "padding": 8, + "canonical_bson": "1500000005766563746F7200030000000910080100" + }, + { + "description": "Negative padding PACKED_BIT", + "valid": false, + "vector": [1], + "dtype_hex": "0x10", + "dtype_alias": "PACKED_BIT", + "padding": -1 } ] } - diff --git a/test/bson_corpus/datetime.json b/test/bson_corpus/datetime.json index f857afdc36..1554341d29 100644 --- a/test/bson_corpus/datetime.json +++ b/test/bson_corpus/datetime.json @@ -24,6 +24,7 @@ { "description" : "Y10K", "canonical_bson" : "1000000009610000DC1FD277E6000000", + "relaxed_extjson" : "{\"a\":{\"$date\":{\"$numberLong\":\"253402300800000\"}}}", "canonical_extjson" : "{\"a\":{\"$date\":{\"$numberLong\":\"253402300800000\"}}}" }, { diff --git a/test/bson_corpus/decimal128-1.json b/test/bson_corpus/decimal128-1.json index 7eefec6bf7..8e7fbc93c6 100644 --- a/test/bson_corpus/decimal128-1.json +++ b/test/bson_corpus/decimal128-1.json @@ -312,6 +312,30 @@ "canonical_bson": "18000000136400000000000a5bc138938d44c64d31cc3700", "degenerate_extjson": "{\"d\" : {\"$numberDecimal\" : \"1000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000\"}}", "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000000000000000000000E+999\"}}" + }, + { + "description": "Clamped zeros with a large positive exponent", + "canonical_bson": "180000001364000000000000000000000000000000FE5F00", + "degenerate_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+2147483647\"}}", + "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+6111\"}}" + }, + { + "description": "Clamped zeros with a large negative exponent", + "canonical_bson": "180000001364000000000000000000000000000000000000", + "degenerate_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-2147483647\"}}", + "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-6176\"}}" + }, + { + "description": "Clamped negative zeros with a large positive exponent", + "canonical_bson": "180000001364000000000000000000000000000000FEDF00", + "degenerate_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E+2147483647\"}}", + "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E+6111\"}}" + }, + { + "description": "Clamped negative zeros with a large negative exponent", + "canonical_bson": "180000001364000000000000000000000000000000008000", + "degenerate_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E-2147483647\"}}", + "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E-6176\"}}" } ] } diff --git a/test/change_streams/unified/change-streams-clusterTime.json b/test/change_streams/unified/change-streams-clusterTime.json index 55b4ae3fbc..2b09e548f1 100644 --- a/test/change_streams/unified/change-streams-clusterTime.json +++ b/test/change_streams/unified/change-streams-clusterTime.json @@ -28,7 +28,6 @@ "minServerVersion": "4.0.0", "topologies": [ "replicaset", - "sharded-replicaset", "load-balanced", "sharded" ], diff --git a/test/change_streams/unified/change-streams-disambiguatedPaths.json b/test/change_streams/unified/change-streams-disambiguatedPaths.json index 91d8e66da2..a8667b5436 100644 --- a/test/change_streams/unified/change-streams-disambiguatedPaths.json +++ b/test/change_streams/unified/change-streams-disambiguatedPaths.json @@ -28,7 +28,6 @@ "minServerVersion": "6.1.0", "topologies": [ "replicaset", - "sharded-replicaset", "load-balanced", "sharded" ], @@ -43,70 +42,6 @@ } ], "tests": [ - { - "description": "disambiguatedPaths is not present when showExpandedEvents is false/unset", - "operations": [ - { - "name": "insertOne", - "object": "collection0", - "arguments": { - "document": { - "_id": 1, - "a": { - "1": 1 - } - } - } - }, - { - "name": "createChangeStream", - "object": "collection0", - "arguments": { - "pipeline": [] - }, - "saveResultAsEntity": "changeStream0" - }, - { - "name": "updateOne", - "object": "collection0", - "arguments": { - "filter": { - "_id": 1 - }, - "update": { - "$set": { - "a.1": 2 - } - } - } - }, - { - "name": "iterateUntilDocumentOrError", - "object": "changeStream0", - "expectResult": { - "operationType": "update", - "ns": { - "db": "database0", - "coll": "collection0" - }, - "updateDescription": { - "updatedFields": { - "$$exists": true - }, - "removedFields": { - "$$exists": true - }, - "truncatedArrays": { - "$$exists": true - }, - "disambiguatedPaths": { - "$$exists": false - } - } - } - } - ] - }, { "description": "disambiguatedPaths is present on updateDescription when an ambiguous path is present", "operations": [ diff --git a/test/change_streams/unified/change-streams-errors.json b/test/change_streams/unified/change-streams-errors.json index 04fe8f04f3..65e99e541e 100644 --- a/test/change_streams/unified/change-streams-errors.json +++ b/test/change_streams/unified/change-streams-errors.json @@ -145,7 +145,7 @@ "minServerVersion": "4.1.11", "topologies": [ "replicaset", - "sharded-replicaset", + "sharded", "load-balanced" ] } @@ -190,7 +190,7 @@ "minServerVersion": "4.2", "topologies": [ "replicaset", - "sharded-replicaset", + "sharded", "load-balanced" ] } diff --git a/test/change_streams/unified/change-streams-nsType.json b/test/change_streams/unified/change-streams-nsType.json new file mode 100644 index 0000000000..1861c9a5e0 --- /dev/null +++ b/test/change_streams/unified/change-streams-nsType.json @@ -0,0 +1,145 @@ +{ + "description": "change-streams-nsType", + "schemaVersion": "1.7", + "runOnRequirements": [ + { + "minServerVersion": "8.1.0", + "topologies": [ + "replicaset", + "sharded" + ], + "serverless": "forbid" + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "useMultipleMongoses": false + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "database0" + } + } + ], + "tests": [ + { + "description": "nsType is present when creating collections", + "operations": [ + { + "name": "dropCollection", + "object": "database0", + "arguments": { + "collection": "foo" + } + }, + { + "name": "createChangeStream", + "object": "database0", + "arguments": { + "pipeline": [], + "showExpandedEvents": true + }, + "saveResultAsEntity": "changeStream0" + }, + { + "name": "createCollection", + "object": "database0", + "arguments": { + "collection": "foo" + } + }, + { + "name": "iterateUntilDocumentOrError", + "object": "changeStream0", + "expectResult": { + "operationType": "create", + "nsType": "collection" + } + } + ] + }, + { + "description": "nsType is present when creating timeseries", + "operations": [ + { + "name": "dropCollection", + "object": "database0", + "arguments": { + "collection": "foo" + } + }, + { + "name": "createChangeStream", + "object": "database0", + "arguments": { + "pipeline": [], + "showExpandedEvents": true + }, + "saveResultAsEntity": "changeStream0" + }, + { + "name": "createCollection", + "object": "database0", + "arguments": { + "collection": "foo", + "timeseries": { + "timeField": "time", + "metaField": "meta", + "granularity": "minutes" + } + } + }, + { + "name": "iterateUntilDocumentOrError", + "object": "changeStream0", + "expectResult": { + "operationType": "create", + "nsType": "timeseries" + } + } + ] + }, + { + "description": "nsType is present when creating views", + "operations": [ + { + "name": "dropCollection", + "object": "database0", + "arguments": { + "collection": "foo" + } + }, + { + "name": "createChangeStream", + "object": "database0", + "arguments": { + "pipeline": [], + "showExpandedEvents": true + }, + "saveResultAsEntity": "changeStream0" + }, + { + "name": "createCollection", + "object": "database0", + "arguments": { + "collection": "foo", + "viewOn": "testName" + } + }, + { + "name": "iterateUntilDocumentOrError", + "object": "changeStream0", + "expectResult": { + "operationType": "create", + "nsType": "view" + } + } + ] + } + ] +} diff --git a/test/change_streams/unified/change-streams-pre_and_post_images.json b/test/change_streams/unified/change-streams-pre_and_post_images.json index 8beefb2bc8..e62fc03459 100644 --- a/test/change_streams/unified/change-streams-pre_and_post_images.json +++ b/test/change_streams/unified/change-streams-pre_and_post_images.json @@ -6,7 +6,7 @@ "minServerVersion": "6.0.0", "topologies": [ "replicaset", - "sharded-replicaset", + "sharded", "load-balanced" ], "serverless": "forbid" diff --git a/test/change_streams/unified/change-streams-resume-allowlist.json b/test/change_streams/unified/change-streams-resume-allowlist.json index b4953ec736..1ec72b432b 100644 --- a/test/change_streams/unified/change-streams-resume-allowlist.json +++ b/test/change_streams/unified/change-streams-resume-allowlist.json @@ -6,7 +6,7 @@ "minServerVersion": "3.6", "topologies": [ "replicaset", - "sharded-replicaset", + "sharded", "load-balanced" ], "serverless": "forbid" diff --git a/test/change_streams/unified/change-streams-resume-errorLabels.json b/test/change_streams/unified/change-streams-resume-errorLabels.json index f5f4505a9f..7fd70108f0 100644 --- a/test/change_streams/unified/change-streams-resume-errorLabels.json +++ b/test/change_streams/unified/change-streams-resume-errorLabels.json @@ -6,7 +6,7 @@ "minServerVersion": "4.3.1", "topologies": [ "replicaset", - "sharded-replicaset", + "sharded", "load-balanced" ], "serverless": "forbid" diff --git a/test/change_streams/unified/change-streams-showExpandedEvents.json b/test/change_streams/unified/change-streams-showExpandedEvents.json index 3eed2f534a..b9594e0c1e 100644 --- a/test/change_streams/unified/change-streams-showExpandedEvents.json +++ b/test/change_streams/unified/change-streams-showExpandedEvents.json @@ -6,9 +6,9 @@ "minServerVersion": "6.0.0", "topologies": [ "replicaset", - "sharded-replicaset", "sharded" - ] + ], + "serverless": "forbid" } ], "createEntities": [ @@ -462,7 +462,6 @@ "runOnRequirements": [ { "topologies": [ - "sharded-replicaset", "sharded" ] } diff --git a/test/change_streams/unified/change-streams.json b/test/change_streams/unified/change-streams.json index c8b60ed4e2..a155d85b6e 100644 --- a/test/change_streams/unified/change-streams.json +++ b/test/change_streams/unified/change-streams.json @@ -181,7 +181,12 @@ "field": "array", "newSize": 2 } - ] + ], + "disambiguatedPaths": { + "$$unsetOrMatches": { + "$$exists": true + } + } } } } @@ -1408,6 +1413,11 @@ "$$unsetOrMatches": { "$$exists": true } + }, + "disambiguatedPaths": { + "$$unsetOrMatches": { + "$$exists": true + } } } } diff --git a/test/client-side-encryption/etc/data/encryptedFields-prefix-suffix.json b/test/client-side-encryption/etc/data/encryptedFields-prefix-suffix.json new file mode 100644 index 0000000000..ec4489fa09 --- /dev/null +++ b/test/client-side-encryption/etc/data/encryptedFields-prefix-suffix.json @@ -0,0 +1,38 @@ +{ + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedText", + "bsonType": "string", + "queries": [ + { + "queryType": "prefixPreview", + "strMinQueryLength": { + "$numberInt": "2" + }, + "strMaxQueryLength": { + "$numberInt": "10" + }, + "caseSensitive": true, + "diacriticSensitive": true + }, + { + "queryType": "suffixPreview", + "strMinQueryLength": { + "$numberInt": "2" + }, + "strMaxQueryLength": { + "$numberInt": "10" + }, + "caseSensitive": true, + "diacriticSensitive": true + } + ] + } + ] +} diff --git a/test/client-side-encryption/etc/data/encryptedFields-substring.json b/test/client-side-encryption/etc/data/encryptedFields-substring.json new file mode 100644 index 0000000000..ee22def77b --- /dev/null +++ b/test/client-side-encryption/etc/data/encryptedFields-substring.json @@ -0,0 +1,30 @@ +{ + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedText", + "bsonType": "string", + "queries": [ + { + "queryType": "substringPreview", + "strMaxLength": { + "$numberInt": "10" + }, + "strMinQueryLength": { + "$numberInt": "2" + }, + "strMaxQueryLength": { + "$numberInt": "10" + }, + "caseSensitive": true, + "diacriticSensitive": true + } + ] + } + ] +} diff --git a/test/client-side-encryption/etc/data/lookup/key-doc.json b/test/client-side-encryption/etc/data/lookup/key-doc.json new file mode 100644 index 0000000000..566b56c354 --- /dev/null +++ b/test/client-side-encryption/etc/data/lookup/key-doc.json @@ -0,0 +1,30 @@ +{ + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } +} diff --git a/test/client-side-encryption/etc/data/lookup/schema-csfle.json b/test/client-side-encryption/etc/data/lookup/schema-csfle.json new file mode 100644 index 0000000000..29ac9ad5da --- /dev/null +++ b/test/client-side-encryption/etc/data/lookup/schema-csfle.json @@ -0,0 +1,19 @@ +{ + "properties": { + "csfle": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" +} diff --git a/test/client-side-encryption/etc/data/lookup/schema-csfle2.json b/test/client-side-encryption/etc/data/lookup/schema-csfle2.json new file mode 100644 index 0000000000..3f1c02781c --- /dev/null +++ b/test/client-side-encryption/etc/data/lookup/schema-csfle2.json @@ -0,0 +1,19 @@ +{ + "properties": { + "csfle2": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" +} diff --git a/test/client-side-encryption/etc/data/lookup/schema-qe.json b/test/client-side-encryption/etc/data/lookup/schema-qe.json new file mode 100644 index 0000000000..9428ea1b45 --- /dev/null +++ b/test/client-side-encryption/etc/data/lookup/schema-qe.json @@ -0,0 +1,20 @@ +{ + "escCollection": "enxcol_.qe.esc", + "ecocCollection": "enxcol_.qe.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "qe", + "bsonType": "string", + "queries": { + "queryType": "equality", + "contention": 0 + } + } + ] +} diff --git a/test/client-side-encryption/etc/data/lookup/schema-qe2.json b/test/client-side-encryption/etc/data/lookup/schema-qe2.json new file mode 100644 index 0000000000..77d5bd37cb --- /dev/null +++ b/test/client-side-encryption/etc/data/lookup/schema-qe2.json @@ -0,0 +1,20 @@ +{ + "escCollection": "enxcol_.qe2.esc", + "ecocCollection": "enxcol_.qe2.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "qe2", + "bsonType": "string", + "queries": { + "queryType": "equality", + "contention": 0 + } + } + ] +} diff --git a/test/client-side-encryption/limits/limits-encryptedFields.json b/test/client-side-encryption/limits/limits-encryptedFields.json new file mode 100644 index 0000000000..c52a0271e1 --- /dev/null +++ b/test/client-side-encryption/limits/limits-encryptedFields.json @@ -0,0 +1,14 @@ +{ + "fields": [ + { + "keyId": { + "$binary": { + "base64": "LOCALAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + }, + "path": "foo", + "bsonType": "string" + } + ] +} \ No newline at end of file diff --git a/test/client-side-encryption/limits/limits-qe-doc.json b/test/client-side-encryption/limits/limits-qe-doc.json new file mode 100644 index 0000000000..71efbf4068 --- /dev/null +++ b/test/client-side-encryption/limits/limits-qe-doc.json @@ -0,0 +1,3 @@ +{ + "foo": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +} \ No newline at end of file diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Rangev2-Compact.json b/test/client-side-encryption/spec/legacy/fle2v2-Rangev2-Compact.json index bba9f25535..59241927ca 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Rangev2-Compact.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Rangev2-Compact.json @@ -6,8 +6,7 @@ "replicaset", "sharded", "load-balanced" - ], - "serverless": "forbid" + ] } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/keyCache.json b/test/client-side-encryption/spec/legacy/keyCache.json new file mode 100644 index 0000000000..912ce80020 --- /dev/null +++ b/test/client-side-encryption/spec/legacy/keyCache.json @@ -0,0 +1,270 @@ +{ + "runOn": [ + { + "minServerVersion": "4.1.10" + } + ], + "database_name": "default", + "collection_name": "default", + "data": [], + "json_schema": { + "properties": { + "encrypted_w_altname": { + "encrypt": { + "keyId": "/altname", + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "random": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string_equivalent": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + }, + "key_vault_data": [ + { + "status": 1, + "_id": { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + }, + "masterKey": { + "provider": "aws", + "key": "arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0", + "region": "us-east-1" + }, + "updateDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyMaterial": { + "$binary": { + "base64": "AQICAHhQNmWG2CzOm1dq3kWLM+iDUZhEqnhJwH9wZVpuZ94A8gEqnsxXlR51T5EbEVezUqqKAAAAwjCBvwYJKoZIhvcNAQcGoIGxMIGuAgEAMIGoBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDHa4jo6yp0Z18KgbUgIBEIB74sKxWtV8/YHje5lv5THTl0HIbhSwM6EqRlmBiFFatmEWaeMk4tO4xBX65eq670I5TWPSLMzpp8ncGHMmvHqRajNBnmFtbYxN3E3/WjxmdbOOe+OXpnGJPcGsftc7cB2shRfA4lICPnE26+oVNXT6p0Lo20nY5XC7jyCO", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyAltNames": [ + "altname", + "another_altname" + ] + } + ], + "tests": [ + { + "description": "Insert with deterministic encryption, then find it", + "clientOptions": { + "autoEncryptOpts": { + "kmsProviders": { + "aws": {} + }, + "keyExpirationMS": 1 + } + }, + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_string": "string0" + } + } + }, + { + "name": "wait", + "object": "testRunner", + "arguments": { + "ms": 50 + } + }, + { + "name": "find", + "arguments": { + "filter": { + "_id": 1 + } + }, + "result": [ + { + "_id": 1, + "encrypted_string": "string0" + } + ] + } + ], + "expectations": [ + { + "command_started_event": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "command_name": "listCollections" + } + }, + { + "command_started_event": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "command_name": "find" + } + }, + { + "command_started_event": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + } + ], + "ordered": true + }, + "command_name": "insert" + } + }, + { + "command_started_event": { + "command": { + "find": "default", + "filter": { + "_id": 1 + } + }, + "command_name": "find" + } + }, + { + "command_started_event": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "command_name": "find" + } + } + ], + "outcome": { + "collection": { + "data": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + } + ] + } + } + } + ] +} diff --git a/test/client-side-encryption/spec/legacy/timeoutMS.json b/test/client-side-encryption/spec/legacy/timeoutMS.json index 8411306224..b667767cfc 100644 --- a/test/client-side-encryption/spec/legacy/timeoutMS.json +++ b/test/client-side-encryption/spec/legacy/timeoutMS.json @@ -110,7 +110,7 @@ "listCollections" ], "blockConnection": true, - "blockTimeMS": 600 + "blockTimeMS": 60 } }, "clientOptions": { @@ -119,7 +119,7 @@ "aws": {} } }, - "timeoutMS": 500 + "timeoutMS": 50 }, "operations": [ { diff --git a/test/client-side-encryption/spec/unified/QE-Text-cleanupStructuredEncryptionData.json b/test/client-side-encryption/spec/unified/QE-Text-cleanupStructuredEncryptionData.json new file mode 100644 index 0000000000..24f33ab3ec --- /dev/null +++ b/test/client-side-encryption/spec/unified/QE-Text-cleanupStructuredEncryptionData.json @@ -0,0 +1,219 @@ +{ + "description": "QE-Text-cleanupStructuredEncryptionData", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.2.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.0" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client", + "databaseName": "db" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "coll" + } + } + ], + "initialData": [ + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "HBk9BWihXExNDvTp1lUxOuxuZK2Pe2ZdVdlsxPEBkiO1bS4mG5NNDsQ7zVxJAH8BtdOYp72Ku4Y3nwc0BUpIKsvAKX4eYXtlhv5zUQxWdeNFhg9qK7qb8nqhnnLeT0f25jFSqzWJoT379hfwDeu0bebJHr35QrJ8myZdPMTEDYF08QYQ48ShRBli0S+QzBHHAQiM2iJNr4svg2WR8JSeWQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + }, + { + "databaseName": "db", + "collectionName": "coll", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedText", + "bsonType": "string", + "queries": [ + { + "queryType": "suffixPreview", + "contention": { + "$numberLong": "0" + }, + "strMinQueryLength": { + "$numberLong": "3" + }, + "strMaxQueryLength": { + "$numberLong": "30" + }, + "caseSensitive": true, + "diacriticSensitive": true + } + ] + } + ] + } + } + } + ], + "tests": [ + { + "description": "QE Text cleanupStructuredEncryptionData works", + "operations": [ + { + "name": "runCommand", + "object": "db", + "arguments": { + "command": { + "cleanupStructuredEncryptionData": "coll" + }, + "commandName": "cleanupStructuredEncryptionData" + }, + "expectResult": { + "ok": 1 + } + } + ], + "expectEvents": [ + { + "client": "client", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "coll" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "cleanupStructuredEncryptionData": "coll", + "cleanupTokens": { + "encryptedText": { + "ecoc": { + "$binary": { + "base64": "SWO8WEoZ2r2Kx/muQKb7+COizy85nIIUFiHh4K9kcvA=", + "subType": "00" + } + }, + "anchorPaddingToken": { + "$binary": { + "base64": "YAiF7Iwhqq1UyfxPvm70xfQJtrIRPrjfD2yRLG1+saQ=", + "subType": "00" + } + } + } + } + }, + "commandName": "cleanupStructuredEncryptionData" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/QE-Text-compactStructuredEncryptionData.json b/test/client-side-encryption/spec/unified/QE-Text-compactStructuredEncryptionData.json new file mode 100644 index 0000000000..c7abfe2d4b --- /dev/null +++ b/test/client-side-encryption/spec/unified/QE-Text-compactStructuredEncryptionData.json @@ -0,0 +1,261 @@ +{ + "description": "QE-Text-compactStructuredEncryptionData", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.2.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.0" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client", + "databaseName": "db" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "coll" + } + } + ], + "initialData": [ + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "HBk9BWihXExNDvTp1lUxOuxuZK2Pe2ZdVdlsxPEBkiO1bS4mG5NNDsQ7zVxJAH8BtdOYp72Ku4Y3nwc0BUpIKsvAKX4eYXtlhv5zUQxWdeNFhg9qK7qb8nqhnnLeT0f25jFSqzWJoT379hfwDeu0bebJHr35QrJ8myZdPMTEDYF08QYQ48ShRBli0S+QzBHHAQiM2iJNr4svg2WR8JSeWQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + }, + { + "databaseName": "db", + "collectionName": "coll", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedText", + "bsonType": "string", + "queries": [ + { + "queryType": "suffixPreview", + "contention": { + "$numberLong": "0" + }, + "strMinQueryLength": { + "$numberLong": "3" + }, + "strMaxQueryLength": { + "$numberLong": "30" + }, + "caseSensitive": true, + "diacriticSensitive": true + } + ] + } + ] + } + } + } + ], + "tests": [ + { + "description": "QE Text compactStructuredEncryptionData works", + "operations": [ + { + "name": "runCommand", + "object": "db", + "arguments": { + "command": { + "compactStructuredEncryptionData": "coll" + }, + "commandName": "compactStructuredEncryptionData" + }, + "expectResult": { + "ok": 1 + } + } + ], + "expectEvents": [ + { + "client": "client", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "coll" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "compactStructuredEncryptionData": "coll", + "encryptionInformation": { + "type": { + "$numberInt": "1" + }, + "schema": { + "db.coll": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedText", + "bsonType": "string", + "queries": [ + { + "queryType": "suffixPreview", + "contention": { + "$numberLong": "0" + }, + "strMinQueryLength": { + "$numberLong": "3" + }, + "strMaxQueryLength": { + "$numberLong": "30" + }, + "caseSensitive": true, + "diacriticSensitive": true + } + ] + } + ], + "strEncodeVersion": { + "$numberInt": "1" + }, + "escCollection": "enxcol_.coll.esc", + "ecocCollection": "enxcol_.coll.ecoc" + } + } + }, + "compactionTokens": { + "encryptedText": { + "ecoc": { + "$binary": { + "base64": "SWO8WEoZ2r2Kx/muQKb7+COizy85nIIUFiHh4K9kcvA=", + "subType": "00" + } + }, + "anchorPaddingToken": { + "$binary": { + "base64": "YAiF7Iwhqq1UyfxPvm70xfQJtrIRPrjfD2yRLG1+saQ=", + "subType": "00" + } + } + } + } + }, + "commandName": "compactStructuredEncryptionData" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/QE-Text-prefixPreview.json b/test/client-side-encryption/spec/unified/QE-Text-prefixPreview.json new file mode 100644 index 0000000000..7279385743 --- /dev/null +++ b/test/client-side-encryption/spec/unified/QE-Text-prefixPreview.json @@ -0,0 +1,338 @@ +{ + "description": "QE-Text-prefixPreview", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.2.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.0" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client", + "databaseName": "db" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "coll" + } + } + ], + "initialData": [ + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "HBk9BWihXExNDvTp1lUxOuxuZK2Pe2ZdVdlsxPEBkiO1bS4mG5NNDsQ7zVxJAH8BtdOYp72Ku4Y3nwc0BUpIKsvAKX4eYXtlhv5zUQxWdeNFhg9qK7qb8nqhnnLeT0f25jFSqzWJoT379hfwDeu0bebJHr35QrJ8myZdPMTEDYF08QYQ48ShRBli0S+QzBHHAQiM2iJNr4svg2WR8JSeWQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + }, + { + "databaseName": "db", + "collectionName": "coll", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedText", + "bsonType": "string", + "queries": [ + { + "queryType": "prefixPreview", + "contention": { + "$numberLong": "0" + }, + "strMinQueryLength": { + "$numberLong": "3" + }, + "strMaxQueryLength": { + "$numberLong": "30" + }, + "caseSensitive": true, + "diacriticSensitive": true + } + ] + } + ] + } + } + } + ], + "tests": [ + { + "description": "Insert QE prefixPreview", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedText": "foobar" + } + }, + "object": "coll" + } + ], + "expectEvents": [ + { + "client": "client", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "coll" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "coll", + "documents": [ + { + "_id": 1, + "encryptedText": { + "$$type": "binData" + } + } + ], + "ordered": true + }, + "commandName": "insert" + } + } + ] + } + ] + }, + { + "description": "Query with matching $encStrStartsWith", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedText": "foobar" + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "$expr": { + "$encStrStartsWith": { + "input": "$encryptedText", + "prefix": "foo" + } + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": { + "$numberInt": "1" + }, + "encryptedText": "foobar", + "__safeContent__": [ + { + "$binary": { + "base64": "wpaMBVDjL4bHf9EtSP52PJFzyNn1R19+iNI/hWtvzdk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "fmUMXTMV/XRiN0IL3VXxSEn6SQG9E6Po30kJKB8JJlQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "vZIDMiFDgjmLNYVrrbnq1zT4hg7sGpe/PMtighSsnRc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "26Z5G+sHTzV3D7F8Y0m08389USZ2afinyFV3ez9UEBQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q/JEq8of7bE0QE5Id0XuOsNQ4qVpANYymcPQDUL2Ywk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Uvvv46LkfbgLoPqZ6xTBzpgoYRTM6FUgRdqZ9eaVojI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "nMxdq2lladuBJA3lv3JC2MumIUtRJBNJVLp3PVE6nQk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "hS3V0qq5CF/SkTl3ZWWWgXcAJ8G5yGtkY2RwcHNc5Oc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "McgwYUxfKj5+4D0vskZymy4KA82s71MR25iV/Enutww=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Ciqdk1b+t+Vrr6oIlFFk0Zdym5BPmwN3glQ0/VcsVdM=", + "subType": "00" + } + } + ] + } + ] + } + ] + }, + { + "description": "Query with non-matching $encStrStartsWith", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedText": "foobar" + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "$expr": { + "$encStrStartsWith": { + "input": "$encryptedText", + "prefix": "bar" + } + } + } + }, + "object": "coll", + "expectResult": [] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/QE-Text-substringPreview.json b/test/client-side-encryption/spec/unified/QE-Text-substringPreview.json new file mode 100644 index 0000000000..6a8f133eac --- /dev/null +++ b/test/client-side-encryption/spec/unified/QE-Text-substringPreview.json @@ -0,0 +1,551 @@ +{ + "description": "QE-Text-substringPreview", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.2.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.0" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client", + "databaseName": "db" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "coll" + } + } + ], + "initialData": [ + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "HBk9BWihXExNDvTp1lUxOuxuZK2Pe2ZdVdlsxPEBkiO1bS4mG5NNDsQ7zVxJAH8BtdOYp72Ku4Y3nwc0BUpIKsvAKX4eYXtlhv5zUQxWdeNFhg9qK7qb8nqhnnLeT0f25jFSqzWJoT379hfwDeu0bebJHr35QrJ8myZdPMTEDYF08QYQ48ShRBli0S+QzBHHAQiM2iJNr4svg2WR8JSeWQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + }, + { + "databaseName": "db", + "collectionName": "coll", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedText", + "bsonType": "string", + "queries": [ + { + "queryType": "substringPreview", + "contention": { + "$numberLong": "0" + }, + "strMinQueryLength": { + "$numberLong": "3" + }, + "strMaxQueryLength": { + "$numberLong": "10" + }, + "strMaxLength": { + "$numberLong": "20" + }, + "caseSensitive": true, + "diacriticSensitive": true + } + ] + } + ] + } + } + } + ], + "tests": [ + { + "description": "Insert QE suffixPreview", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedText": "foobar" + } + }, + "object": "coll" + } + ], + "expectEvents": [ + { + "client": "client", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "coll" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "coll", + "documents": [ + { + "_id": 1, + "encryptedText": { + "$$type": "binData" + } + } + ], + "ordered": true + }, + "commandName": "insert" + } + } + ] + } + ] + }, + { + "description": "Query with matching $encStrContains", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedText": "foobar" + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "$expr": { + "$encStrContains": { + "input": "$encryptedText", + "substring": "oba" + } + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": { + "$numberInt": "1" + }, + "encryptedText": "foobar", + "__safeContent__": [ + { + "$binary": { + "base64": "wpaMBVDjL4bHf9EtSP52PJFzyNn1R19+iNI/hWtvzdk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "IpY3x/jjm8j/74jAdUhgxdM5hk68zR0zv/lTKm/72Vg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "G+ky260C6QiOfIxKz14FmaMbAxvui1BKJO/TnLOHlGk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "7dv3gAKe9vwJMZmpB40pRCwRTmc7ds9UkGhxH8j084E=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "o0V+Efn6x8XQdE80F1tztNaT3qxHjcsd9DOQ47BtmQk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "sJvrCjyVot7PIZFsdRehWFANKAj6fmBaj3FLbz/dZLE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "e98auxFmu02h5MfBIARk29MI7hSmvN3F9DaQ0xjqoEM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "US83krGNov/ezL6IhsY5eEOCxv1xUPDIEL/nmY0IKi0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "P2Aq5+OHZPG0CWIdmZvWq9c/18ZKVYW3vbxd+WU/TXU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "8AdPRPnSzcd5uhq4TZfNvNeF0XjLNVwAsJJMTtktw84=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "9O6u/G51I4ZHFLhL4ZLuudbr0s202A2QnPfThmOXPhI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "N7AjYVyVlv6+lVSTM+cIxRL3SMgs3G5LgxSs+jrgDkI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "RbGF7dQbPGYQFd9DDO1hPz1UlLOJ77FAC6NsjGwJeos=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "m7srHMgKm6kZwsNx8rc45pmw0/9Qro6xuQ8lZS3+RYk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "K75CNU3JyKFqZWPiIsVi4+n7DhYmcPl/nEhQ3d88mVI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "c7bwGpUZc/7JzEnMS7qQ/TPuXZyrmMihFaAV6zIqbZc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "rDvEdUgEk8u4Srt3ETokWs2FXcnyJaRGQ+NbkFwi2rQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "VcdZj9zfveRBRlpCR2OYWau2+GokOFb73TE3gpElNiU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "eOa9o2xfA6OgkbYUxd6wQJicaeN6guhy2V66W3ALsaA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "1xGkJh+um70XiRd8lKLDtyHgDqrf7/59Mg7X0+KZh8k=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "OSvllqHxycbcZN4phR6NDujY3ttA59o7nQJ6V9eJpX0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ZTX1pyk8Vdw0BSbJx7GeJNcQf3tGKxbrrNSTqBqUWkg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cn7V05zb5iXwYrePGMHztC+GRq+Tj8IMpRDraauPhSE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "E9bV9KyrZxHJSUmMg0HrDK4gGN+75ruelAnrM6hXQgY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "WrssTNmdgXoTGpbaF0JLRCGH6cDQuz1XEFNTy98nrb0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "jZmyOJP35dsxQ/OY5U4ISpVRIYr8iedNfcwZiKt29Qc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "d2mocORMbX9MX+/itAW8r1kxVw2/uii4vzXtc+2CIRQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JBnJy58eRPhDo3DuZvsHbvQDiHXxdtAx1Eif66k5SfA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "OjbDulC8s62v0pgweBSsQqtJjJBwH5JinfJpj7nVr+A=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "85i7KT2GP9nSda3Gsil5LKubhq0LDtc22pxBxHpR+nE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "u9Fvsclwrs9lwIcMPV/fMZD7L3d5anSfJQVjQb9mgLg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "LZ32ttmLJGOIw9oFaUCn3Sx5uHPTYJPSFpeGRWNqlUc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "mMsZvGEePTqtl0FJAL/jAdyWNQIlpwN61YIlZsSIZ6s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "XZcu1a/ZGsIzAl3j4MXQlLo4v2p7kvIqRHtIQYFmL6k=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Zse27LinlYCEnX6iTmJceI33mEJxFb0LdPxp0RiMOaQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "vOv2Hgb2/sBpnX9XwFbIN6yDxhjchwlmczUf82W2tp4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "oQxZ9A6j3x5j6x1Jqw/N9tpP4rfWMjcV3y+a3PkrL7c=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "/D7ew3EijyUnmT22awVFspcuyo3JChJcDeCPwpljzVM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "BEmmwqyamt9X3bcWDld61P01zquy8fBHAXq3SHAPP0M=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wygD9/kAo1KsRvtr1v+9/lvqoWdKwgh6gDHvAQfXPPk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pRTKgF/uksrF1c1AcfSTY6ZhqBKVud1vIztQ4/36SLs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "C4iUo8oNJsjJ37BqnBgIgSQpf99X2Bb4W5MZEAmakHU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "icoE53jIq6Fu/YGKUiSUTYyZ8xdiTQY9jJiGxVJObpw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "oubCwk0V6G2RFWtcOnYDU4uUBoXBrhBRi4nZgrYj9JY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "IyqhQ9nGhzEi5YW2W6v1kGU5DY2u2qSqbM/qXdLdWVU=", + "subType": "00" + } + } + ] + } + ] + } + ] + }, + { + "description": "Query with non-matching $encStrContains", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedText": "foobar" + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "$expr": { + "$encStrContains": { + "input": "$encryptedText", + "substring": "blah" + } + } + } + }, + "object": "coll", + "expectResult": [] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/QE-Text-suffixPreview.json b/test/client-side-encryption/spec/unified/QE-Text-suffixPreview.json new file mode 100644 index 0000000000..deec5e63b0 --- /dev/null +++ b/test/client-side-encryption/spec/unified/QE-Text-suffixPreview.json @@ -0,0 +1,338 @@ +{ + "description": "QE-Text-suffixPreview", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.2.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.0" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client", + "databaseName": "db" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "coll" + } + } + ], + "initialData": [ + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "HBk9BWihXExNDvTp1lUxOuxuZK2Pe2ZdVdlsxPEBkiO1bS4mG5NNDsQ7zVxJAH8BtdOYp72Ku4Y3nwc0BUpIKsvAKX4eYXtlhv5zUQxWdeNFhg9qK7qb8nqhnnLeT0f25jFSqzWJoT379hfwDeu0bebJHr35QrJ8myZdPMTEDYF08QYQ48ShRBli0S+QzBHHAQiM2iJNr4svg2WR8JSeWQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + }, + { + "databaseName": "db", + "collectionName": "coll", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedText", + "bsonType": "string", + "queries": [ + { + "queryType": "suffixPreview", + "contention": { + "$numberLong": "0" + }, + "strMinQueryLength": { + "$numberLong": "3" + }, + "strMaxQueryLength": { + "$numberLong": "30" + }, + "caseSensitive": true, + "diacriticSensitive": true + } + ] + } + ] + } + } + } + ], + "tests": [ + { + "description": "Insert QE suffixPreview", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedText": "foobar" + } + }, + "object": "coll" + } + ], + "expectEvents": [ + { + "client": "client", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "coll" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "coll", + "documents": [ + { + "_id": 1, + "encryptedText": { + "$$type": "binData" + } + } + ], + "ordered": true + }, + "commandName": "insert" + } + } + ] + } + ] + }, + { + "description": "Query with matching $encStrStartsWith", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedText": "foobar" + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "$expr": { + "$encStrEndsWith": { + "input": "$encryptedText", + "suffix": "bar" + } + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": { + "$numberInt": "1" + }, + "encryptedText": "foobar", + "__safeContent__": [ + { + "$binary": { + "base64": "wpaMBVDjL4bHf9EtSP52PJFzyNn1R19+iNI/hWtvzdk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "uDCWsucUsJemUP7pmeb+Kd8B9qupVzI8wnLFqX1rkiU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "W3E1x4bHZ8SEHFz4zwXM0G5Z5WSwBhnxE8x5/qdP6JM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6g/TXVDDf6z+ntResIvTKWdmIy4ajQ1rhwdNZIiEG7A=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "hU+u/T3D6dHDpT3d/v5AlgtRoAufCXCAyO2jQlgsnCw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "vrPnq0AtBIURNgNGA6HJL+5/p5SBWe+qz8505TRo/dE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "W5pylBxdv2soY2NcBfPiHDVLTS6tx+0ULkI8gysBeFY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "oWO3xX3x0bYUJGK2S1aPAmlU3Xtfsgb9lTZ6flGAlsg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SjZGucTEUbdpd86O8yj1pyMyBOOKxvAQ9C8ngZ9C5UE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CEaMZkxVDVbnXr+To0DOyvsva04UQkIYP3KtgYVVwf8=", + "subType": "00" + } + } + ] + } + ] + } + ] + }, + { + "description": "Query with non-matching $encStrEndsWith", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedText": "foobar" + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "$expr": { + "$encStrEndsWith": { + "input": "$encryptedText", + "suffix": "foo" + } + } + } + }, + "object": "coll", + "expectResult": [] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/aggregate.json b/test/client-side-encryption/spec/unified/aggregate.json new file mode 100644 index 0000000000..d04ce49d28 --- /dev/null +++ b/test/client-side-encryption/spec/unified/aggregate.json @@ -0,0 +1,433 @@ +{ + "description": "aggregate", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "4.1.10", + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + } + ], + "createOptions": { + "validator": { + "$jsonSchema": { + "properties": { + "encrypted_w_altname": { + "encrypt": { + "keyId": "/altname", + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "random": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string_equivalent": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "status": 1, + "_id": { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + }, + "masterKey": { + "provider": "aws", + "key": "arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0", + "region": "us-east-1" + }, + "updateDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyMaterial": { + "$binary": { + "base64": "AQICAHhQNmWG2CzOm1dq3kWLM+iDUZhEqnhJwH9wZVpuZ94A8gEqnsxXlR51T5EbEVezUqqKAAAAwjCBvwYJKoZIhvcNAQcGoIGxMIGuAgEAMIGoBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDHa4jo6yp0Z18KgbUgIBEIB74sKxWtV8/YHje5lv5THTl0HIbhSwM6EqRlmBiFFatmEWaeMk4tO4xBX65eq670I5TWPSLMzpp8ncGHMmvHqRajNBnmFtbYxN3E3/WjxmdbOOe+OXpnGJPcGsftc7cB2shRfA4lICPnE26+oVNXT6p0Lo20nY5XC7jyCO", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyAltNames": [ + "altname", + "another_altname" + ] + } + ] + } + ], + "tests": [ + { + "description": "Aggregate with deterministic encryption", + "skipReason": "SERVER-39395", + "operations": [ + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encrypted_string": "457-55-5642" + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encrypted_string": "string0" + } + ] + } + ], + "outcome": [ + { + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + } + ], + "collectionName": "default", + "databaseName": "default" + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "aggregate": "default", + "pipeline": [ + { + "$match": { + "encrypted_string": "457-55-5642" + } + } + ] + }, + "commandName": "aggregate" + } + } + ] + } + ] + }, + { + "description": "Aggregate with empty pipeline", + "skipReason": "SERVER-40829 hides agg support behind enableTestCommands flag.", + "operations": [ + { + "name": "aggregate", + "arguments": { + "pipeline": [] + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encrypted_string": "string0" + } + ] + } + ], + "outcome": [ + { + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + } + ], + "collectionName": "default", + "databaseName": "default" + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "aggregate": "default", + "pipeline": [], + "cursor": {} + }, + "commandName": "aggregate" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + } + ] + } + ] + }, + { + "description": "Aggregate should fail with random encryption", + "skipReason": "SERVER-39395", + "operations": [ + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "random": "abc" + } + } + ] + }, + "object": "coll", + "expectError": { + "errorContains": "Cannot query on fields encrypted with the randomized encryption" + } + } + ] + }, + { + "description": "Database aggregate should fail", + "operations": [ + { + "name": "aggregate", + "object": "db", + "arguments": { + "pipeline": [ + { + "$currentOp": { + "allUsers": false, + "idleConnections": false, + "localOps": true + } + }, + { + "$match": { + "command.aggregate": { + "$eq": 1 + } + } + }, + { + "$project": { + "command": 1 + } + }, + { + "$project": { + "command.lsid": 0 + } + } + ] + }, + "expectError": { + "errorContains": "non-collection command not supported for auto encryption: aggregate" + } + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/awsTemporary.json b/test/client-side-encryption/spec/unified/awsTemporary.json new file mode 100644 index 0000000000..24b732a5eb --- /dev/null +++ b/test/client-side-encryption/spec/unified/awsTemporary.json @@ -0,0 +1,313 @@ +{ + "description": "awsTemporary", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "4.1.10", + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + }, + "sessionToken": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client1", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + }, + "sessionToken": "bad" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db1", + "client": "client1", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll1", + "database": "db1", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "validator": { + "$jsonSchema": { + "properties": { + "encrypted_w_altname": { + "encrypt": { + "keyId": "/altname", + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "random": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string_equivalent": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "status": 1, + "_id": { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + }, + "masterKey": { + "provider": "aws", + "key": "arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0", + "region": "us-east-1" + }, + "updateDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyMaterial": { + "$binary": { + "base64": "AQICAHhQNmWG2CzOm1dq3kWLM+iDUZhEqnhJwH9wZVpuZ94A8gEqnsxXlR51T5EbEVezUqqKAAAAwjCBvwYJKoZIhvcNAQcGoIGxMIGuAgEAMIGoBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDHa4jo6yp0Z18KgbUgIBEIB74sKxWtV8/YHje5lv5THTl0HIbhSwM6EqRlmBiFFatmEWaeMk4tO4xBX65eq670I5TWPSLMzpp8ncGHMmvHqRajNBnmFtbYxN3E3/WjxmdbOOe+OXpnGJPcGsftc7cB2shRfA4lICPnE26+oVNXT6p0Lo20nY5XC7jyCO", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyAltNames": [ + "altname", + "another_altname" + ] + } + ] + } + ], + "tests": [ + { + "description": "Insert a document with auto encryption using the AWS provider with temporary credentials", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_string": "string0" + } + }, + "object": "coll" + } + ], + "outcome": [ + { + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + } + ], + "collectionName": "default", + "databaseName": "default" + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault" + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + } + ], + "ordered": true + }, + "commandName": "insert" + } + } + ] + } + ] + }, + { + "description": "Insert with invalid temporary credentials", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_string": "string0" + } + }, + "object": "coll1", + "expectError": { + "errorContains": "security token" + } + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/azureKMS.json b/test/client-side-encryption/spec/unified/azureKMS.json new file mode 100644 index 0000000000..b70959217f --- /dev/null +++ b/test/client-side-encryption/spec/unified/azureKMS.json @@ -0,0 +1,293 @@ +{ + "description": "azureKMS", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "4.1.10", + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "azure": { + "tenantId": { + "$$placeholder": 1 + }, + "clientId": { + "$$placeholder": 1 + }, + "clientSecret": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "validator": { + "$jsonSchema": { + "properties": { + "encrypted_string_aws": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "encrypted_string_azure": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AZURE+AAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "encrypted_string_gcp": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "GCP+AAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "encrypted_string_local": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "encrypted_string_kmip": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "dBHpr8aITfeBQ15grpbLpQ==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "encrypted_string_kmip_delegated": { + "encrypt": { + "keyId": [ + { + "$uuid": "7411e9af-c688-4df7-8143-5e60ae96cba6" + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "AZURE+AAAAAAAAAAAAAAAA==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "n+HWZ0ZSVOYA3cvQgP7inN4JSXfOH85IngmeQxRpQHjCCcqT3IFqEWNlrsVHiz3AELimHhX4HKqOLWMUeSIT6emUDDoQX9BAv8DR1+E1w4nGs/NyEneac78EYFkK3JysrFDOgl2ypCCTKAypkn9CkAx1if4cfgQE93LW4kczcyHdGiH36CIxrCDGv1UzAvERN5Qa47DVwsM6a+hWsF2AAAJVnF0wYLLJU07TuRHdMrrphPWXZsFgyV+lRqJ7DDpReKNO8nMPLV/mHqHBHGPGQiRdb9NoJo8CvokGz4+KE8oLwzKf6V24dtwZmRkrsDV4iOhvROAzz+Euo1ypSkL3mw==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1601573901680" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1601573901680" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "azure", + "keyVaultEndpoint": "key-vault-csfle.vault.azure.net", + "keyName": "key-name-csfle" + }, + "keyAltNames": [ + "altname", + "azure_altname" + ] + } + ] + } + ], + "tests": [ + { + "description": "Insert a document with auto encryption using Azure KMS provider", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_string_azure": "string0" + } + }, + "object": "coll" + } + ], + "outcome": [ + { + "documents": [ + { + "_id": 1, + "encrypted_string_azure": { + "$binary": { + "base64": "AQGVERPgAAAAAAAAAAAAAAAC5DbBSwPwfSlBrDtRuglvNvCXD1KzDuCKY2P+4bRFtHDjpTOE2XuytPAUaAbXf1orsPq59PVZmsbTZbt2CB8qaQ==", + "subType": "06" + } + } + } + ], + "collectionName": "default", + "databaseName": "default" + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "AZURE+AAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault" + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encrypted_string_azure": { + "$binary": { + "base64": "AQGVERPgAAAAAAAAAAAAAAAC5DbBSwPwfSlBrDtRuglvNvCXD1KzDuCKY2P+4bRFtHDjpTOE2XuytPAUaAbXf1orsPq59PVZmsbTZbt2CB8qaQ==", + "subType": "06" + } + } + } + ], + "ordered": true + }, + "commandName": "insert" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/badQueries.json b/test/client-side-encryption/spec/unified/badQueries.json new file mode 100644 index 0000000000..7a4f30d5b7 --- /dev/null +++ b/test/client-side-encryption/spec/unified/badQueries.json @@ -0,0 +1,1393 @@ +{ + "description": "badQueries", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "4.1.10", + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "collection": { + "id": "coll_with_encrypted_id", + "database": "db", + "collectionName": "coll_with_encrypted_id" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + }, + { + "_id": 2, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACDdw4KFz3ZLquhsbt7RmDjD0N67n0uSXx7IGnQNCLeIKvot6s/ouI21Eo84IOtb6lhwUNPlSEBNY0/hbszWAKJg==", + "subType": "06" + } + } + } + ], + "createOptions": { + "validator": { + "$jsonSchema": { + "properties": { + "encrypted_w_altname": { + "encrypt": { + "keyId": "/altname", + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "random": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string_equivalent": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + } + } + }, + { + "databaseName": "default", + "collectionName": "coll_with_encrypted_id", + "documents": [], + "createOptions": { + "validator": { + "$jsonSchema": { + "properties": { + "_id": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + } + } + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "status": 1, + "_id": { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + }, + "masterKey": { + "provider": "aws", + "key": "arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0", + "region": "us-east-1" + }, + "updateDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyMaterial": { + "$binary": { + "base64": "AQICAHhQNmWG2CzOm1dq3kWLM+iDUZhEqnhJwH9wZVpuZ94A8gEqnsxXlR51T5EbEVezUqqKAAAAwjCBvwYJKoZIhvcNAQcGoIGxMIGuAgEAMIGoBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDHa4jo6yp0Z18KgbUgIBEIB74sKxWtV8/YHje5lv5THTl0HIbhSwM6EqRlmBiFFatmEWaeMk4tO4xBX65eq670I5TWPSLMzpp8ncGHMmvHqRajNBnmFtbYxN3E3/WjxmdbOOe+OXpnGJPcGsftc7cB2shRfA4lICPnE26+oVNXT6p0Lo20nY5XC7jyCO", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyAltNames": [ + "altname", + "another_altname" + ] + } + ] + } + ], + "tests": [ + { + "description": "$text unconditionally fails", + "operations": [ + { + "name": "find", + "arguments": { + "filter": { + "$text": { + "$search": "search text" + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "Unsupported match expression operator for encryption" + } + } + ] + }, + { + "description": "$where unconditionally fails", + "operations": [ + { + "name": "find", + "arguments": { + "filter": { + "$where": { + "$code": "function() { return true }" + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "Unsupported match expression operator for encryption" + } + } + ] + }, + { + "description": "$bit operators succeed on unencrypted, error on encrypted", + "operations": [ + { + "name": "find", + "arguments": { + "filter": { + "unencrypted": { + "$bitsAllClear": 35 + } + } + }, + "object": "coll", + "expectResult": [] + }, + { + "name": "find", + "arguments": { + "filter": { + "encrypted_string": { + "$bitsAllClear": 35 + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "Invalid match expression operator on encrypted field" + } + }, + { + "name": "find", + "arguments": { + "filter": { + "unencrypted": { + "$bitsAllSet": 35 + } + } + }, + "object": "coll", + "expectResult": [] + }, + { + "name": "find", + "arguments": { + "filter": { + "encrypted_string": { + "$bitsAllSet": 35 + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "Invalid match expression operator on encrypted field" + } + }, + { + "name": "find", + "arguments": { + "filter": { + "unencrypted": { + "$bitsAnyClear": 35 + } + } + }, + "object": "coll", + "expectResult": [] + }, + { + "name": "find", + "arguments": { + "filter": { + "encrypted_string": { + "$bitsAnyClear": 35 + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "Invalid match expression operator on encrypted field" + } + }, + { + "name": "find", + "arguments": { + "filter": { + "unencrypted": { + "$bitsAnySet": 35 + } + } + }, + "object": "coll", + "expectResult": [] + }, + { + "name": "find", + "arguments": { + "filter": { + "encrypted_string": { + "$bitsAnySet": 35 + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "Invalid match expression operator on encrypted field" + } + } + ] + }, + { + "description": "geo operators succeed on unencrypted, error on encrypted", + "operations": [ + { + "name": "find", + "arguments": { + "filter": { + "unencrypted": { + "$near": [ + 0, + 0 + ] + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "unable to find index" + } + }, + { + "name": "find", + "arguments": { + "filter": { + "encrypted_string": { + "$near": [ + 0, + 0 + ] + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "Invalid match expression operator on encrypted field" + } + }, + { + "name": "find", + "arguments": { + "filter": { + "unencrypted": { + "$nearSphere": [ + 0, + 0 + ] + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "unable to find index" + } + }, + { + "name": "find", + "arguments": { + "filter": { + "encrypted_string": { + "$nearSphere": [ + 0, + 0 + ] + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "Invalid match expression operator on encrypted field" + } + }, + { + "name": "find", + "arguments": { + "filter": { + "unencrypted": { + "$geoIntersects": { + "$geometry": { + "type": "Polygon", + "coordinates": [ + [ + [ + 0, + 0 + ], + [ + 1, + 0 + ], + [ + 1, + 1 + ], + [ + 0, + 0 + ] + ] + ] + } + } + } + } + }, + "object": "coll", + "expectResult": [] + }, + { + "name": "find", + "arguments": { + "filter": { + "encrypted_string": { + "$geoIntersects": { + "$geometry": { + "type": "Polygon", + "coordinates": [ + [ + [ + 0, + 0 + ], + [ + 1, + 0 + ], + [ + 1, + 1 + ], + [ + 0, + 0 + ] + ] + ] + } + } + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "Invalid match expression operator on encrypted field" + } + }, + { + "name": "find", + "arguments": { + "filter": { + "unencrypted": { + "$geoWithin": { + "$geometry": { + "type": "Polygon", + "coordinates": [ + [ + [ + 0, + 0 + ], + [ + 1, + 0 + ], + [ + 1, + 1 + ], + [ + 0, + 0 + ] + ] + ] + } + } + } + } + }, + "object": "coll", + "expectResult": [] + }, + { + "name": "find", + "arguments": { + "filter": { + "encrypted_string": { + "$geoWithin": { + "$geometry": { + "type": "Polygon", + "coordinates": [ + [ + [ + 0, + 0 + ], + [ + 1, + 0 + ], + [ + 1, + 1 + ], + [ + 0, + 0 + ] + ] + ] + } + } + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "Invalid match expression operator on encrypted field" + } + } + ] + }, + { + "description": "inequality operators succeed on unencrypted, error on encrypted", + "operations": [ + { + "name": "find", + "arguments": { + "filter": { + "unencrypted": { + "$gt": 1 + } + } + }, + "object": "coll", + "expectResult": [] + }, + { + "name": "find", + "arguments": { + "filter": { + "encrypted_string": { + "$gt": 1 + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "Invalid match expression operator on encrypted field" + } + }, + { + "name": "find", + "arguments": { + "filter": { + "unencrypted": { + "$lt": 1 + } + } + }, + "object": "coll", + "expectResult": [] + }, + { + "name": "find", + "arguments": { + "filter": { + "encrypted_string": { + "$lt": 1 + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "Invalid match expression operator on encrypted field" + } + }, + { + "name": "find", + "arguments": { + "filter": { + "unencrypted": { + "$gte": 1 + } + } + }, + "object": "coll", + "expectResult": [] + }, + { + "name": "find", + "arguments": { + "filter": { + "encrypted_string": { + "$gte": 1 + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "Invalid match expression operator on encrypted field" + } + }, + { + "name": "find", + "arguments": { + "filter": { + "unencrypted": { + "$lte": 1 + } + } + }, + "object": "coll", + "expectResult": [] + }, + { + "name": "find", + "arguments": { + "filter": { + "encrypted_string": { + "$lte": 1 + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "Invalid match expression operator on encrypted field" + } + } + ] + }, + { + "description": "other misc operators succeed on unencrypted, error on encrypted", + "operations": [ + { + "name": "find", + "arguments": { + "filter": { + "unencrypted": { + "$mod": [ + 3, + 1 + ] + } + } + }, + "object": "coll", + "expectResult": [] + }, + { + "name": "find", + "arguments": { + "filter": { + "encrypted_string": { + "$mod": [ + 3, + 1 + ] + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "Invalid match expression operator on encrypted field" + } + }, + { + "name": "find", + "arguments": { + "filter": { + "unencrypted": { + "$regex": "pattern", + "$options": "" + } + } + }, + "object": "coll", + "expectResult": [] + }, + { + "name": "find", + "arguments": { + "filter": { + "encrypted_string": { + "$regex": "pattern", + "$options": "" + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "Invalid match expression operator on encrypted field" + } + }, + { + "name": "find", + "arguments": { + "filter": { + "unencrypted": { + "$size": 2 + } + } + }, + "object": "coll", + "expectResult": [] + }, + { + "name": "find", + "arguments": { + "filter": { + "encrypted_string": { + "$size": 2 + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "Invalid match expression operator on encrypted field" + } + }, + { + "name": "find", + "arguments": { + "filter": { + "unencrypted": { + "$eq": null + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encrypted_string": "string0" + }, + { + "_id": 2, + "encrypted_string": "string1" + } + ] + }, + { + "name": "find", + "arguments": { + "filter": { + "encrypted_string": { + "$eq": null + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "Illegal equality to null predicate for encrypted field" + } + }, + { + "name": "find", + "arguments": { + "filter": { + "unencrypted": { + "$in": [ + null + ] + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encrypted_string": "string0" + }, + { + "_id": 2, + "encrypted_string": "string1" + } + ] + }, + { + "name": "find", + "arguments": { + "filter": { + "encrypted_string": { + "$in": [ + null + ] + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "Illegal equality to null inside $in against an encrypted field" + } + } + ] + }, + { + "description": "$addToSet succeeds on unencrypted, error on encrypted", + "operations": [ + { + "name": "updateOne", + "arguments": { + "filter": {}, + "update": { + "$addToSet": { + "unencrypted": [ + "a" + ] + } + } + }, + "object": "coll", + "expectResult": { + "matchedCount": 1, + "modifiedCount": 1, + "upsertedCount": 0 + } + }, + { + "name": "updateOne", + "arguments": { + "filter": {}, + "update": { + "$addToSet": { + "encrypted_string": [ + "a" + ] + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "$addToSet not allowed on encrypted values" + } + } + ] + }, + { + "description": "$inc succeeds on unencrypted, error on encrypted", + "operations": [ + { + "name": "updateOne", + "arguments": { + "filter": {}, + "update": { + "$inc": { + "unencrypted": 1 + } + } + }, + "object": "coll", + "expectResult": { + "matchedCount": 1, + "modifiedCount": 1, + "upsertedCount": 0 + } + }, + { + "name": "updateOne", + "arguments": { + "filter": {}, + "update": { + "$inc": { + "encrypted_string": 1 + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "$inc and $mul not allowed on encrypted values" + } + } + ] + }, + { + "description": "$mul succeeds on unencrypted, error on encrypted", + "operations": [ + { + "name": "updateOne", + "arguments": { + "filter": {}, + "update": { + "$mul": { + "unencrypted": 1 + } + } + }, + "object": "coll", + "expectResult": { + "matchedCount": 1, + "modifiedCount": 1, + "upsertedCount": 0 + } + }, + { + "name": "updateOne", + "arguments": { + "filter": {}, + "update": { + "$mul": { + "encrypted_string": 1 + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "$inc and $mul not allowed on encrypted values" + } + } + ] + }, + { + "description": "$max succeeds on unencrypted, error on encrypted", + "operations": [ + { + "name": "updateOne", + "arguments": { + "filter": {}, + "update": { + "$max": { + "unencrypted": 1 + } + } + }, + "object": "coll", + "expectResult": { + "matchedCount": 1, + "modifiedCount": 1, + "upsertedCount": 0 + } + }, + { + "name": "updateOne", + "arguments": { + "filter": {}, + "update": { + "$max": { + "encrypted_string": 1 + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "$max and $min not allowed on encrypted values" + } + } + ] + }, + { + "description": "$min succeeds on unencrypted, error on encrypted", + "operations": [ + { + "name": "updateOne", + "arguments": { + "filter": {}, + "update": { + "$min": { + "unencrypted": 1 + } + } + }, + "object": "coll", + "expectResult": { + "matchedCount": 1, + "modifiedCount": 1, + "upsertedCount": 0 + } + }, + { + "name": "updateOne", + "arguments": { + "filter": {}, + "update": { + "$min": { + "encrypted_string": 1 + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "$max and $min not allowed on encrypted values" + } + } + ] + }, + { + "description": "$currentDate succeeds on unencrypted, error on encrypted", + "operations": [ + { + "name": "updateOne", + "arguments": { + "filter": {}, + "update": { + "$currentDate": { + "unencrypted": true + } + } + }, + "object": "coll", + "expectResult": { + "matchedCount": 1, + "modifiedCount": 1, + "upsertedCount": 0 + } + }, + { + "name": "updateOne", + "arguments": { + "filter": {}, + "update": { + "$currentDate": { + "encrypted_string": true + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "$currentDate not allowed on encrypted values" + } + } + ] + }, + { + "description": "$pop succeeds on unencrypted, error on encrypted", + "operations": [ + { + "name": "updateOne", + "arguments": { + "filter": {}, + "update": { + "$pop": { + "unencrypted": 1 + } + } + }, + "object": "coll", + "expectResult": { + "matchedCount": 1, + "modifiedCount": 0, + "upsertedCount": 0 + } + }, + { + "name": "updateOne", + "arguments": { + "filter": {}, + "update": { + "$pop": { + "encrypted_string": 1 + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "$pop not allowed on encrypted values" + } + } + ] + }, + { + "description": "$pull succeeds on unencrypted, error on encrypted", + "operations": [ + { + "name": "updateOne", + "arguments": { + "filter": {}, + "update": { + "$pull": { + "unencrypted": 1 + } + } + }, + "object": "coll", + "expectResult": { + "matchedCount": 1, + "modifiedCount": 0, + "upsertedCount": 0 + } + }, + { + "name": "updateOne", + "arguments": { + "filter": {}, + "update": { + "$pull": { + "encrypted_string": 1 + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "$pull not allowed on encrypted values" + } + } + ] + }, + { + "description": "$pullAll succeeds on unencrypted, error on encrypted", + "operations": [ + { + "name": "updateOne", + "arguments": { + "filter": {}, + "update": { + "$pullAll": { + "unencrypted": [ + 1 + ] + } + } + }, + "object": "coll", + "expectResult": { + "matchedCount": 1, + "modifiedCount": 0, + "upsertedCount": 0 + } + }, + { + "name": "updateOne", + "arguments": { + "filter": {}, + "update": { + "$pullAll": { + "encrypted_string": [ + 1 + ] + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "$pullAll not allowed on encrypted values" + } + } + ] + }, + { + "description": "$push succeeds on unencrypted, error on encrypted", + "operations": [ + { + "name": "updateOne", + "arguments": { + "filter": {}, + "update": { + "$push": { + "unencrypted": 1 + } + } + }, + "object": "coll", + "expectResult": { + "matchedCount": 1, + "modifiedCount": 1, + "upsertedCount": 0 + } + }, + { + "name": "updateOne", + "arguments": { + "filter": {}, + "update": { + "$push": { + "encrypted_string": 1 + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "$push not allowed on encrypted values" + } + } + ] + }, + { + "description": "array filters on encrypted fields does not error in mongocryptd, but errors in mongod", + "operations": [ + { + "name": "updateOne", + "arguments": { + "filter": {}, + "update": { + "$set": { + "encrypted_string.$[i].x": 1 + } + }, + "arrayFilters": [ + { + "i.x": 1 + } + ] + }, + "object": "coll", + "expectError": { + "errorContains": "Array update operations not allowed on encrypted values" + } + } + ] + }, + { + "description": "positional operator succeeds on unencrypted, errors on encrypted", + "operations": [ + { + "name": "updateOne", + "arguments": { + "filter": { + "unencrypted": 1 + }, + "update": { + "$set": { + "unencrypted.$": 1 + } + } + }, + "object": "coll", + "expectResult": { + "matchedCount": 0, + "modifiedCount": 0, + "upsertedCount": 0 + } + }, + { + "name": "updateOne", + "arguments": { + "filter": { + "encrypted_string": "abc" + }, + "update": { + "$set": { + "encrypted_string.$": "abc" + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "Cannot encrypt fields below '$' positional update operator" + } + } + ] + }, + { + "description": "an update that would produce an array on an encrypted field errors", + "operations": [ + { + "name": "updateOne", + "arguments": { + "filter": {}, + "update": { + "$set": { + "encrypted_string": [ + 1, + 2 + ] + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "Cannot encrypt element of type" + } + } + ] + }, + { + "description": "an insert with encrypted field on _id errors", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1 + } + }, + "object": "coll_with_encrypted_id", + "expectError": { + "errorContains": "Invalid schema containing the 'encrypt' keyword." + } + } + ] + }, + { + "description": "an insert with an array value for an encrypted field fails", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "encrypted_string": [ + "123", + "456" + ] + } + }, + "object": "coll", + "expectError": { + "errorContains": "Cannot encrypt element of type" + } + } + ] + }, + { + "description": "an insert with a Timestamp(0,0) value in the top-level fails", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "random": { + "$timestamp": { + "t": 0, + "i": 0 + } + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "A command that inserts cannot supply Timestamp(0, 0) for an encrypted" + } + } + ] + }, + { + "description": "distinct with the key referring to a field where the keyID is a JSON Pointer errors", + "operations": [ + { + "name": "distinct", + "arguments": { + "filter": {}, + "fieldName": "encrypted_w_altname" + }, + "object": "coll", + "expectError": { + "errorContains": "The distinct key is not allowed to be marked for encryption with a non-UUID keyId" + } + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/badSchema.json b/test/client-side-encryption/spec/unified/badSchema.json new file mode 100644 index 0000000000..af93d659d4 --- /dev/null +++ b/test/client-side-encryption/spec/unified/badSchema.json @@ -0,0 +1,393 @@ +{ + "description": "badSchema", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "4.1.10", + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "schemaMap": { + "default.default": { + "properties": { + "encrypted_string": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + } + }, + "bsonType": "array" + } + }, + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db0", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll0", + "database": "db0", + "collectionName": "default" + } + }, + { + "client": { + "id": "client1", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "schemaMap": { + "default.default": { + "properties": { + "foo": { + "properties": { + "bar": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + } + } + } + } + } + }, + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db1", + "client": "client1", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll1", + "database": "db1", + "collectionName": "default" + } + }, + { + "client": { + "id": "client2", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "schemaMap": { + "default.default": { + "properties": { + "encrypted_string": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + }, + "bsonType": "object" + } + } + } + }, + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db2", + "client": "client2", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll2", + "database": "db2", + "collectionName": "default" + } + }, + { + "client": { + "id": "client3", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "schemaMap": { + "default.default": { + "anyOf": [ + { + "properties": { + "encrypted_string": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + } + } + } + ] + } + }, + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db3", + "client": "client3", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll3", + "database": "db3", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [] + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "status": 1, + "_id": { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + }, + "masterKey": { + "provider": "aws", + "key": "arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0", + "region": "us-east-1" + }, + "updateDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyMaterial": { + "$binary": { + "base64": "AQICAHhQNmWG2CzOm1dq3kWLM+iDUZhEqnhJwH9wZVpuZ94A8gEqnsxXlR51T5EbEVezUqqKAAAAwjCBvwYJKoZIhvcNAQcGoIGxMIGuAgEAMIGoBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDHa4jo6yp0Z18KgbUgIBEIB74sKxWtV8/YHje5lv5THTl0HIbhSwM6EqRlmBiFFatmEWaeMk4tO4xBX65eq670I5TWPSLMzpp8ncGHMmvHqRajNBnmFtbYxN3E3/WjxmdbOOe+OXpnGJPcGsftc7cB2shRfA4lICPnE26+oVNXT6p0Lo20nY5XC7jyCO", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyAltNames": [ + "altname", + "another_altname" + ] + } + ] + } + ], + "tests": [ + { + "description": "Schema with an encrypted field in an array", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_string": "string0" + } + }, + "object": "coll0", + "expectError": { + "errorContains": "Invalid schema" + } + } + ], + "outcome": [ + { + "documents": [], + "collectionName": "default", + "databaseName": "default" + } + ] + }, + { + "description": "Schema without specifying parent object types", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_string": "string0" + } + }, + "object": "coll1", + "expectError": { + "errorContains": "Invalid schema" + } + } + ], + "outcome": [ + { + "documents": [], + "collectionName": "default", + "databaseName": "default" + } + ] + }, + { + "description": "Schema with siblings of encrypt document", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_string": "string0" + } + }, + "object": "coll2", + "expectError": { + "errorContains": "'encrypt' cannot be used in conjunction with 'bsonType'" + } + } + ], + "outcome": [ + { + "documents": [], + "collectionName": "default", + "databaseName": "default" + } + ] + }, + { + "description": "Schema with logical keywords", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_string": "string0" + } + }, + "object": "coll3", + "expectError": { + "errorContains": "Invalid schema" + } + } + ], + "outcome": [ + { + "documents": [], + "collectionName": "default", + "databaseName": "default" + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/basic.json b/test/client-side-encryption/spec/unified/basic.json new file mode 100644 index 0000000000..5522f585da --- /dev/null +++ b/test/client-side-encryption/spec/unified/basic.json @@ -0,0 +1,431 @@ +{ + "description": "basic", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "4.1.10", + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted", + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "validator": { + "$jsonSchema": { + "properties": { + "encrypted_w_altname": { + "encrypt": { + "keyId": "/altname", + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "random": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string_equivalent": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "status": 1, + "_id": { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + }, + "masterKey": { + "provider": "aws", + "key": "arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0", + "region": "us-east-1" + }, + "updateDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyMaterial": { + "$binary": { + "base64": "AQICAHhQNmWG2CzOm1dq3kWLM+iDUZhEqnhJwH9wZVpuZ94A8gEqnsxXlR51T5EbEVezUqqKAAAAwjCBvwYJKoZIhvcNAQcGoIGxMIGuAgEAMIGoBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDHa4jo6yp0Z18KgbUgIBEIB74sKxWtV8/YHje5lv5THTl0HIbhSwM6EqRlmBiFFatmEWaeMk4tO4xBX65eq670I5TWPSLMzpp8ncGHMmvHqRajNBnmFtbYxN3E3/WjxmdbOOe+OXpnGJPcGsftc7cB2shRfA4lICPnE26+oVNXT6p0Lo20nY5XC7jyCO", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyAltNames": [ + "altname", + "another_altname" + ] + } + ] + } + ], + "tests": [ + { + "description": "Insert with deterministic encryption, then find it", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_string": "string0" + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "_id": 1 + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encrypted_string": "string0" + } + ] + } + ], + "outcome": [ + { + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + } + ], + "collectionName": "default", + "databaseName": "default" + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + } + ], + "ordered": true + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "default", + "filter": { + "_id": 1 + } + }, + "commandName": "find" + } + } + ] + } + ] + }, + { + "description": "Insert with randomized encryption, then find it", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "random": "123" + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "_id": 1 + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "random": "123" + } + ] + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 1, + "random": { + "$$type": "binData" + } + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "random": { + "$$type": "binData" + } + } + ], + "ordered": true + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "default", + "filter": { + "_id": 1 + } + }, + "commandName": "find" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/bulk.json b/test/client-side-encryption/spec/unified/bulk.json new file mode 100644 index 0000000000..90922b88d0 --- /dev/null +++ b/test/client-side-encryption/spec/unified/bulk.json @@ -0,0 +1,407 @@ +{ + "description": "bulk", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "4.1.10", + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted", + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "validator": { + "$jsonSchema": { + "properties": { + "encrypted_w_altname": { + "encrypt": { + "keyId": "/altname", + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "random": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string_equivalent": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "status": 1, + "_id": { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + }, + "masterKey": { + "provider": "aws", + "key": "arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0", + "region": "us-east-1" + }, + "updateDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyMaterial": { + "$binary": { + "base64": "AQICAHhQNmWG2CzOm1dq3kWLM+iDUZhEqnhJwH9wZVpuZ94A8gEqnsxXlR51T5EbEVezUqqKAAAAwjCBvwYJKoZIhvcNAQcGoIGxMIGuAgEAMIGoBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDHa4jo6yp0Z18KgbUgIBEIB74sKxWtV8/YHje5lv5THTl0HIbhSwM6EqRlmBiFFatmEWaeMk4tO4xBX65eq670I5TWPSLMzpp8ncGHMmvHqRajNBnmFtbYxN3E3/WjxmdbOOe+OXpnGJPcGsftc7cB2shRfA4lICPnE26+oVNXT6p0Lo20nY5XC7jyCO", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyAltNames": [ + "altname", + "another_altname" + ] + } + ] + } + ], + "tests": [ + { + "description": "Bulk write with encryption", + "operations": [ + { + "name": "bulkWrite", + "arguments": { + "requests": [ + { + "insertOne": { + "document": { + "_id": 1, + "encrypted_string": "string0", + "random": "abc" + } + } + }, + { + "insertOne": { + "document": { + "_id": 2, + "encrypted_string": "string1" + } + } + }, + { + "updateOne": { + "filter": { + "encrypted_string": "string0" + }, + "update": { + "$set": { + "encrypted_string": "string1" + } + } + } + }, + { + "deleteOne": { + "filter": { + "$and": [ + { + "encrypted_string": "string1" + }, + { + "_id": 2 + } + ] + } + } + } + ], + "ordered": true + }, + "object": "coll" + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACDdw4KFz3ZLquhsbt7RmDjD0N67n0uSXx7IGnQNCLeIKvot6s/ouI21Eo84IOtb6lhwUNPlSEBNY0/hbszWAKJg==", + "subType": "06" + } + }, + "random": { + "$$type": "binData" + } + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + }, + "random": { + "$$type": "binData" + } + }, + { + "_id": 2, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACDdw4KFz3ZLquhsbt7RmDjD0N67n0uSXx7IGnQNCLeIKvot6s/ouI21Eo84IOtb6lhwUNPlSEBNY0/hbszWAKJg==", + "subType": "06" + } + } + } + ], + "ordered": true + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "update": "default", + "updates": [ + { + "q": { + "encrypted_string": { + "$eq": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + } + }, + "u": { + "$set": { + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACDdw4KFz3ZLquhsbt7RmDjD0N67n0uSXx7IGnQNCLeIKvot6s/ouI21Eo84IOtb6lhwUNPlSEBNY0/hbszWAKJg==", + "subType": "06" + } + } + } + }, + "multi": false, + "upsert": false + } + ], + "ordered": true + }, + "commandName": "update" + } + }, + { + "commandStartedEvent": { + "command": { + "delete": "default", + "deletes": [ + { + "q": { + "$and": [ + { + "encrypted_string": { + "$eq": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACDdw4KFz3ZLquhsbt7RmDjD0N67n0uSXx7IGnQNCLeIKvot6s/ouI21Eo84IOtb6lhwUNPlSEBNY0/hbszWAKJg==", + "subType": "06" + } + } + } + }, + { + "_id": { + "$eq": 2 + } + } + ] + }, + "limit": 1 + } + ], + "ordered": true + }, + "commandName": "delete" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/bypassAutoEncryption.json b/test/client-side-encryption/spec/unified/bypassAutoEncryption.json new file mode 100644 index 0000000000..3254c43781 --- /dev/null +++ b/test/client-side-encryption/spec/unified/bypassAutoEncryption.json @@ -0,0 +1,403 @@ +{ + "description": "bypassAutoEncryption", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "4.1.10", + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "bypassAutoEncryption": true, + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + } + ], + "createOptions": { + "validator": { + "$jsonSchema": { + "properties": { + "encrypted_w_altname": { + "encrypt": { + "keyId": "/altname", + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "random": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string_equivalent": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "status": 1, + "_id": { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + }, + "masterKey": { + "provider": "aws", + "key": "arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0", + "region": "us-east-1" + }, + "updateDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyMaterial": { + "$binary": { + "base64": "AQICAHhQNmWG2CzOm1dq3kWLM+iDUZhEqnhJwH9wZVpuZ94A8gEqnsxXlR51T5EbEVezUqqKAAAAwjCBvwYJKoZIhvcNAQcGoIGxMIGuAgEAMIGoBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDHa4jo6yp0Z18KgbUgIBEIB74sKxWtV8/YHje5lv5THTl0HIbhSwM6EqRlmBiFFatmEWaeMk4tO4xBX65eq670I5TWPSLMzpp8ncGHMmvHqRajNBnmFtbYxN3E3/WjxmdbOOe+OXpnGJPcGsftc7cB2shRfA4lICPnE26+oVNXT6p0Lo20nY5XC7jyCO", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyAltNames": [ + "altname", + "another_altname" + ] + } + ] + } + ], + "tests": [ + { + "description": "Insert with bypassAutoEncryption", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 2, + "encrypted_string": "string0" + }, + "bypassDocumentValidation": true + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": {} + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encrypted_string": "string0" + }, + { + "_id": 2, + "encrypted_string": "string0" + } + ] + } + ], + "outcome": [ + { + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + }, + { + "_id": 2, + "encrypted_string": "string0" + } + ], + "collectionName": "default", + "databaseName": "default" + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 2, + "encrypted_string": "string0" + } + ], + "ordered": true + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "default", + "filter": {} + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + } + ] + } + ] + }, + { + "description": "Insert with bypassAutoEncryption for local schema", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 2, + "encrypted_string": "string0" + }, + "bypassDocumentValidation": true + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": {} + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encrypted_string": "string0" + }, + { + "_id": 2, + "encrypted_string": "string0" + } + ] + } + ], + "outcome": [ + { + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + }, + { + "_id": 2, + "encrypted_string": "string0" + } + ], + "collectionName": "default", + "databaseName": "default" + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 2, + "encrypted_string": "string0" + } + ], + "ordered": true + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "default", + "filter": {} + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/bypassedCommand.json b/test/client-side-encryption/spec/unified/bypassedCommand.json new file mode 100644 index 0000000000..b0c4c56322 --- /dev/null +++ b/test/client-side-encryption/spec/unified/bypassedCommand.json @@ -0,0 +1,147 @@ +{ + "description": "bypassedCommand", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "4.1.10", + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [] + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "status": 1, + "_id": { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + }, + "masterKey": { + "provider": "aws", + "key": "arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0", + "region": "us-east-1" + }, + "updateDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyMaterial": { + "$binary": { + "base64": "AQICAHhQNmWG2CzOm1dq3kWLM+iDUZhEqnhJwH9wZVpuZ94A8gEqnsxXlR51T5EbEVezUqqKAAAAwjCBvwYJKoZIhvcNAQcGoIGxMIGuAgEAMIGoBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDHa4jo6yp0Z18KgbUgIBEIB74sKxWtV8/YHje5lv5THTl0HIbhSwM6EqRlmBiFFatmEWaeMk4tO4xBX65eq670I5TWPSLMzpp8ncGHMmvHqRajNBnmFtbYxN3E3/WjxmdbOOe+OXpnGJPcGsftc7cB2shRfA4lICPnE26+oVNXT6p0Lo20nY5XC7jyCO", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyAltNames": [ + "altname", + "another_altname" + ] + } + ] + } + ], + "tests": [ + { + "description": "ping is bypassed", + "operations": [ + { + "name": "runCommand", + "object": "db", + "arguments": { + "command": { + "ping": 1 + }, + "commandName": "ping" + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "ping": 1 + }, + "commandName": "ping" + } + } + ] + } + ] + }, + { + "description": "kill op is not bypassed", + "operations": [ + { + "name": "runCommand", + "object": "db", + "arguments": { + "command": { + "killOp": 1, + "op": 1234 + }, + "commandName": "killOp" + }, + "expectError": { + "errorContains": "command not supported for auto encryption: killOp" + } + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/count.json b/test/client-side-encryption/spec/unified/count.json new file mode 100644 index 0000000000..d44b3e827d --- /dev/null +++ b/test/client-side-encryption/spec/unified/count.json @@ -0,0 +1,293 @@ +{ + "description": "count", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "4.1.10", + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + }, + { + "_id": 2, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + } + ], + "createOptions": { + "validator": { + "$jsonSchema": { + "properties": { + "encrypted_w_altname": { + "encrypt": { + "keyId": "/altname", + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "random": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string_equivalent": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "status": 1, + "_id": { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + }, + "masterKey": { + "provider": "aws", + "key": "arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0", + "region": "us-east-1" + }, + "updateDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyMaterial": { + "$binary": { + "base64": "AQICAHhQNmWG2CzOm1dq3kWLM+iDUZhEqnhJwH9wZVpuZ94A8gEqnsxXlR51T5EbEVezUqqKAAAAwjCBvwYJKoZIhvcNAQcGoIGxMIGuAgEAMIGoBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDHa4jo6yp0Z18KgbUgIBEIB74sKxWtV8/YHje5lv5THTl0HIbhSwM6EqRlmBiFFatmEWaeMk4tO4xBX65eq670I5TWPSLMzpp8ncGHMmvHqRajNBnmFtbYxN3E3/WjxmdbOOe+OXpnGJPcGsftc7cB2shRfA4lICPnE26+oVNXT6p0Lo20nY5XC7jyCO", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyAltNames": [ + "altname", + "another_altname" + ] + } + ] + } + ], + "tests": [ + { + "description": "Count with deterministic encryption", + "operations": [ + { + "name": "countDocuments", + "arguments": { + "filter": { + "encrypted_string": "string0" + } + }, + "object": "coll", + "expectResult": 2 + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "aggregate": "default", + "cursor": {}, + "pipeline": [ + { + "$match": { + "encrypted_string": { + "$eq": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + } + } + }, + { + "$group": { + "_id": { + "$const": 1 + }, + "n": { + "$sum": { + "$const": 1 + } + } + } + } + ] + }, + "commandName": "aggregate" + } + } + ] + } + ] + }, + { + "description": "Count fails when filtering on a random encrypted field", + "operations": [ + { + "name": "countDocuments", + "arguments": { + "filter": { + "random": "abc" + } + }, + "object": "coll", + "expectError": { + "errorContains": "Cannot query on fields encrypted with the randomized encryption" + } + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/countDocuments.json b/test/client-side-encryption/spec/unified/countDocuments.json new file mode 100644 index 0000000000..c0202258b8 --- /dev/null +++ b/test/client-side-encryption/spec/unified/countDocuments.json @@ -0,0 +1,296 @@ +{ + "description": "countDocuments", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "4.1.10", + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + }, + { + "_id": 2, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACDdw4KFz3ZLquhsbt7RmDjD0N67n0uSXx7IGnQNCLeIKvot6s/ouI21Eo84IOtb6lhwUNPlSEBNY0/hbszWAKJg==", + "subType": "06" + } + } + } + ], + "createOptions": { + "validator": { + "$jsonSchema": { + "properties": { + "encrypted_w_altname": { + "encrypt": { + "keyId": "/altname", + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "random": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string_equivalent": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "status": 1, + "_id": { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + }, + "masterKey": { + "provider": "aws", + "key": "arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0", + "region": "us-east-1" + }, + "updateDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyMaterial": { + "$binary": { + "base64": "AQICAHhQNmWG2CzOm1dq3kWLM+iDUZhEqnhJwH9wZVpuZ94A8gEqnsxXlR51T5EbEVezUqqKAAAAwjCBvwYJKoZIhvcNAQcGoIGxMIGuAgEAMIGoBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDHa4jo6yp0Z18KgbUgIBEIB74sKxWtV8/YHje5lv5THTl0HIbhSwM6EqRlmBiFFatmEWaeMk4tO4xBX65eq670I5TWPSLMzpp8ncGHMmvHqRajNBnmFtbYxN3E3/WjxmdbOOe+OXpnGJPcGsftc7cB2shRfA4lICPnE26+oVNXT6p0Lo20nY5XC7jyCO", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyAltNames": [ + "altname", + "another_altname" + ] + } + ] + } + ], + "tests": [ + { + "description": "countDocuments with deterministic encryption", + "skipReason": "waiting on SERVER-39395", + "operations": [ + { + "name": "countDocuments", + "arguments": { + "filter": { + "encrypted_string": "string0" + } + }, + "object": "coll", + "expectResult": 1 + } + ], + "outcome": [ + { + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + }, + { + "_id": 2, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACDdw4KFz3ZLquhsbt7RmDjD0N67n0uSXx7IGnQNCLeIKvot6s/ouI21Eo84IOtb6lhwUNPlSEBNY0/hbszWAKJg==", + "subType": "06" + } + } + } + ], + "collectionName": "default", + "databaseName": "default" + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "aggregate": "default", + "pipeline": [ + { + "$match": { + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + } + }, + { + "$group": { + "_id": 1, + "n": { + "$sum": 1 + } + } + } + ] + }, + "commandName": "aggregate" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/create-and-createIndexes.json b/test/client-side-encryption/spec/unified/create-and-createIndexes.json new file mode 100644 index 0000000000..5debd15945 --- /dev/null +++ b/test/client-side-encryption/spec/unified/create-and-createIndexes.json @@ -0,0 +1,121 @@ +{ + "description": "create-and-createIndexes", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "4.1.10", + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "unencryptedCollection" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "unencryptedCollection", + "documents": [] + } + ], + "tests": [ + { + "description": "create is OK", + "operations": [ + { + "name": "dropCollection", + "object": "db", + "arguments": { + "collection": "unencryptedCollection" + } + }, + { + "name": "createCollection", + "object": "db", + "arguments": { + "collection": "unencryptedCollection", + "validator": { + "unencrypted_string": "foo" + } + } + }, + { + "name": "assertCollectionExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "unencryptedCollection" + } + } + ] + }, + { + "description": "createIndexes is OK", + "operations": [ + { + "name": "dropCollection", + "object": "db", + "arguments": { + "collection": "unencryptedCollection" + } + }, + { + "name": "createCollection", + "object": "db", + "arguments": { + "collection": "unencryptedCollection" + } + }, + { + "name": "createIndex", + "object": "coll", + "arguments": { + "keys": { + "x": 1 + }, + "name": "name" + } + }, + { + "name": "assertIndexExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "unencryptedCollection", + "indexName": "name" + } + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/delete.json b/test/client-side-encryption/spec/unified/delete.json new file mode 100644 index 0000000000..242bcdba8c --- /dev/null +++ b/test/client-side-encryption/spec/unified/delete.json @@ -0,0 +1,396 @@ +{ + "description": "delete", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "4.1.10", + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + }, + { + "_id": 2, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACDdw4KFz3ZLquhsbt7RmDjD0N67n0uSXx7IGnQNCLeIKvot6s/ouI21Eo84IOtb6lhwUNPlSEBNY0/hbszWAKJg==", + "subType": "06" + } + } + } + ], + "createOptions": { + "validator": { + "$jsonSchema": { + "properties": { + "encrypted_w_altname": { + "encrypt": { + "keyId": "/altname", + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "random": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string_equivalent": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "status": 1, + "_id": { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + }, + "masterKey": { + "provider": "aws", + "key": "arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0", + "region": "us-east-1" + }, + "updateDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyMaterial": { + "$binary": { + "base64": "AQICAHhQNmWG2CzOm1dq3kWLM+iDUZhEqnhJwH9wZVpuZ94A8gEqnsxXlR51T5EbEVezUqqKAAAAwjCBvwYJKoZIhvcNAQcGoIGxMIGuAgEAMIGoBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDHa4jo6yp0Z18KgbUgIBEIB74sKxWtV8/YHje5lv5THTl0HIbhSwM6EqRlmBiFFatmEWaeMk4tO4xBX65eq670I5TWPSLMzpp8ncGHMmvHqRajNBnmFtbYxN3E3/WjxmdbOOe+OXpnGJPcGsftc7cB2shRfA4lICPnE26+oVNXT6p0Lo20nY5XC7jyCO", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyAltNames": [ + "altname", + "another_altname" + ] + } + ] + } + ], + "tests": [ + { + "description": "deleteOne with deterministic encryption", + "operations": [ + { + "name": "deleteOne", + "arguments": { + "filter": { + "encrypted_string": "string0" + } + }, + "object": "coll", + "expectResult": { + "deletedCount": 1 + } + } + ], + "outcome": [ + { + "documents": [ + { + "_id": 2, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACDdw4KFz3ZLquhsbt7RmDjD0N67n0uSXx7IGnQNCLeIKvot6s/ouI21Eo84IOtb6lhwUNPlSEBNY0/hbszWAKJg==", + "subType": "06" + } + } + } + ], + "collectionName": "default", + "databaseName": "default" + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "delete": "default", + "deletes": [ + { + "q": { + "encrypted_string": { + "$eq": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + } + }, + "limit": 1 + } + ], + "ordered": true + }, + "commandName": "delete" + } + } + ] + } + ] + }, + { + "description": "deleteMany with deterministic encryption", + "operations": [ + { + "name": "deleteMany", + "arguments": { + "filter": { + "encrypted_string": { + "$in": [ + "string0", + "string1" + ] + } + } + }, + "object": "coll", + "expectResult": { + "deletedCount": 2 + } + } + ], + "outcome": [ + { + "documents": [], + "collectionName": "default", + "databaseName": "default" + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "delete": "default", + "deletes": [ + { + "q": { + "encrypted_string": { + "$in": [ + { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + }, + { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACDdw4KFz3ZLquhsbt7RmDjD0N67n0uSXx7IGnQNCLeIKvot6s/ouI21Eo84IOtb6lhwUNPlSEBNY0/hbszWAKJg==", + "subType": "06" + } + } + ] + } + }, + "limit": 0 + } + ], + "ordered": true + }, + "commandName": "delete" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/distinct.json b/test/client-side-encryption/spec/unified/distinct.json new file mode 100644 index 0000000000..a7ac0fc7f1 --- /dev/null +++ b/test/client-side-encryption/spec/unified/distinct.json @@ -0,0 +1,325 @@ +{ + "description": "distinct", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "4.1.10", + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + }, + { + "_id": 2, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + }, + { + "_id": 3, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACDdw4KFz3ZLquhsbt7RmDjD0N67n0uSXx7IGnQNCLeIKvot6s/ouI21Eo84IOtb6lhwUNPlSEBNY0/hbszWAKJg==", + "subType": "06" + } + } + } + ], + "createOptions": { + "validator": { + "$jsonSchema": { + "properties": { + "encrypted_w_altname": { + "encrypt": { + "keyId": "/altname", + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "random": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string_equivalent": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "status": 1, + "_id": { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + }, + "masterKey": { + "provider": "aws", + "key": "arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0", + "region": "us-east-1" + }, + "updateDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyMaterial": { + "$binary": { + "base64": "AQICAHhQNmWG2CzOm1dq3kWLM+iDUZhEqnhJwH9wZVpuZ94A8gEqnsxXlR51T5EbEVezUqqKAAAAwjCBvwYJKoZIhvcNAQcGoIGxMIGuAgEAMIGoBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDHa4jo6yp0Z18KgbUgIBEIB74sKxWtV8/YHje5lv5THTl0HIbhSwM6EqRlmBiFFatmEWaeMk4tO4xBX65eq670I5TWPSLMzpp8ncGHMmvHqRajNBnmFtbYxN3E3/WjxmdbOOe+OXpnGJPcGsftc7cB2shRfA4lICPnE26+oVNXT6p0Lo20nY5XC7jyCO", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyAltNames": [ + "altname", + "another_altname" + ] + } + ] + } + ], + "tests": [ + { + "description": "distinct with deterministic encryption", + "operations": [ + { + "name": "distinct", + "arguments": { + "filter": { + "encrypted_string": "string0" + }, + "fieldName": "encrypted_string" + }, + "object": "coll", + "expectResult": [ + "string0" + ] + } + ], + "outcome": [ + { + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + }, + { + "_id": 2, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + }, + { + "_id": 3, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACDdw4KFz3ZLquhsbt7RmDjD0N67n0uSXx7IGnQNCLeIKvot6s/ouI21Eo84IOtb6lhwUNPlSEBNY0/hbszWAKJg==", + "subType": "06" + } + } + } + ], + "collectionName": "default", + "databaseName": "default" + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "distinct": "default", + "key": "encrypted_string", + "query": { + "encrypted_string": { + "$eq": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + } + } + }, + "commandName": "distinct" + } + } + ] + } + ] + }, + { + "description": "Distinct fails when filtering on a random encrypted field", + "operations": [ + { + "name": "distinct", + "arguments": { + "filter": { + "random": "abc" + }, + "fieldName": "encrypted_string" + }, + "object": "coll", + "expectError": { + "errorContains": "Cannot query on fields encrypted with the randomized encryption" + } + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/explain.json b/test/client-side-encryption/spec/unified/explain.json new file mode 100644 index 0000000000..667f921165 --- /dev/null +++ b/test/client-side-encryption/spec/unified/explain.json @@ -0,0 +1,293 @@ +{ + "description": "explain", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "7.0.0", + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + }, + { + "_id": 2, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACDdw4KFz3ZLquhsbt7RmDjD0N67n0uSXx7IGnQNCLeIKvot6s/ouI21Eo84IOtb6lhwUNPlSEBNY0/hbszWAKJg==", + "subType": "06" + } + } + } + ], + "createOptions": { + "validator": { + "$jsonSchema": { + "properties": { + "encrypted_w_altname": { + "encrypt": { + "keyId": "/altname", + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "random": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string_equivalent": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "status": 1, + "_id": { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + }, + "masterKey": { + "provider": "aws", + "key": "arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0", + "region": "us-east-1" + }, + "updateDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyMaterial": { + "$binary": { + "base64": "AQICAHhQNmWG2CzOm1dq3kWLM+iDUZhEqnhJwH9wZVpuZ94A8gEqnsxXlR51T5EbEVezUqqKAAAAwjCBvwYJKoZIhvcNAQcGoIGxMIGuAgEAMIGoBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDHa4jo6yp0Z18KgbUgIBEIB74sKxWtV8/YHje5lv5THTl0HIbhSwM6EqRlmBiFFatmEWaeMk4tO4xBX65eq670I5TWPSLMzpp8ncGHMmvHqRajNBnmFtbYxN3E3/WjxmdbOOe+OXpnGJPcGsftc7cB2shRfA4lICPnE26+oVNXT6p0Lo20nY5XC7jyCO", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyAltNames": [ + "altname", + "another_altname" + ] + } + ] + } + ], + "tests": [ + { + "description": "Explain a find with deterministic encryption", + "operations": [ + { + "name": "runCommand", + "object": "db", + "arguments": { + "command": { + "explain": { + "find": "default", + "filter": { + "encrypted_string": "string1" + } + } + }, + "commandName": "explain" + } + } + ], + "outcome": [ + { + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + }, + { + "_id": 2, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACDdw4KFz3ZLquhsbt7RmDjD0N67n0uSXx7IGnQNCLeIKvot6s/ouI21Eo84IOtb6lhwUNPlSEBNY0/hbszWAKJg==", + "subType": "06" + } + } + } + ], + "collectionName": "default", + "databaseName": "default" + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "explain": { + "find": "default", + "filter": { + "encrypted_string": { + "$eq": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACDdw4KFz3ZLquhsbt7RmDjD0N67n0uSXx7IGnQNCLeIKvot6s/ouI21Eo84IOtb6lhwUNPlSEBNY0/hbszWAKJg==", + "subType": "06" + } + } + } + } + }, + "verbosity": "allPlansExecution" + }, + "commandName": "explain" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/find.json b/test/client-side-encryption/spec/unified/find.json new file mode 100644 index 0000000000..7f358d9c08 --- /dev/null +++ b/test/client-side-encryption/spec/unified/find.json @@ -0,0 +1,458 @@ +{ + "description": "find", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "4.1.10", + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + }, + { + "_id": 2, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACDdw4KFz3ZLquhsbt7RmDjD0N67n0uSXx7IGnQNCLeIKvot6s/ouI21Eo84IOtb6lhwUNPlSEBNY0/hbszWAKJg==", + "subType": "06" + } + }, + "random": { + "$binary": { + "base64": "AgAAAAAAAAAAAAAAAAAAAAACyfp+lXvKOi7f5vh6ZsCijLEaXFKq1X06RmyS98ZvmMQGixTw8HM1f/bGxZjGwvYwjXOkIEb7Exgb8p2KCDI5TQ==", + "subType": "06" + } + } + } + ], + "createOptions": { + "validator": { + "$jsonSchema": { + "properties": { + "encrypted_w_altname": { + "encrypt": { + "keyId": "/altname", + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "random": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string_equivalent": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "status": 1, + "_id": { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + }, + "masterKey": { + "provider": "aws", + "key": "arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0", + "region": "us-east-1" + }, + "updateDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyMaterial": { + "$binary": { + "base64": "AQICAHhQNmWG2CzOm1dq3kWLM+iDUZhEqnhJwH9wZVpuZ94A8gEqnsxXlR51T5EbEVezUqqKAAAAwjCBvwYJKoZIhvcNAQcGoIGxMIGuAgEAMIGoBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDHa4jo6yp0Z18KgbUgIBEIB74sKxWtV8/YHje5lv5THTl0HIbhSwM6EqRlmBiFFatmEWaeMk4tO4xBX65eq670I5TWPSLMzpp8ncGHMmvHqRajNBnmFtbYxN3E3/WjxmdbOOe+OXpnGJPcGsftc7cB2shRfA4lICPnE26+oVNXT6p0Lo20nY5XC7jyCO", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyAltNames": [ + "altname", + "another_altname" + ] + } + ] + } + ], + "tests": [ + { + "description": "Find with deterministic encryption", + "operations": [ + { + "name": "find", + "arguments": { + "filter": { + "encrypted_string": "string0" + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encrypted_string": "string0" + } + ] + } + ], + "outcome": [ + { + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + }, + { + "_id": 2, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACDdw4KFz3ZLquhsbt7RmDjD0N67n0uSXx7IGnQNCLeIKvot6s/ouI21Eo84IOtb6lhwUNPlSEBNY0/hbszWAKJg==", + "subType": "06" + } + }, + "random": { + "$binary": { + "base64": "AgAAAAAAAAAAAAAAAAAAAAACyfp+lXvKOi7f5vh6ZsCijLEaXFKq1X06RmyS98ZvmMQGixTw8HM1f/bGxZjGwvYwjXOkIEb7Exgb8p2KCDI5TQ==", + "subType": "06" + } + } + } + ], + "collectionName": "default", + "databaseName": "default" + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "default", + "filter": { + "encrypted_string": { + "$eq": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + } + } + }, + "commandName": "find" + } + } + ] + } + ] + }, + { + "description": "Find with $in with deterministic encryption", + "operations": [ + { + "name": "find", + "arguments": { + "filter": { + "encrypted_string": { + "$in": [ + "string0", + "string1" + ] + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encrypted_string": "string0" + }, + { + "_id": 2, + "encrypted_string": "string1", + "random": "abc" + } + ] + } + ], + "outcome": [ + { + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + }, + { + "_id": 2, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACDdw4KFz3ZLquhsbt7RmDjD0N67n0uSXx7IGnQNCLeIKvot6s/ouI21Eo84IOtb6lhwUNPlSEBNY0/hbszWAKJg==", + "subType": "06" + } + }, + "random": { + "$binary": { + "base64": "AgAAAAAAAAAAAAAAAAAAAAACyfp+lXvKOi7f5vh6ZsCijLEaXFKq1X06RmyS98ZvmMQGixTw8HM1f/bGxZjGwvYwjXOkIEb7Exgb8p2KCDI5TQ==", + "subType": "06" + } + } + } + ], + "collectionName": "default", + "databaseName": "default" + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "default", + "filter": { + "encrypted_string": { + "$in": [ + { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + }, + { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACDdw4KFz3ZLquhsbt7RmDjD0N67n0uSXx7IGnQNCLeIKvot6s/ouI21Eo84IOtb6lhwUNPlSEBNY0/hbszWAKJg==", + "subType": "06" + } + } + ] + } + } + }, + "commandName": "find" + } + } + ] + } + ] + }, + { + "description": "Find fails when filtering on a random encrypted field", + "operations": [ + { + "name": "find", + "arguments": { + "filter": { + "random": "abc" + } + }, + "object": "coll", + "expectError": { + "errorContains": "Cannot query on fields encrypted with the randomized encryption" + } + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/findOneAndDelete.json b/test/client-side-encryption/spec/unified/findOneAndDelete.json new file mode 100644 index 0000000000..ff1103cb9b --- /dev/null +++ b/test/client-side-encryption/spec/unified/findOneAndDelete.json @@ -0,0 +1,276 @@ +{ + "description": "findOneAndDelete", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "4.1.10", + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + }, + { + "_id": 2, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACDdw4KFz3ZLquhsbt7RmDjD0N67n0uSXx7IGnQNCLeIKvot6s/ouI21Eo84IOtb6lhwUNPlSEBNY0/hbszWAKJg==", + "subType": "06" + } + } + } + ], + "createOptions": { + "validator": { + "$jsonSchema": { + "properties": { + "encrypted_w_altname": { + "encrypt": { + "keyId": "/altname", + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "random": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string_equivalent": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "status": 1, + "_id": { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + }, + "masterKey": { + "provider": "aws", + "key": "arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0", + "region": "us-east-1" + }, + "updateDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyMaterial": { + "$binary": { + "base64": "AQICAHhQNmWG2CzOm1dq3kWLM+iDUZhEqnhJwH9wZVpuZ94A8gEqnsxXlR51T5EbEVezUqqKAAAAwjCBvwYJKoZIhvcNAQcGoIGxMIGuAgEAMIGoBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDHa4jo6yp0Z18KgbUgIBEIB74sKxWtV8/YHje5lv5THTl0HIbhSwM6EqRlmBiFFatmEWaeMk4tO4xBX65eq670I5TWPSLMzpp8ncGHMmvHqRajNBnmFtbYxN3E3/WjxmdbOOe+OXpnGJPcGsftc7cB2shRfA4lICPnE26+oVNXT6p0Lo20nY5XC7jyCO", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyAltNames": [ + "altname", + "another_altname" + ] + } + ] + } + ], + "tests": [ + { + "description": "findOneAndDelete with deterministic encryption", + "operations": [ + { + "name": "findOneAndDelete", + "arguments": { + "filter": { + "encrypted_string": "string0" + } + }, + "object": "coll" + } + ], + "outcome": [ + { + "documents": [ + { + "_id": 2, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACDdw4KFz3ZLquhsbt7RmDjD0N67n0uSXx7IGnQNCLeIKvot6s/ouI21Eo84IOtb6lhwUNPlSEBNY0/hbszWAKJg==", + "subType": "06" + } + } + } + ], + "collectionName": "default", + "databaseName": "default" + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "findAndModify": "default", + "query": { + "encrypted_string": { + "$eq": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + } + }, + "remove": true + }, + "commandName": "findAndModify" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/findOneAndReplace.json b/test/client-side-encryption/spec/unified/findOneAndReplace.json new file mode 100644 index 0000000000..c1a89fd2f6 --- /dev/null +++ b/test/client-side-encryption/spec/unified/findOneAndReplace.json @@ -0,0 +1,282 @@ +{ + "description": "findOneAndReplace", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "4.1.10", + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + } + ], + "createOptions": { + "validator": { + "$jsonSchema": { + "properties": { + "encrypted_w_altname": { + "encrypt": { + "keyId": "/altname", + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "random": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string_equivalent": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "status": 1, + "_id": { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + }, + "masterKey": { + "provider": "aws", + "key": "arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0", + "region": "us-east-1" + }, + "updateDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyMaterial": { + "$binary": { + "base64": "AQICAHhQNmWG2CzOm1dq3kWLM+iDUZhEqnhJwH9wZVpuZ94A8gEqnsxXlR51T5EbEVezUqqKAAAAwjCBvwYJKoZIhvcNAQcGoIGxMIGuAgEAMIGoBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDHa4jo6yp0Z18KgbUgIBEIB74sKxWtV8/YHje5lv5THTl0HIbhSwM6EqRlmBiFFatmEWaeMk4tO4xBX65eq670I5TWPSLMzpp8ncGHMmvHqRajNBnmFtbYxN3E3/WjxmdbOOe+OXpnGJPcGsftc7cB2shRfA4lICPnE26+oVNXT6p0Lo20nY5XC7jyCO", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyAltNames": [ + "altname", + "another_altname" + ] + } + ] + } + ], + "tests": [ + { + "description": "findOneAndReplace with deterministic encryption", + "operations": [ + { + "name": "findOneAndReplace", + "arguments": { + "filter": { + "encrypted_string": "string0" + }, + "replacement": { + "encrypted_string": "string1" + }, + "returnDocument": "Before" + }, + "object": "coll", + "expectResult": { + "_id": 1, + "encrypted_string": "string0" + } + } + ], + "outcome": [ + { + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACDdw4KFz3ZLquhsbt7RmDjD0N67n0uSXx7IGnQNCLeIKvot6s/ouI21Eo84IOtb6lhwUNPlSEBNY0/hbszWAKJg==", + "subType": "06" + } + } + } + ], + "collectionName": "default", + "databaseName": "default" + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "findAndModify": "default", + "query": { + "encrypted_string": { + "$eq": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + } + }, + "update": { + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACDdw4KFz3ZLquhsbt7RmDjD0N67n0uSXx7IGnQNCLeIKvot6s/ouI21Eo84IOtb6lhwUNPlSEBNY0/hbszWAKJg==", + "subType": "06" + } + } + } + }, + "commandName": "findAndModify" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/findOneAndUpdate.json b/test/client-side-encryption/spec/unified/findOneAndUpdate.json new file mode 100644 index 0000000000..ffcb0e79e4 --- /dev/null +++ b/test/client-side-encryption/spec/unified/findOneAndUpdate.json @@ -0,0 +1,286 @@ +{ + "description": "findOneAndUpdate", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "4.1.10", + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + } + ], + "createOptions": { + "validator": { + "$jsonSchema": { + "properties": { + "encrypted_w_altname": { + "encrypt": { + "keyId": "/altname", + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "random": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string_equivalent": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "status": 1, + "_id": { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + }, + "masterKey": { + "provider": "aws", + "key": "arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0", + "region": "us-east-1" + }, + "updateDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyMaterial": { + "$binary": { + "base64": "AQICAHhQNmWG2CzOm1dq3kWLM+iDUZhEqnhJwH9wZVpuZ94A8gEqnsxXlR51T5EbEVezUqqKAAAAwjCBvwYJKoZIhvcNAQcGoIGxMIGuAgEAMIGoBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDHa4jo6yp0Z18KgbUgIBEIB74sKxWtV8/YHje5lv5THTl0HIbhSwM6EqRlmBiFFatmEWaeMk4tO4xBX65eq670I5TWPSLMzpp8ncGHMmvHqRajNBnmFtbYxN3E3/WjxmdbOOe+OXpnGJPcGsftc7cB2shRfA4lICPnE26+oVNXT6p0Lo20nY5XC7jyCO", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyAltNames": [ + "altname", + "another_altname" + ] + } + ] + } + ], + "tests": [ + { + "description": "findOneAndUpdate with deterministic encryption", + "operations": [ + { + "name": "findOneAndUpdate", + "arguments": { + "filter": { + "encrypted_string": "string0" + }, + "update": { + "$set": { + "encrypted_string": "string1" + } + }, + "returnDocument": "Before" + }, + "object": "coll", + "expectResult": { + "_id": 1, + "encrypted_string": "string0" + } + } + ], + "outcome": [ + { + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACDdw4KFz3ZLquhsbt7RmDjD0N67n0uSXx7IGnQNCLeIKvot6s/ouI21Eo84IOtb6lhwUNPlSEBNY0/hbszWAKJg==", + "subType": "06" + } + } + } + ], + "collectionName": "default", + "databaseName": "default" + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "findAndModify": "default", + "query": { + "encrypted_string": { + "$eq": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + } + }, + "update": { + "$set": { + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACDdw4KFz3ZLquhsbt7RmDjD0N67n0uSXx7IGnQNCLeIKvot6s/ouI21Eo84IOtb6lhwUNPlSEBNY0/hbszWAKJg==", + "subType": "06" + } + } + } + } + }, + "commandName": "findAndModify" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-BypassQueryAnalysis.json b/test/client-side-encryption/spec/unified/fle2v2-BypassQueryAnalysis.json new file mode 100644 index 0000000000..671413b83f --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-BypassQueryAnalysis.json @@ -0,0 +1,324 @@ +{ + "description": "fle2v2-BypassQueryAnalysis", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "7.0.0", + "serverless": "forbid", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + }, + "keyVaultNamespace": "keyvault.datakeys", + "bypassQueryAnalysis": true + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "encryptedDB", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "encryptedColl", + "database": "encryptedDB", + "collectionName": "default" + } + }, + { + "client": { + "id": "client1" + } + }, + { + "database": { + "id": "unencryptedDB", + "client": "client1", + "databaseName": "default" + } + }, + { + "collection": { + "id": "unencryptedColl", + "database": "unencryptedDB", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + }, + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedIndexed", + "bsonType": "string", + "queries": { + "queryType": "equality", + "contention": { + "$numberLong": "0" + } + } + }, + { + "keyId": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedUnindexed", + "bsonType": "string" + } + ] + } + } + } + ], + "tests": [ + { + "description": "BypassQueryAnalysis decrypts", + "operations": [ + { + "object": "encryptedColl", + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedIndexed": { + "$binary": { + "base64": "C18BAAAFZAAgAAAAANnt+eLTkv4GdDPl8IAfJOvTzArOgFJQ2S/DcLza4W0DBXMAIAAAAAD2u+omZme3P2gBPehMQyQHQ153tPN1+z7bksYA9jKTpAVwADAAAAAAUnCOQqIvmR65YKyYnsiVfVrg9hwUVO3RhhKExo3RWOzgaS0QdsBL5xKFS0JhZSoWBXUAEAAAAAQSNFZ4EjSYdhI0EjRWeJASEHQAAgAAAAV2AFAAAAAAEjRWeBI0mHYSNBI0VniQEpQbp/ZJpWBKeDtKLiXb0P2E9wvc0g3f373jnYQYlJquOrlPOoEy3ngsHPJuSUijvWDsrQzqYa349K7G/66qaXEFZQAgAAAAAOuac/eRLYakKX6B0vZ1r3QodOQFfjqJD+xlGiPu4/PsBWwAIAAAAACkm0o9bj6j0HuADKc0svbqO2UHj6GrlNdF6yKNxh63xRJrAAAAAAAAAAAAAA==", + "subType": "06" + } + } + } + } + }, + { + "object": "encryptedColl", + "name": "find", + "arguments": { + "filter": { + "_id": 1 + } + }, + "expectResult": [ + { + "_id": 1, + "encryptedIndexed": "123" + } + ] + }, + { + "object": "unencryptedColl", + "name": "find", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 1, + "encryptedIndexed": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "31eCYlbQoVboc5zwC8IoyJVSkag9PxREka8dkmbXJeY=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedIndexed": { + "$binary": { + "base64": "C18BAAAFZAAgAAAAANnt+eLTkv4GdDPl8IAfJOvTzArOgFJQ2S/DcLza4W0DBXMAIAAAAAD2u+omZme3P2gBPehMQyQHQ153tPN1+z7bksYA9jKTpAVwADAAAAAAUnCOQqIvmR65YKyYnsiVfVrg9hwUVO3RhhKExo3RWOzgaS0QdsBL5xKFS0JhZSoWBXUAEAAAAAQSNFZ4EjSYdhI0EjRWeJASEHQAAgAAAAV2AFAAAAAAEjRWeBI0mHYSNBI0VniQEpQbp/ZJpWBKeDtKLiXb0P2E9wvc0g3f373jnYQYlJquOrlPOoEy3ngsHPJuSUijvWDsrQzqYa349K7G/66qaXEFZQAgAAAAAOuac/eRLYakKX6B0vZ1r3QodOQFfjqJD+xlGiPu4/PsBWwAIAAAAACkm0o9bj6j0HuADKc0svbqO2UHj6GrlNdF6yKNxh63xRJrAAAAAAAAAAAAAA==", + "subType": "06" + } + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedIndexed", + "bsonType": "string", + "queries": { + "queryType": "equality", + "contention": { + "$numberLong": "0" + } + } + }, + { + "keyId": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedUnindexed", + "bsonType": "string" + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "default", + "filter": { + "_id": 1 + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Compact.json b/test/client-side-encryption/spec/unified/fle2v2-Compact.json new file mode 100644 index 0000000000..07ebf4351b --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Compact.json @@ -0,0 +1,312 @@ +{ + "description": "fle2v2-Compact", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "7.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent", + "commandSucceededEvent" + ] + } + }, + { + "client": { + "id": "client1" + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "database": { + "id": "db1", + "client": "client1", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll1", + "database": "db1", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedIndexed", + "bsonType": "string", + "queries": { + "queryType": "equality", + "contention": { + "$numberLong": "0" + } + } + }, + { + "keyId": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedUnindexed", + "bsonType": "string" + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + }, + { + "_id": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "HBk9BWihXExNDvTp1lUxOuxuZK2Pe2ZdVdlsxPEBkiO1bS4mG5NNDsQ7zVxJAH8BtdOYp72Ku4Y3nwc0BUpIKsvAKX4eYXtlhv5zUQxWdeNFhg9qK7qb8nqhnnLeT0f25jFSqzWJoT379hfwDeu0bebJHr35QrJ8myZdPMTEDYF08QYQ48ShRBli0S+QzBHHAQiM2iJNr4svg2WR8JSeWQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "Compact works", + "operations": [ + { + "name": "runCommand", + "object": "db", + "arguments": { + "command": { + "compactStructuredEncryptionData": "default" + }, + "commandName": "compactStructuredEncryptionData" + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandSucceededEvent": { + "commandName": "listCollections", + "reply": { + "ok": 1 + } + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandSucceededEvent": { + "commandName": "find", + "reply": { + "ok": 1 + } + } + }, + { + "commandStartedEvent": { + "command": { + "compactStructuredEncryptionData": "default", + "compactionTokens": { + "encryptedIndexed": { + "$binary": { + "base64": "noN+05JsuO1oDg59yypIGj45i+eFH6HOTXOPpeZ//Mk=", + "subType": "00" + } + }, + "encryptedUnindexed": { + "$binary": { + "base64": "SWO8WEoZ2r2Kx/muQKb7+COizy85nIIUFiHh4K9kcvA=", + "subType": "00" + } + } + } + }, + "commandName": "compactStructuredEncryptionData" + } + }, + { + "commandSucceededEvent": { + "commandName": "compactStructuredEncryptionData", + "reply": { + "ok": 1 + } + } + } + ] + } + ] + }, + { + "description": "Compact errors on an unencrypted client", + "operations": [ + { + "name": "runCommand", + "object": "db1", + "arguments": { + "command": { + "compactStructuredEncryptionData": "default" + }, + "commandName": "compactStructuredEncryptionData" + }, + "expectError": { + "errorContains": "'compactStructuredEncryptionData.compactionTokens' is missing" + } + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-CreateCollection-OldServer.json b/test/client-side-encryption/spec/unified/fle2v2-CreateCollection-OldServer.json new file mode 100644 index 0000000000..fc069d55b2 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-CreateCollection-OldServer.json @@ -0,0 +1,127 @@ +{ + "description": "fle2v2-CreateCollection-OldServer", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "6.0.0", + "maxServerVersion": "6.3.99", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + }, + "encryptedFieldsMap": { + "default.encryptedCollection": { + "fields": [ + { + "path": "firstName", + "bsonType": "string", + "keyId": { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + } + ] + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + } + ], + "tests": [ + { + "description": "driver returns an error if creating a QEv2 collection on unsupported server", + "operations": [ + { + "name": "dropCollection", + "object": "db", + "arguments": { + "collection": "encryptedCollection" + } + }, + { + "name": "createCollection", + "object": "db", + "arguments": { + "collection": "encryptedCollection" + }, + "expectError": { + "errorContains": "Driver support of Queryable Encryption is incompatible with server. Upgrade server to use Queryable Encryption." + } + }, + { + "name": "assertCollectionNotExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "enxcol_.encryptedCollection.esc" + } + }, + { + "name": "assertCollectionNotExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "enxcol_.encryptedCollection.ecc" + } + }, + { + "name": "assertCollectionNotExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "enxcol_.encryptedCollection.ecoc" + } + }, + { + "name": "assertCollectionNotExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "encryptedCollection" + } + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-CreateCollection.json b/test/client-side-encryption/spec/unified/fle2v2-CreateCollection.json new file mode 100644 index 0000000000..3dfb76c461 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-CreateCollection.json @@ -0,0 +1,1748 @@ +{ + "description": "fle2v2-CreateCollection", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "7.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + }, + "encryptedFieldsMap": { + "default.encryptedCollection": { + "fields": [ + { + "path": "firstName", + "bsonType": "string", + "keyId": { + "$binary": { + "subType": "04", + "base64": "AAAAAAAAAAAAAAAAAAAAAA==" + } + } + } + ] + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client1", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + }, + "encryptedFieldsMap": { + "default.encryptedCollection": { + "escCollection": "invalid_esc_name", + "ecocCollection": "invalid_ecoc_name", + "fields": [ + { + "path": "firstName", + "bsonType": "string", + "keyId": { + "$binary": { + "subType": "04", + "base64": "AAAAAAAAAAAAAAAAAAAAAA==" + } + } + } + ] + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db1", + "client": "client1", + "databaseName": "default" + } + }, + { + "client": { + "id": "client2", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + }, + "encryptedFieldsMap": {} + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db2", + "client": "client2", + "databaseName": "default" + } + } + ], + "tests": [ + { + "description": "state collections and index are created", + "operations": [ + { + "name": "dropCollection", + "object": "db", + "arguments": { + "collection": "encryptedCollection" + } + }, + { + "name": "createCollection", + "object": "db", + "arguments": { + "collection": "encryptedCollection" + } + }, + { + "name": "assertCollectionExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "enxcol_.encryptedCollection.esc" + } + }, + { + "name": "assertCollectionNotExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "enxcol_.encryptedCollection.ecc" + } + }, + { + "name": "assertCollectionExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "enxcol_.encryptedCollection.ecoc" + } + }, + { + "name": "assertCollectionExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "encryptedCollection" + } + }, + { + "name": "assertIndexExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "encryptedCollection", + "indexName": "__safeContent___1" + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "drop": "enxcol_.encryptedCollection.esc" + }, + "databaseName": "default", + "commandName": "drop" + } + }, + { + "commandStartedEvent": { + "command": { + "drop": "enxcol_.encryptedCollection.ecoc" + }, + "databaseName": "default", + "commandName": "drop" + } + }, + { + "commandStartedEvent": { + "command": { + "drop": "encryptedCollection" + }, + "databaseName": "default", + "commandName": "drop" + } + }, + { + "commandStartedEvent": { + "command": { + "create": "enxcol_.encryptedCollection.esc", + "clusteredIndex": { + "key": { + "_id": 1 + }, + "unique": true + } + }, + "databaseName": "default", + "commandName": "create" + } + }, + { + "commandStartedEvent": { + "command": { + "create": "enxcol_.encryptedCollection.ecoc", + "clusteredIndex": { + "key": { + "_id": 1 + }, + "unique": true + } + }, + "databaseName": "default", + "commandName": "create" + } + }, + { + "commandStartedEvent": { + "command": { + "create": "encryptedCollection", + "encryptedFields": { + "fields": [ + { + "path": "firstName", + "bsonType": "string", + "keyId": { + "$binary": { + "subType": "04", + "base64": "AAAAAAAAAAAAAAAAAAAAAA==" + } + } + } + ] + } + }, + "databaseName": "default", + "commandName": "create" + } + }, + { + "commandStartedEvent": { + "command": { + "createIndexes": "encryptedCollection", + "indexes": [ + { + "name": "__safeContent___1", + "key": { + "__safeContent__": 1 + } + } + ] + }, + "databaseName": "default", + "commandName": "createIndexes" + } + } + ] + } + ] + }, + { + "description": "default state collection names are applied", + "operations": [ + { + "name": "dropCollection", + "object": "db", + "arguments": { + "collection": "encryptedCollection" + } + }, + { + "name": "createCollection", + "object": "db", + "arguments": { + "collection": "encryptedCollection" + } + }, + { + "name": "assertCollectionExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "enxcol_.encryptedCollection.esc" + } + }, + { + "name": "assertCollectionNotExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "enxcol_.encryptedCollection.ecc" + } + }, + { + "name": "assertCollectionExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "enxcol_.encryptedCollection.ecoc" + } + }, + { + "name": "assertCollectionExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "encryptedCollection" + } + }, + { + "name": "assertIndexExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "encryptedCollection", + "indexName": "__safeContent___1" + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "drop": "enxcol_.encryptedCollection.esc" + }, + "databaseName": "default", + "commandName": "drop" + } + }, + { + "commandStartedEvent": { + "command": { + "drop": "enxcol_.encryptedCollection.ecoc" + }, + "databaseName": "default", + "commandName": "drop" + } + }, + { + "commandStartedEvent": { + "command": { + "drop": "encryptedCollection" + }, + "databaseName": "default", + "commandName": "drop" + } + }, + { + "commandStartedEvent": { + "command": { + "create": "enxcol_.encryptedCollection.esc", + "clusteredIndex": { + "key": { + "_id": 1 + }, + "unique": true + } + }, + "databaseName": "default", + "commandName": "create" + } + }, + { + "commandStartedEvent": { + "command": { + "create": "enxcol_.encryptedCollection.ecoc", + "clusteredIndex": { + "key": { + "_id": 1 + }, + "unique": true + } + }, + "databaseName": "default", + "commandName": "create" + } + }, + { + "commandStartedEvent": { + "command": { + "create": "encryptedCollection", + "encryptedFields": { + "fields": [ + { + "path": "firstName", + "bsonType": "string", + "keyId": { + "$binary": { + "subType": "04", + "base64": "AAAAAAAAAAAAAAAAAAAAAA==" + } + } + } + ] + } + }, + "databaseName": "default", + "commandName": "create" + } + }, + { + "commandStartedEvent": { + "command": { + "createIndexes": "encryptedCollection", + "indexes": [ + { + "name": "__safeContent___1", + "key": { + "__safeContent__": 1 + } + } + ] + }, + "databaseName": "default", + "commandName": "createIndexes" + } + } + ] + } + ] + }, + { + "description": "drop removes all state collections", + "operations": [ + { + "name": "dropCollection", + "object": "db", + "arguments": { + "collection": "encryptedCollection" + } + }, + { + "name": "createCollection", + "object": "db", + "arguments": { + "collection": "encryptedCollection" + } + }, + { + "name": "assertCollectionExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "enxcol_.encryptedCollection.esc" + } + }, + { + "name": "assertCollectionNotExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "enxcol_.encryptedCollection.ecc" + } + }, + { + "name": "assertCollectionExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "enxcol_.encryptedCollection.ecoc" + } + }, + { + "name": "assertCollectionExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "encryptedCollection" + } + }, + { + "name": "assertIndexExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "encryptedCollection", + "indexName": "__safeContent___1" + } + }, + { + "name": "dropCollection", + "object": "db", + "arguments": { + "collection": "encryptedCollection" + } + }, + { + "name": "assertCollectionNotExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "enxcol_.encryptedCollection.ecoc" + } + }, + { + "name": "assertCollectionNotExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "encryptedCollection" + } + }, + { + "name": "assertIndexNotExists", + "object": "db", + "arguments": { + "databaseName": "default", + "collectionName": "encryptedCollection", + "indexName": "__safeContent___1" + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "drop": "enxcol_.encryptedCollection.esc" + }, + "databaseName": "default", + "commandName": "drop" + } + }, + { + "commandStartedEvent": { + "command": { + "drop": "enxcol_.encryptedCollection.ecoc" + }, + "databaseName": "default", + "commandName": "drop" + } + }, + { + "commandStartedEvent": { + "command": { + "drop": "encryptedCollection" + }, + "databaseName": "default", + "commandName": "drop" + } + }, + { + "commandStartedEvent": { + "command": { + "create": "enxcol_.encryptedCollection.esc", + "clusteredIndex": { + "key": { + "_id": 1 + }, + "unique": true + } + }, + "databaseName": "default", + "commandName": "create" + } + }, + { + "commandStartedEvent": { + "command": { + "create": "enxcol_.encryptedCollection.ecoc", + "clusteredIndex": { + "key": { + "_id": 1 + }, + "unique": true + } + }, + "databaseName": "default", + "commandName": "create" + } + }, + { + "commandStartedEvent": { + "command": { + "create": "encryptedCollection", + "encryptedFields": { + "fields": [ + { + "path": "firstName", + "bsonType": "string", + "keyId": { + "$binary": { + "subType": "04", + "base64": "AAAAAAAAAAAAAAAAAAAAAA==" + } + } + } + ] + } + }, + "databaseName": "default", + "commandName": "create" + } + }, + { + "commandStartedEvent": { + "command": { + "createIndexes": "encryptedCollection", + "indexes": [ + { + "name": "__safeContent___1", + "key": { + "__safeContent__": 1 + } + } + ] + }, + "databaseName": "default", + "commandName": "createIndexes" + } + }, + { + "commandStartedEvent": { + "command": { + "drop": "enxcol_.encryptedCollection.esc" + }, + "databaseName": "default", + "commandName": "drop" + } + }, + { + "commandStartedEvent": { + "command": { + "drop": "enxcol_.encryptedCollection.ecoc" + }, + "databaseName": "default", + "commandName": "drop" + } + }, + { + "commandStartedEvent": { + "command": { + "drop": "encryptedCollection" + }, + "databaseName": "default", + "commandName": "drop" + } + } + ] + } + ] + }, + { + "description": "CreateCollection without encryptedFields.", + "operations": [ + { + "name": "dropCollection", + "object": "db", + "arguments": { + "collection": "plaintextCollection" + } + }, + { + "name": "createCollection", + "object": "db", + "arguments": { + "collection": "plaintextCollection" + } + }, + { + "name": "assertCollectionExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "plaintextCollection" + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "plaintextCollection" + } + }, + "databaseName": "default", + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "drop": "plaintextCollection" + }, + "databaseName": "default", + "commandName": "drop" + } + }, + { + "commandStartedEvent": { + "command": { + "create": "plaintextCollection" + }, + "databaseName": "default", + "commandName": "create" + } + } + ] + } + ] + }, + { + "description": "CreateCollection from encryptedFieldsMap.", + "operations": [ + { + "name": "dropCollection", + "object": "db", + "arguments": { + "collection": "encryptedCollection" + } + }, + { + "name": "createCollection", + "object": "db", + "arguments": { + "collection": "encryptedCollection" + } + }, + { + "name": "assertCollectionExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "enxcol_.encryptedCollection.esc" + } + }, + { + "name": "assertCollectionNotExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "enxcol_.encryptedCollection.ecc" + } + }, + { + "name": "assertCollectionExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "enxcol_.encryptedCollection.ecoc" + } + }, + { + "name": "assertCollectionExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "encryptedCollection" + } + }, + { + "name": "assertIndexExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "encryptedCollection", + "indexName": "__safeContent___1" + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "drop": "enxcol_.encryptedCollection.esc" + }, + "databaseName": "default", + "commandName": "drop" + } + }, + { + "commandStartedEvent": { + "command": { + "drop": "enxcol_.encryptedCollection.ecoc" + }, + "databaseName": "default", + "commandName": "drop" + } + }, + { + "commandStartedEvent": { + "command": { + "drop": "encryptedCollection" + }, + "databaseName": "default", + "commandName": "drop" + } + }, + { + "commandStartedEvent": { + "command": { + "create": "enxcol_.encryptedCollection.esc", + "clusteredIndex": { + "key": { + "_id": 1 + }, + "unique": true + } + }, + "databaseName": "default", + "commandName": "create" + } + }, + { + "commandStartedEvent": { + "command": { + "create": "enxcol_.encryptedCollection.ecoc", + "clusteredIndex": { + "key": { + "_id": 1 + }, + "unique": true + } + }, + "databaseName": "default", + "commandName": "create" + } + }, + { + "commandStartedEvent": { + "command": { + "create": "encryptedCollection", + "encryptedFields": { + "fields": [ + { + "path": "firstName", + "bsonType": "string", + "keyId": { + "$binary": { + "subType": "04", + "base64": "AAAAAAAAAAAAAAAAAAAAAA==" + } + } + } + ] + } + }, + "databaseName": "default", + "commandName": "create" + } + }, + { + "commandStartedEvent": { + "command": { + "createIndexes": "encryptedCollection", + "indexes": [ + { + "name": "__safeContent___1", + "key": { + "__safeContent__": 1 + } + } + ] + }, + "databaseName": "default", + "commandName": "createIndexes" + } + } + ] + } + ] + }, + { + "description": "CreateCollection from encryptedFields.", + "operations": [ + { + "name": "dropCollection", + "object": "db2", + "arguments": { + "collection": "encryptedCollection", + "encryptedFields": { + "fields": [ + { + "path": "firstName", + "bsonType": "string", + "keyId": { + "$binary": { + "subType": "04", + "base64": "AAAAAAAAAAAAAAAAAAAAAA==" + } + } + } + ] + } + } + }, + { + "name": "createCollection", + "object": "db2", + "arguments": { + "collection": "encryptedCollection", + "encryptedFields": { + "fields": [ + { + "path": "firstName", + "bsonType": "string", + "keyId": { + "$binary": { + "subType": "04", + "base64": "AAAAAAAAAAAAAAAAAAAAAA==" + } + } + } + ] + } + } + }, + { + "name": "assertCollectionExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "enxcol_.encryptedCollection.esc" + } + }, + { + "name": "assertCollectionNotExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "enxcol_.encryptedCollection.ecc" + } + }, + { + "name": "assertCollectionExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "enxcol_.encryptedCollection.ecoc" + } + }, + { + "name": "assertCollectionExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "encryptedCollection" + } + }, + { + "name": "assertIndexExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "encryptedCollection", + "indexName": "__safeContent___1" + } + } + ], + "expectEvents": [ + { + "client": "client2", + "events": [ + { + "commandStartedEvent": { + "command": { + "drop": "enxcol_.encryptedCollection.esc" + }, + "databaseName": "default", + "commandName": "drop" + } + }, + { + "commandStartedEvent": { + "command": { + "drop": "enxcol_.encryptedCollection.ecoc" + }, + "databaseName": "default", + "commandName": "drop" + } + }, + { + "commandStartedEvent": { + "command": { + "drop": "encryptedCollection" + }, + "databaseName": "default", + "commandName": "drop" + } + }, + { + "commandStartedEvent": { + "command": { + "create": "enxcol_.encryptedCollection.esc", + "clusteredIndex": { + "key": { + "_id": 1 + }, + "unique": true + } + }, + "databaseName": "default", + "commandName": "create" + } + }, + { + "commandStartedEvent": { + "command": { + "create": "enxcol_.encryptedCollection.ecoc", + "clusteredIndex": { + "key": { + "_id": 1 + }, + "unique": true + } + }, + "databaseName": "default", + "commandName": "create" + } + }, + { + "commandStartedEvent": { + "command": { + "create": "encryptedCollection", + "encryptedFields": { + "fields": [ + { + "path": "firstName", + "bsonType": "string", + "keyId": { + "$binary": { + "subType": "04", + "base64": "AAAAAAAAAAAAAAAAAAAAAA==" + } + } + } + ] + } + }, + "databaseName": "default", + "commandName": "create" + } + }, + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "encryptedCollection" + } + }, + "databaseName": "default", + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "createIndexes": "encryptedCollection", + "indexes": [ + { + "name": "__safeContent___1", + "key": { + "__safeContent__": 1 + } + } + ] + }, + "databaseName": "default", + "commandName": "createIndexes" + } + } + ] + } + ] + }, + { + "description": "DropCollection from encryptedFieldsMap", + "operations": [ + { + "name": "dropCollection", + "object": "db", + "arguments": { + "collection": "encryptedCollection" + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "drop": "enxcol_.encryptedCollection.esc" + }, + "databaseName": "default", + "commandName": "drop" + } + }, + { + "commandStartedEvent": { + "command": { + "drop": "enxcol_.encryptedCollection.ecoc" + }, + "databaseName": "default", + "commandName": "drop" + } + }, + { + "commandStartedEvent": { + "command": { + "drop": "encryptedCollection" + }, + "databaseName": "default", + "commandName": "drop" + } + } + ] + } + ] + }, + { + "description": "DropCollection from encryptedFields", + "operations": [ + { + "name": "dropCollection", + "object": "db2", + "arguments": { + "collection": "encryptedCollection", + "encryptedFields": { + "fields": [ + { + "path": "firstName", + "bsonType": "string", + "keyId": { + "$binary": { + "subType": "04", + "base64": "AAAAAAAAAAAAAAAAAAAAAA==" + } + } + } + ] + } + } + }, + { + "name": "createCollection", + "object": "db2", + "arguments": { + "collection": "encryptedCollection", + "encryptedFields": { + "fields": [ + { + "path": "firstName", + "bsonType": "string", + "keyId": { + "$binary": { + "subType": "04", + "base64": "AAAAAAAAAAAAAAAAAAAAAA==" + } + } + } + ] + } + } + }, + { + "name": "assertCollectionExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "enxcol_.encryptedCollection.esc" + } + }, + { + "name": "assertCollectionNotExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "enxcol_.encryptedCollection.ecc" + } + }, + { + "name": "assertCollectionExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "enxcol_.encryptedCollection.ecoc" + } + }, + { + "name": "assertCollectionExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "encryptedCollection" + } + }, + { + "name": "assertIndexExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "encryptedCollection", + "indexName": "__safeContent___1" + } + }, + { + "name": "dropCollection", + "object": "db2", + "arguments": { + "collection": "encryptedCollection", + "encryptedFields": { + "fields": [ + { + "path": "firstName", + "bsonType": "string", + "keyId": { + "$binary": { + "subType": "04", + "base64": "AAAAAAAAAAAAAAAAAAAAAA==" + } + } + } + ] + } + } + }, + { + "name": "assertCollectionNotExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "enxcol_.encryptedCollection.esc" + } + }, + { + "name": "assertCollectionNotExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "enxcol_.encryptedCollection.ecoc" + } + }, + { + "name": "assertCollectionNotExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "encryptedCollection" + } + } + ], + "expectEvents": [ + { + "client": "client2", + "events": [ + { + "commandStartedEvent": { + "command": { + "drop": "enxcol_.encryptedCollection.esc" + }, + "databaseName": "default", + "commandName": "drop" + } + }, + { + "commandStartedEvent": { + "command": { + "drop": "enxcol_.encryptedCollection.ecoc" + }, + "databaseName": "default", + "commandName": "drop" + } + }, + { + "commandStartedEvent": { + "command": { + "drop": "encryptedCollection" + }, + "databaseName": "default", + "commandName": "drop" + } + }, + { + "commandStartedEvent": { + "command": { + "create": "enxcol_.encryptedCollection.esc", + "clusteredIndex": { + "key": { + "_id": 1 + }, + "unique": true + } + }, + "databaseName": "default", + "commandName": "create" + } + }, + { + "commandStartedEvent": { + "command": { + "create": "enxcol_.encryptedCollection.ecoc", + "clusteredIndex": { + "key": { + "_id": 1 + }, + "unique": true + } + }, + "databaseName": "default", + "commandName": "create" + } + }, + { + "commandStartedEvent": { + "command": { + "create": "encryptedCollection", + "encryptedFields": { + "fields": [ + { + "path": "firstName", + "bsonType": "string", + "keyId": { + "$binary": { + "subType": "04", + "base64": "AAAAAAAAAAAAAAAAAAAAAA==" + } + } + } + ] + } + }, + "databaseName": "default", + "commandName": "create" + } + }, + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "encryptedCollection" + } + }, + "databaseName": "default", + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "createIndexes": "encryptedCollection", + "indexes": [ + { + "name": "__safeContent___1", + "key": { + "__safeContent__": 1 + } + } + ] + }, + "databaseName": "default", + "commandName": "createIndexes" + } + }, + { + "commandStartedEvent": { + "command": { + "drop": "enxcol_.encryptedCollection.esc" + }, + "databaseName": "default", + "commandName": "drop" + } + }, + { + "commandStartedEvent": { + "command": { + "drop": "enxcol_.encryptedCollection.ecoc" + }, + "databaseName": "default", + "commandName": "drop" + } + }, + { + "commandStartedEvent": { + "command": { + "drop": "encryptedCollection" + }, + "databaseName": "default", + "commandName": "drop" + } + } + ] + } + ] + }, + { + "description": "DropCollection from remote encryptedFields", + "operations": [ + { + "name": "dropCollection", + "object": "db2", + "arguments": { + "collection": "encryptedCollection", + "encryptedFields": { + "fields": [ + { + "path": "firstName", + "bsonType": "string", + "keyId": { + "$binary": { + "subType": "04", + "base64": "AAAAAAAAAAAAAAAAAAAAAA==" + } + } + } + ] + } + } + }, + { + "name": "createCollection", + "object": "db2", + "arguments": { + "collection": "encryptedCollection", + "encryptedFields": { + "fields": [ + { + "path": "firstName", + "bsonType": "string", + "keyId": { + "$binary": { + "subType": "04", + "base64": "AAAAAAAAAAAAAAAAAAAAAA==" + } + } + } + ] + } + } + }, + { + "name": "assertCollectionExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "enxcol_.encryptedCollection.esc" + } + }, + { + "name": "assertCollectionNotExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "enxcol_.encryptedCollection.ecc" + } + }, + { + "name": "assertCollectionExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "enxcol_.encryptedCollection.ecoc" + } + }, + { + "name": "assertCollectionExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "encryptedCollection" + } + }, + { + "name": "assertIndexExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "encryptedCollection", + "indexName": "__safeContent___1" + } + }, + { + "name": "dropCollection", + "object": "db2", + "arguments": { + "collection": "encryptedCollection" + } + }, + { + "name": "assertCollectionNotExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "enxcol_.encryptedCollection.esc" + } + }, + { + "name": "assertCollectionNotExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "enxcol_.encryptedCollection.ecoc" + } + }, + { + "name": "assertCollectionNotExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "encryptedCollection" + } + } + ], + "expectEvents": [ + { + "client": "client2", + "events": [ + { + "commandStartedEvent": { + "command": { + "drop": "enxcol_.encryptedCollection.esc" + }, + "databaseName": "default", + "commandName": "drop" + } + }, + { + "commandStartedEvent": { + "command": { + "drop": "enxcol_.encryptedCollection.ecoc" + }, + "databaseName": "default", + "commandName": "drop" + } + }, + { + "commandStartedEvent": { + "command": { + "drop": "encryptedCollection" + }, + "databaseName": "default", + "commandName": "drop" + } + }, + { + "commandStartedEvent": { + "command": { + "create": "enxcol_.encryptedCollection.esc", + "clusteredIndex": { + "key": { + "_id": 1 + }, + "unique": true + } + }, + "databaseName": "default", + "commandName": "create" + } + }, + { + "commandStartedEvent": { + "command": { + "create": "enxcol_.encryptedCollection.ecoc", + "clusteredIndex": { + "key": { + "_id": 1 + }, + "unique": true + } + }, + "databaseName": "default", + "commandName": "create" + } + }, + { + "commandStartedEvent": { + "command": { + "create": "encryptedCollection", + "encryptedFields": { + "fields": [ + { + "path": "firstName", + "bsonType": "string", + "keyId": { + "$binary": { + "subType": "04", + "base64": "AAAAAAAAAAAAAAAAAAAAAA==" + } + } + } + ] + } + }, + "databaseName": "default", + "commandName": "create" + } + }, + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "encryptedCollection" + } + }, + "databaseName": "default", + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "createIndexes": "encryptedCollection", + "indexes": [ + { + "name": "__safeContent___1", + "key": { + "__safeContent__": 1 + } + } + ] + }, + "databaseName": "default", + "commandName": "createIndexes" + } + }, + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "encryptedCollection" + } + }, + "databaseName": "default", + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "drop": "enxcol_.encryptedCollection.esc" + }, + "databaseName": "default", + "commandName": "drop" + } + }, + { + "commandStartedEvent": { + "command": { + "drop": "enxcol_.encryptedCollection.ecoc" + }, + "databaseName": "default", + "commandName": "drop" + } + }, + { + "commandStartedEvent": { + "command": { + "drop": "encryptedCollection" + }, + "databaseName": "default", + "commandName": "drop" + } + } + ] + } + ] + }, + { + "description": "encryptedFields are consulted for metadata collection names", + "operations": [ + { + "name": "dropCollection", + "object": "db1", + "arguments": { + "collection": "encryptedCollection" + } + }, + { + "name": "createCollection", + "object": "db1", + "arguments": { + "collection": "encryptedCollection" + }, + "expectError": { + "errorContains": "Encrypted State Collection name should follow" + } + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-DecryptExistingData.json b/test/client-side-encryption/spec/unified/fle2v2-DecryptExistingData.json new file mode 100644 index 0000000000..b171c78c00 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-DecryptExistingData.json @@ -0,0 +1,186 @@ +{ + "description": "fle2v2-DecryptExistingData", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "7.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [ + { + "_id": 1, + "encryptedUnindexed": { + "$binary": { + "base64": "BqvN76sSNJh2EjQSNFZ4kBICTQaVZPWgXp41I7mPV1rLFTtw1tXzjcdSEyxpKKqujlko5TeizkB9hHQ009dVY1+fgIiDcefh+eQrm3CkhQ==", + "subType": "06" + } + } + } + ] + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "HBk9BWihXExNDvTp1lUxOuxuZK2Pe2ZdVdlsxPEBkiO1bS4mG5NNDsQ7zVxJAH8BtdOYp72Ku4Y3nwc0BUpIKsvAKX4eYXtlhv5zUQxWdeNFhg9qK7qb8nqhnnLeT0f25jFSqzWJoT379hfwDeu0bebJHr35QrJ8myZdPMTEDYF08QYQ48ShRBli0S+QzBHHAQiM2iJNr4svg2WR8JSeWQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "FLE2 decrypt of existing data succeeds", + "operations": [ + { + "name": "find", + "arguments": { + "filter": { + "_id": 1 + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedUnindexed": "value123" + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "default", + "filter": { + "_id": 1 + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Delete.json b/test/client-side-encryption/spec/unified/fle2v2-Delete.json new file mode 100644 index 0000000000..305f642ae1 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Delete.json @@ -0,0 +1,326 @@ +{ + "description": "fle2v2-Delete", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "7.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedIndexed", + "bsonType": "string", + "queries": { + "queryType": "equality", + "contention": { + "$numberLong": "0" + } + } + }, + { + "keyId": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedUnindexed", + "bsonType": "string" + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "Delete can query an FLE2 indexed field", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedIndexed": "value123" + } + }, + "object": "coll" + }, + { + "name": "deleteOne", + "arguments": { + "filter": { + "encryptedIndexed": "value123" + } + }, + "object": "coll", + "expectResult": { + "deletedCount": 1 + } + } + ], + "outcome": [ + { + "documents": [], + "collectionName": "default", + "databaseName": "default" + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedIndexed": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedIndexed", + "bsonType": "string", + "queries": { + "queryType": "equality", + "contention": { + "$numberLong": "0" + } + } + }, + { + "keyId": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedUnindexed", + "bsonType": "string" + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "delete": "default", + "deletes": [ + { + "q": { + "encryptedIndexed": { + "$eq": { + "$binary": { + "base64": "DIkAAAAFZAAgAAAAAPtVteJQAlgb2YMa/+7YWH00sbQPyt7L6Rb8OwBdMmL2BXMAIAAAAAAd44hgVKnEnTFlwNVC14oyc9OZOTspeymusqkRQj57nAVsACAAAAAAaZ9s3G+4znfxStxeOZwcZy1OhzjMGc5hjmdMN+b/w6kSY20AAAAAAAAAAAAA", + "subType": "06" + } + } + } + }, + "limit": 1 + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedIndexed", + "bsonType": "string", + "queries": { + "queryType": "equality", + "contention": { + "$numberLong": "0" + } + } + }, + { + "keyId": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedUnindexed", + "bsonType": "string" + } + ] + } + } + } + }, + "commandName": "delete" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-EncryptedFields-vs-EncryptedFieldsMap.json b/test/client-side-encryption/spec/unified/fle2v2-EncryptedFields-vs-EncryptedFieldsMap.json new file mode 100644 index 0000000000..7a6957db0a --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-EncryptedFields-vs-EncryptedFieldsMap.json @@ -0,0 +1,258 @@ +{ + "description": "fle2v2-EncryptedFields-vs-EncryptedFieldsMap", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "7.0.0", + "serverless": "forbid", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + }, + "encryptedFieldsMap": { + "default.default": { + "fields": [] + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "encryptedDB", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "encryptedColl", + "database": "encryptedDB", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "HBk9BWihXExNDvTp1lUxOuxuZK2Pe2ZdVdlsxPEBkiO1bS4mG5NNDsQ7zVxJAH8BtdOYp72Ku4Y3nwc0BUpIKsvAKX4eYXtlhv5zUQxWdeNFhg9qK7qb8nqhnnLeT0f25jFSqzWJoT379hfwDeu0bebJHr35QrJ8myZdPMTEDYF08QYQ48ShRBli0S+QzBHHAQiM2iJNr4svg2WR8JSeWQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + }, + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedIndexed", + "bsonType": "string", + "queries": { + "queryType": "equality", + "contention": { + "$numberLong": "0" + } + } + }, + { + "keyId": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedUnindexed", + "bsonType": "string" + } + ] + } + } + } + ], + "tests": [ + { + "description": "encryptedFieldsMap is preferred over remote encryptedFields", + "operations": [ + { + "object": "encryptedColl", + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedUnindexed": { + "$binary": { + "base64": "BqvN76sSNJh2EjQSNFZ4kBICTQaVZPWgXp41I7mPV1rLFTtw1tXzjcdSEyxpKKqujlko5TeizkB9hHQ009dVY1+fgIiDcefh+eQrm3CkhQ==", + "subType": "06" + } + } + } + } + }, + { + "object": "encryptedColl", + "name": "find", + "arguments": { + "filter": { + "_id": 1 + } + }, + "expectResult": [ + { + "_id": 1, + "encryptedUnindexed": "value123" + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "databaseName": "default", + "commandName": "insert", + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedUnindexed": { + "$binary": { + "base64": "BqvN76sSNJh2EjQSNFZ4kBICTQaVZPWgXp41I7mPV1rLFTtw1tXzjcdSEyxpKKqujlko5TeizkB9hHQ009dVY1+fgIiDcefh+eQrm3CkhQ==", + "subType": "06" + } + } + } + ], + "ordered": true + } + } + }, + { + "commandStartedEvent": { + "databaseName": "default", + "commandName": "find", + "command": { + "find": "default", + "filter": { + "_id": 1 + } + } + } + }, + { + "commandStartedEvent": { + "databaseName": "keyvault", + "commandName": "find", + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + } + } + } + ] + } + ], + "outcome": [ + { + "collectionName": "default", + "databaseName": "default", + "documents": [ + { + "_id": 1, + "encryptedUnindexed": { + "$binary": { + "base64": "BqvN76sSNJh2EjQSNFZ4kBICTQaVZPWgXp41I7mPV1rLFTtw1tXzjcdSEyxpKKqujlko5TeizkB9hHQ009dVY1+fgIiDcefh+eQrm3CkhQ==", + "subType": "06" + } + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-EncryptedFields-vs-jsonSchema.json b/test/client-side-encryption/spec/unified/fle2v2-EncryptedFields-vs-jsonSchema.json new file mode 100644 index 0000000000..af24e9b369 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-EncryptedFields-vs-jsonSchema.json @@ -0,0 +1,367 @@ +{ + "description": "fle2v2-EncryptedFields-vs-jsonSchema", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "7.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted", + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedIndexed", + "bsonType": "string", + "queries": { + "queryType": "equality", + "contention": { + "$numberLong": "0" + } + } + }, + { + "keyId": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedUnindexed", + "bsonType": "string" + } + ] + }, + "validator": { + "$jsonSchema": { + "properties": {}, + "bsonType": "object" + } + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "encryptedFields is preferred over jsonSchema", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedIndexed": "123" + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedIndexed": "123" + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedIndexed": "123" + } + ] + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 1, + "encryptedIndexed": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "31eCYlbQoVboc5zwC8IoyJVSkag9PxREka8dkmbXJeY=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedIndexed": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedIndexed", + "bsonType": "string", + "queries": { + "queryType": "equality", + "contention": { + "$numberLong": "0" + } + } + }, + { + "keyId": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedUnindexed", + "bsonType": "string" + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "default", + "filter": { + "encryptedIndexed": { + "$eq": { + "$binary": { + "base64": "DIkAAAAFZAAgAAAAAPGmZcUzdE/FPILvRSyAScGvZparGI2y9rJ/vSBxgCujBXMAIAAAAACi1RjmndKqgnXy7xb22RzUbnZl1sOZRXPOC0KcJkAxmQVsACAAAAAApJtKPW4+o9B7gAynNLL26jtlB4+hq5TXResijcYet8USY20AAAAAAAAAAAAA", + "subType": "06" + } + } + } + }, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedIndexed", + "bsonType": "string", + "queries": { + "queryType": "equality", + "contention": { + "$numberLong": "0" + } + } + }, + { + "keyId": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedUnindexed", + "bsonType": "string" + } + ] + } + } + } + }, + "commandName": "find" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-EncryptedFieldsMap-defaults.json b/test/client-side-encryption/spec/unified/fle2v2-EncryptedFieldsMap-defaults.json new file mode 100644 index 0000000000..3727e43147 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-EncryptedFieldsMap-defaults.json @@ -0,0 +1,139 @@ +{ + "description": "fle2v2-EncryptedFieldsMap-defaults", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "7.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + }, + "encryptedFieldsMap": { + "default.default": { + "fields": [] + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [] + } + ], + "tests": [ + { + "description": "default state collections are applied to encryptionInformation", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "foo": { + "$binary": { + "base64": "BYkAAAAFZAAgAAAAAE8KGPgq7h3n9nH5lfHcia8wtOTLwGkZNLBesb6PULqbBXMAIAAAAACq0558QyD3c3jkR5k0Zc9UpQK8ByhXhtn2d1xVQnuJ3AVjACAAAAAA1003zUWGwD4zVZ0KeihnZOthS3V6CEHUfnJZcIYHefISY20AAAAAAAAAAAAA", + "subType": "06" + } + } + } + }, + "object": "coll" + } + ], + "outcome": [ + { + "documents": [ + { + "_id": 1, + "foo": { + "$binary": { + "base64": "BYkAAAAFZAAgAAAAAE8KGPgq7h3n9nH5lfHcia8wtOTLwGkZNLBesb6PULqbBXMAIAAAAACq0558QyD3c3jkR5k0Zc9UpQK8ByhXhtn2d1xVQnuJ3AVjACAAAAAA1003zUWGwD4zVZ0KeihnZOthS3V6CEHUfnJZcIYHefISY20AAAAAAAAAAAAA", + "subType": "06" + } + } + } + ], + "collectionName": "default", + "databaseName": "default" + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "foo": { + "$binary": { + "base64": "BYkAAAAFZAAgAAAAAE8KGPgq7h3n9nH5lfHcia8wtOTLwGkZNLBesb6PULqbBXMAIAAAAACq0558QyD3c3jkR5k0Zc9UpQK8ByhXhtn2d1xVQnuJ3AVjACAAAAAA1003zUWGwD4zVZ0KeihnZOthS3V6CEHUfnJZcIYHefISY20AAAAAAAAAAAAA", + "subType": "06" + } + } + } + ], + "encryptionInformation": { + "type": { + "$numberInt": "1" + }, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [] + } + } + }, + "ordered": true + }, + "commandName": "insert" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-FindOneAndUpdate.json b/test/client-side-encryption/spec/unified/fle2v2-FindOneAndUpdate.json new file mode 100644 index 0000000000..5131dc9fef --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-FindOneAndUpdate.json @@ -0,0 +1,622 @@ +{ + "description": "fle2v2-FindOneAndUpdate", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "7.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted", + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedIndexed", + "bsonType": "string", + "queries": { + "queryType": "equality", + "contention": { + "$numberLong": "0" + } + } + }, + { + "keyId": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedUnindexed", + "bsonType": "string" + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "findOneAndUpdate can query an FLE2 indexed field", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedIndexed": "value123" + } + }, + "object": "coll" + }, + { + "name": "findOneAndUpdate", + "arguments": { + "filter": { + "encryptedIndexed": "value123" + }, + "update": { + "$set": { + "foo": "bar" + } + }, + "returnDocument": "Before" + }, + "object": "coll", + "expectResult": { + "_id": 1, + "encryptedIndexed": "value123" + } + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 1, + "encryptedIndexed": { + "$$type": "binData" + }, + "foo": "bar", + "__safeContent__": [ + { + "$binary": { + "base64": "ThpoKfQ8AkOzkFfNC1+9PF0pY2nIzfXvRdxQgjkNbBw=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedIndexed": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedIndexed", + "bsonType": "string", + "queries": { + "queryType": "equality", + "contention": { + "$numberLong": "0" + } + } + }, + { + "keyId": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedUnindexed", + "bsonType": "string" + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "findAndModify": "default", + "query": { + "encryptedIndexed": { + "$eq": { + "$binary": { + "base64": "DIkAAAAFZAAgAAAAAPtVteJQAlgb2YMa/+7YWH00sbQPyt7L6Rb8OwBdMmL2BXMAIAAAAAAd44hgVKnEnTFlwNVC14oyc9OZOTspeymusqkRQj57nAVsACAAAAAAaZ9s3G+4znfxStxeOZwcZy1OhzjMGc5hjmdMN+b/w6kSY20AAAAAAAAAAAAA", + "subType": "06" + } + } + } + }, + "update": { + "$set": { + "foo": "bar" + } + }, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedIndexed", + "bsonType": "string", + "queries": { + "queryType": "equality", + "contention": { + "$numberLong": "0" + } + } + }, + { + "keyId": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedUnindexed", + "bsonType": "string" + } + ] + } + } + } + }, + "commandName": "findAndModify" + } + } + ] + } + ] + }, + { + "description": "findOneAndUpdate can modify an FLE2 indexed field", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedIndexed": "value123" + } + }, + "object": "coll" + }, + { + "name": "findOneAndUpdate", + "arguments": { + "filter": { + "encryptedIndexed": "value123" + }, + "update": { + "$set": { + "encryptedIndexed": "value456" + } + }, + "returnDocument": "Before" + }, + "object": "coll", + "expectResult": { + "_id": 1, + "encryptedIndexed": "value123" + } + }, + { + "name": "find", + "arguments": { + "filter": { + "_id": 1 + } + }, + "object": "coll", + "expectResult": [ + { + "encryptedIndexed": "value456" + } + ] + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 1, + "encryptedIndexed": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "rhe7/w8Ob8Unl44rGr/moScx6m5VODQnscDhF4Nkn6g=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedIndexed": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedIndexed", + "bsonType": "string", + "queries": { + "queryType": "equality", + "contention": { + "$numberLong": "0" + } + } + }, + { + "keyId": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedUnindexed", + "bsonType": "string" + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "findAndModify": "default", + "query": { + "encryptedIndexed": { + "$eq": { + "$binary": { + "base64": "DIkAAAAFZAAgAAAAAPtVteJQAlgb2YMa/+7YWH00sbQPyt7L6Rb8OwBdMmL2BXMAIAAAAAAd44hgVKnEnTFlwNVC14oyc9OZOTspeymusqkRQj57nAVsACAAAAAAaZ9s3G+4znfxStxeOZwcZy1OhzjMGc5hjmdMN+b/w6kSY20AAAAAAAAAAAAA", + "subType": "06" + } + } + } + }, + "update": { + "$set": { + "encryptedIndexed": { + "$$type": "binData" + } + } + }, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedIndexed", + "bsonType": "string", + "queries": { + "queryType": "equality", + "contention": { + "$numberLong": "0" + } + } + }, + { + "keyId": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedUnindexed", + "bsonType": "string" + } + ] + } + } + } + }, + "commandName": "findAndModify" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "default", + "filter": { + "_id": { + "$eq": 1 + } + } + }, + "commandName": "find" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-InsertFind-Indexed.json b/test/client-side-encryption/spec/unified/fle2v2-InsertFind-Indexed.json new file mode 100644 index 0000000000..8155797583 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-InsertFind-Indexed.json @@ -0,0 +1,361 @@ +{ + "description": "fle2v2-InsertFind-Indexed", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "7.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted", + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedIndexed", + "bsonType": "string", + "queries": { + "queryType": "equality", + "contention": { + "$numberLong": "0" + } + } + }, + { + "keyId": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedUnindexed", + "bsonType": "string" + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "Insert and find FLE2 indexed field", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedIndexed": "123" + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedIndexed": "123" + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedIndexed": "123" + } + ] + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 1, + "encryptedIndexed": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "31eCYlbQoVboc5zwC8IoyJVSkag9PxREka8dkmbXJeY=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedIndexed": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedIndexed", + "bsonType": "string", + "queries": { + "queryType": "equality", + "contention": { + "$numberLong": "0" + } + } + }, + { + "keyId": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedUnindexed", + "bsonType": "string" + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "default", + "filter": { + "encryptedIndexed": { + "$eq": { + "$binary": { + "base64": "DIkAAAAFZAAgAAAAAPGmZcUzdE/FPILvRSyAScGvZparGI2y9rJ/vSBxgCujBXMAIAAAAACi1RjmndKqgnXy7xb22RzUbnZl1sOZRXPOC0KcJkAxmQVsACAAAAAApJtKPW4+o9B7gAynNLL26jtlB4+hq5TXResijcYet8USY20AAAAAAAAAAAAA", + "subType": "06" + } + } + } + }, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedIndexed", + "bsonType": "string", + "queries": { + "queryType": "equality", + "contention": { + "$numberLong": "0" + } + } + }, + { + "keyId": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedUnindexed", + "bsonType": "string" + } + ] + } + } + } + }, + "commandName": "find" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-InsertFind-Unindexed.json b/test/client-side-encryption/spec/unified/fle2v2-InsertFind-Unindexed.json new file mode 100644 index 0000000000..a6410bb9d8 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-InsertFind-Unindexed.json @@ -0,0 +1,301 @@ +{ + "description": "fle2v2-InsertFind-Unindexed", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "7.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted", + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedIndexed", + "bsonType": "string", + "queries": { + "queryType": "equality", + "contention": { + "$numberLong": "0" + } + } + }, + { + "keyId": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedUnindexed", + "bsonType": "string" + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "HBk9BWihXExNDvTp1lUxOuxuZK2Pe2ZdVdlsxPEBkiO1bS4mG5NNDsQ7zVxJAH8BtdOYp72Ku4Y3nwc0BUpIKsvAKX4eYXtlhv5zUQxWdeNFhg9qK7qb8nqhnnLeT0f25jFSqzWJoT379hfwDeu0bebJHr35QrJ8myZdPMTEDYF08QYQ48ShRBli0S+QzBHHAQiM2iJNr4svg2WR8JSeWQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "Insert and find FLE2 unindexed field", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedUnindexed": "value123" + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "_id": 1 + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedUnindexed": "value123" + } + ] + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 1, + "encryptedUnindexed": { + "$$type": "binData" + } + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedUnindexed": { + "$$type": "binData" + } + } + ], + "ordered": true + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "default", + "filter": { + "_id": { + "$eq": 1 + } + } + }, + "commandName": "find" + } + } + ] + } + ] + }, + { + "description": "Query with an unindexed field fails", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedUnindexed": "value123" + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedUnindexed": "value123" + } + }, + "object": "coll", + "expectError": { + "errorContains": "encrypt" + } + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-MissingKey.json b/test/client-side-encryption/spec/unified/fle2v2-MissingKey.json new file mode 100644 index 0000000000..dc8ffc57b2 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-MissingKey.json @@ -0,0 +1,137 @@ +{ + "description": "fle2v2-MissingKey", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "7.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [ + { + "encryptedUnindexed": { + "$binary": { + "base64": "BqvN76sSNJh2EjQSNFZ4kBICTQaVZPWgXp41I7mPV1rLFTtw1tXzjcdSEyxpKKqujlko5TeizkB9hHQ009dVY1+fgIiDcefh+eQrm3CkhQ==", + "subType": "06" + } + } + } + ], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedIndexed", + "bsonType": "string", + "queries": { + "queryType": "equality", + "contention": { + "$numberLong": "0" + } + } + }, + { + "keyId": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedUnindexed", + "bsonType": "string" + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [] + } + ], + "tests": [ + { + "description": "FLE2 encrypt fails with missing key", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedIndexed": "123" + } + }, + "object": "coll", + "expectError": { + "errorContains": "not all keys requested were satisfied" + } + } + ] + }, + { + "description": "FLE2 decrypt fails with missing key", + "operations": [ + { + "name": "find", + "arguments": { + "filter": {} + }, + "object": "coll", + "expectError": { + "errorContains": "not all keys requested were satisfied" + } + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-NoEncryption.json b/test/client-side-encryption/spec/unified/fle2v2-NoEncryption.json new file mode 100644 index 0000000000..4036fe5edd --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-NoEncryption.json @@ -0,0 +1,123 @@ +{ + "description": "fle2v2-NoEncryption", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "7.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [] + } + } + } + ], + "tests": [ + { + "description": "insert with no encryption succeeds", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "foo": "bar" + } + }, + "object": "coll" + } + ], + "outcome": [ + { + "documents": [ + { + "_id": 1, + "foo": "bar" + } + ], + "collectionName": "default", + "databaseName": "default" + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "foo": "bar" + } + ], + "ordered": true + }, + "commandName": "insert" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Compact.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Compact.json new file mode 100644 index 0000000000..8ccbcafc24 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Compact.json @@ -0,0 +1,358 @@ +{ + "description": "fle2v2-Rangev2-Compact", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent", + "commandSucceededEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedInt", + "bsonType": "int", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberInt": "0" + }, + "max": { + "$numberInt": "200" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "Compact works with 'range' fields", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + } + }, + "object": "coll" + }, + { + "name": "runCommand", + "object": "db", + "arguments": { + "command": { + "compactStructuredEncryptionData": "default" + }, + "commandName": "compactStructuredEncryptionData" + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandSucceededEvent": { + "commandName": "listCollections", + "reply": { + "ok": 1 + } + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandSucceededEvent": { + "commandName": "find", + "reply": { + "ok": 1 + } + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 0, + "encryptedInt": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedInt", + "bsonType": "int", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberInt": "0" + }, + "max": { + "$numberInt": "200" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandSucceededEvent": { + "commandName": "insert", + "reply": { + "ok": 1 + } + } + }, + { + "commandStartedEvent": { + "command": { + "compactStructuredEncryptionData": "default", + "compactionTokens": { + "encryptedInt": { + "ecoc": { + "$binary": { + "base64": "noN+05JsuO1oDg59yypIGj45i+eFH6HOTXOPpeZ//Mk=", + "subType": "00" + } + }, + "anchorPaddingToken": { + "$binary": { + "base64": "QxKJD2If48p0l8NAXf2Kr0aleMd/dATSjBK6hTpNMyc=", + "subType": "00" + } + } + } + }, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedInt", + "bsonType": "int", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberInt": "0" + }, + "max": { + "$numberInt": "200" + } + } + } + ] + } + } + } + }, + "commandName": "compactStructuredEncryptionData" + } + }, + { + "commandSucceededEvent": { + "commandName": "compactStructuredEncryptionData", + "reply": { + "ok": 1 + } + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Date-Aggregate.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Date-Aggregate.json new file mode 100644 index 0000000000..7933cc5600 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Date-Aggregate.json @@ -0,0 +1,574 @@ +{ + "description": "fle2v2-Rangev2-Date-Aggregate", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted", + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDate", + "bsonType": "date", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$date": { + "$numberLong": "0" + } + }, + "max": { + "$date": { + "$numberLong": "200" + } + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "FLE2 Range Date. Aggregate.", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDate": { + "$gt": { + "$date": { + "$numberLong": "0" + } + } + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + ] + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 0, + "encryptedDate": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "RjBYT2h3ZAoHxhf8DU6/dFbDkEBZp0IxREcsRTu2MXs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "x7GR49EN0t3WXQDihkrbonK7qNIBYC87tpL/XEUyIYc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JfYUqWF+OoGjiYkRI4L5iPlF+T1Eleul7Fki22jp4Qc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q1RyGfIgsaQHoZFRw+DD28V26rN5hweApPLwExncvT8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "L2PFeKGvLS6C+DLudR6fGlBq3ERPvjWvRyNRIA2HVb0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CWxaNqL3iP1yCixDkcmf9bmW3E5VeN8TJkg1jJe528s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+vC6araOEo+fpW7PSIP40/EnzBCj1d2N10Jr3rrXJJM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6SV63Mf51Z6A6p2X3rCnJKCu6ku3Oeb45mBYbz+IoAo=", + "subType": "00" + } + } + ] + }, + { + "_id": 1, + "encryptedDate": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "25j9sQXZCihCmHKvTHgaBsAVZFcGPn7JjHdrCGlwyyw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "FA74j21GUEJb1DJBOpR9nVnjaDZnd8yAQNuaW9Qi26g=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kJv//KVkbrobIBf+QeWC5jxn20mx/P0R1N6aCSMgKM8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zB+Whi9IUUGxfLEe+lGuIzLX4LFbIhaIAm5lRk65QTc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ybO1QU3CgvhO8JgRXH+HxKszWcpl5aGDYYVa75fHa1g=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "X3Y3eSAbbMg//JgiHHiFpYOpV61t8kkDexI+CQyitH4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SlNHXyqVFGDPrX/2ppwog6l4pwj3PKda2TkZbqgfSfA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "McjV8xwTF3xI7863DYOBdyvIv6UpzThl6v9vBRk05bI=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 0, + "encryptedDate": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDate", + "bsonType": "date", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$date": { + "$numberLong": "0" + } + }, + "max": { + "$date": { + "$numberLong": "200" + } + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedDate": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDate", + "bsonType": "date", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$date": { + "$numberLong": "0" + } + }, + "max": { + "$date": { + "$numberLong": "200" + } + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "aggregate": "default", + "pipeline": [ + { + "$match": { + "encryptedDate": { + "$gt": { + "$binary": { + "base64": "DXUFAAADcGF5bG9hZAAZBQAABGcABQUAAAMwAH0AAAAFZAAgAAAAALGGQ/CRD+pGLD53BZzWcCcYbuGLVEyjzXIx7b+ux/q2BXMAIAAAAACOC6mXEZL27P9hethZbtKYsTXKK+FpgQ9Axxmn9N/cCwVsACAAAAAA+MFEd8XfZSpbXKqqPC2L3TEFswkaG5Ff6aSgf8p+XVIAAzEAfQAAAAVkACAAAAAAtL3QIvnZqCF72eS6lKr8ilff7R6kiNklokiTuaU5wNsFcwAgAAAAAEtqr3/X731VB+VrbFcY8ZrJKRo2E0Fd+C8L0EMNcvcCBWwAIAAAAABNPhSriux8W8qbwnhCczE3IzlhNEnGDpUwTFDZSL+eYQADMgB9AAAABWQAIAAAAAB99ZW/7KwXKzl5M3XQsAJ3JbEef90IoxFYBArNiYzlgQVzACAAAAAAYO/qaw0+92HAryxOUG7iK6hnIy3OaUA9jIqtHdvcq8YFbAAgAAAAAHrUYj8A0hVgc6VklpDiljOnykrUSfEsjm56XO/bsfKdAAMzAH0AAAAFZAAgAAAAAOK8brUuc2onBNDRtfYMR736dHj4dQqXod8JG7tAMTsDBXMAIAAAAAAW6SrGAL6Bx0s7ZlsYULFfOAiYIGhEWu6md3r+Rk40awVsACAAAAAAIHYXP8RLcCboUmHN3+OlnEw1DxaLSnbTB9PdF228fFAAAzQAfQAAAAVkACAAAAAAV22FGF7ZDwK/EYiGNMlm/QuT3saQdyJM/Fn+ZyQug1oFcwAgAAAAACo7GwCvbcs5UHQMgds9/1QMklEVdjZigpuOFGrDmmxtBWwAIAAAAADQbYYPxlCMMGe2MulbiurApFLoeJSMvTeDU3pyEA2jNwADNQB9AAAABWQAIAAAAADFspsMG7yHjKppyllon1KqAsTrHaZ6JzNqnSz8o6iTvwVzACAAAAAAeiA5pqVIQQ9s6UY/P8v5Jjkl3I7iFNeLDYehikrINrsFbAAgAAAAAFjBTzTpNxDEkA0vSRj0jCED9KDRlboMVyilKyDz5YR4AAM2AH0AAAAFZAAgAAAAAPcLmtq+V1e+MRlZ7NHq1+mrRVBQje5zj685ZvdsfKvSBXMAIAAAAABdHz/3w2k5km97QN9m7oLFYJaVJneNlMboIlz5yUASQAVsACAAAAAAWbp8JVJnx8fEVAJFa7WMfMa7wXeP5M3C8MX20J/i9n0AAzcAfQAAAAVkACAAAAAAYfLwnoxK6XAGQrJFy8+TIJoq38ldBaO75h4zA4ZX5tQFcwAgAAAAAC2wk8UcJH5X5XGnDBYmel6srpBkzBhHtt3Jw1u5TSJ1BWwAIAAAAAA9/YU9eI3D7QbXKIw/3/gzWJ6MZrCYhG0j1wNKgRQp5wADOAB9AAAABWQAIAAAAADGvyrtKkIcaV17ynZA7b2k5Pz6OhvxdWNkDvDWJIja8wVzACAAAAAAOLypVKNxf/wR1G8OZjUUsTQzDYeNNhhITxGMSp7euS4FbAAgAAAAAA9EsxoV1B2DcQ1NJRwuxXnvVR+vkD0wbbDYEI/zFEnDAAM5AH0AAAAFZAAgAAAAAEocREw1L0g+roFUchJI2Yd0M0ME2bnErNUYnpyJP1SqBXMAIAAAAAAcE2/JK/8MoSeOchIuAkKh1X3ImoA7p8ujAZIfvIDo6QVsACAAAAAA+W0+zgLr85/PD7P9a94wk6MgNgrizx/XU9aCxAkp1IwAABJjbQAAAAAAAAAAAAAQcGF5bG9hZElkAAAAAAAQZmlyc3RPcGVyYXRvcgABAAAAEnNwAAEAAAAAAAAAEHRmAAEAAAAJbW4AAAAAAAAAAAAJbXgAyAAAAAAAAAAA", + "subType": "06" + } + } + } + } + } + ], + "cursor": {}, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDate", + "bsonType": "date", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$date": { + "$numberLong": "0" + } + }, + "max": { + "$date": { + "$numberLong": "200" + } + } + } + } + ] + } + } + } + }, + "commandName": "aggregate" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Date-Correctness.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Date-Correctness.json new file mode 100644 index 0000000000..9ed541fa8e --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Date-Correctness.json @@ -0,0 +1,1610 @@ +{ + "description": "fle2v2-Rangev2-Date-Correctness", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDate", + "bsonType": "date", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$date": { + "$numberLong": "0" + } + }, + "max": { + "$date": { + "$numberLong": "200" + } + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "Find with $gt", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDate": { + "$gt": { + "$date": { + "$numberLong": "0" + } + } + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + ] + } + ] + }, + { + "description": "Find with $gte", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDate": { + "$gte": { + "$date": { + "$numberLong": "0" + } + } + } + }, + "sort": { + "_id": 1 + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + }, + { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + ] + } + ] + }, + { + "description": "Find with $gt with no results", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDate": { + "$gt": { + "$date": { + "$numberLong": "1" + } + } + } + } + }, + "object": "coll", + "expectResult": [] + } + ] + }, + { + "description": "Find with $lt", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDate": { + "$lt": { + "$date": { + "$numberLong": "1" + } + } + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + } + ] + } + ] + }, + { + "description": "Find with $lte", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDate": { + "$lte": { + "$date": { + "$numberLong": "1" + } + } + } + }, + "sort": { + "_id": 1 + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + }, + { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + ] + } + ] + }, + { + "description": "Find with $lt below min", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDate": { + "$lt": { + "$date": { + "$numberLong": "0" + } + } + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "must be greater than the range minimum" + } + } + ] + }, + { + "description": "Find with $gt above max", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDate": { + "$gt": { + "$date": { + "$numberLong": "200" + } + } + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "must be less than the range maximum" + } + } + ] + }, + { + "description": "Find with $gt and $lt", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDate": { + "$gt": { + "$date": { + "$numberLong": "0" + } + }, + "$lt": { + "$date": { + "$numberLong": "2" + } + } + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + ] + } + ] + }, + { + "description": "Find with equality", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + } + ] + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + ] + } + ] + }, + { + "description": "Find with full range", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDate": { + "$gte": { + "$date": { + "$numberLong": "0" + } + }, + "$lte": { + "$date": { + "$numberLong": "200" + } + } + } + }, + "sort": { + "_id": 1 + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + }, + { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + ] + } + ] + }, + { + "description": "Find with $in", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDate": { + "$in": [ + { + "$date": { + "$numberLong": "0" + } + } + ] + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + } + ] + } + ] + }, + { + "description": "Insert out of range", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "-1" + } + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "value must be greater than or equal to the minimum value" + } + } + ] + }, + { + "description": "Insert min and max", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 200, + "encryptedDate": { + "$date": { + "$numberLong": "200" + } + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": {}, + "sort": { + "_id": 1 + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + }, + { + "_id": 200, + "encryptedDate": { + "$date": { + "$numberLong": "200" + } + } + } + ] + } + ] + }, + { + "description": "Aggregate with $gte", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDate": { + "$gte": { + "$date": { + "$numberLong": "0" + } + } + } + } + }, + { + "$sort": { + "_id": 1 + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + }, + { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + ] + } + ] + }, + { + "description": "Aggregate with $gt with no results", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDate": { + "$gt": { + "$date": { + "$numberLong": "1" + } + } + } + } + } + ] + }, + "object": "coll", + "expectResult": [] + } + ] + }, + { + "description": "Aggregate with $lt", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDate": { + "$lt": { + "$date": { + "$numberLong": "1" + } + } + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + } + ] + } + ] + }, + { + "description": "Aggregate with $lte", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDate": { + "$lte": { + "$date": { + "$numberLong": "1" + } + } + } + } + }, + { + "$sort": { + "_id": 1 + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + }, + { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + ] + } + ] + }, + { + "description": "Aggregate with $lt below min", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDate": { + "$lt": { + "$date": { + "$numberLong": "0" + } + } + } + } + } + ] + }, + "object": "coll", + "expectError": { + "errorContains": "must be greater than the range minimum" + } + } + ] + }, + { + "description": "Aggregate with $gt above max", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDate": { + "$gt": { + "$date": { + "$numberLong": "200" + } + } + } + } + } + ] + }, + "object": "coll", + "expectError": { + "errorContains": "must be less than the range maximum" + } + } + ] + }, + { + "description": "Aggregate with $gt and $lt", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDate": { + "$gt": { + "$date": { + "$numberLong": "0" + } + }, + "$lt": { + "$date": { + "$numberLong": "2" + } + } + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + ] + } + ] + }, + { + "description": "Aggregate with equality", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + } + ] + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + ] + } + ] + }, + { + "description": "Aggregate with full range", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDate": { + "$gte": { + "$date": { + "$numberLong": "0" + } + }, + "$lte": { + "$date": { + "$numberLong": "200" + } + } + } + } + }, + { + "$sort": { + "_id": 1 + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + }, + { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + ] + } + ] + }, + { + "description": "Aggregate with $in", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDate": { + "$in": [ + { + "$date": { + "$numberLong": "0" + } + } + ] + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + } + ] + } + ] + }, + { + "description": "Wrong type: Insert Double", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDate": { + "$numberDouble": "0" + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "Cannot encrypt element" + } + } + ] + }, + { + "description": "Wrong type: Find Double", + "operations": [ + { + "name": "find", + "arguments": { + "filter": { + "encryptedDate": { + "$gte": { + "$numberDouble": "0" + } + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "value type is a date" + } + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Date-Delete.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Date-Delete.json new file mode 100644 index 0000000000..ad05dd4e17 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Date-Delete.json @@ -0,0 +1,505 @@ +{ + "description": "fle2v2-Rangev2-Date-Delete", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDate", + "bsonType": "date", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$date": { + "$numberLong": "0" + } + }, + "max": { + "$date": { + "$numberLong": "200" + } + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "FLE2 Range Date. Delete.", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + }, + "object": "coll" + }, + { + "name": "deleteOne", + "arguments": { + "filter": { + "encryptedDate": { + "$gt": { + "$date": { + "$numberLong": "0" + } + } + } + } + }, + "object": "coll", + "expectResult": { + "deletedCount": 1 + } + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 0, + "encryptedDate": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "RjBYT2h3ZAoHxhf8DU6/dFbDkEBZp0IxREcsRTu2MXs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "x7GR49EN0t3WXQDihkrbonK7qNIBYC87tpL/XEUyIYc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JfYUqWF+OoGjiYkRI4L5iPlF+T1Eleul7Fki22jp4Qc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q1RyGfIgsaQHoZFRw+DD28V26rN5hweApPLwExncvT8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "L2PFeKGvLS6C+DLudR6fGlBq3ERPvjWvRyNRIA2HVb0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CWxaNqL3iP1yCixDkcmf9bmW3E5VeN8TJkg1jJe528s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+vC6araOEo+fpW7PSIP40/EnzBCj1d2N10Jr3rrXJJM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6SV63Mf51Z6A6p2X3rCnJKCu6ku3Oeb45mBYbz+IoAo=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 0, + "encryptedDate": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDate", + "bsonType": "date", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$date": { + "$numberLong": "0" + } + }, + "max": { + "$date": { + "$numberLong": "200" + } + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedDate": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDate", + "bsonType": "date", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$date": { + "$numberLong": "0" + } + }, + "max": { + "$date": { + "$numberLong": "200" + } + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "delete": "default", + "deletes": [ + { + "q": { + "encryptedDate": { + "$gt": { + "$binary": { + "base64": "DXUFAAADcGF5bG9hZAAZBQAABGcABQUAAAMwAH0AAAAFZAAgAAAAALGGQ/CRD+pGLD53BZzWcCcYbuGLVEyjzXIx7b+ux/q2BXMAIAAAAACOC6mXEZL27P9hethZbtKYsTXKK+FpgQ9Axxmn9N/cCwVsACAAAAAA+MFEd8XfZSpbXKqqPC2L3TEFswkaG5Ff6aSgf8p+XVIAAzEAfQAAAAVkACAAAAAAtL3QIvnZqCF72eS6lKr8ilff7R6kiNklokiTuaU5wNsFcwAgAAAAAEtqr3/X731VB+VrbFcY8ZrJKRo2E0Fd+C8L0EMNcvcCBWwAIAAAAABNPhSriux8W8qbwnhCczE3IzlhNEnGDpUwTFDZSL+eYQADMgB9AAAABWQAIAAAAAB99ZW/7KwXKzl5M3XQsAJ3JbEef90IoxFYBArNiYzlgQVzACAAAAAAYO/qaw0+92HAryxOUG7iK6hnIy3OaUA9jIqtHdvcq8YFbAAgAAAAAHrUYj8A0hVgc6VklpDiljOnykrUSfEsjm56XO/bsfKdAAMzAH0AAAAFZAAgAAAAAOK8brUuc2onBNDRtfYMR736dHj4dQqXod8JG7tAMTsDBXMAIAAAAAAW6SrGAL6Bx0s7ZlsYULFfOAiYIGhEWu6md3r+Rk40awVsACAAAAAAIHYXP8RLcCboUmHN3+OlnEw1DxaLSnbTB9PdF228fFAAAzQAfQAAAAVkACAAAAAAV22FGF7ZDwK/EYiGNMlm/QuT3saQdyJM/Fn+ZyQug1oFcwAgAAAAACo7GwCvbcs5UHQMgds9/1QMklEVdjZigpuOFGrDmmxtBWwAIAAAAADQbYYPxlCMMGe2MulbiurApFLoeJSMvTeDU3pyEA2jNwADNQB9AAAABWQAIAAAAADFspsMG7yHjKppyllon1KqAsTrHaZ6JzNqnSz8o6iTvwVzACAAAAAAeiA5pqVIQQ9s6UY/P8v5Jjkl3I7iFNeLDYehikrINrsFbAAgAAAAAFjBTzTpNxDEkA0vSRj0jCED9KDRlboMVyilKyDz5YR4AAM2AH0AAAAFZAAgAAAAAPcLmtq+V1e+MRlZ7NHq1+mrRVBQje5zj685ZvdsfKvSBXMAIAAAAABdHz/3w2k5km97QN9m7oLFYJaVJneNlMboIlz5yUASQAVsACAAAAAAWbp8JVJnx8fEVAJFa7WMfMa7wXeP5M3C8MX20J/i9n0AAzcAfQAAAAVkACAAAAAAYfLwnoxK6XAGQrJFy8+TIJoq38ldBaO75h4zA4ZX5tQFcwAgAAAAAC2wk8UcJH5X5XGnDBYmel6srpBkzBhHtt3Jw1u5TSJ1BWwAIAAAAAA9/YU9eI3D7QbXKIw/3/gzWJ6MZrCYhG0j1wNKgRQp5wADOAB9AAAABWQAIAAAAADGvyrtKkIcaV17ynZA7b2k5Pz6OhvxdWNkDvDWJIja8wVzACAAAAAAOLypVKNxf/wR1G8OZjUUsTQzDYeNNhhITxGMSp7euS4FbAAgAAAAAA9EsxoV1B2DcQ1NJRwuxXnvVR+vkD0wbbDYEI/zFEnDAAM5AH0AAAAFZAAgAAAAAEocREw1L0g+roFUchJI2Yd0M0ME2bnErNUYnpyJP1SqBXMAIAAAAAAcE2/JK/8MoSeOchIuAkKh1X3ImoA7p8ujAZIfvIDo6QVsACAAAAAA+W0+zgLr85/PD7P9a94wk6MgNgrizx/XU9aCxAkp1IwAABJjbQAAAAAAAAAAAAAQcGF5bG9hZElkAAAAAAAQZmlyc3RPcGVyYXRvcgABAAAAEnNwAAEAAAAAAAAAEHRmAAEAAAAJbW4AAAAAAAAAAAAJbXgAyAAAAAAAAAAA", + "subType": "06" + } + } + } + }, + "limit": 1 + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDate", + "bsonType": "date", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$date": { + "$numberLong": "0" + } + }, + "max": { + "$date": { + "$numberLong": "200" + } + } + } + } + ] + } + } + } + }, + "commandName": "delete" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Date-FindOneAndUpdate.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Date-FindOneAndUpdate.json new file mode 100644 index 0000000000..55db0279c2 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Date-FindOneAndUpdate.json @@ -0,0 +1,577 @@ +{ + "description": "fle2v2-Rangev2-Date-FindOneAndUpdate", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDate", + "bsonType": "date", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$date": { + "$numberLong": "0" + } + }, + "max": { + "$date": { + "$numberLong": "200" + } + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "FLE2 Range Date. FindOneAndUpdate.", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + }, + "object": "coll" + }, + { + "name": "findOneAndUpdate", + "arguments": { + "filter": { + "encryptedDate": { + "$gt": { + "$date": { + "$numberLong": "0" + } + } + } + }, + "update": { + "$set": { + "encryptedDate": { + "$date": { + "$numberLong": "2" + } + } + } + }, + "returnDocument": "Before" + }, + "object": "coll", + "expectResult": { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 0, + "encryptedDate": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "RjBYT2h3ZAoHxhf8DU6/dFbDkEBZp0IxREcsRTu2MXs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "x7GR49EN0t3WXQDihkrbonK7qNIBYC87tpL/XEUyIYc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JfYUqWF+OoGjiYkRI4L5iPlF+T1Eleul7Fki22jp4Qc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q1RyGfIgsaQHoZFRw+DD28V26rN5hweApPLwExncvT8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "L2PFeKGvLS6C+DLudR6fGlBq3ERPvjWvRyNRIA2HVb0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CWxaNqL3iP1yCixDkcmf9bmW3E5VeN8TJkg1jJe528s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+vC6araOEo+fpW7PSIP40/EnzBCj1d2N10Jr3rrXJJM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6SV63Mf51Z6A6p2X3rCnJKCu6ku3Oeb45mBYbz+IoAo=", + "subType": "00" + } + } + ] + }, + { + "_id": 1, + "encryptedDate": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "hyDcE6QQjPrYJaIS/n7evEZFYcm31Tj89CpEYGF45cI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "F08nMDWDZc+DbWM7XCEJNNCEYyinRmrvGP7EWhmp4is=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cXH4688amcDc8kZOJq4UP8cE3R58Zl7e+Qo/1jyspps=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "uURBxvTp3FBCVkd+LPqyuY7d6rMW6SGIJQEPY/wtkZI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "jG3hax1L3RBp9t38vUt53FsBxgr/+Si/vVISpAylYpE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kwtIW8MhH9Ky5xNjBx8gFA/SHh2YVphie7g5FGBzals=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "FHflwFuEMu4xX0ZApHi+pdlBH+oevAtXckCUb5Wv0xU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ty4cnzJdAlbQKnh7px3GEYjBnvO+jIOaKjoTRDtmh3M=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 0, + "encryptedDate": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDate", + "bsonType": "date", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$date": { + "$numberLong": "0" + } + }, + "max": { + "$date": { + "$numberLong": "200" + } + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedDate": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDate", + "bsonType": "date", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$date": { + "$numberLong": "0" + } + }, + "max": { + "$date": { + "$numberLong": "200" + } + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "findAndModify": "default", + "query": { + "encryptedDate": { + "$gt": { + "$binary": { + "base64": "DXUFAAADcGF5bG9hZAAZBQAABGcABQUAAAMwAH0AAAAFZAAgAAAAALGGQ/CRD+pGLD53BZzWcCcYbuGLVEyjzXIx7b+ux/q2BXMAIAAAAACOC6mXEZL27P9hethZbtKYsTXKK+FpgQ9Axxmn9N/cCwVsACAAAAAA+MFEd8XfZSpbXKqqPC2L3TEFswkaG5Ff6aSgf8p+XVIAAzEAfQAAAAVkACAAAAAAtL3QIvnZqCF72eS6lKr8ilff7R6kiNklokiTuaU5wNsFcwAgAAAAAEtqr3/X731VB+VrbFcY8ZrJKRo2E0Fd+C8L0EMNcvcCBWwAIAAAAABNPhSriux8W8qbwnhCczE3IzlhNEnGDpUwTFDZSL+eYQADMgB9AAAABWQAIAAAAAB99ZW/7KwXKzl5M3XQsAJ3JbEef90IoxFYBArNiYzlgQVzACAAAAAAYO/qaw0+92HAryxOUG7iK6hnIy3OaUA9jIqtHdvcq8YFbAAgAAAAAHrUYj8A0hVgc6VklpDiljOnykrUSfEsjm56XO/bsfKdAAMzAH0AAAAFZAAgAAAAAOK8brUuc2onBNDRtfYMR736dHj4dQqXod8JG7tAMTsDBXMAIAAAAAAW6SrGAL6Bx0s7ZlsYULFfOAiYIGhEWu6md3r+Rk40awVsACAAAAAAIHYXP8RLcCboUmHN3+OlnEw1DxaLSnbTB9PdF228fFAAAzQAfQAAAAVkACAAAAAAV22FGF7ZDwK/EYiGNMlm/QuT3saQdyJM/Fn+ZyQug1oFcwAgAAAAACo7GwCvbcs5UHQMgds9/1QMklEVdjZigpuOFGrDmmxtBWwAIAAAAADQbYYPxlCMMGe2MulbiurApFLoeJSMvTeDU3pyEA2jNwADNQB9AAAABWQAIAAAAADFspsMG7yHjKppyllon1KqAsTrHaZ6JzNqnSz8o6iTvwVzACAAAAAAeiA5pqVIQQ9s6UY/P8v5Jjkl3I7iFNeLDYehikrINrsFbAAgAAAAAFjBTzTpNxDEkA0vSRj0jCED9KDRlboMVyilKyDz5YR4AAM2AH0AAAAFZAAgAAAAAPcLmtq+V1e+MRlZ7NHq1+mrRVBQje5zj685ZvdsfKvSBXMAIAAAAABdHz/3w2k5km97QN9m7oLFYJaVJneNlMboIlz5yUASQAVsACAAAAAAWbp8JVJnx8fEVAJFa7WMfMa7wXeP5M3C8MX20J/i9n0AAzcAfQAAAAVkACAAAAAAYfLwnoxK6XAGQrJFy8+TIJoq38ldBaO75h4zA4ZX5tQFcwAgAAAAAC2wk8UcJH5X5XGnDBYmel6srpBkzBhHtt3Jw1u5TSJ1BWwAIAAAAAA9/YU9eI3D7QbXKIw/3/gzWJ6MZrCYhG0j1wNKgRQp5wADOAB9AAAABWQAIAAAAADGvyrtKkIcaV17ynZA7b2k5Pz6OhvxdWNkDvDWJIja8wVzACAAAAAAOLypVKNxf/wR1G8OZjUUsTQzDYeNNhhITxGMSp7euS4FbAAgAAAAAA9EsxoV1B2DcQ1NJRwuxXnvVR+vkD0wbbDYEI/zFEnDAAM5AH0AAAAFZAAgAAAAAEocREw1L0g+roFUchJI2Yd0M0ME2bnErNUYnpyJP1SqBXMAIAAAAAAcE2/JK/8MoSeOchIuAkKh1X3ImoA7p8ujAZIfvIDo6QVsACAAAAAA+W0+zgLr85/PD7P9a94wk6MgNgrizx/XU9aCxAkp1IwAABJjbQAAAAAAAAAAAAAQcGF5bG9hZElkAAAAAAAQZmlyc3RPcGVyYXRvcgABAAAAEnNwAAEAAAAAAAAAEHRmAAEAAAAJbW4AAAAAAAAAAAAJbXgAyAAAAAAAAAAA", + "subType": "06" + } + } + } + }, + "update": { + "$set": { + "encryptedDate": { + "$$type": "binData" + } + } + }, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDate", + "bsonType": "date", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$date": { + "$numberLong": "0" + } + }, + "max": { + "$date": { + "$numberLong": "200" + } + } + } + } + ] + } + } + } + }, + "commandName": "findAndModify" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Date-InsertFind.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Date-InsertFind.json new file mode 100644 index 0000000000..1fd1edf191 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Date-InsertFind.json @@ -0,0 +1,562 @@ +{ + "description": "fle2v2-Rangev2-Date-InsertFind", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDate", + "bsonType": "date", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$date": { + "$numberLong": "0" + } + }, + "max": { + "$date": { + "$numberLong": "200" + } + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "FLE2 Range Date. Insert and Find.", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDate": { + "$gt": { + "$date": { + "$numberLong": "0" + } + } + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + ] + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 0, + "encryptedDate": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "RjBYT2h3ZAoHxhf8DU6/dFbDkEBZp0IxREcsRTu2MXs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "x7GR49EN0t3WXQDihkrbonK7qNIBYC87tpL/XEUyIYc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JfYUqWF+OoGjiYkRI4L5iPlF+T1Eleul7Fki22jp4Qc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q1RyGfIgsaQHoZFRw+DD28V26rN5hweApPLwExncvT8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "L2PFeKGvLS6C+DLudR6fGlBq3ERPvjWvRyNRIA2HVb0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CWxaNqL3iP1yCixDkcmf9bmW3E5VeN8TJkg1jJe528s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+vC6araOEo+fpW7PSIP40/EnzBCj1d2N10Jr3rrXJJM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6SV63Mf51Z6A6p2X3rCnJKCu6ku3Oeb45mBYbz+IoAo=", + "subType": "00" + } + } + ] + }, + { + "_id": 1, + "encryptedDate": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "25j9sQXZCihCmHKvTHgaBsAVZFcGPn7JjHdrCGlwyyw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "FA74j21GUEJb1DJBOpR9nVnjaDZnd8yAQNuaW9Qi26g=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kJv//KVkbrobIBf+QeWC5jxn20mx/P0R1N6aCSMgKM8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zB+Whi9IUUGxfLEe+lGuIzLX4LFbIhaIAm5lRk65QTc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ybO1QU3CgvhO8JgRXH+HxKszWcpl5aGDYYVa75fHa1g=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "X3Y3eSAbbMg//JgiHHiFpYOpV61t8kkDexI+CQyitH4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SlNHXyqVFGDPrX/2ppwog6l4pwj3PKda2TkZbqgfSfA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "McjV8xwTF3xI7863DYOBdyvIv6UpzThl6v9vBRk05bI=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 0, + "encryptedDate": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDate", + "bsonType": "date", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$date": { + "$numberLong": "0" + } + }, + "max": { + "$date": { + "$numberLong": "200" + } + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedDate": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDate", + "bsonType": "date", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$date": { + "$numberLong": "0" + } + }, + "max": { + "$date": { + "$numberLong": "200" + } + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "default", + "filter": { + "encryptedDate": { + "$gt": { + "$binary": { + "base64": "DXUFAAADcGF5bG9hZAAZBQAABGcABQUAAAMwAH0AAAAFZAAgAAAAALGGQ/CRD+pGLD53BZzWcCcYbuGLVEyjzXIx7b+ux/q2BXMAIAAAAACOC6mXEZL27P9hethZbtKYsTXKK+FpgQ9Axxmn9N/cCwVsACAAAAAA+MFEd8XfZSpbXKqqPC2L3TEFswkaG5Ff6aSgf8p+XVIAAzEAfQAAAAVkACAAAAAAtL3QIvnZqCF72eS6lKr8ilff7R6kiNklokiTuaU5wNsFcwAgAAAAAEtqr3/X731VB+VrbFcY8ZrJKRo2E0Fd+C8L0EMNcvcCBWwAIAAAAABNPhSriux8W8qbwnhCczE3IzlhNEnGDpUwTFDZSL+eYQADMgB9AAAABWQAIAAAAAB99ZW/7KwXKzl5M3XQsAJ3JbEef90IoxFYBArNiYzlgQVzACAAAAAAYO/qaw0+92HAryxOUG7iK6hnIy3OaUA9jIqtHdvcq8YFbAAgAAAAAHrUYj8A0hVgc6VklpDiljOnykrUSfEsjm56XO/bsfKdAAMzAH0AAAAFZAAgAAAAAOK8brUuc2onBNDRtfYMR736dHj4dQqXod8JG7tAMTsDBXMAIAAAAAAW6SrGAL6Bx0s7ZlsYULFfOAiYIGhEWu6md3r+Rk40awVsACAAAAAAIHYXP8RLcCboUmHN3+OlnEw1DxaLSnbTB9PdF228fFAAAzQAfQAAAAVkACAAAAAAV22FGF7ZDwK/EYiGNMlm/QuT3saQdyJM/Fn+ZyQug1oFcwAgAAAAACo7GwCvbcs5UHQMgds9/1QMklEVdjZigpuOFGrDmmxtBWwAIAAAAADQbYYPxlCMMGe2MulbiurApFLoeJSMvTeDU3pyEA2jNwADNQB9AAAABWQAIAAAAADFspsMG7yHjKppyllon1KqAsTrHaZ6JzNqnSz8o6iTvwVzACAAAAAAeiA5pqVIQQ9s6UY/P8v5Jjkl3I7iFNeLDYehikrINrsFbAAgAAAAAFjBTzTpNxDEkA0vSRj0jCED9KDRlboMVyilKyDz5YR4AAM2AH0AAAAFZAAgAAAAAPcLmtq+V1e+MRlZ7NHq1+mrRVBQje5zj685ZvdsfKvSBXMAIAAAAABdHz/3w2k5km97QN9m7oLFYJaVJneNlMboIlz5yUASQAVsACAAAAAAWbp8JVJnx8fEVAJFa7WMfMa7wXeP5M3C8MX20J/i9n0AAzcAfQAAAAVkACAAAAAAYfLwnoxK6XAGQrJFy8+TIJoq38ldBaO75h4zA4ZX5tQFcwAgAAAAAC2wk8UcJH5X5XGnDBYmel6srpBkzBhHtt3Jw1u5TSJ1BWwAIAAAAAA9/YU9eI3D7QbXKIw/3/gzWJ6MZrCYhG0j1wNKgRQp5wADOAB9AAAABWQAIAAAAADGvyrtKkIcaV17ynZA7b2k5Pz6OhvxdWNkDvDWJIja8wVzACAAAAAAOLypVKNxf/wR1G8OZjUUsTQzDYeNNhhITxGMSp7euS4FbAAgAAAAAA9EsxoV1B2DcQ1NJRwuxXnvVR+vkD0wbbDYEI/zFEnDAAM5AH0AAAAFZAAgAAAAAEocREw1L0g+roFUchJI2Yd0M0ME2bnErNUYnpyJP1SqBXMAIAAAAAAcE2/JK/8MoSeOchIuAkKh1X3ImoA7p8ujAZIfvIDo6QVsACAAAAAA+W0+zgLr85/PD7P9a94wk6MgNgrizx/XU9aCxAkp1IwAABJjbQAAAAAAAAAAAAAQcGF5bG9hZElkAAAAAAAQZmlyc3RPcGVyYXRvcgABAAAAEnNwAAEAAAAAAAAAEHRmAAEAAAAJbW4AAAAAAAAAAAAJbXgAyAAAAAAAAAAA", + "subType": "06" + } + } + } + }, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDate", + "bsonType": "date", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$date": { + "$numberLong": "0" + } + }, + "max": { + "$date": { + "$numberLong": "200" + } + } + } + } + ] + } + } + } + }, + "commandName": "find" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Date-Update.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Date-Update.json new file mode 100644 index 0000000000..d5153270d5 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Date-Update.json @@ -0,0 +1,581 @@ +{ + "description": "fle2v2-Rangev2-Date-Update", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDate", + "bsonType": "date", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$date": { + "$numberLong": "0" + } + }, + "max": { + "$date": { + "$numberLong": "200" + } + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "FLE2 Range Date. Update.", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDate": { + "$date": { + "$numberLong": "0" + } + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDate": { + "$date": { + "$numberLong": "1" + } + } + } + }, + "object": "coll" + }, + { + "name": "updateOne", + "arguments": { + "filter": { + "encryptedDate": { + "$gt": { + "$date": { + "$numberLong": "0" + } + } + } + }, + "update": { + "$set": { + "encryptedDate": { + "$date": { + "$numberLong": "2" + } + } + } + } + }, + "object": "coll", + "expectResult": { + "matchedCount": 1, + "modifiedCount": 1, + "upsertedCount": 0 + } + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 0, + "encryptedDate": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "RjBYT2h3ZAoHxhf8DU6/dFbDkEBZp0IxREcsRTu2MXs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "x7GR49EN0t3WXQDihkrbonK7qNIBYC87tpL/XEUyIYc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JfYUqWF+OoGjiYkRI4L5iPlF+T1Eleul7Fki22jp4Qc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q1RyGfIgsaQHoZFRw+DD28V26rN5hweApPLwExncvT8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "L2PFeKGvLS6C+DLudR6fGlBq3ERPvjWvRyNRIA2HVb0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CWxaNqL3iP1yCixDkcmf9bmW3E5VeN8TJkg1jJe528s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+vC6araOEo+fpW7PSIP40/EnzBCj1d2N10Jr3rrXJJM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6SV63Mf51Z6A6p2X3rCnJKCu6ku3Oeb45mBYbz+IoAo=", + "subType": "00" + } + } + ] + }, + { + "_id": 1, + "encryptedDate": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "hyDcE6QQjPrYJaIS/n7evEZFYcm31Tj89CpEYGF45cI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "F08nMDWDZc+DbWM7XCEJNNCEYyinRmrvGP7EWhmp4is=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cXH4688amcDc8kZOJq4UP8cE3R58Zl7e+Qo/1jyspps=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "uURBxvTp3FBCVkd+LPqyuY7d6rMW6SGIJQEPY/wtkZI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "jG3hax1L3RBp9t38vUt53FsBxgr/+Si/vVISpAylYpE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kwtIW8MhH9Ky5xNjBx8gFA/SHh2YVphie7g5FGBzals=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "FHflwFuEMu4xX0ZApHi+pdlBH+oevAtXckCUb5Wv0xU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ty4cnzJdAlbQKnh7px3GEYjBnvO+jIOaKjoTRDtmh3M=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 0, + "encryptedDate": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDate", + "bsonType": "date", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$date": { + "$numberLong": "0" + } + }, + "max": { + "$date": { + "$numberLong": "200" + } + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedDate": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDate", + "bsonType": "date", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$date": { + "$numberLong": "0" + } + }, + "max": { + "$date": { + "$numberLong": "200" + } + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "update": "default", + "ordered": true, + "updates": [ + { + "q": { + "encryptedDate": { + "$gt": { + "$binary": { + "base64": "DXUFAAADcGF5bG9hZAAZBQAABGcABQUAAAMwAH0AAAAFZAAgAAAAALGGQ/CRD+pGLD53BZzWcCcYbuGLVEyjzXIx7b+ux/q2BXMAIAAAAACOC6mXEZL27P9hethZbtKYsTXKK+FpgQ9Axxmn9N/cCwVsACAAAAAA+MFEd8XfZSpbXKqqPC2L3TEFswkaG5Ff6aSgf8p+XVIAAzEAfQAAAAVkACAAAAAAtL3QIvnZqCF72eS6lKr8ilff7R6kiNklokiTuaU5wNsFcwAgAAAAAEtqr3/X731VB+VrbFcY8ZrJKRo2E0Fd+C8L0EMNcvcCBWwAIAAAAABNPhSriux8W8qbwnhCczE3IzlhNEnGDpUwTFDZSL+eYQADMgB9AAAABWQAIAAAAAB99ZW/7KwXKzl5M3XQsAJ3JbEef90IoxFYBArNiYzlgQVzACAAAAAAYO/qaw0+92HAryxOUG7iK6hnIy3OaUA9jIqtHdvcq8YFbAAgAAAAAHrUYj8A0hVgc6VklpDiljOnykrUSfEsjm56XO/bsfKdAAMzAH0AAAAFZAAgAAAAAOK8brUuc2onBNDRtfYMR736dHj4dQqXod8JG7tAMTsDBXMAIAAAAAAW6SrGAL6Bx0s7ZlsYULFfOAiYIGhEWu6md3r+Rk40awVsACAAAAAAIHYXP8RLcCboUmHN3+OlnEw1DxaLSnbTB9PdF228fFAAAzQAfQAAAAVkACAAAAAAV22FGF7ZDwK/EYiGNMlm/QuT3saQdyJM/Fn+ZyQug1oFcwAgAAAAACo7GwCvbcs5UHQMgds9/1QMklEVdjZigpuOFGrDmmxtBWwAIAAAAADQbYYPxlCMMGe2MulbiurApFLoeJSMvTeDU3pyEA2jNwADNQB9AAAABWQAIAAAAADFspsMG7yHjKppyllon1KqAsTrHaZ6JzNqnSz8o6iTvwVzACAAAAAAeiA5pqVIQQ9s6UY/P8v5Jjkl3I7iFNeLDYehikrINrsFbAAgAAAAAFjBTzTpNxDEkA0vSRj0jCED9KDRlboMVyilKyDz5YR4AAM2AH0AAAAFZAAgAAAAAPcLmtq+V1e+MRlZ7NHq1+mrRVBQje5zj685ZvdsfKvSBXMAIAAAAABdHz/3w2k5km97QN9m7oLFYJaVJneNlMboIlz5yUASQAVsACAAAAAAWbp8JVJnx8fEVAJFa7WMfMa7wXeP5M3C8MX20J/i9n0AAzcAfQAAAAVkACAAAAAAYfLwnoxK6XAGQrJFy8+TIJoq38ldBaO75h4zA4ZX5tQFcwAgAAAAAC2wk8UcJH5X5XGnDBYmel6srpBkzBhHtt3Jw1u5TSJ1BWwAIAAAAAA9/YU9eI3D7QbXKIw/3/gzWJ6MZrCYhG0j1wNKgRQp5wADOAB9AAAABWQAIAAAAADGvyrtKkIcaV17ynZA7b2k5Pz6OhvxdWNkDvDWJIja8wVzACAAAAAAOLypVKNxf/wR1G8OZjUUsTQzDYeNNhhITxGMSp7euS4FbAAgAAAAAA9EsxoV1B2DcQ1NJRwuxXnvVR+vkD0wbbDYEI/zFEnDAAM5AH0AAAAFZAAgAAAAAEocREw1L0g+roFUchJI2Yd0M0ME2bnErNUYnpyJP1SqBXMAIAAAAAAcE2/JK/8MoSeOchIuAkKh1X3ImoA7p8ujAZIfvIDo6QVsACAAAAAA+W0+zgLr85/PD7P9a94wk6MgNgrizx/XU9aCxAkp1IwAABJjbQAAAAAAAAAAAAAQcGF5bG9hZElkAAAAAAAQZmlyc3RPcGVyYXRvcgABAAAAEnNwAAEAAAAAAAAAEHRmAAEAAAAJbW4AAAAAAAAAAAAJbXgAyAAAAAAAAAAA", + "subType": "06" + } + } + } + }, + "u": { + "$set": { + "encryptedDate": { + "$$type": "binData" + } + } + }, + "multi": false, + "upsert": false + } + ], + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDate", + "bsonType": "date", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$date": { + "$numberLong": "0" + } + }, + "max": { + "$date": { + "$numberLong": "200" + } + } + } + } + ] + } + } + }, + "$db": "default" + }, + "commandName": "update" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Decimal-Aggregate.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Decimal-Aggregate.json new file mode 100644 index 0000000000..712a68be32 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Decimal-Aggregate.json @@ -0,0 +1,1965 @@ +{ + "description": "fle2v2-Rangev2-Decimal-Aggregate", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalNoPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "FLE2 Range Decimal. Aggregate.", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "1" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDecimalNoPrecision": { + "$gt": { + "$numberDecimal": "0" + } + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "1" + } + } + ] + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": { + "$numberInt": "0" + }, + "encryptedDecimalNoPrecision": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "rbf3AeBEv4wWFAKknqDxRW5cLNkFvbIs6iJjc6LShQY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "n+XAuFnP8Dov9TnhGFxNx0K/MnVM9WbJ7RouEu0ndO0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yRXojuVdn5GQtD97qYlaCL6cOLmZ7Cvcb3wFjkLUIdM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "DuIkdRPITRs55I4SZmgomAHCIsDQmXRhW8+MOznkzSk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SsBk+Et1lTbU+QRPx+xyJ/jMkmfG+QCvQEpip2YYrzA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "crCIzOd8KhHvvUlX7M1v9bhvU4pLdTc+X2SuqoKU5Ek=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "YOWdCw4UrqnxkAaVjqmC4sKQDMVMHEpFGnlxpxdaU6E=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "M3SShp81Ff8tQ632qKbv9MUcN6wjDaBReI0VXNu6Xh4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "gzHlSPxpM0hT75kQvWFzGlOxKvDoiKQZOr19V6l2zXI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "s3JnppOGYw9SL2Q1kMAZs948v2F5PrpXjGei/HioDWs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cG6+3Gk/zEH68P/uuuwiAUVCuyJwa1LeV+t29FlPPAo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dupdvR3AyJtM+g9NDKiaLVOtGca387JQp8w+V03m7Ig=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JqEQc5svj2jTvZ6LLA5ivE+kTb/0aRemSEmxk4G7Zrg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "szcXXXKnob+p3SoM4yED2R920LeJ7cVsclPMFTe4CeI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "o1QoGVXmuBdHwHm7aCtGMlMVKrjFdYvJXpoq6uhIAZ0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Jfm5wPlqqLCJRGQIqRq2NGmpn7s0Vrih2H3YAOoI2YU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zMHLb8ARbsYo8Ld05bqnGFf1Usha6EGb8QKwdSAyps0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yQdtq9lh5pugL7/i0Bj/PuZUUBUIzf+7wj1rl5y736w=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wGWVZdO7qIuyDg/BqDgqjgoQ02h5YYgwXQB1oCin2NE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "by9HMLj6NTEpgztZ5HSN6GxImkXPcaFINYDzgZY33X8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tWo0vbasi7bXmn/MsOx13VC1IsWtpx/nYp0uj4iMzdA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tQQpndUYd5O87lOtrGjH3wl9VsOK0ray7RMasL90sBM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cQjXEDCMsOpKLLf+vlTgIHA+cbSJdzqhbSX9Wvh95aA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "7yMpU48IxK9SzP2cx3VnTownGEwFmeFofuuFT97SuuY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kSOx1kz0CmBgzKQHZlo65ZUY1DIv9A99JRm+Us2y6Ew=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ubQpdPBe6/xvtr+AcXdfYLSvYCR4ot0tivehkCsupb4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xal+iCJ6FTefRQToyoNksc9NCZShyn04NDGi4IYrcoM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "d7jU4iOK50xHxlkSifcxlZFCM46TSgQzoYivxG3HNLY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tJvl2nsBLBVzL3pp6sKWCL4UXeh3q/roYBJjSb74ve0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "OIUCaKRvIx9t1w6Hxlz1IcQTdPNCfdRNwnnTm10W+X0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "A9tvzsiElotOUVIB4CqfQp9mAwqvTM35YkmAR170aHA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "lI8gpK7hpb7c9x4RQugsxMnQay5LZJmwslZdvMx/dcE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dNCzh40U0XvdKnSDi3HRQOWQftEsDVqc4uUvsVFGoq8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "IP+iwEBWBwVVZIdpaMu8k5+soFCz+TZkYn3drKZ9grE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pnqyh6e0y5svHkJDShlN9CHV0WvMBE4QbtJpQw5ZCXc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "elEl42tbVDoRTLjAhZUFEtXiut4b3PVhg/1ZLZSQdtE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "vHuu2FxwclMHqyE6JBYbTYgbEkB0dqb/JuaxsvfwsmY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xTf7NCe3Gf8QpE78HR5OknlLTKfs9J+RN9UZpH6fnso=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "XiWSasRnJAulGR6+LCVD3mwRObXylqYWR9jvpywq12c=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "MZMxEQ5ikx0PG1YFIExv0UnTZogsvgeOEZTpzvBDn4w=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yZMyMZBDrWbAhvnic7vvIYhmO9m5H2iuv0c8KNZrBzY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xxM14hTPY5j0vvcK2C7YAEjzdsfUTFHozHC0hEo1bxI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+01rqR1xVwkpGXcstbk1ItJqFVjH6Q8MGxEN3Cm9Y1A=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xOpLV0Z2VTRJ3iWtnWZcsyjXubTIkYWo31cO+HV1o1k=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "BWUOLqgLBqc5NwxVlSV5H3KFQPXbCp7mdo+jF+8cJqY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "fuQb1S6xZDGlrEbK+kI23aL53PP1PVNwqICnZNt9Yzg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SfscnoibFttahLdPVC4Ee+47ewGFKpDSU7M6HX19bKE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "rpSW2awybNVeKtat91VFxqbINoTfNhPfQAu+d73Xtf8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "9M/CP9ccOIIj2LLFmE0GFDO0Ban2wsNalEXfM6+h+1s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "WrEMG49l1ye4MhXs5ZS9tz8P6h+hDvthIg/2wW9ne1Q=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ImNhbfeyfH8qIEeA5ic0s3dAQBdzzTBS+CPsNih9vZ0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dWP33YDSn04UKJN2ogh2Rui0iW/0q2y18OCDRVcfyoo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "lYv0isAtfGh6H9tdp3cp2eHU7q2J+uk7QrgcxtK3w7Y=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "VGMoamB/+7zTOYcY/pqJc96xlv2PdW4hwsIAEIslTDQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yNeBWMF7BnD9wVwz2PgJsvWr77QiVvvWUvJF0+fqBug=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SfpvObJ+tJBXSvqeN7vlOfmhYign635lciYAJIjUtY8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dsen4NqjzVGjpjufiTMs3+gqeD09EbnuogPgxrJECwg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pxCWVM3sn19NsFEpgHbgLa+PmYlhN3mMiP0Wk8kJhYw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q11KNvJszjYIB9n9HcC+N4uz11a3eRj1L3BH9scKMDQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "A1PmkgcEToWh1JiVWE6mI5jUu7poxWWuCUt/cgRUUDc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "qJo3Hu4PJeanL7XEaWXO/n3YsodhZyd+MJOOmB9Kpd8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "BkBKLO8URFscfRY9Bav/1+L9mLohDgNr/MkZtGiraIs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "rZq5WA3Hx3xthOyHAJXK//f8pE2qbz7YKu3TIMp9GFY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "X07a/Lm80p5xd4RFs1dNmw+90tmPDPdGiAKVZkxd4zY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6YrBn2ofIw1b5ooakrLOwF41BWrps8OO0H9WH4/rtlE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "0l86Ag5OszXpa78SlOUV3K9nff5iC1p0mRXtLg9M1s4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Hn6yuxFHodeyu7ISlhYrbSf9pTiH4TDEvbYLWjTwFO0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zdf4y2etKBuIpkEU1zMwoCkCsdisfXZCh8QPamm+drY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "rOQ9oMdiK5xxGH+jPzOvwVqdGGnF3+HkJXxn81s6hp4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "61aKKsE3+BJHHWYvs3xSIBvlRmKswmaOo5rygQJguUg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "KuDb/GIzqDM8wv7m7m8AECiWJbae5EKKtJRugZx7kR0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Q+t8t2TmNUiCIorVr9F3AlVnX+Mpt2ZYvN+s8UGict8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tJRZIpKxUgHyL83kW8cvfjkxN3z6WoNnUg+SQw+LK+k=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pnUsYjip8SvW0+m9mR5WWTkpK+p6uwJ6yBUAlBnFKMk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "PArHlz+yPRYDycAP/PgnI/AkP8Wgmfg++Vf4UG1Bf0E=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wnIh53Q3jeK8jEBe1n8kJLa89/H0BxO26ZU8SRIAs9Q=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "4F8U59gzBLGhq58PEWQk2nch+R0Va7eTUoxMneReUIA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ihKagIW3uT1dm22ROr/g5QaCpxZVj2+Fs/YSdM2Noco=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "EJtUOOwjkrPUi9mavYAi+Gom9Y2DuFll7aDwo4mq0M0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dIkr8dbaVRQFskAVT6B286BbcBBt1pZPEOcTZqk4ZcI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "aYVAcZYkH/Tieoa1XOjE/zCy5AJcVTHjS0NG2QB7muA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "sBidL6y8TenseetpioIAAtn0lK/7C8MoW4JXpVYi3z8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "0Dd2klU/t4R86c2WJcJDAd57k/N7OjvYSO5Vf8KH8sw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "I3jZ92WEVmZmgaIkLbuWhBxl7EM6bEjiEttgBJunArA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "aGHoQMlgJoGvArjfIbc3nnkoc8SWBxcrN7hSmjMRzos=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "bpiWPnF/KVBQr5F6MEwc5ZZayzIRvQOLDAm4ntwOi8g=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tI7QVKbE6avWgDD9h4QKyFlnTxFCwd2iLySKakxNR/I=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "XGsge0CnoaXgE3rcpKm8AEeku5QVfokS3kcI+JKV1lk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JQxlryW2Q5WOwfrjAnaZxDvC83Dg6sjRVP5zegf2WiM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "YFuHKJOfoqp1iGVxoFjx7bLYgVdsN4GuUFxEgO9HJ5s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Z6vUdiCR18ylKomf08uxcQHeRtmyav7/Ecvzz4av3k4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SPGo1Ib5AiP/tSllL7Z5PAypvnKdwJLzt8imfIMSEJQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "m94Nh6PFFQFLIib9Cu5LAKavhXnagSHG6F5EF8lD96I=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pfEkQI98mB+gm1+JbmVurPAODMFPJ4E8DnqfVyUWbSo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "DNj3OVRLbr43s0vd+rgWghOL3FqeO/60npdojC8Ry/M=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kAYIQrjHVu49W8FTxyxJeiLVRWWjC9fPcBn+Hx1F+Ss=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "aCSO7UVOpoQvu/iridarxkxV1SVxU1i9HVSYXUAeXk4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Gh6hTP/yj1IKlXQ+Q69KTfMlGZjEcXoRLGbQHNFo/1s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "/gDgIFQ4tAlJk3GN48IS5Qa5IPmErwGk8CHxAbp6gs0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "PICyimwPjxpusyKxNssOOwUotAUbygpyEtORsVGXT8g=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "4lu+cBHyAUvuxC6JUNyHLzHsCogGSWFFnUCkDwfQdgI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pSndkmoNUJwXjgkbkgOrT5f9nSvuoMEZOkwAN9ElRaE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tyW+D4i26QihNM5MuBM+wnt5AdWGSJaJ4X5ydc9iWTU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "9Syjr8RoxUgPKr+O5rsCu07AvcebA4P8IVKyS1NVLWc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "67tPfDYnK2tmrioI51fOBG0ygajcV0pLo5+Zm/rEW7U=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "y0EiPRxYTuS1eVTIaPQUQBBxwkyxNckbePvKgChwd0M=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "NWd+2veAaeXQgR3vCvzlI4R1WW67D5YsVLdoXfdb8qg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "PY5RQqKQsL2GqBBSPNOEVpojNFRX/NijCghIpxD6CZk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "lcvwTyEjFlssCJtdjRpdN6oY+C7bxZY+WA+QAqzj9zg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CWE7XRNylvTwO/9Fv56dNqUaQWMmESNS/GNIwgBaEI0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ijwlrUeS8nRYqK1F8kiCYF0mNDolEZS+/lJO1Lg93C8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "8KzV+qYGYuIjoNj8eEpnTuHrMYuhzphl80rS6wrODuU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wDyTLjSEFF895hSQsHvmoEQVS6KIkZOtq1c9dVogm9I=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SGrtPuMYCjUrfKF0Pq/thdaQzmGBMUvlwN3ORIu9tHU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "KySHON3hIoUk4xWcwTqk6IL0kgjzjxgMBObVIkCGvk4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "hBIdS9j0XJPeT4ot73ngELkpUoSixvRBvdOL9z48jY8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Tx6um0q9HjS5ZvlFhvukpI6ORnyrXMWVW1OoxvgqII0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zFKlyfX5H81+d4A4J3FKn4T5JfG+OWtR06ddyX4Mxas=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cGgCDuPV7MeMMYEDpgOupqyNP4BQ4H7rBnd2QygumgM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "IPaUoy98v11EoglTpJ4kBlEawoZ8y7BPwzjLYBpkvHQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Pfo4Am6tOWAyZNn8G9W5HWWGC3ZWmX0igI/RRB870Ro=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "fnTSjd7bC1Udoq6iM7UDnHAC/lsIXSHp/Gy332qw+/I=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "fApBgVRrTDyEumkeWs5p3ag9KB48SbU4Si0dl7Ns9rc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "QxudfBItgoCnUj5NXVnSmWH3HK76YtKkMmzn4lyyUYY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "sSOvwhKa29Wq94bZ5jGIiJQGbG1uBrKSBfOYBz/oZeI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "FdaMgwwJ0NKsqmPZLC5oE+/0D74Dfpvig3LaI5yW5Fs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "sRWBy12IERN43BSZIrnBfC9+zFBUdvjTlkqIH81NGt4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "/4tIRpxKhoOwnXAiFn1Z7Xmric4USOIfKvTYQXk3QTc=", + "subType": "00" + } + } + ] + }, + { + "_id": { + "$numberInt": "1" + }, + "encryptedDecimalNoPrecision": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "RGTjNVEsNJb+DG7DpPOam8rQWD5HZAMpRyiTQaw7tk8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "I93Md7QNPGmEEGYU1+VVCqBPBEvXdqHPtTJtMOn06Yk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "GecBFQ1PemlECWZWCl7f74vmsL6eB6mzQ9n6tK6FYfs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "QpjhZl+O1ORifgtCZuWAdcP6OKL7IZ2cA46v8FJcV28=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "RlQWwhU+uVv0a+9IB5cUkEfvHBvOw3B1Sx6WfPWMqes=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ubb81XTC7U+4tcNzf1oYvOY6gR5hC2Izqx54f4GuJ0E=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6M4Q5NMQ9TqNnjzGOxIkiUIY8TEL0I3XD1QnhefQUqU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "BtInzk9t2FFMCEY6AQ7zN8jwrrZEs2irSv6q0Q4NaIw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6vxXfETu9cuBIpRBo3jUUU04mJIH/aAhLX8K6VI5Xv0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wXPCdS+q23zi1bkPnaVG2j0PsVtxdeSLJ//h6J1x8RU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "KY3KkfBAsN2l80wbpj41G0gwBR5KmmFnZcagg7D3ENk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tI8NFAxXCX4VOnY5X73K6KI/Yspd3aR94KV39MhJlAw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "nFxH0UC3mATKA6Vboz+QX/hAjj19kF/SH6H5Cne7qC0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q8hYqIYaIi7nOdG/7qQZYnz8Bsacfi66M1nVku4SH08=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "4saA92R4arp4anvD9xFtze+sNcQqTEhPHyl1h70A8NE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "DbIziOBRRyeQS6RtBR09E37LV+CTKrEjGoRMLSpG6eE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Fv80Plp/7w2gnVqrwawLd6qhJ10G4NCDm3re67cNq4Y=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "T/T2oiQCBBES4YN7EodzPRdabZSFlYIClHBym+bQUZE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ZQgHD3l46Ujqtbnj1VbbeM29C9wJzOhz+yZ/7XdSrxk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ltlFKzWvyZvHxDFOYDd/XXJ6kUiJj0ln2HTCEz2o4Z4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "flW8A7bltC1u8bzx0WJtxosGJdOVsJFfbx33jxnpFGg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SXO+92QbMKwUSG2t27ciunV1c3VvFkUuDmSczpRe008=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+KioGs1GM+xRBzFE67ePTWj04KMSE5/Y6qUF7nJ5kvU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "L3xNVbh6YH+RzqABN+5Jgb7T234Efpn766DmUvxIxgg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "hPF+60mBYPjh21dEmPlBhKgyc9S2qLtTkypYvnqP2Fc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "EletRsETy2HcjaPIm2c8CkT7ch/P3pJJDC8hasepcSU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "r5bMXUaNKqLPxZ+TG9HYTG4aSDgcpim27rN8rQFkM0w=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "0Q7Erdr8+/S0wUEDDIqlS5XjBVWvhZY65K0uUDb6+Ns=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xEcnhXy35hbXNVBPOOt3TUHbxvKfQ48KjA9b6/rbMqQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "T8bEpiQNgsEudXvyKE9SZlSvbpV/LUaslsdqgSFltyo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "hIoiaF2YjnxDbODfhFEB+JGZ5nf8suD3Shck5bwQ3N0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "qnA6qzejeRJ0rsZaZ0zOvKAaXyxt5lpscKQNYFZNl4k=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "anAKCL2DN/le2VaP0n2ucYSEH/DaaEH/8Sa4OqTZsRA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JCZlBJaFm618oWYSnT9Jr1MtwFVw4BZjOzO+5yWgR90=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yxyk4n9762WzcDVGnTn4jCqUnSMIVCrLDIjCX1QVj34=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "fDI6fdKvDJwim5/CQwWZEzcrXE3LHgy7FTtffcC7tXE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Vex+gcz5T+WkzsVZQrkqUR2ryyZbnaOGuWpYvjN0zCw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "8TLEXz+Gbbp6llHpZXVjLsdlYY9f6hrKpHVpyfDe0RY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "7fTyt5BrunypS65TfOzFW2E2qdIuT4SLeDeGlbQoJCs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "8fKGrkqN0/KuSjyXgDBmRauDKrSa//JBKRWHEB9xBf4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "s4codmG7uN4ss6P357jL21lazEe90M9GOK5WrOknSV0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "RkSpua8XF+NUdxVDU90EbLUTTyZFX3tt3atBTroFaRk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "LnTCuCDyAHK5B9KXzjtwGmWB+qergQk2OCjnIx9MI2A=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cBFh0virAX4pVXf/udIGI2951i0+0aZAdJcBVGtYnT4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "G54X6myQXWZ5fw/G31en3QbdgfXzL9+hFTtJpnWMqDI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "EdsiiuezcsFJFnYIyGjCOhnqMj1BOwTB5EFxN+ERUkg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dVH9MXLtk0WTwGQ3xmrhOqfropMUkDW3o6paNPGl3NU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "sB3HqXKWY3pKbuEH8BTbfNIGfbY+7/ZbOc3XC+JRNNI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "WHyDk62Xhqbo4/iie2aLIM4x2uuAjv6102dJSHI58oM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pNUFuHpeNRDUZ/NrtII2c6sNc9eGR1lIUlIyXKERA+0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "UPa+pdCqnN0bfAptdzldQOSd01gidrDKy8KhWrpSKAI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "l+7dOAlo+HUffMqFYXL6pgUFeTbwOM9CjKQLxEoLtc4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SRnDXV/rN6C8xwMutv9E1luv3DOUio3VkgPr8Cpm7Ew=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "QcH6gl+gX7xZ7OWhUNQMbndJy0Piz49pDo6RsnLkVSA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "t+uL4DnfsI/Zll/KXWW1cOKX3Hu8WIkm3pt9efCVSAQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "myutHDctku/+Uug/nD8gRbYvmx/IovtoAAC2/fz2oHA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6C+cjD0e0nSCP6cPqQYbNG7SlOd6Mfvi8hyfm7Ng+D8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zg01JSoOj9oBKT0S1ldJucXzY5AKgreS+h2xJreWTOs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "7qQ80/FjodHl1m1py/Oii0/9C/xWbLdhaRXQ+kkCP10=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "YwWMNH07vL6c5Nhg+MRnVByhzUunu8y0VLM9z/XvR5U=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Dle8bU98+fudAbc14SToZFkwvV3tcYVsjDug0NWljpc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "J+eKL1vPJmlzltvhI6Li5Fz/TJmi3Ng+ehRTcs46API=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zB3XzfFygLwC3WHkj0up+VbEd25KKoce1vOpG/5bwK4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "vnVnmOnL+z2pqwE+A6cVKS0Iwy4F4/2IiElJca9bUQM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+lG5r/Fpqry3BtFuvY67+RntmHAMDoLVOSGc6ZoXPb0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "L5MXQertqc6uj7ADe8aWKbd1sYHPCE7P1VYVg9Zc3VI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "imKONuZgopt0bhM3GMX2WVPwQYMTobuUUEdhcLfHs4c=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "eOkU1J1uVbiVFWBerbXsSIVcF2nqiicTkFy4x7kFHB8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "gI0uDhXeoH/UatDQKEf4qo8FHzWZDhb/wuWTqbq/ID4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cOkd5Aa3btYhtojE/smsF/PJnULqQ4NNqTkU6KXTFmo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "AWNJMs1MTe294oFipp8Y6P0CjpkZ4qCZoClQF3XcHq8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6gJtlzXOFhGYrVbTuRMmvMlDTwXdNtR9aGBlHZPwIMw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "LEmwVGA/xsEG7UrcOoYLFu6KCXgijzFznenknuDacm8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "mIRFPTXRrGaPtp/Ydij2jgkRe4uoUvAKxW2d8b9zYL0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "B+Uv2u48WALOO0L311z+eryjYQzKJVMfdHMZPhOAFmY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "INXXp0wDyVCq+NtfIrrC2ciETmyW/dWB/48/u4yLEZ4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "se7DGo8XrlrQDLEcco1tZrQt9kDe+0RTyl2bw/quG4w=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "vr0m2+Zk9lbN6UgWCyn8xJWJOokU3IDYab5U5q1+CgQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "XI+eJ8Gy2JktG1gICgoj1qpsfy1tKmH0kglWbaQH6DA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "A+UCuNnuAUqnQzspA6TVqUPRmtZmpSex5HFw7THRxs0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xaH2Ehfljd19uo0Fvb3iwkdaiWEVQd2YPoitgEPkhSM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "S/iZBJGcc8+qZxyMtab65MMBoSglybwk3x58Nb86gnY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "w14ZE5qqY5YgkS4Zcs9YNbrQbY1XfGOOHNn9bOYnFVQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "0MhGd/jEF1vjkKGp+ZMn9SjLK54jkp9W4Hg+Sp/oxaI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "92QZ73e/NRTYgCm4aifaKth6aAsKnLLccBc0zx/qUTY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "WOjzemCgFJOiGIp81RSVh/tFlzSTj9eFWcBnsiv2Ycs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "DrsP9CmfKPjw5yLL8bnSeAxfNzAwlb+Z8OqCiKgBY7o=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "lMogqg8veBv6mri3/drMe9afJiKMvevkmGcw9BedfLo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "TxqwNcY8Tg2MPpNdkPBwvfpuTttSYRHU26DGECKYQ9o=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "l0u1b4b4vYACWIwfnB7PZac4oDEgjQZCzHruNPTgAIY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "iVSGQ+cCfhbWIrY/v/WBORK92elu9gfRKyGhr6r/k00=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yK1forG50diEXte8ECzjfpHeYsPyuQ/dgxbxn/nzY5k=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "gIfTLCD3VwnOwkC0zPXWTqaITxX6ZplA69PO2a6zolc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "O/Zxlgh3WqpzJ7+Sd8XWMVID4/GXJUUWaSqfgDUi3b0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ZQ6yv368zwahUqSUYH/StL0Qgz/TwS1CzlMjVDvCciI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "m2rPEYkjwyiKdonMrKlcF7hya4lFOAUwEePJ3SgrNx8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Mq0yl5iVKlq71bT/dT/fXOWf2n90bTnXFnOdGDN0JOc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6qDGMXipPLC2O6EAAMjO2F9xx4rdqZso4IkPpH2304U=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "jvQHRQQa2RIszE2LX2Hv2LbRhYawJ6qmtRt8HZzFQXg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ovJXQrkZlpeHRciKyE/WWNm5O389gRgzx1W+Dw596X4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "a4kgRNvYctGYqyQv9qScL/WkljTYVylJ9pE9KDULlxU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "qV4Q48vPiCJMTjljotzYKI/zfExWpkKOSHGcAjGyDig=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "jtI7zbBF+QW/aYYTkn90zzyHLXLgmy7l1bzgMb2oqic=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q0KmJl9txPdn962UNvnfe6UFhdk9YaFZuTm33F+csso=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ULNdEqeZJgtmNOhN/Y9INzsE9AnxWYwOMn+pIbRXIFs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "R4oz9+wkdjpKe5tE1jpG7IURAnfvS5fLP4LrD5cZfTE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "qG5Z7VhwSu/HT/YFTgDzyAAzJKq51xPw2HeEV5btYC4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "OM/1DmIIZ5Qyhtq8TGkHTBEMVKjAnKRZMRXYtTG8ctc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "2R5vZbljLXnDFA99YfGuRB7pAdPJVKsT25zLNMC0fUk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "OMbavF2EmdAz1fHkLV3ctFEUDfriKhoT2gidwHZ9z1o=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "MWT4Zrw3/vVvTYMa1Is5Pjr3wEwnBfnEAPPUAHKQhNU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tBkRPfG9yxfKocQx5pAJX0oEHKPL0Tgtr+0UYe09InE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "lqxpnDR/H0YgH7RcfKoNoaaRhe1SIazIeMbQ1fu9y3Q=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "utT1UdR22PWOTrOkZauztX613lAplV4eh/ejTRb7ZSk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "S+Y2yFyKi/a6FXhih4yGo29X8I8OT6/zwEoX6NMKT4o=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "QSjVppg29x6oS5yBg8OFjrFt0tuTpWCuKxfIy0k8YnE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "y3r6/Xsfvsl3HksXlVYkJgHUqpQGfICxg3x9f8Zw1qM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "BSltHzEwDjFN4du9rDHAPvl22atlcTioEtt+gC5L1tk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "0arGXjSN0006UnXbrWsGqhvBair569DeFDUME3Df3rA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "s/DumaMad08S+PBUUcrS+v42K0z8HgcdiQtrFAEu2Qs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "EzJ8Y8N0OQBTlnvrK82PdevDNZZO4E6CNgYVu8Cj6Ks=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "VA4vr8jBPI5QdiPrULzzZjBMIUbG3V7Slg5zm0bFcKc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "YAOvEB2ZLtq9LQiFViBHWaxxWVVonC2rNYj9tN9s3L0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "hgaHMo9aAGS+nBwvqnTjZO+YkiQPY1c1XcIYeaYKHyI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "YvaoLt3ZpH0atB0tNzwMjpoxRYJXl0DqSjisMJiGVBE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "EMmW6CptFsiLoPOi5/uAJQ2FmeLg6mCpuVLLrRWk7Mc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "1jQsNMarSnarlYmXEuoFokeBMg/090qUD9wqo1Zn8Gs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "hupXNKhRpJxpyDAAP1TgJ5JMZh9lhbMk6s7D7dMS3C8=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 0, + "encryptedDecimalNoPrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalNoPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedDecimalNoPrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalNoPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "aggregate": "default", + "pipeline": [ + { + "$match": { + "encryptedDecimalNoPrecision": { + "$gt": { + "$binary": { + "base64": "DR1jAAADcGF5bG9hZACxYgAABGcAnWIAAAMwAH0AAAAFZAAgAAAAAJu2KgiI8vM+kz9qD3ZQzFQY5qbgYqCqHG5R4jAlnlwXBXMAIAAAAAAAUXxFXsz764T79sGCdhxvNd5b6E/9p61FonsHyEIhogVsACAAAAAAt19RL3Oo5ni5L8kcvgOJYLgVYyXJExwP8pkuzLG7f/kAAzEAfQAAAAVkACAAAAAAPQPvL0ARjujSv2Rkm8r7spVsgeC1K3FWcskGGZ3OdDIFcwAgAAAAACgNn660GmefR8jLqzgR1u5O+Uocx9GyEHiBqVGko5FZBWwAIAAAAADflr+fsnZngm6KRWYgHa9JzK+bXogWl9evBU9sQUHPHQADMgB9AAAABWQAIAAAAAD2Zi6kcxmaD2mY3VWrP+wYJMPg6cSBIYPapxaFQxYFdQVzACAAAAAAM/cV36BLBY3xFBXsXJY8M9EHHOc/qrmdc2CJmj3M89gFbAAgAAAAAOpydOrKxx6m2gquSDV2Vv3w10GocmNCFeOo/fRhRH9JAAMzAH0AAAAFZAAgAAAAAOaNqI9srQ/mI9gwbk+VkizGBBH/PPWOVusgnfPk3tY1BXMAIAAAAAAc96O/pwKCmHCagT6T/QV/wz4vqO+R22GsZ1dse2Vg6QVsACAAAAAAgzIak+Q3UFLTHXPmJ+MuEklFtR3eLtvM+jdKkmGCV/YAAzQAfQAAAAVkACAAAAAA0XlQgy/Yu97EQOjronl9b3dcR1DFn3deuVhtTLbJZHkFcwAgAAAAACoMnpVl6EFJak8A+t5N4RFnQhkQEBnNAx8wDqmq5U/dBWwAIAAAAACR26FJif673qpwF1J1FEkQGJ1Ywcr/ZW6JQ7meGqzt1QADNQB9AAAABWQAIAAAAAAOtpNexRxfv0yRFvZO9DhlkpU4mDuAb8ykdLnE5Vf1VAVzACAAAAAAeblFKm/30orP16uQpZslvsoS8s0xfNPIBlw3VkHeekYFbAAgAAAAAPEoHj87sYE+nBut52/LPvleWQBzB/uaJFnosxp4NRO2AAM2AH0AAAAFZAAgAAAAAIr8xAFm1zPmrvW4Vy5Ct0W8FxMmyPmFzdWVzesBhAJFBXMAIAAAAABYeeXjJEzTHwxab6pUiCRiZjxgtN59a1y8Szy3hfkg+gVsACAAAAAAJuoY4rF8mbI+nKb+5XbZShJ8191o/e8ZCRHE0O4Ey8MAAzcAfQAAAAVkACAAAAAAl+ibLk0/+EwoqeC8S8cGgAtjtpQWGEZDsybMPnrrkwEFcwAgAAAAAHPPBudWgQ+HUorLDpJMqhS9VBF2VF5aLcxgrM1s+yU7BWwAIAAAAAAcCcBR2Vyv5pAFbaOU97yovuOi1+ATDnLLcAUqHecXcAADOAB9AAAABWQAIAAAAACR9erwLTb+tcWFZgJ2MEfM0PKI9uuwIjDTHADRFgD+SQVzACAAAAAAcOop8TXsGUVQoKhzUllMYWxL93xCOkwtIpV8Q6hiSYYFbAAgAAAAAKXKmh4V8veYwob1H03Q3p3PN8SRAaQwDT34KlNVUjiDAAM5AH0AAAAFZAAgAAAAALv0vCPgh7QpmM8Ug6ad5ioZJCh7pLMdT8FYyQioBQ6KBXMAIAAAAADsCPyIG8t6ApQkRk1fX/sfc1kpuWCWP8gAEpnYoBSHrQVsACAAAAAAJe/r67N6d8uTiogvfoR9rEXbIDjyLb9EVdqkayFFGaYAAzEwAH0AAAAFZAAgAAAAAIW4AxJgYoM0pcNTwk1RSbyjZGIqgKL1hcTJmNrnZmoPBXMAIAAAAAAZpfx3EFO0vY0f1eHnE0PazgqeNDTaj+pPJMUNW8lFrAVsACAAAAAAP+Um2vwW6Bj6vuz9DKz6+6aWkoKoEmFNoiz/xXm7lOsAAzExAH0AAAAFZAAgAAAAAKliO6L9zgeuufjj174hvmQGNRbmYYs9yAirL7OxwEW3BXMAIAAAAAAqU7vs3DWUQ95Eq8OejwWnD0GuXd+ASi/uD6S0l8MM1QVsACAAAAAAb9legYzsfctBPpHyl7YWpPmLr5QiNZFND/50N1vv2MUAAzEyAH0AAAAFZAAgAAAAAOGQcCBkk+j/Kzjt/Cs6g3BZPJG81wIHBS8JewHGpgk+BXMAIAAAAABjrxZXWCkdzrExwCgyHaafuPSQ4V4x2k9kUCAqUaYKDQVsACAAAAAADBU6KefT0v8zSmseaMNmQxKjJar72y7MojLFhkEHqrUAAzEzAH0AAAAFZAAgAAAAAPmCNEt4t97waOSd5hNi2fNCdWEkmcFJ37LI9k4Az4/5BXMAIAAAAABX7DuDPNg+duvELf3NbLWkPMFw2HGLgWGHyVWcPvSNCAVsACAAAAAAS7El1FtZ5STh8Q1FguvieyYX9b2DF1DFVsb9hzxXYRsAAzE0AH0AAAAFZAAgAAAAAD4vtVUYRNB+FD9yoQ2FVJH3nMeJeKbi6eZfth638YqbBXMAIAAAAAANCuUB4OdmuD6LaDK2f3vaqfgYYvg40wDXOBbcFjTqLwVsACAAAAAA9hqC2VoJBjwR7hcQ45xO8ZVojwC83jiRacCaDj6Px2gAAzE1AH0AAAAFZAAgAAAAAJPIRzjmTjbdIvshG6UslbEOd797ZSIdjGAhGWxVQvK1BXMAIAAAAABgmJ0Jh8WLs9IYs/a7DBjDWd8J3thW/AGJK7zDnMeYOAVsACAAAAAAi9zAsyAuou2oiCUHGc6QefLUkACa9IgeBhGu9W/r0X8AAzE2AH0AAAAFZAAgAAAAAABQyKQPoW8wGPIqnsTv69+DzIdRkohRhOhDmyVHkw9WBXMAIAAAAAAqWA2X4tB/h3O1Xlawtz6ndI6WaTwgU1QYflL35opu5gVsACAAAAAAWI/Gj5aZMwDIxztqmVL0g5LBcI8EdKEc2UA28pnekQoAAzE3AH0AAAAFZAAgAAAAACB7NOyGQ1Id3MYnxtBXqyZ5Ul/lHH6p1b10U63DfT6bBXMAIAAAAADpOryIcndxztkHSfLN3Kzq29sD8djS0PspDSqERMqokQVsACAAAAAADatsMW4ezgnyi1PiP7xk+gA4AFIN/fb5uJqfVkjg4UoAAzE4AH0AAAAFZAAgAAAAAKVfXLfs8XA14CRTB56oZwV+bFJN5BHraTXbqEXZDmTkBXMAIAAAAAASRWTsfGOpqdffiOodoqIgBzG/yzFyjR5CfUsIUIWGpgVsACAAAAAAkgCHbCwyX640/0Ni8+MoYxeHUiC+FSU4Mn9jTLYtgZgAAzE5AH0AAAAFZAAgAAAAAH/aZr4EuS0/noQR9rcF8vwoaxnxrwgOsSJ0ys8PkHhGBXMAIAAAAACd7ObGQW7qfddcvyxRTkPuvq/PHu7+6I5dxwS1Lzy5XAVsACAAAAAA3q0eKdV7KeU3pc+CtfypKR7BPxwaf30yu0j9FXeOOboAAzIwAH0AAAAFZAAgAAAAAKvlcpFFNq0oA+urq3w6d80PK1HHHw0H0yVWvU9aHijXBXMAIAAAAADWnAHQ5Fhlcjawki7kWzdqjM2f6IdGJblojrYElWjsZgVsACAAAAAAO0wvY66l24gx8nRxyVGC0QcTztIi81Kx3ndRhuZr6W4AAzIxAH0AAAAFZAAgAAAAAH/2aMezEOddrq+dNOkDrdqf13h2ttOnexZsJxG1G6PNBXMAIAAAAABNtgnibjC4VKy5poYjvdsBBnVvDTF/4mmEAxsXVgZVKgVsACAAAAAAqvadzJFLqQbs8WxgZ2D2X+XnaPSDMLCVVgWxx5jnLcYAAzIyAH0AAAAFZAAgAAAAAF2wZoDL6/V59QqO8vdRZWDpXpkV4h4KOCSn5e7x7nmzBXMAIAAAAADLZBu7LCYjbThaVUqMK14H/elrVOYIKJQCx4C9Yjw37gVsACAAAAAAEh6Vs81jLU204aGpL90fmYTm5i5R8/RT1uIbg6VU3HwAAzIzAH0AAAAFZAAgAAAAAH27yYaLn9zh2CpvaoomUPercSfJRUmBY6XFqmhcXi9QBXMAIAAAAAAUwumVlIYIs9JhDhSj0R0+59psCMsFk94E62VxkPt42QVsACAAAAAAT5x2hCCd2bpmpnyWaxas8nSxTc8e4C9DfKaqr0ABEysAAzI0AH0AAAAFZAAgAAAAALMg2kNAO4AFFs/mW3In04yFeN4AP6Vo0klyUoT06RquBXMAIAAAAAAgGWJbeIdwlpqXCyVIYSs0dt54Rfc8JF4b8uYc+YUj0AVsACAAAAAAWHeWxIkyvXTOWvfZzqtPXjfGaWWKjGSIQENTU3zBCrsAAzI1AH0AAAAFZAAgAAAAALas/i1T2DFCEmrrLEi7O2ngJZyFHialOoedVXS+OjenBXMAIAAAAAA1kK0QxY4REcGxHeMkgumyF7iwlsRFtw9MlbSSoQY7uAVsACAAAAAAUNlpMJZs1p4HfsD4Q4WZ4TBEi6Oc2fX34rzyynqWCdwAAzI2AH0AAAAFZAAgAAAAAP1TejmWg1CEuNSMt6NUgeQ5lT+oBoeyF7d2l5xQrbXWBXMAIAAAAABPX0kj6obggdJShmqtVfueKHplH4ZrXusiwrRDHMOKeQVsACAAAAAAIYOsNwC3DA7fLcOzqdr0bOFdHCfmK8tLwPoaE9uKOosAAzI3AH0AAAAFZAAgAAAAAMrKn+QPa/NxYezNhlOX9nyEkN1kE/gW7EuZkVqYl0b8BXMAIAAAAABUoZMSPUywRGfX2EEencJEKH5x/P9ySUVrhStAwgR/LgVsACAAAAAAMgZFH6lQIIDrgHnFeslv3ld20ynwQjQJt3cAp4GgrFkAAzI4AH0AAAAFZAAgAAAAAMmD1+a+oVbiUZd1HuZqdgtdVsVKwuWAn3/M1B6QGBM3BXMAIAAAAACLyytOYuZ9WEsIrrtJbXUx4QgipbaAbmlJvSZVkGi0CAVsACAAAAAA4v1lSp5H9BB+HYJ4bH43tC8aeuPZMf78Ng1JOhJh190AAzI5AH0AAAAFZAAgAAAAAOVKV7IuFwmYP1qVv8h0NvJmfPICu8yQhzjG7oJdTLDoBXMAIAAAAABL70XLfQLKRsw1deJ2MUvxSWKxpF/Ez73jqtbLvqbuogVsACAAAAAAvfgzIorXxE91dDt4nQxYfntTsx0M8Gzdsao5naQqcRUAAzMwAH0AAAAFZAAgAAAAAKS/1RSAQma+xV9rz04IcdzmavtrBDjOKPM+Z2NEyYfPBXMAIAAAAAAOJDWGORDgfRv8+w5nunh41wXb2hCA0MRzwnLnQtIqPgVsACAAAAAAf42C1+T7xdHEFF83+c2mF5S8PuuL22ogXXELnRAZ4boAAzMxAH0AAAAFZAAgAAAAAFeq8o82uNY1X8cH6OhdTzHNBUnCChsEDs5tm0kPBz3qBXMAIAAAAABaxMBbsaeEj/EDtr8nZfrhhhirBRPJwVamDo5WwbgvTQVsACAAAAAAMbH453A+BYAaDOTo5kdhV1VdND1avNwvshEG/4MIJjQAAzMyAH0AAAAFZAAgAAAAAI8IKIfDrohHh2cjspJHCovqroSr5N3QyVtNzFvT5+FzBXMAIAAAAABXHXteKG0DoOMmECKp6ro1MZNQvXGzqTDdZ0DUc8QfFAVsACAAAAAA/w5s++XYmO+9TWTbtGc3n3ndV4T9JUribIbF4jmDLSMAAzMzAH0AAAAFZAAgAAAAAJkHvm15kIu1OtAiaByj5ieWqzxiu/epK6c/9+KYIrB0BXMAIAAAAACzg5TcyANk0nes/wCJudd1BwlkWWF6zw3nGclq5v3SJQVsACAAAAAAvruXHTT3irPJLyWpI1j/Xwf2FeIE/IV+6Z49pqRzISoAAzM0AH0AAAAFZAAgAAAAAAYSOvEWWuSg1Aym7EssNLR+xsY7e9BcwsX4JKlnSHJcBXMAIAAAAABT48eY3PXVDOjw7JpNjOe1j2JyI3LjDnQoqZ8Je5B2KgVsACAAAAAAU2815RR57TQ9uDg0XjWjBkAKvf8yssxDMzrM4+FqP6AAAzM1AH0AAAAFZAAgAAAAAGQxC9L1e9DfO5XZvX1yvc3hTLtQEdKO9FPMkyg0Y9ZABXMAIAAAAADtmcMNJwdWLxQEArMGZQyzpnu+Z5yMmPAkvgq4eAKwNQVsACAAAAAAJ88zt4Y/Hoqh+zrf6KCOiUwHbOzCxSfp6k/qsZaYGEgAAzM2AH0AAAAFZAAgAAAAADLHK2LNCNRO0pv8n4fAsxwtUqCNnVK8rRgNiQfXpHSdBXMAIAAAAACf16EBIHRKD3SzjRW+LMOl+47QXA3CJhMzlcqyFRW22AVsACAAAAAAMGz4fAOa0EoVv90fUffwLjBrQhHATf+NdlgCR65vujAAAzM3AH0AAAAFZAAgAAAAAHiZJiXKNF8bbukQGsdYkEi95I+FSBHy1I5/hK2uEZruBXMAIAAAAADE+lZBa8HDUJPN+bF6xI9x4N7GF9pj3vBR7y0BcfFhBAVsACAAAAAAGIEN6sfqq30nyxW4dxDgXr/jz5HmvA9T1jx/pKCn4zgAAzM4AH0AAAAFZAAgAAAAAI1oa2OIw5TvhT14tYCGmhanUoYcCZtNbrVbeoMldHNZBXMAIAAAAAAx2nS0Ipblf2XOgBiUOuJFBupBhe7nb6QPLZlA4aMPCgVsACAAAAAA9xu828hugIgo0E3de9dZD+gTpVUGlwtDba+tw/WcbUoAAzM5AH0AAAAFZAAgAAAAABgTWS3Yap7Q59hii/uPPimHWXsr+DUmsqfwt/X73qsOBXMAIAAAAACKK05liW5KrmEAvtpCB1WUltruzUylDDpjea//UlWoOAVsACAAAAAAcgN4P/wakJ5aJK5c1bvJBqpVGND221dli2YicPFfuAYAAzQwAH0AAAAFZAAgAAAAABOAnBPXDp6i9TISQXvcNKwGDLepZTu3cKrB4vKnSCjBBXMAIAAAAADjjzZO7UowAAvpwyG8BNOVqLCccMFk3aDK4unUeft5ywVsACAAAAAA4zkCd4k9gvfXoD1C7vwTjNcdVJwEARh8h/cxZ4PNMfgAAzQxAH0AAAAFZAAgAAAAAHN8hyvT1lYrAsdiV5GBdd5jhtrAYE/KnSjw2Ka9hjz9BXMAIAAAAAD794JK7EeXBs+D7yOVK7nWF8SbZ/7U8gZ7nnT9JFNwTAVsACAAAAAAg8Wt1HO3NhByq2ggux2a4Lo6Gryr24rEFIqh2acrwWMAAzQyAH0AAAAFZAAgAAAAAO93bPrq8bsnp1AtNd9ETnXIz0lH/2HYN/vuw9wA3fyFBXMAIAAAAABHlls5fbaF2oAGqptC481XQ4eYxInTC29aElfmVZgDUgVsACAAAAAANoQXEWpXJpgrSNK/cKi/m7oYhuSRlp1IZBF0bqTEATcAAzQzAH0AAAAFZAAgAAAAAL1YsAZm1SA0ztU6ySIrQgCCA74V6rr0/4iIygCcaJL6BXMAIAAAAADTXWTHWovGmUR1Zg9l/Aqq9H5mOCJQQrb/Dfae7e3wKAVsACAAAAAA5dunyJK6/SVfDD0t9QlNBcFqoZnf9legRjHaLSKAoQMAAzQ0AH0AAAAFZAAgAAAAAEoFAeHk0RZ9kD+cJRD3j7PcE5gzWKnyBrF1I/MDNp5mBXMAIAAAAACgHtc2hMBRSZjKw8RAdDHK+Pi1HeyjiBuAslGVNcW5tAVsACAAAAAAXzBLfq+GxRtX4Wa9fazA49DBLG6AjZm2XODStJKH8D0AAzQ1AH0AAAAFZAAgAAAAAAW+7DmSN/LX+/0uBVJDHIc2dhxAGz4+ehyyz8fAnNGoBXMAIAAAAAA6Ilw42EvvfLJ3Eq8Afd+FjPoPcQutZO6ltmCLEr8kxQVsACAAAAAAbbZalyo07BbFjPFlYmbmv0z023eT9eLkHqeVUnfUAUAAAzQ2AH0AAAAFZAAgAAAAANBdV7M7kuYO3EMoQItAbXv4t2cIhfaT9V6+s4cg9djlBXMAIAAAAABvz4MIvZWxxrcJCL5qxLfFhXiUYB1OLHdKEjco94SgDgVsACAAAAAAK2GVGvyPIKolF/ECcmfmkVcf1/IZNcaTv96N92yGrkEAAzQ3AH0AAAAFZAAgAAAAAMoAoiAn1kc79j5oPZtlMWHMhhgwNhLUnvqkqIFvcH1NBXMAIAAAAADcJTW7WiCyW0Z9YDUYwppXhLj4Ac1povpJvcAq+i48MQVsACAAAAAAIGxGDzoeB3PTmudl4+j6piQB++e33EEzuzAiXcqGxvUAAzQ4AH0AAAAFZAAgAAAAACI3j5QP7dWHpcT6WO/OhsWwRJNASBYqIBDNzW8IorEyBXMAIAAAAABxUpBSjXwCKDdGP9hYU+RvyR+96kChfvyyRC4jZmztqAVsACAAAAAAvBCHguWswb4X0xdcAryCvZgQuthXzt7597bJ5VxAMdgAAzQ5AH0AAAAFZAAgAAAAAKsbycEuQSeNrF8Qnxqw3x3og8JmQabwGqnDbqzFRVrrBXMAIAAAAACno/3ef2JZJS93SVVzmOZSN+jjJHT8s0XYq2M46d2sLAVsACAAAAAAAt5zLJG+/j4K8rnkFtAn8IvdUVNefe6utJ3rdzgwudIAAzUwAH0AAAAFZAAgAAAAAPXIcoO8TiULqlxzb74NFg+I8kWX5uXIDUPnh2DobIoMBXMAIAAAAADR6/drkdTpnr9g1XNvKDwtBRBdKn7c2c4ZNUVK5CThdQVsACAAAAAAJqOA1c6KVog3F4Hb/GfDb3jCxXDRTqpXWSbMH4ePIJsAAzUxAH0AAAAFZAAgAAAAAEa03ZOJmfHT6/nVadvIw71jVxEuIloyvxXraYEW7u7pBXMAIAAAAADzRlBJK75FLiKjz3djqcgjCLo/e3yntI3MnPS48OORhgVsACAAAAAAnQhx4Rnyj081XrLRLD5NLpWmRWCsd0M9Hl7Jl19R0h8AAzUyAH0AAAAFZAAgAAAAAKx8NLSZUU04pSSGmHa5fh2oLHsEN5mmNMNHL95/tuC9BXMAIAAAAAA59hcXVaN3MNdHoo11OcH1aPRzHCwpVjO9mGfMz4xh3QVsACAAAAAAYIPdjV2XbPj7dBeHPwnwhVU7zMuJ+xtMUW5mIOYtmdAAAzUzAH0AAAAFZAAgAAAAAHNKAUxUqBFNS9Ea9NgCZoXMWgwhP4x0/OvoaPRWMquXBXMAIAAAAABUZ551mnP4ZjX+PXU9ttomzuOpo427MVynpkyq+nsYCQVsACAAAAAALnVK5p2tTTeZEh1zYt4iqKIQT9Z0si//Hy1L85oF+5IAAzU0AH0AAAAFZAAgAAAAALfGXDlyDVcGaqtyHkLT0qpuRhJQLgCxtznazhFtuyn/BXMAIAAAAABipxlXDq14C62pXhwAeen5+syA+/C6bN4rtZYcO4zKwAVsACAAAAAAXUf0pzUq0NhLYagWDap4uEiwq5rLpcx29rWbt1NYMsMAAzU1AH0AAAAFZAAgAAAAANoEr8sheJjg4UCfBkuUzarU9NFoy1xwbXjs5ifVDeA9BXMAIAAAAABPoyTf6M+xeZVGES4aNzVlq7LgjqZXJ/QunjYVusGUEAVsACAAAAAA1hA2gMeZZPUNytk9K+lB1RCqWRudRr7GtadJlExJf8oAAzU2AH0AAAAFZAAgAAAAAKvDiK+xjlBe1uQ3SZTNQl2lClIIvpP/5CHwY6Kb3WlgBXMAIAAAAAANnxImq5MFbWaRBHdJp+yD09bVlcFtiFDYsy1eDZj+iQVsACAAAAAAWtsyO+FxMPSIezwsV1TJD8ZrXAdRnQM6DJ+f+1V3qEkAAzU3AH0AAAAFZAAgAAAAAF49IlFH9RmSUSvUQpEPUedEksrQUcjsOv44nMkwXhjzBXMAIAAAAADJtWGbk0bZzmk20obz+mNsp86UCu/nLLlbg7ppxYn7PgVsACAAAAAA3k0Tj/XgPQtcYijH8cIlQoe/VXf15q1nrZNmg7yWYEgAAzU4AH0AAAAFZAAgAAAAAOuSJyuvz50lp3BzXlFKnq62QkN2quNU1Gq1IDsnFoJCBXMAIAAAAAAqavH1d93XV3IzshWlMnzznucadBF0ND092/2ApI1AcAVsACAAAAAAzUrK4kpoKCmcpdZlZNI13fddjdoAseVe67jaX1LobIIAAzU5AH0AAAAFZAAgAAAAALtgC4Whb4ZdkCiI30zY6fwlsxSa7lEaOAU3SfUXr02XBXMAIAAAAACgdZ6U1ZVgUaZZwbIaCdlANpCw6TZV0bwg3DS1NC/mnAVsACAAAAAAzI49hdpp0PbO7S2KexISxC16sE73EUAEyuqUFAC/J48AAzYwAH0AAAAFZAAgAAAAAF6PfplcGp6vek1ThwenMHVkbZgrc/dHgdsgx1VdPqZ5BXMAIAAAAACha3qhWkqmuwJSEXPozDO8y1ZdRLyzt9Crt2vjGnT7AAVsACAAAAAA7nvcU59+LwxGupSF21jAeAE0x7JE94tjRkJfgM1yKU8AAzYxAH0AAAAFZAAgAAAAAKoLEhLvLjKc7lhOJfx+VrGJCx9tXlOSa9bxQzGR6rfbBXMAIAAAAAAIDK5wNnjRMBzET7x/KAMExL/zi1IumJM92XTgXfoPoAVsACAAAAAAFkUYWFwNr815dEdFqp+TiIozDcq5IBNVkyMoDjharDQAAzYyAH0AAAAFZAAgAAAAADoQv6lutRmh5scQFvIW6K5JBquLxszuygM1tzBiGknIBXMAIAAAAADAD+JjW7FoBQ76/rsECmmcL76bmyfXpUU/awqIsZdO+wVsACAAAAAAPFHdLw3jssmEXsgtvl/RBNaUCRA1kgSwsofG364VOvQAAzYzAH0AAAAFZAAgAAAAAJNHUGAgn56KekghO19d11nai3lAh0JAlWfeP+6w4lJBBXMAIAAAAAD9XGJlvz59msJvA6St9fKW9CG4JoHV61rlWWnkdBRLzwVsACAAAAAAxwP/X/InJJHmrjznvahIMgj6pQR30B62UtHCthSjrP0AAzY0AH0AAAAFZAAgAAAAAHgYoMGjEE6fAlAhICv0+doHcVX8CmMVxyq7+jlyGrvmBXMAIAAAAAC/5MQZgTHuIr/O5Z3mXPvqrom5JTQ8IeSpQGhO9sB+8gVsACAAAAAAuPSXVmJUAUpTQg/A9Bu1hYczZF58KEhVofakygbsvJQAAzY1AH0AAAAFZAAgAAAAANpIljbxHOM7pydY877gpRQvYY2TGK7igqgGsavqGPBABXMAIAAAAAAqHyEu9gpurPOulApPnr0x9wrygY/7mXe9rAC+tPK80wVsACAAAAAA7gkPzNsS3gCxdFBWbSW9tkBjoR5ib+saDvpGSB3A3ogAAzY2AH0AAAAFZAAgAAAAAGR+gEaZTeGNgG9BuM1bX2R9ed4FCxBA9F9QvdQDAjZwBXMAIAAAAABSkrYFQ6pf8MZ1flgmeIRkxaSh/Eep4Btdx4QYnGGnwAVsACAAAAAApRovMiV00hm/pEcT4XBsyPNw0eo8RLAX/fuabjdU+uwAAzY3AH0AAAAFZAAgAAAAAFNprhQ3ZwIcYbuzLolAT5n/vc14P9kUUQComDu6eFyKBXMAIAAAAAAcx9z9pk32YbPV/sfPZl9ALIEVsqoLXgqWLVK/tP+heAVsACAAAAAA/qxvuvJbAHwwhfrPVpmCFzNvg2cU/NXaWgqgYUZpgXwAAzY4AH0AAAAFZAAgAAAAADgyPqQdqQrgfmJjRFAILTHzXbdw5kpKyfeoEcy6YYG/BXMAIAAAAAAE+3XsBQ8VAxAkN81au+f3FDeCD/s7KoZD+fnM1MJSSAVsACAAAAAAhRnjrXecwV0yeCWKJ5J/x12Xx4qVJahsCEVHB/1U2rcAAzY5AH0AAAAFZAAgAAAAAI0CT7JNngTCTUSei1Arw7eHWCD0jumv2rb7imjWIlWABXMAIAAAAABSP8t6ya0SyCphXMwnru6ZUDXWElN0NfBvEOhDvW9bJQVsACAAAAAAGWeGmBNDRaMtvm7Rv+8TJ2sJ4WNXKcp3tqpv5Se9Ut4AAzcwAH0AAAAFZAAgAAAAAD/FIrGYFDjyYmVb7oTMVwweWP7A6F9LnyIuNO4MjBnXBXMAIAAAAACIZgJCQRZu7NhuNMyOqCn1tf+DfU1qm10TPCfj5JYV3wVsACAAAAAA5hmY4ptuNxULGf87SUFXQWGAONsL9U29duh8xqsHtxoAAzcxAH0AAAAFZAAgAAAAAHIkVuNDkSS1cHIThKc/O0r2/ubaABTOi8Q1r/dvBAsEBXMAIAAAAADdHYqchEiJLM340c3Q4vJABmmth3+MKzwLYlsG6GS7sQVsACAAAAAADa+KP/pdTiG22l+ZWd30P1iHjnBF4zSNRdFm0oEK82kAAzcyAH0AAAAFZAAgAAAAAJmoDILNhC6kn3masElfnjIjP1VjsjRavGk1gSUIjh1NBXMAIAAAAAD97Ilvp3XF8T6MmVVcxMPcdL80RgQ09UoC6PnoOvZ1IQVsACAAAAAA2RK3Xng6v8kpvfVW9tkVXjpE+BSnx9/+Fw85Evs+kUEAAzczAH0AAAAFZAAgAAAAAI5bm3YO0Xgf0VT+qjVTTfvckecM3Cwqj7DTKZXf8/NXBXMAIAAAAAD/m+h8fBhWaHm6Ykuz0WX1xL4Eme3ErLObyEVJf8NCywVsACAAAAAAfb1VZZCqs2ivYbRzX4p5CtaCkKW+g20Pr57FWXzEZi8AAzc0AH0AAAAFZAAgAAAAANqo4+p6qdtCzcB4BX1wQ6llU7eFBnuu4MtZwp4B6mDlBXMAIAAAAAAGiz+VaukMZ+6IH4jtn4KWWdKK4/W+O+gRioQDrfzpMgVsACAAAAAAG4YYkTp80EKo59mlHExDodRQFR7njhR5dmISwUJ6ukAAAzc1AH0AAAAFZAAgAAAAAPrFXmHP2Y4YAm7b/aqsdn/DPoDkv7B8egWkfe23XsM1BXMAIAAAAAAGhwpKAr7skeqHm3oseSbO7qKNhmYsuUrECBxJ5k+D2AVsACAAAAAAAqPQi9luYAu3GrFCEsVjd9z2zIDcp6SPTR2w6KQEr+IAAzc2AH0AAAAFZAAgAAAAABzjYxwAjXxXc0Uxv18rH8I3my0Aguow0kTwKyxbrm+cBXMAIAAAAADVbqJVr6IdokuhXkEtXF0C2gINLiAjMVN20lE20Vmp2QVsACAAAAAAD7K1Fx4gFaaizkIUrf+EGXQeG7QX1jadhGc6Ji471H8AAzc3AH0AAAAFZAAgAAAAAFMm2feF2fFCm/UC6AfIyepX/xJDSmnnolQIBnHcPmb5BXMAIAAAAABLI11kFrQoaNVZFmq/38aRNImPOjdJh0Lo6irI8M/AaAVsACAAAAAAOWul0oVqJ9CejD2RqphhTC98DJeRQy5EwbNerU2+4l8AAzc4AH0AAAAFZAAgAAAAAJvXB3KyNiNtQko4SSzo/9b2qmM2zU9CQTTDfLSBWMgRBXMAIAAAAAAvjuVP7KsLRDeqVqRziTKpBrjVyqKiIbO9Gw8Wl2wFTAVsACAAAAAADlE+oc1ins+paNcaOZJhBlKlObDJ4VQORWjFYocM4LgAAzc5AH0AAAAFZAAgAAAAAPGdcxDiid8z8XYnfdDivNMYVPgBKdGOUw6UStU+48CdBXMAIAAAAAARj6g1Ap0eEfuCZ4X2TsEw+Djrhto3fA5nLwPaY0vCTgVsACAAAAAAoHqiwGOUkBu8SX5U1yHho+UIFdSN2MdQN5s6bQ0EsJYAAzgwAH0AAAAFZAAgAAAAAP5rGPrYGt3aKob5f/ldP0qrW7bmWvqnKY4QwdDWz400BXMAIAAAAADTQkW2ymaaf/bhteOOGmSrIR97bAnJx+yN3yMj1bTeewVsACAAAAAADyQnHGH2gF4w4L8axUsSTf6Ubk7L5/eoFOJk12MtZAoAAzgxAH0AAAAFZAAgAAAAAAlz6wJze5UkIxKpJOZFGCOf3v2KByWyI6NB6JM9wNcBBXMAIAAAAABUC7P/neUIHHoZtq0jFVBHY75tSFYr1Y5S16YN5XxC1QVsACAAAAAAgvxRbXDisNnLY3pfsjDdnFLtkvYUC4lhA68eBXc7KAwAAzgyAH0AAAAFZAAgAAAAAFJ8AtHcjia/9Y5pLEc3qVgH5xKiXw12G9Kn2A1EY8McBXMAIAAAAAAxe7Bdw7eUSBk/oAawa7uicTEDgXLymRNhBy1LAxhDvwVsACAAAAAAxKPaIBKVx3jTA+R/el7P7AZ7efrmTGjJs3Hj/YdMddwAAzgzAH0AAAAFZAAgAAAAAO8uwQUaKFb6vqR3Sv3Wn4QAonC2exOC9lGG1juqP5DtBXMAIAAAAABZf1KyJgQg8/Rf5c02DgDK2aQu0rNCOvaL60ohDHyY+gVsACAAAAAAqyEjfKC8lYoIfoXYHUqHZPoaA6EK5BAZy5dxXZmay4kAAzg0AH0AAAAFZAAgAAAAAE8YtqyRsGCeiR6hhiyisR/hccmK4nZqIMzO4lUBmEFzBXMAIAAAAAC1UYOSKqAeG1UJiKjWFVskRhuFKpj9Ezy+lICZvFlN5AVsACAAAAAA6Ct9nNMKyRazn1OKnRKagm746CGu+jyhbL1qJnZxGi0AAzg1AH0AAAAFZAAgAAAAAPhCrMausDx1QUIEqp9rUdRKyM6a9AAx7jQ3ILIu8wNIBXMAIAAAAACmH8lotGCiF2q9VQxhsS+7LAZv79VUAsOUALaGxE/EpAVsACAAAAAAnc1xCKfdvbUEc8F7XZqlNn1C+hZTtC0I9I3LL06iaNkAAzg2AH0AAAAFZAAgAAAAAOBi/GAYFcstMSJPgp3VkMiuuUUCrZytvqYaU8dwm8v2BXMAIAAAAACEZSZVyD3pKzGlbdwlYmWQhHHTV5SnNLknl2Gw8IaUTQVsACAAAAAAfsLZsEDcWSuNsIo/TD1ReyQW75HPMgmuKZuWFOLKRLoAAzg3AH0AAAAFZAAgAAAAAIQuup+YGfH3mflzWopN8J1X8o8a0d9CSGIvrA5HOzraBXMAIAAAAADYvNLURXsC2ITMqK14LABQBI+hZZ5wNf24JMcKLW+84AVsACAAAAAACzfjbTBH7IwDU91OqLAz94RFkoqBOkzKAqQb55gT4/MAAzg4AH0AAAAFZAAgAAAAAKsh0ADyOnVocFrOrf6MpTrNvAj8iaiE923DPryu124gBXMAIAAAAADg24a8NVE1GyScc6tmnTbmu5ulzO+896fE92lN08MeswVsACAAAAAAaPxcOIxnU7But88/yadOuDJDMcCywwrRitaxMODT4msAAzg5AH0AAAAFZAAgAAAAAKkVC2Y6HtRmv72tDnPUSjJBvse7SxLqnr09/Uuj9sVVBXMAIAAAAABYNFUkH7ylPMN+Bc3HWX1e0flGYNbtJNCY9SltJCW/UAVsACAAAAAAZYK/f9H4OeihmpiFMH7Wm7uLvs2s92zNA8wyrNZTsuMAAzkwAH0AAAAFZAAgAAAAADDggcwcb/Yn1Kk39sOHsv7BO/MfP3m/AJzjGH506Wf9BXMAIAAAAAAYZIsdjICS0+BDyRUPnrSAZfPrwtuMaEDEn0/ijLNQmAVsACAAAAAAGPnYVvo2ulO9z4LGd/69NAklfIcZqZvFX2KK0s+FcTUAAzkxAH0AAAAFZAAgAAAAAEWY7dEUOJBgjOoWVht1wLehsWAzB3rSOBtLgTuM2HC8BXMAIAAAAAAAoswiHRROurjwUW8u8D5EUT+67yvrgpB/j6PzBDAfVwVsACAAAAAA6NhRTYFL/Sz4tao7vpPjLNgAJ0FX6P/IyMW65qT6YsMAAzkyAH0AAAAFZAAgAAAAAPZaapeAUUFPA7JTCMOWHJa9lnPFh0/gXfAPjA1ezm4ZBXMAIAAAAACmJvLY2nivw7/b3DOKH/X7bBXjJwoowqb1GtEFO3OYgAVsACAAAAAA/JcUoyKacCB1NfmH8vYqC1f7rd13KShrQqV2r9QBP44AAzkzAH0AAAAFZAAgAAAAAK00u6jadxCZAiA+fTsPVDsnW5p5LCr4+kZZZOTDuZlfBXMAIAAAAAAote4zTEYMDgaaQbAdN8Dzv93ljPLdGjJzvnRn3KXgtQVsACAAAAAAxXd9Mh6R3mnJy8m7UfqMKi6oD5DlZpkaOz6bEjMOdiwAAzk0AH0AAAAFZAAgAAAAAFbgabdyymiEVYYwtJSWa7lfl/oYuj/SukzJeDOR6wPVBXMAIAAAAADAFGFjS1vPbN6mQEhkDYTD6V2V23Ys9gUEUMGNvMPkaAVsACAAAAAAL/D5Sze/ZoEanZLK0IeEkhgVkxEjMWVCfmJaD3a8uNIAAzk1AH0AAAAFZAAgAAAAABNMR6UBv2E627CqLtQ/eDYx7OEwQ7JrR4mSHFa1N8tLBXMAIAAAAAAxH4gucI4UmNVB7625C6hFSVCuIpJO3lusJlPuL8H5EQVsACAAAAAAVLHNg0OUVqZ7WGOP53BkTap9FOw9dr1P4J8HxqFqU04AAzk2AH0AAAAFZAAgAAAAAG8cd6WBneNunlqrQ2EmNf35W7OGObGq9WL4ePX+LUDmBXMAIAAAAAAjJ2+sX87NSis9hBsgb1QprVRnO7Bf+GObCGoUqyPE4wVsACAAAAAAs9c9SM49/pWmyUQKslpt3RTMBNSRppfNO0JBvUqHPg0AAzk3AH0AAAAFZAAgAAAAAFWOUGkUpy8yf6gB3dio/aOfRKh7XuhvsUj48iESFJrGBXMAIAAAAAAY7sCDMcrUXvNuL6dO0m11WyijzXZvPIcOKob6IpC4PQVsACAAAAAAJOP+EHz6awDb1qK2bZQ3kTV7wsj5Daj/IGAWh4g7omAAAzk4AH0AAAAFZAAgAAAAAGUrIdKxOihwNmo6B+aG+Ag1qa0+iqdksHOjQj+Oy9bZBXMAIAAAAABwa5dbI2KmzBDNBTQBEkjZv4sPaeRkRNejcjdVymRFKQVsACAAAAAA4ml/nm0gJNTcJ4vuD+T2Qfq2fQZlibJp/j6MOGDrbHMAAzk5AH0AAAAFZAAgAAAAAOx89xV/hRk64/CkM9N2EMK6aldII0c8smdcsZ46NbP8BXMAIAAAAADBF6tfQ+7q9kTuLyuyrSnDgmrdmrXkdhl980i1KHuGHgVsACAAAAAACUqiFqHZdGbwAA+hN0YUE5zFg+H+dabIB4dj5/75W/YAAzEwMAB9AAAABWQAIAAAAADJDdC9aEFl4Y8J/awHbnXGHjfP+VXQilPHJg7ewaJI7AVzACAAAAAAE+tqRl6EcBMXvbr4GDiNIYObTsYpa1n6BJk9EjIJVicFbAAgAAAAAJVc+HYYqa0m1Hq6OiRX8c0iRnJYOt6AJAJoG0sG3GMSAAMxMDEAfQAAAAVkACAAAAAA3F9rjEKhpoHuTULVGgfUsGGwJs3bISrXkFP1v6KoQLgFcwAgAAAAAIBf0tXw96Z/Ds0XSIHX/zk3MzUR/7WZR/J6FpxRWChtBWwAIAAAAABWrjGlvKYuTS2s8L9rYy8Hf0juFGJfwQmxVIjkTmFIGQADMTAyAH0AAAAFZAAgAAAAAOYIYoWkX7dGuyKfi3XssUlc7u/gWzqrR9KMkikKVdmSBXMAIAAAAABVF2OYjRTGi9Tw8XCAwZWLpX35Yl271TlNWp6N/nROhAVsACAAAAAA0nWwYzXQ1+EkDvnGq+SMlq20z+j32Su+i/A95SggPb4AAzEwMwB9AAAABWQAIAAAAACMtPm12YtdEAvqu6Eji1yuRXnu1RJP6h0l7pH3lSH4MwVzACAAAAAAENyCFfyUAh1veQBGx+cxiB7Sasrj41jzCGflZkB5cRMFbAAgAAAAAKdI2LMqISr/T5vuJPg6ZRBm5fVi2aQCc4ra3A4+AjbDAAMxMDQAfQAAAAVkACAAAAAAvlI4lDcs6GB1cnm/Tzo014CXWqidCdyE5t2lknWQd4QFcwAgAAAAAD60SpNc4O2KT7J0llKdSpcX1/Xxs97N715a1HsTFkmBBWwAIAAAAABuuRkJWAH1CynggBt1/5sPh9PoGiqTlS24D/OE2uHXLQADMTA1AH0AAAAFZAAgAAAAAKl8zcHJRDjSjJeV/WvMxulW1zrTFtaeBy/aKKhadc6UBXMAIAAAAADBdWQl5SBIvtZZLIHszePwkO14W1mQ0izUk2Ov21cPNAVsACAAAAAAHErCYycpqiIcCZHdmPL1hi+ovLQk4TAvENpfLdTRamQAAzEwNgB9AAAABWQAIAAAAABb6LXDWqCp1beQgQjj8I3sRTtFhlrmiBi+h/+ikmrvugVzACAAAAAA9stpgTecT7uTyaGNs3K9Bp0A7R0QaIAOfscyMXHBPX8FbAAgAAAAAHUt+McyXrJ1H8SwnHNVO181Ki8vDAM1f7XI26mg95ZDAAMxMDcAfQAAAAVkACAAAAAA97NTT+81PhDhgptNtp4epzA0tP4iNb9j1AWkiiiKGM8FcwAgAAAAAKPbHg7ise16vxmdPCzksA/2Mn/qST0L9Xe8vnQugVkcBWwAIAAAAABB0EMXfvju4JU/mUH/OvxWbPEl9NJkcEp4iCbkXI41fAADMTA4AH0AAAAFZAAgAAAAAMqpayM2XotEFmm0gwQd9rIzApy0X+7HfOhNk6VU7F5lBXMAIAAAAACJR9+q5T9qFHXFNgGbZnPubG8rkO6cwWhzITQTmd6VgwVsACAAAAAAOZLQ6o7e4mVfDzbpQioa4d3RoTvqwgnbmc5Qh2wsZuoAAzEwOQB9AAAABWQAIAAAAADQnslvt6Hm2kJPmqsTVYQHE/wWeZ4bE1XSkt7TKy0r1gVzACAAAAAA8URTA4ZMrhHPvlp53TH6FDCzS+0+61qHm5XK6UiOrKEFbAAgAAAAAHQbgTCdZcbdA0avaTmZXUKnIS7Nwf1tNrcXDCw+PdBRAAMxMTAAfQAAAAVkACAAAAAAhujlgFPFczsdCGXtQ/002Ck8YWQHHzvWvUHrkbjv4rwFcwAgAAAAALbV0lLGcSGfE7mDM3n/fgEvi+ifjl7WZ5b3aqjDNvx9BWwAIAAAAACbceTZy8E3QA1pHmPN5kTlOx3EO8kJM5PUjTVftw1VpgADMTExAH0AAAAFZAAgAAAAABm/6pF96j26Jm7z5KkY1y33zcAEXLx2n0DwC03bs/ixBXMAIAAAAAD01OMvTZI/mqMgxIhA5nLs068mW+GKl3OW3ilf2D8+LgVsACAAAAAAaLvJDrqBESTNZSdcXsd+8GXPl8ZkUsGpeYuyYVv/kygAAzExMgB9AAAABWQAIAAAAACfw9/te4GkHZAapC9sDMHHHZgmlTrccyJDPFciOMSOcwVzACAAAAAAIIC1ZpHObvmMwUfqDRPl4C1aeuHwujM1G/yJbvybMNAFbAAgAAAAAAs9x1SnVpMfNv5Bm1aXGwHmbbI9keWa9HRD35XuCBK5AAMxMTMAfQAAAAVkACAAAAAAkxHJRbnShpPOylLoDdNShfILeA1hChKFQY9qQyZ5VmsFcwAgAAAAAKidrY+rC3hTY+YWu2a7fuMH2RD/XaiTIBW1hrxNCQOJBWwAIAAAAACW0kkqMIzIFMn7g+R0MI8l15fr3k/w/mHtY5n6SYTEwAADMTE0AH0AAAAFZAAgAAAAAByuYl8dBvfaZ0LO/81JW4hYypeNmvLMaxsIdvqMPrWoBXMAIAAAAABNddwobOUJzm9HOUD8BMZJqkNCUCqstHZkC76FIdNg9AVsACAAAAAAQQOkIQtkyNavqCnhQbNg3HfqrJdsAGaoxSJePJl1qXsAAzExNQB9AAAABWQAIAAAAABxMy7X5hf7AXGDz3Y/POu1ZpkMlNcSvSP92NOO/Gs7wAVzACAAAAAAHJshWo2T5wU2zvqCyJzcJQKQaHFHpCpMc9oWBXkpUPoFbAAgAAAAAGeiJKzlUXAvL0gOlW+Hz1mSa2HsV4RGmyLmCHlzbAkoAAMxMTYAfQAAAAVkACAAAAAAlqbslixl7Zw3bRlibZbe/WmKw23k8uKeIzPKYEtbIy0FcwAgAAAAAHEKwpUxkxOfef5HYvulXPmdbzTivwdwrSYIHDeNRcpcBWwAIAAAAADuPckac21Hrg/h0kt5ShJwVEZ9rx6SOHd2+HDjqxEWTQADMTE3AH0AAAAFZAAgAAAAAMXrXx0saZ+5gORmwM2FLuZG6iuO2YS+1IGPoAtDKoKBBXMAIAAAAADIQsxCr8CfFKaBcx8kIeSywnGh7JHjKRJ9vJd9x79y7wVsACAAAAAAcvBV+SykDYhmRFyVYwFYB9oBKBSHr55Jdz2cXeowsUQAAzExOAB9AAAABWQAIAAAAAAm83FA9yDUpwkbKTihe7m53u+DivS9BU2b4vQMtCVQ2AVzACAAAAAAz3m1UB/AbZPa4QSKFDnUgHaT78+6iGOFAtouiBorEgEFbAAgAAAAAIgbpyYtJj5513Z5XYqviH/HXG/5+mqR52iBbfqMmDtZAAMxMTkAfQAAAAVkACAAAAAAJRzYK0PUwr9RPG2/7yID0WgcTJPB2Xjccp5LAPDYunkFcwAgAAAAAIIh24h3DrltAzNFhF+MEmPrZtzr1PhCofhChZqfCW+jBWwAIAAAAAAzRNXtL5o9VXMk5D5ylI0odPDJDSZZry1wfN+TedH70gADMTIwAH0AAAAFZAAgAAAAAHSaHWs/dnmI9sc7nB50VB2Bzs0kHapMHCQdyVEYY30TBXMAIAAAAACkV22lhEjWv/9/DubfHBAcwJggKI5mIbSK5L2nyqloqQVsACAAAAAAS19m7DccQxgryOsBJ3GsCs37yfQqNi1G+S6fCXpEhn4AAzEyMQB9AAAABWQAIAAAAAAC/I4TQRtCl12YZmdGz17X4GqSQgfwCPgRBwdHmdwu+QVzACAAAAAAx8f3z2ut/RAZhleari4vCEE+tNIn4ikjoUwzitfQ588FbAAgAAAAAJci0w1ZB8W2spJQ+kMpod6HSCtSR2jrabOH+B0fj3A4AAMxMjIAfQAAAAVkACAAAAAADGB5yU2XT0fse/MPWgvBvZikVxrl5pf3S5K1hceKWooFcwAgAAAAAIxTmlLHMjNaVDEfJbXvRez0SEPWFREBJCT6qTHsrljoBWwAIAAAAAAlswzAl81+0DteibwHD+CG5mZJrfHXa9NnEFRtXybzzwADMTIzAH0AAAAFZAAgAAAAABmO7QD9vxWMmFjIHz13lyOeV6vHT6mYCsWxF7hb/yOjBXMAIAAAAACT9lmgkiqzuWG24afuzYiCeK9gmJqacmxAruIukd0xEAVsACAAAAAAZa0/FI/GkZR7CtX18Xg9Tn9zfxkD0UoaSt+pIO5t1t4AAzEyNAB9AAAABWQAIAAAAAAfPUoy7QyZKhIIURso+mkP9qr1izbjETqF5s22GwjCjAVzACAAAAAAvLMsIDQ/go4VUxeh50UHmsvMvfx51cwyONnRD2odvC0FbAAgAAAAAKMb+1CodEalAFnDrEL1Ndt8ztamZ+9134m9Kp3GQgd+AAMxMjUAfQAAAAVkACAAAAAAE3ZqUar0Bq2zWbARE0bAv98jBlK9UJ73/xcwdMWWlSkFcwAgAAAAAK4M+MmC+9sFiFsumMyJZQKxWmmJiuG9H7IzKw083xxkBWwAIAAAAAAqkAONzhvMhkyL1D/6h7QQxEkdhC3p2WjXH+VGq5qCqQADMTI2AH0AAAAFZAAgAAAAAMo8FJiOq63cAmyk2O7eI7GcbQh/1j4RrMTqly3rexftBXMAIAAAAADjVmpd0WiRGTw/gAqEgGolt2EI7Csv14vKdmYoMD0aAgVsACAAAAAA07XQBzBUQMNw7F2/YxJjZNuPVpHTTgbLd1oGk77+bygAAzEyNwB9AAAABWQAIAAAAACu5IGaIx7A3Jvly/kzlCsSA4s3iJwuIl8jEdRH0k93NwVzACAAAAAA9NRUyxYE+t0Xyosyt6vIfMFW/vBoYg6sR+jBNs4JAxIFbAAgAAAAAAzyZ91dx+0oMlOVAjRGiMrPySikY/U9eMEB4WJb3uWtAAMxMjgAfQAAAAVkACAAAAAALkRy0GJInXYLA+cgjs6Myb0a+Gu9hgXhHvhLNoGWfckFcwAgAAAAANbALyt9zCSvwnLaWCd2/y2eoB7qkWTvv1Ldu8r40JPuBWwAIAAAAAD4Fl5bV5sz4isIE9bX+lmAp+aAKaZgVYVZeVfrItkCZAADMTI5AH0AAAAFZAAgAAAAAGoUK/DSWhT8LZhszSUqDbTrp8cSA7rdqmADKL+MILtTBXMAIAAAAABHnEE9bVa6lvhfhEMkkV2kzSSxH/sMW/FIJuw3CzWs6wVsACAAAAAAanavcBdqZxgRGKvEK95wTmeL1K1CeDSXZsXUAs81uOgAAzEzMAB9AAAABWQAIAAAAAC922ZDQE3h2fQKibGMZ9hV0WNlmrPYYSdtaSyYxsWYqgVzACAAAAAAagMovciKK6WVjIc2cCj8nK5O/gVOFFVeVAJpRp89tmQFbAAgAAAAAKcTFfPQzaFiAtSFhqbN02sCE1BKWJSrRfGN5L6oZwzkAAMxMzEAfQAAAAVkACAAAAAAtK+JqX3K/z2txjAU15DgX4y90DS2YLfIJFolCOkJJJwFcwAgAAAAAMnR5V7gfX7MNqqUdL5AkWlkhyFXaBRVNej+Rcn8lrQkBWwAIAAAAAA2cDNRXZuiC241TGRvdFyctJnrNcdbZOP9zHio81tkngADMTMyAH0AAAAFZAAgAAAAAAeGrIMK/bac6kPczxbvRYqKMkcpeI2FjdMpD91FDWIvBXMAIAAAAAAix62z1LeS8yvSXCl5gHSIomjyx76fF3S1lp9k900hygVsACAAAAAAiYwzf2m71aWFD5ajcXyW2JX2EzQOkBroTGMg29nLPYIAAzEzMwB9AAAABWQAIAAAAACphf298InM0Us4HT8o1W1MGw0D/02vd7Jh+U0h7qaFaQVzACAAAAAAFXtk7YpqsOJxsqGWSIL+YcBE96G3Zz9D31gPqDW94y8FbAAgAAAAAAOrS1KVA94rjB1jZ1pPocpCeBG+B14RzWoHqVDpp7JbAAMxMzQAfQAAAAVkACAAAAAATLDS2cuDVM3yDMuWNgk2iGKBTzPpfJMbvxVOSY39ZfcFcwAgAAAAAPT5wRi2cLHIUflXzm6EQB/m7xdThP80ir1VV/JBBqvxBWwAIAAAAAB9lEtZS0aXCFbCtSbhnis27S5IPcfWGygHW8AHn3QqzwADMTM1AH0AAAAFZAAgAAAAAJNjExiZVX7jfFGfYpQu16qxLN0YPqVU/5CQ/Y67YSinBXMAIAAAAABMpm2+6KrkRUlXzQoMPHrQmIO6dkQz66tYdfTeA3dKqQVsACAAAAAAFXobHiMLvNZuEPr8jtewCX2J93EZG3JNeyVg92fue6YAAzEzNgB9AAAABWQAIAAAAABlFkYtLCx901X6QVVMkSn6Z7k30UF4xHaA0OZJJ9bdyQVzACAAAAAATez+F9GHcGzTp7jjv4feboUNb8JCkIp4EqcPFisnq7MFbAAgAAAAACE7JvOpBgMoZ7kRd4QbxIhxukPTUxXpzhjnBHiR7XoRAAMxMzcAfQAAAAVkACAAAAAA8NJKN0IxZnruhswGQkiruv8Ih0EMwDcSZx/Xasup9dkFcwAgAAAAAKaJZRxzA+Igeydvuk6cSwUHXcrmT4PjhuPu//FslpdnBWwAIAAAAAD53Rok1Vq/PMAnXmarqoHJ0PEyYUBmVESa9hIpCv/G9QADMTM4AH0AAAAFZAAgAAAAABHxHdEClz7hbSSgE58+dWLlSMJnoPz+jFxp4bB1GmLQBXMAIAAAAAD3nSvT6aGD+A110J/NwEfp0nPutlmuB5B+wA3CC3noGAVsACAAAAAA3Apjd+TapONB7k5wBVwTWgn8t+Sq2oyyU5/+as109RcAAzEzOQB9AAAABWQAIAAAAAC/o8qW/ifk3KuJ01VFkyNLgQafxB5/bGs2G5VyyVafOwVzACAAAAAA1bMqAFGDHSl6BYNLbxApvkAv2K1/oafywiX0MDz1dGUFbAAgAAAAAHJXLlId3edFoniLD/9K2A5973MeP2Ro31flDyqm3l5QAAMxNDAAfQAAAAVkACAAAAAAY2V8I1bz3a1AxTtmED6UhdhA09huFkuuEX8R+d/WDPUFcwAgAAAAAPTVoNRiI76tcRKqd+JBBVyy4+YcKST42p0QX2BtmQ2VBWwAIAAAAACcxt9hg14WqPNiDv1MkqVljM2e2KJEv53lA17LhV6ZigADMTQxAH0AAAAFZAAgAAAAAO2kSsW0WGN9AOtK4xK2SHrGhWiaAbMEKT4iZkRpaDN/BXMAIAAAAABKGzQcPM8LT2dwOggxoWjv/1imYWabbG/G4kBw8OWaxAVsACAAAAAAC9hLK1dScQTAqg+YAG3ObdPzg2Xet57HmOFpGmyUR9UAAzE0MgB9AAAABWQAIAAAAAAiCwzNEEaH/mDam68IdDftnhthyUFdb+ZCNSBQ91WlHQVzACAAAAAA7tHyHcxCzmbJeFYZyPm4mEgkTGKOvwY4MX82OvH0Jn8FbAAgAAAAAAb5IAbZ1hXCNegQ+S+C9i/Z8y6sS8KeU04V6hXa2ml6AAMxNDMAfQAAAAVkACAAAAAAGuCHVNJSuoVkpPOnS5s89GuA+BLi2IPBUr2Bg1sWEPIFcwAgAAAAAEl1gncS5/xO7bQ/KQSstRV3rOT2SW6nV92ZANeG2SR6BWwAIAAAAAA9LOcKmhek8F2wAh8yvT/vjp2gaouuO+Hmv10lwAeWPAADMTQ0AH0AAAAFZAAgAAAAAMfxz7gEaoCdPvXrubDhCZUS0ARLZc1svgbXgMDlVBPgBXMAIAAAAAB6a5dDA3fuT5Vz2KvAcbUEFX/+B7Nw2p1QqbPoQ5TTuAVsACAAAAAAcf/y75UOuI62A6vWH7bYr/5Jz+nirZVYK/81trN6XOQAAzE0NQB9AAAABWQAIAAAAACnYsqF/VzmjIImC9+dqrHO1TM6lJ6fRwM0mM6Wf6paOwVzACAAAAAA5tgZzch8uDCR1ky3SllVaKVpxAlbrhvlNDTazZZRZOAFbAAgAAAAALeGiLJS4z2zhgVpxzyPdRYyACP9QzQBOob34YrIZumCAAMxNDYAfQAAAAVkACAAAAAAEC0sIVmadtW4YMuRXH7RpAhXclsd+3bmqGXCMeaT014FcwAgAAAAABPpXh0uzpsJJB+IRUNajmMB9WGwswfpw5T9xk3Xj6ANBWwAIAAAAAAmf+NYh9TZ/QRu3w/GQz66n7DtfbJijN3G7KzeL8lstAADMTQ3AH0AAAAFZAAgAAAAABaIB3n49Xm9cOafSrQsE0WCcYp8rMIO/qVwIlMF5YLRBXMAIAAAAAC9EyWJV3xOu9bzgdJ/yX+ko7qLf1u3AxNMataW2C9EzQVsACAAAAAAvVbDkLxXx2DcMLifIQ3K0IIJcLcAG9DUrNfI6aoUjNcAAzE0OAB9AAAABWQAIAAAAAA5rZItA/cocRnngYqcJ3nBXQ+l688aKz3EQyLbYYunPAVzACAAAAAAwKyA+L7TgxztPClLrIMk2JXR+w7c04N3ZOqPgjvrIvsFbAAgAAAAACzvZ33h6aWEe8hmo+1f6OXJ72FY5hvWaUuha64ZV3KFAAMxNDkAfQAAAAVkACAAAAAA3htn7oHJ0YYpIrs+Mzyh85Ys67HwAdv5LQl1mCdoMWkFcwAgAAAAAEHjCtNNLenHuSIYux6ezAHsXDaj2DlTF67ToDhDDe6HBWwAIAAAAAD+P4H0sk9jOd+7vOANt2/1Ectb+4ZRGPE8GkHWNXW3MgADMTUwAH0AAAAFZAAgAAAAAEnt18Km/nqggfIJWxzTr9r3hnXNaueG6XO9A5G11LnGBXMAIAAAAAD7QxzGMN/ard5TfFLecE6uusMmXG2+RBsBR+/NCQHUwAVsACAAAAAAQEZ1ZZ8GC8rdbg7s87OM5Gr9qkTXS9+P5DuAZxj5Gl4AAzE1MQB9AAAABWQAIAAAAAAVAKK/GoY8AACu/hyMpO4hdLq6JnEyWNzkyci9sbaD/wVzACAAAAAA2HmeqpMlvvBpV2zQTYIRmsc4MFlfHRwLof0ycJgMg/MFbAAgAAAAACdltCeWi5E/q1Li1eXLChpM2D9QQSGLBZ82NklQSc0oAAMxNTIAfQAAAAVkACAAAAAAhHyq1GQC/GiMwpYjcsfkNxolJ10ARKjIjfkW1Wipzi0FcwAgAAAAAD/uaGWxTDq87F8XZ6CrFI+RNa8yMqfSZdqK00Kj833BBWwAIAAAAAD6aEdOO0CsQGagioOCvANPCEHSpJ8BSixlPBq5ERhB7AADMTUzAH0AAAAFZAAgAAAAABAJJxHoZD+MQBWqm9UM9Dd3z5ZohIZGWRaRVRsMptKQBXMAIAAAAADrE/ca+gqj/SH4oao4wE4qn2ovoTydzcMbDbrfnUs3zAVsACAAAAAAeNCIQN6hVnGJinytQRFGlQ2ocoprXNqpia+BSxzl+uwAAzE1NAB9AAAABWQAIAAAAAAv01wz7VG9mTepjXQi6Zma+7b/OVBaKVkWNbgDLr1mFgVzACAAAAAA0I5sxz8r6wkCp5Tgvr+iL4p6MxSOq5d3e1kZG+0b7NkFbAAgAAAAAIA32v6oGkAOS96HexGouNTex+tLahtx9QF2dgGClk6WAAMxNTUAfQAAAAVkACAAAAAAWXecRwxSon68xaa9THXnRDw5ZfzARKnvvjTjtbae6T0FcwAgAAAAAPh0UfUMEo7eILCMv2tiJQe1bF9qtXq7GJtC6H5Va4fIBWwAIAAAAADqFr1ThRrTXNgIOrJWScO9mk86Ufi95IDu5gi4vP+HWQADMTU2AH0AAAAFZAAgAAAAAEY5WL8/LpX36iAB1wlQrMO/xHVjoO9BePVzbUlBYo+bBXMAIAAAAABoKcpadDXUARedDvTmzUzWPe1jTuvD0z9oIcZmKuiSXwVsACAAAAAAJuJbwuaMrAFoI+jU/IYr+k4RzAqITrOjAd3HWCpJHqEAAzE1NwB9AAAABWQAIAAAAADnJnWqsfx0xqNnqfFGCxIplVu8mXjaHTViJT9+y2RuTgVzACAAAAAAWAaSCwIXDwdYxWf2NZTly/iKVfG/KDjHUcA1BokN5sMFbAAgAAAAAJVxavipE0H4/JQvhagdytXBZ8qGooeXpkbPQ1RfYMVHAAMxNTgAfQAAAAVkACAAAAAAsPG7LaIpJvcwqcbtfFUpIjj+vpNj70Zjaw3eV9T+QYsFcwAgAAAAAJQ71zi0NlCyY8ZQs3IasJ4gB1PmWx57HpnlCf3+hmhqBWwAIAAAAACD58TO6d+71GaOoS+r73rAxliAO9GMs4Uc8JbOTmC0OwADMTU5AH0AAAAFZAAgAAAAAAGiSqKaQDakMi1W87rFAhkogfRAevnwQ41onWNUJKtuBXMAIAAAAAASgiDpXfGh7E47KkOD8MAcX8+BnDShlnU5JAGdnPdqOAVsACAAAAAAI+2TTQIgbFq4Yr3lkzGwhG/tqChP7hRAx2W0fNaH6jcAAzE2MAB9AAAABWQAIAAAAAB7L4EnhjKA5xJD3ORhH2wOA1BvpnQ+7IjRYi+jjVEaJAVzACAAAAAAuhBIm0nL3FJnVJId+7CKDASEo+l2E89Z9/5aWSITK4AFbAAgAAAAALtSICOzQDfV9d+gZuYxpEj6cCeHnKTT+2G3ceP2H65kAAMxNjEAfQAAAAVkACAAAAAAaROn1NaDZFOGEWw724dsXBAm6bgmL5i0cki6QZQNrOoFcwAgAAAAANVT8R6UvhrAlyqYlxtmnvkR4uYK/hlvyQmBu/LP6/3ZBWwAIAAAAAD+aHNMP/X+jcRHyUtrCNkk1KfMtoD3GTmShS8pWGLt+AADMTYyAH0AAAAFZAAgAAAAADqSR5e0/Th59LrauDA7OnGD1Xr3H3NokfVxzDWOFaN7BXMAIAAAAACt30faNwTWRbvmykDpiDYUOCwA6QDbBBYBFWS7rdOB4AVsACAAAAAAF7SvnjjRk5v2flFOKaBAEDvjXaL1cpjsQLtK2fv9zdQAAzE2MwB9AAAABWQAIAAAAADmtb1ZgpZjSeodPG/hIVlsnS8hoRRwRbrTVx89VwL62AVzACAAAAAAi38e1g6sEyVfSDkzZbaZXGxKI/zKNbMasOl2LYoWrq8FbAAgAAAAAALACk0KcCDN/Kv8WuazY8ORtUGkOZ5Dsm0ys1oOppp/AAMxNjQAfQAAAAVkACAAAAAAf/f7AWVgBxoKjr7YsEQ4w/fqSvuQWV2HMiA3rQ7ur0sFcwAgAAAAADkkeJozP6FFhUdRIN74H4UhIHue+eVbOs1NvbdWYFQrBWwAIAAAAAB55FlHAkmTzAYj/TWrGkRJw2EhrVWUnZXDoMYjyfB/ZwADMTY1AH0AAAAFZAAgAAAAAI2WEOymtuFpdKi4ctanPLnlQud+yMKKb8p/nfKmIy56BXMAIAAAAADVKrJmhjr1rfF3p+T+tl7UFd1B7+BfJRk0e7a4im7ozgVsACAAAAAA5E7Ti3PnFiBQoCcb/DN7V1uM3Xd6VKiexPKntssFL7kAAzE2NgB9AAAABWQAIAAAAAAuHU9Qd79hjyvKOujGanSGDIQlxzsql8JytTZhEnPw+AVzACAAAAAAjF2gV/4+sOHVgDd/oR5wDi9zL7NGpGD+NsEpGXy/a4QFbAAgAAAAAJzMoyojYV6Ed/LpVN5zge93Odv3U7JgP7wxeRaJZGTdAAMxNjcAfQAAAAVkACAAAAAA7dQDkt3iyWYCT94d7yqUtPPwp4qkC0ddu+HFdHgVKEkFcwAgAAAAANuYvtvZBTEq4Rm9+5eb7VuFopowkrAuv86PGP8Q8/QvBWwAIAAAAACeqXoAOQOE4j0zRMlkVd8plaW0RX1npsFvB38Xmzv7sAADMTY4AH0AAAAFZAAgAAAAAAwnZSDhL4tNGYxlHPhKYB8s28dY5ScSwiKZm3UhT8U3BXMAIAAAAABDoY6dhivufTURQExyC9Gx3ocpl09bgbbQLChj3qVGbgVsACAAAAAAF+1nS7O0v85s3CCy+9HkdeoEfm2C6ZiNbPMMnSfsMHUAAzE2OQB9AAAABWQAIAAAAAC2VuRdaC4ZJmLdNOvD6R2tnvkyARteqXouJmI46V306QVzACAAAAAAMn1Z6B35wFTX9mEYAPM+IiJ5hauEwfD0CyIvBrxHg7IFbAAgAAAAAOG6DvDZkT9B/xZWmjao2AevN7MMbs3Oh9YJeSd/hZ+hAAMxNzAAfQAAAAVkACAAAAAAVerb7qVNy457rNOHOgDSKyWl5ojun7iWrv1uHPXrIZQFcwAgAAAAAIDcYS9j5z+gx0xdJj09L7876r/vjvKTi/d3bXDE3PhyBWwAIAAAAADuhVLqb1Bkrx8aNymS+bx2cL8GvLFNH4SAi690DUgnWQADMTcxAH0AAAAFZAAgAAAAAH/E44yLxKCJjuSmU9A8SEhbmkDOx1PqqtYcZtgOzJdrBXMAIAAAAABgLh9v2HjBbogrRoQ82LS6KjZQnzjxyJH4PH+F3jupSAVsACAAAAAAIlO46ehXp4TqpDV0t6op++KO+uWBFh8iFORZjmx2IjkAAzE3MgB9AAAABWQAIAAAAAAlNUdDL+f/SSQ5074mrq0JNh7CTXwTbbhsQyDwWeDVMwVzACAAAAAANIH2IlSNG0kUw4qz0budjcWn8mNR9cJlYUqPYdonucAFbAAgAAAAAJMrOUOyiu5Y3sV76zwEFct8L7+i8WGlQI2+8z2W2kzaAAMxNzMAfQAAAAVkACAAAAAASZ+CvUDtlk/R4HAQ3a+PHrKeY/8ifAfh0oXYFqliu80FcwAgAAAAAJelpzPgM65OZFt/mvGGpwibclQ49wH+1gbUGzd9OindBWwAIAAAAAD9qeDchteEpVXWcycmD9kl9449C1dOw0r60TBm5jK+cQADMTc0AH0AAAAFZAAgAAAAAN9fkoUVbvFV2vMNMAkak4gYfEnzwKI3eDM3pnDK5q3lBXMAIAAAAACnDkgVNVNUlbQ9RhR6Aot2nVy+U4km6+GHPkLr631jEAVsACAAAAAANzg/BnkvkmvOr8nS4omF+q9EG/4oisB+ul4YHi938hwAAzE3NQB9AAAABWQAIAAAAAASyK3b1nmNCMptVEGOjwoxYLLS9fYWm/Zxilqea0jpEQVzACAAAAAADDHsGrbqlKGEpxlvfyqOJKQJjwJrzsrB7k3HG0AUJbkFbAAgAAAAAKwx3S4XfDZh4+LuI9jf7XgUh5qiefNv87JD4qvVRfPSAAMxNzYAfQAAAAVkACAAAAAAlSP9iK31GlcG9MKGbLmq+VXMslURr+As736rrVNXcsUFcwAgAAAAAAvbj0zfq9zzi8XReheKFbCB+h9IsOLgXPPpI5vrEJNZBWwAIAAAAABXvoZhaQE7ogWjeBjceVkp03N20cKYP3TA8vuNsgpfAgADMTc3AH0AAAAFZAAgAAAAAOJNORH8Bev97gVU7y6bznOxJ+E6Qoykur1QP76hG1/7BXMAIAAAAAC+C1PtOOrSZgzBAGhr+dPe/kR0JUw9GTwLVNr61xC1aAVsACAAAAAAeA/L8MQIXkamaObtMPLpoDoi5FypA5WAPtMeMrgi0eQAAzE3OAB9AAAABWQAIAAAAAAKcHzLUomavInN6upPkyWhAqYQACP/vdVCIYpiy6U6HgVzACAAAAAATsR4KItY6R2+U7Gg6sJdaEcf58gjd1OulyWovIqfxKcFbAAgAAAAAFbm10ko67ahboAejQdAV0U2uA5OhZYdb8XUFJ8OL46LAAMxNzkAfQAAAAVkACAAAAAAqTOLiMpCdR59tLZzzIPqJvbCNvz2XQL9ust0qYaehtcFcwAgAAAAAArefox/3k5xGOeiw2m6NUdzuGxmPwcu5IFcj+jMwHgHBWwAIAAAAADLZGFJ7MQd5JXMgMXjqZO5LDLxcFClcXPlnRMWRn+1oAADMTgwAH0AAAAFZAAgAAAAAIPSqSeVzSRgNVNmrPYHmUMgykCY27NbdDUNhE5kx/SgBXMAIAAAAAAhX90nNfxyXmZe/+btZ7q6xMX4PFyj0paM1ccJ/5IUUQVsACAAAAAA419oHmD2W0SYoOMwhrhrp8jf68fg9hTkaRdCuVd3CN0AAzE4MQB9AAAABWQAIAAAAACLn5DxiqAosHGXIAY96FwFKjeqrzXWf3VJIQMwx1fl4gVzACAAAAAAindvU27nveutopdvuHmzdENBbeGFtI3Qcsr07jxmvm8FbAAgAAAAAPvl9pBStQvP4OGkN5v0MghUY6djm9n7XdKKfrW0l1sMAAMxODIAfQAAAAVkACAAAAAA7i2S6rHRSPBwZEn59yxaS7HiYBOmObIkeyCcFU42kf8FcwAgAAAAAGb3RSEyBmgarkTvyLWtOLJcPwCKbCRkESG4RZjVmY4iBWwAIAAAAADB2/wo5CSHR4ANtifY6ZRXNTO5+O8qP82DfAiAeanpZwADMTgzAH0AAAAFZAAgAAAAAFz+M+H/Z94mdPW5oP51B4HWptp1rxcMWAjnlHvWJDWrBXMAIAAAAACBFEOQyL7ZHu4Cq33QvXkmKuH5ibG/Md3RaED9CtG5HwVsACAAAAAAfggtJTprQ/yZzj7y5z9KvXsdeXMWP0yUXMMJqpOwI88AAzE4NAB9AAAABWQAIAAAAAAE7c2x3Z3aM1XGfLNk/XQ9jCazNRbGhVm7H8c2NjS5ywVzACAAAAAARJ9h8fdcwA19velF3L/Wcvi2rCzewlKZ2nA0p8bT9uwFbAAgAAAAAJtWe6b4wK2Hae2dZm/OEpYQnvoZjz4Sz5IgJC2wInecAAMxODUAfQAAAAVkACAAAAAAVoRt9B9dNVvIMGN+ea5TzRzQC+lqSZ8dd/170zU5o9cFcwAgAAAAAEwM95XZin5mv2yhCI8+ugtKuvRVmNgzzIQN0yi1+9aIBWwAIAAAAAAMGBq72n00rox3uqhxSB98mkenTGCdbbUF1gXrgottzgADMTg2AH0AAAAFZAAgAAAAAKRDkjyWv/etlYT4GyoXrmBED2FgZHnhc+l9Wsl06cH2BXMAIAAAAABohlpm3K850Vndf3NmNE0hHqDlNbSR8/IvMidQ3LnIZAVsACAAAAAAW42nGHa6q2MCAaaPVwaIDfr8QLyQwjKq23onZJYsqVsAAzE4NwB9AAAABWQAIAAAAAC3DFh5oklLCNLY90bgWm68dFXz65JpAZSp1K99MBTPAQVzACAAAAAAQgZecmxEUZVHoptEQClDwAf8smI3WynQ/i+JBP0g+kQFbAAgAAAAAEUSQGVnAPISD6voD0DiBUqyWKgt2rta0tjmoe+LNt6IAAMxODgAfQAAAAVkACAAAAAAQ5WKvWSB503qeNlOI2Tpjd5blheNr6OBO8pfJfPNstcFcwAgAAAAAKwHgQLSDJ5NwLBQbY5OnblQIsVDpGV7q3RCbFLD1U4/BWwAIAAAAACQ5nED99LnpbqXZuUOUjnO2HTphEAFBjLD4OZeDEYybgADMTg5AH0AAAAFZAAgAAAAAGfhFY3RGRm5ZgWRQef1tXxHBq5Y6fXaLAR4yJhrTBplBXMAIAAAAACKEF0ApLoB6lP2UqTFsTQYNc9OdDrs/vziPGzttGVLKQVsACAAAAAArOO6FyfNRyBi0sPT5iye7M8d16MTLcwRfodZq4uCYKEAAzE5MAB9AAAABWQAIAAAAAAIM73gPcgzgotYHLeMa2zAU4mFsr7CbILUZWfnuKSwagVzACAAAAAAJCSu98uV8xv88f2BIOWzt6p+6EjQStMBdkGPUkgN79cFbAAgAAAAAMGqPGMPxXbmYbVfSa/japvUljht1zZT33TY7ZjAiuPfAAMxOTEAfQAAAAVkACAAAAAAkWmHCUsiMy1pwZTHxVPBzPTrWFBUDqHNrVqcyyt7nO8FcwAgAAAAAMv2CebFRG/br7USELR98sIdgE9OQCRBGV5JZCO+uPMgBWwAIAAAAABt7qSmn3gxJu7aswsbUiwvO+G6lXj/Xhx+J/zQyZxzLAADMTkyAH0AAAAFZAAgAAAAAGInUYv0lP/rK7McM8taEHXRefk8Q2AunrvWqdfSV7UaBXMAIAAAAACE+WPxJ3gan7iRTbIxXXx+bKVcaf8kP4JD8DcwU0aL7wVsACAAAAAAUC4eTprX4DUZn2X+UXYU6QjtiXk+u57yoOPBbPQUmDkAAzE5MwB9AAAABWQAIAAAAACmHlg2ud3cplXlTsNTpvNnY6Qm1Fce0m899COamoDjaQVzACAAAAAArtJQeJIlepBWRU2aYar7+YGYVQ7dfDc1oxgTmA8r9q0FbAAgAAAAAOk45vg5VqZHAFCO3i0Z52SZi5RADf8NXwf68T5yad/DAAMxOTQAfQAAAAVkACAAAAAApzcWSAbZWV/Rq+ylRNqqlJqNVR4fhXrz4633/MQOQgcFcwAgAAAAAN/jz/bsEleiuCl+li83EWlG6UMHA8CyaOMRKCkXkSCPBWwAIAAAAAC3Sd+Qg+uFDKpGZHbrQgokXHQ1az1aFl4YK343OB6hcQAAEmNtAAAAAAAAAAAAABBwYXlsb2FkSWQAAAAAABBmaXJzdE9wZXJhdG9yAAEAAAASc3AAAQAAAAAAAAAQdGYAAQAAABNtbgD/////Y46NN8CHrb4J7f/fE214AP////9jjo03wIetvgnt/18A", + "subType": "06" + } + } + } + } + } + ], + "cursor": {}, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalNoPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + } + }, + "commandName": "aggregate" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Decimal-Correctness.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Decimal-Correctness.json new file mode 100644 index 0000000000..edca7724a7 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Decimal-Correctness.json @@ -0,0 +1,1016 @@ +{ + "description": "fle2v2-Rangev2-Decimal-Correctness", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalNoPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "Find with $gt", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDecimalNoPrecision": { + "$gt": { + "$numberDecimal": "0.0" + } + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "1.0" + } + } + ] + } + ] + }, + { + "description": "Find with $gte", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDecimalNoPrecision": { + "$gte": { + "$numberDecimal": "0.0" + } + } + }, + "sort": { + "_id": 1 + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "0.0" + } + }, + { + "_id": 1, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "1.0" + } + } + ] + } + ] + }, + { + "description": "Find with $gt with no results", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDecimalNoPrecision": { + "$gt": { + "$numberDecimal": "1.0" + } + } + } + }, + "object": "coll", + "expectResult": [] + } + ] + }, + { + "description": "Find with $lt", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDecimalNoPrecision": { + "$lt": { + "$numberDecimal": "1.0" + } + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "0.0" + } + } + ] + } + ] + }, + { + "description": "Find with $lte", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDecimalNoPrecision": { + "$lte": { + "$numberDecimal": "1.0" + } + } + }, + "sort": { + "_id": 1 + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "0.0" + } + }, + { + "_id": 1, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "1.0" + } + } + ] + } + ] + }, + { + "description": "Find with $gt and $lt", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDecimalNoPrecision": { + "$gt": { + "$numberDecimal": "0.0" + }, + "$lt": { + "$numberDecimal": "2.0" + } + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "1.0" + } + } + ] + } + ] + }, + { + "description": "Find with equality", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDecimalNoPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "0.0" + } + } + ] + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDecimalNoPrecision": { + "$numberDecimal": "1.0" + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "1.0" + } + } + ] + } + ] + }, + { + "description": "Find with $in", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDecimalNoPrecision": { + "$in": [ + { + "$numberDecimal": "0.0" + } + ] + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "0.0" + } + } + ] + } + ] + }, + { + "description": "Aggregate with $gte", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDecimalNoPrecision": { + "$gte": { + "$numberDecimal": "0.0" + } + } + } + }, + { + "$sort": { + "_id": 1 + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "0.0" + } + }, + { + "_id": 1, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "1.0" + } + } + ] + } + ] + }, + { + "description": "Aggregate with $gt with no results", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDecimalNoPrecision": { + "$gt": { + "$numberDecimal": "1.0" + } + } + } + } + ] + }, + "object": "coll", + "expectResult": [] + } + ] + }, + { + "description": "Aggregate with $lt", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDecimalNoPrecision": { + "$lt": { + "$numberDecimal": "1.0" + } + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "0.0" + } + } + ] + } + ] + }, + { + "description": "Aggregate with $lte", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDecimalNoPrecision": { + "$lte": { + "$numberDecimal": "1.0" + } + } + } + }, + { + "$sort": { + "_id": 1 + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "0.0" + } + }, + { + "_id": 1, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "1.0" + } + } + ] + } + ] + }, + { + "description": "Aggregate with $gt and $lt", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDecimalNoPrecision": { + "$gt": { + "$numberDecimal": "0.0" + }, + "$lt": { + "$numberDecimal": "2.0" + } + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "1.0" + } + } + ] + } + ] + }, + { + "description": "Aggregate with equality", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDecimalNoPrecision": { + "$numberDecimal": "0.0" + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "0.0" + } + } + ] + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDecimalNoPrecision": { + "$numberDecimal": "1.0" + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "1.0" + } + } + ] + } + ] + }, + { + "description": "Aggregate with $in", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDecimalNoPrecision": { + "$in": [ + { + "$numberDecimal": "0.0" + } + ] + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "0.0" + } + } + ] + } + ] + }, + { + "description": "Wrong type: Insert Int", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalNoPrecision": { + "$numberInt": "0" + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "Cannot encrypt element" + } + } + ] + }, + { + "description": "Wrong type: Find Int", + "operations": [ + { + "name": "find", + "arguments": { + "filter": { + "encryptedDecimalNoPrecision": { + "$gte": { + "$numberInt": "0" + } + } + }, + "sort": { + "_id": 1 + } + }, + "object": "coll", + "expectError": { + "errorContains": "field type is not supported" + } + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Decimal-Delete.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Decimal-Delete.json new file mode 100644 index 0000000000..4b0121ac22 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Decimal-Delete.json @@ -0,0 +1,1179 @@ +{ + "description": "fle2v2-Rangev2-Decimal-Delete", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalNoPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "FLE2 Range Decimal. Delete.", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "1" + } + } + }, + "object": "coll" + }, + { + "name": "deleteOne", + "arguments": { + "filter": { + "encryptedDecimalNoPrecision": { + "$gt": { + "$numberDecimal": "0" + } + } + } + }, + "object": "coll", + "expectResult": { + "deletedCount": 1 + } + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": { + "$numberInt": "0" + }, + "encryptedDecimalNoPrecision": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "rbf3AeBEv4wWFAKknqDxRW5cLNkFvbIs6iJjc6LShQY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "n+XAuFnP8Dov9TnhGFxNx0K/MnVM9WbJ7RouEu0ndO0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yRXojuVdn5GQtD97qYlaCL6cOLmZ7Cvcb3wFjkLUIdM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "DuIkdRPITRs55I4SZmgomAHCIsDQmXRhW8+MOznkzSk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SsBk+Et1lTbU+QRPx+xyJ/jMkmfG+QCvQEpip2YYrzA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "crCIzOd8KhHvvUlX7M1v9bhvU4pLdTc+X2SuqoKU5Ek=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "YOWdCw4UrqnxkAaVjqmC4sKQDMVMHEpFGnlxpxdaU6E=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "M3SShp81Ff8tQ632qKbv9MUcN6wjDaBReI0VXNu6Xh4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "gzHlSPxpM0hT75kQvWFzGlOxKvDoiKQZOr19V6l2zXI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "s3JnppOGYw9SL2Q1kMAZs948v2F5PrpXjGei/HioDWs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cG6+3Gk/zEH68P/uuuwiAUVCuyJwa1LeV+t29FlPPAo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dupdvR3AyJtM+g9NDKiaLVOtGca387JQp8w+V03m7Ig=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JqEQc5svj2jTvZ6LLA5ivE+kTb/0aRemSEmxk4G7Zrg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "szcXXXKnob+p3SoM4yED2R920LeJ7cVsclPMFTe4CeI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "o1QoGVXmuBdHwHm7aCtGMlMVKrjFdYvJXpoq6uhIAZ0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Jfm5wPlqqLCJRGQIqRq2NGmpn7s0Vrih2H3YAOoI2YU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zMHLb8ARbsYo8Ld05bqnGFf1Usha6EGb8QKwdSAyps0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yQdtq9lh5pugL7/i0Bj/PuZUUBUIzf+7wj1rl5y736w=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wGWVZdO7qIuyDg/BqDgqjgoQ02h5YYgwXQB1oCin2NE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "by9HMLj6NTEpgztZ5HSN6GxImkXPcaFINYDzgZY33X8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tWo0vbasi7bXmn/MsOx13VC1IsWtpx/nYp0uj4iMzdA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tQQpndUYd5O87lOtrGjH3wl9VsOK0ray7RMasL90sBM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cQjXEDCMsOpKLLf+vlTgIHA+cbSJdzqhbSX9Wvh95aA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "7yMpU48IxK9SzP2cx3VnTownGEwFmeFofuuFT97SuuY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kSOx1kz0CmBgzKQHZlo65ZUY1DIv9A99JRm+Us2y6Ew=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ubQpdPBe6/xvtr+AcXdfYLSvYCR4ot0tivehkCsupb4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xal+iCJ6FTefRQToyoNksc9NCZShyn04NDGi4IYrcoM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "d7jU4iOK50xHxlkSifcxlZFCM46TSgQzoYivxG3HNLY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tJvl2nsBLBVzL3pp6sKWCL4UXeh3q/roYBJjSb74ve0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "OIUCaKRvIx9t1w6Hxlz1IcQTdPNCfdRNwnnTm10W+X0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "A9tvzsiElotOUVIB4CqfQp9mAwqvTM35YkmAR170aHA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "lI8gpK7hpb7c9x4RQugsxMnQay5LZJmwslZdvMx/dcE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dNCzh40U0XvdKnSDi3HRQOWQftEsDVqc4uUvsVFGoq8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "IP+iwEBWBwVVZIdpaMu8k5+soFCz+TZkYn3drKZ9grE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pnqyh6e0y5svHkJDShlN9CHV0WvMBE4QbtJpQw5ZCXc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "elEl42tbVDoRTLjAhZUFEtXiut4b3PVhg/1ZLZSQdtE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "vHuu2FxwclMHqyE6JBYbTYgbEkB0dqb/JuaxsvfwsmY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xTf7NCe3Gf8QpE78HR5OknlLTKfs9J+RN9UZpH6fnso=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "XiWSasRnJAulGR6+LCVD3mwRObXylqYWR9jvpywq12c=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "MZMxEQ5ikx0PG1YFIExv0UnTZogsvgeOEZTpzvBDn4w=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yZMyMZBDrWbAhvnic7vvIYhmO9m5H2iuv0c8KNZrBzY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xxM14hTPY5j0vvcK2C7YAEjzdsfUTFHozHC0hEo1bxI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+01rqR1xVwkpGXcstbk1ItJqFVjH6Q8MGxEN3Cm9Y1A=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xOpLV0Z2VTRJ3iWtnWZcsyjXubTIkYWo31cO+HV1o1k=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "BWUOLqgLBqc5NwxVlSV5H3KFQPXbCp7mdo+jF+8cJqY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "fuQb1S6xZDGlrEbK+kI23aL53PP1PVNwqICnZNt9Yzg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SfscnoibFttahLdPVC4Ee+47ewGFKpDSU7M6HX19bKE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "rpSW2awybNVeKtat91VFxqbINoTfNhPfQAu+d73Xtf8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "9M/CP9ccOIIj2LLFmE0GFDO0Ban2wsNalEXfM6+h+1s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "WrEMG49l1ye4MhXs5ZS9tz8P6h+hDvthIg/2wW9ne1Q=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ImNhbfeyfH8qIEeA5ic0s3dAQBdzzTBS+CPsNih9vZ0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dWP33YDSn04UKJN2ogh2Rui0iW/0q2y18OCDRVcfyoo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "lYv0isAtfGh6H9tdp3cp2eHU7q2J+uk7QrgcxtK3w7Y=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "VGMoamB/+7zTOYcY/pqJc96xlv2PdW4hwsIAEIslTDQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yNeBWMF7BnD9wVwz2PgJsvWr77QiVvvWUvJF0+fqBug=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SfpvObJ+tJBXSvqeN7vlOfmhYign635lciYAJIjUtY8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dsen4NqjzVGjpjufiTMs3+gqeD09EbnuogPgxrJECwg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pxCWVM3sn19NsFEpgHbgLa+PmYlhN3mMiP0Wk8kJhYw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q11KNvJszjYIB9n9HcC+N4uz11a3eRj1L3BH9scKMDQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "A1PmkgcEToWh1JiVWE6mI5jUu7poxWWuCUt/cgRUUDc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "qJo3Hu4PJeanL7XEaWXO/n3YsodhZyd+MJOOmB9Kpd8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "BkBKLO8URFscfRY9Bav/1+L9mLohDgNr/MkZtGiraIs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "rZq5WA3Hx3xthOyHAJXK//f8pE2qbz7YKu3TIMp9GFY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "X07a/Lm80p5xd4RFs1dNmw+90tmPDPdGiAKVZkxd4zY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6YrBn2ofIw1b5ooakrLOwF41BWrps8OO0H9WH4/rtlE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "0l86Ag5OszXpa78SlOUV3K9nff5iC1p0mRXtLg9M1s4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Hn6yuxFHodeyu7ISlhYrbSf9pTiH4TDEvbYLWjTwFO0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zdf4y2etKBuIpkEU1zMwoCkCsdisfXZCh8QPamm+drY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "rOQ9oMdiK5xxGH+jPzOvwVqdGGnF3+HkJXxn81s6hp4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "61aKKsE3+BJHHWYvs3xSIBvlRmKswmaOo5rygQJguUg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "KuDb/GIzqDM8wv7m7m8AECiWJbae5EKKtJRugZx7kR0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Q+t8t2TmNUiCIorVr9F3AlVnX+Mpt2ZYvN+s8UGict8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tJRZIpKxUgHyL83kW8cvfjkxN3z6WoNnUg+SQw+LK+k=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pnUsYjip8SvW0+m9mR5WWTkpK+p6uwJ6yBUAlBnFKMk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "PArHlz+yPRYDycAP/PgnI/AkP8Wgmfg++Vf4UG1Bf0E=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wnIh53Q3jeK8jEBe1n8kJLa89/H0BxO26ZU8SRIAs9Q=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "4F8U59gzBLGhq58PEWQk2nch+R0Va7eTUoxMneReUIA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ihKagIW3uT1dm22ROr/g5QaCpxZVj2+Fs/YSdM2Noco=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "EJtUOOwjkrPUi9mavYAi+Gom9Y2DuFll7aDwo4mq0M0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dIkr8dbaVRQFskAVT6B286BbcBBt1pZPEOcTZqk4ZcI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "aYVAcZYkH/Tieoa1XOjE/zCy5AJcVTHjS0NG2QB7muA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "sBidL6y8TenseetpioIAAtn0lK/7C8MoW4JXpVYi3z8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "0Dd2klU/t4R86c2WJcJDAd57k/N7OjvYSO5Vf8KH8sw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "I3jZ92WEVmZmgaIkLbuWhBxl7EM6bEjiEttgBJunArA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "aGHoQMlgJoGvArjfIbc3nnkoc8SWBxcrN7hSmjMRzos=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "bpiWPnF/KVBQr5F6MEwc5ZZayzIRvQOLDAm4ntwOi8g=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tI7QVKbE6avWgDD9h4QKyFlnTxFCwd2iLySKakxNR/I=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "XGsge0CnoaXgE3rcpKm8AEeku5QVfokS3kcI+JKV1lk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JQxlryW2Q5WOwfrjAnaZxDvC83Dg6sjRVP5zegf2WiM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "YFuHKJOfoqp1iGVxoFjx7bLYgVdsN4GuUFxEgO9HJ5s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Z6vUdiCR18ylKomf08uxcQHeRtmyav7/Ecvzz4av3k4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SPGo1Ib5AiP/tSllL7Z5PAypvnKdwJLzt8imfIMSEJQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "m94Nh6PFFQFLIib9Cu5LAKavhXnagSHG6F5EF8lD96I=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pfEkQI98mB+gm1+JbmVurPAODMFPJ4E8DnqfVyUWbSo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "DNj3OVRLbr43s0vd+rgWghOL3FqeO/60npdojC8Ry/M=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kAYIQrjHVu49W8FTxyxJeiLVRWWjC9fPcBn+Hx1F+Ss=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "aCSO7UVOpoQvu/iridarxkxV1SVxU1i9HVSYXUAeXk4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Gh6hTP/yj1IKlXQ+Q69KTfMlGZjEcXoRLGbQHNFo/1s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "/gDgIFQ4tAlJk3GN48IS5Qa5IPmErwGk8CHxAbp6gs0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "PICyimwPjxpusyKxNssOOwUotAUbygpyEtORsVGXT8g=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "4lu+cBHyAUvuxC6JUNyHLzHsCogGSWFFnUCkDwfQdgI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pSndkmoNUJwXjgkbkgOrT5f9nSvuoMEZOkwAN9ElRaE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tyW+D4i26QihNM5MuBM+wnt5AdWGSJaJ4X5ydc9iWTU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "9Syjr8RoxUgPKr+O5rsCu07AvcebA4P8IVKyS1NVLWc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "67tPfDYnK2tmrioI51fOBG0ygajcV0pLo5+Zm/rEW7U=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "y0EiPRxYTuS1eVTIaPQUQBBxwkyxNckbePvKgChwd0M=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "NWd+2veAaeXQgR3vCvzlI4R1WW67D5YsVLdoXfdb8qg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "PY5RQqKQsL2GqBBSPNOEVpojNFRX/NijCghIpxD6CZk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "lcvwTyEjFlssCJtdjRpdN6oY+C7bxZY+WA+QAqzj9zg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CWE7XRNylvTwO/9Fv56dNqUaQWMmESNS/GNIwgBaEI0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ijwlrUeS8nRYqK1F8kiCYF0mNDolEZS+/lJO1Lg93C8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "8KzV+qYGYuIjoNj8eEpnTuHrMYuhzphl80rS6wrODuU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wDyTLjSEFF895hSQsHvmoEQVS6KIkZOtq1c9dVogm9I=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SGrtPuMYCjUrfKF0Pq/thdaQzmGBMUvlwN3ORIu9tHU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "KySHON3hIoUk4xWcwTqk6IL0kgjzjxgMBObVIkCGvk4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "hBIdS9j0XJPeT4ot73ngELkpUoSixvRBvdOL9z48jY8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Tx6um0q9HjS5ZvlFhvukpI6ORnyrXMWVW1OoxvgqII0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zFKlyfX5H81+d4A4J3FKn4T5JfG+OWtR06ddyX4Mxas=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cGgCDuPV7MeMMYEDpgOupqyNP4BQ4H7rBnd2QygumgM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "IPaUoy98v11EoglTpJ4kBlEawoZ8y7BPwzjLYBpkvHQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Pfo4Am6tOWAyZNn8G9W5HWWGC3ZWmX0igI/RRB870Ro=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "fnTSjd7bC1Udoq6iM7UDnHAC/lsIXSHp/Gy332qw+/I=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "fApBgVRrTDyEumkeWs5p3ag9KB48SbU4Si0dl7Ns9rc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "QxudfBItgoCnUj5NXVnSmWH3HK76YtKkMmzn4lyyUYY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "sSOvwhKa29Wq94bZ5jGIiJQGbG1uBrKSBfOYBz/oZeI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "FdaMgwwJ0NKsqmPZLC5oE+/0D74Dfpvig3LaI5yW5Fs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "sRWBy12IERN43BSZIrnBfC9+zFBUdvjTlkqIH81NGt4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "/4tIRpxKhoOwnXAiFn1Z7Xmric4USOIfKvTYQXk3QTc=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 0, + "encryptedDecimalNoPrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalNoPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedDecimalNoPrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalNoPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "delete": "default", + "deletes": [ + { + "q": { + "encryptedDecimalNoPrecision": { + "$gt": { + "$binary": { + "base64": "DR1jAAADcGF5bG9hZACxYgAABGcAnWIAAAMwAH0AAAAFZAAgAAAAAJu2KgiI8vM+kz9qD3ZQzFQY5qbgYqCqHG5R4jAlnlwXBXMAIAAAAAAAUXxFXsz764T79sGCdhxvNd5b6E/9p61FonsHyEIhogVsACAAAAAAt19RL3Oo5ni5L8kcvgOJYLgVYyXJExwP8pkuzLG7f/kAAzEAfQAAAAVkACAAAAAAPQPvL0ARjujSv2Rkm8r7spVsgeC1K3FWcskGGZ3OdDIFcwAgAAAAACgNn660GmefR8jLqzgR1u5O+Uocx9GyEHiBqVGko5FZBWwAIAAAAADflr+fsnZngm6KRWYgHa9JzK+bXogWl9evBU9sQUHPHQADMgB9AAAABWQAIAAAAAD2Zi6kcxmaD2mY3VWrP+wYJMPg6cSBIYPapxaFQxYFdQVzACAAAAAAM/cV36BLBY3xFBXsXJY8M9EHHOc/qrmdc2CJmj3M89gFbAAgAAAAAOpydOrKxx6m2gquSDV2Vv3w10GocmNCFeOo/fRhRH9JAAMzAH0AAAAFZAAgAAAAAOaNqI9srQ/mI9gwbk+VkizGBBH/PPWOVusgnfPk3tY1BXMAIAAAAAAc96O/pwKCmHCagT6T/QV/wz4vqO+R22GsZ1dse2Vg6QVsACAAAAAAgzIak+Q3UFLTHXPmJ+MuEklFtR3eLtvM+jdKkmGCV/YAAzQAfQAAAAVkACAAAAAA0XlQgy/Yu97EQOjronl9b3dcR1DFn3deuVhtTLbJZHkFcwAgAAAAACoMnpVl6EFJak8A+t5N4RFnQhkQEBnNAx8wDqmq5U/dBWwAIAAAAACR26FJif673qpwF1J1FEkQGJ1Ywcr/ZW6JQ7meGqzt1QADNQB9AAAABWQAIAAAAAAOtpNexRxfv0yRFvZO9DhlkpU4mDuAb8ykdLnE5Vf1VAVzACAAAAAAeblFKm/30orP16uQpZslvsoS8s0xfNPIBlw3VkHeekYFbAAgAAAAAPEoHj87sYE+nBut52/LPvleWQBzB/uaJFnosxp4NRO2AAM2AH0AAAAFZAAgAAAAAIr8xAFm1zPmrvW4Vy5Ct0W8FxMmyPmFzdWVzesBhAJFBXMAIAAAAABYeeXjJEzTHwxab6pUiCRiZjxgtN59a1y8Szy3hfkg+gVsACAAAAAAJuoY4rF8mbI+nKb+5XbZShJ8191o/e8ZCRHE0O4Ey8MAAzcAfQAAAAVkACAAAAAAl+ibLk0/+EwoqeC8S8cGgAtjtpQWGEZDsybMPnrrkwEFcwAgAAAAAHPPBudWgQ+HUorLDpJMqhS9VBF2VF5aLcxgrM1s+yU7BWwAIAAAAAAcCcBR2Vyv5pAFbaOU97yovuOi1+ATDnLLcAUqHecXcAADOAB9AAAABWQAIAAAAACR9erwLTb+tcWFZgJ2MEfM0PKI9uuwIjDTHADRFgD+SQVzACAAAAAAcOop8TXsGUVQoKhzUllMYWxL93xCOkwtIpV8Q6hiSYYFbAAgAAAAAKXKmh4V8veYwob1H03Q3p3PN8SRAaQwDT34KlNVUjiDAAM5AH0AAAAFZAAgAAAAALv0vCPgh7QpmM8Ug6ad5ioZJCh7pLMdT8FYyQioBQ6KBXMAIAAAAADsCPyIG8t6ApQkRk1fX/sfc1kpuWCWP8gAEpnYoBSHrQVsACAAAAAAJe/r67N6d8uTiogvfoR9rEXbIDjyLb9EVdqkayFFGaYAAzEwAH0AAAAFZAAgAAAAAIW4AxJgYoM0pcNTwk1RSbyjZGIqgKL1hcTJmNrnZmoPBXMAIAAAAAAZpfx3EFO0vY0f1eHnE0PazgqeNDTaj+pPJMUNW8lFrAVsACAAAAAAP+Um2vwW6Bj6vuz9DKz6+6aWkoKoEmFNoiz/xXm7lOsAAzExAH0AAAAFZAAgAAAAAKliO6L9zgeuufjj174hvmQGNRbmYYs9yAirL7OxwEW3BXMAIAAAAAAqU7vs3DWUQ95Eq8OejwWnD0GuXd+ASi/uD6S0l8MM1QVsACAAAAAAb9legYzsfctBPpHyl7YWpPmLr5QiNZFND/50N1vv2MUAAzEyAH0AAAAFZAAgAAAAAOGQcCBkk+j/Kzjt/Cs6g3BZPJG81wIHBS8JewHGpgk+BXMAIAAAAABjrxZXWCkdzrExwCgyHaafuPSQ4V4x2k9kUCAqUaYKDQVsACAAAAAADBU6KefT0v8zSmseaMNmQxKjJar72y7MojLFhkEHqrUAAzEzAH0AAAAFZAAgAAAAAPmCNEt4t97waOSd5hNi2fNCdWEkmcFJ37LI9k4Az4/5BXMAIAAAAABX7DuDPNg+duvELf3NbLWkPMFw2HGLgWGHyVWcPvSNCAVsACAAAAAAS7El1FtZ5STh8Q1FguvieyYX9b2DF1DFVsb9hzxXYRsAAzE0AH0AAAAFZAAgAAAAAD4vtVUYRNB+FD9yoQ2FVJH3nMeJeKbi6eZfth638YqbBXMAIAAAAAANCuUB4OdmuD6LaDK2f3vaqfgYYvg40wDXOBbcFjTqLwVsACAAAAAA9hqC2VoJBjwR7hcQ45xO8ZVojwC83jiRacCaDj6Px2gAAzE1AH0AAAAFZAAgAAAAAJPIRzjmTjbdIvshG6UslbEOd797ZSIdjGAhGWxVQvK1BXMAIAAAAABgmJ0Jh8WLs9IYs/a7DBjDWd8J3thW/AGJK7zDnMeYOAVsACAAAAAAi9zAsyAuou2oiCUHGc6QefLUkACa9IgeBhGu9W/r0X8AAzE2AH0AAAAFZAAgAAAAAABQyKQPoW8wGPIqnsTv69+DzIdRkohRhOhDmyVHkw9WBXMAIAAAAAAqWA2X4tB/h3O1Xlawtz6ndI6WaTwgU1QYflL35opu5gVsACAAAAAAWI/Gj5aZMwDIxztqmVL0g5LBcI8EdKEc2UA28pnekQoAAzE3AH0AAAAFZAAgAAAAACB7NOyGQ1Id3MYnxtBXqyZ5Ul/lHH6p1b10U63DfT6bBXMAIAAAAADpOryIcndxztkHSfLN3Kzq29sD8djS0PspDSqERMqokQVsACAAAAAADatsMW4ezgnyi1PiP7xk+gA4AFIN/fb5uJqfVkjg4UoAAzE4AH0AAAAFZAAgAAAAAKVfXLfs8XA14CRTB56oZwV+bFJN5BHraTXbqEXZDmTkBXMAIAAAAAASRWTsfGOpqdffiOodoqIgBzG/yzFyjR5CfUsIUIWGpgVsACAAAAAAkgCHbCwyX640/0Ni8+MoYxeHUiC+FSU4Mn9jTLYtgZgAAzE5AH0AAAAFZAAgAAAAAH/aZr4EuS0/noQR9rcF8vwoaxnxrwgOsSJ0ys8PkHhGBXMAIAAAAACd7ObGQW7qfddcvyxRTkPuvq/PHu7+6I5dxwS1Lzy5XAVsACAAAAAA3q0eKdV7KeU3pc+CtfypKR7BPxwaf30yu0j9FXeOOboAAzIwAH0AAAAFZAAgAAAAAKvlcpFFNq0oA+urq3w6d80PK1HHHw0H0yVWvU9aHijXBXMAIAAAAADWnAHQ5Fhlcjawki7kWzdqjM2f6IdGJblojrYElWjsZgVsACAAAAAAO0wvY66l24gx8nRxyVGC0QcTztIi81Kx3ndRhuZr6W4AAzIxAH0AAAAFZAAgAAAAAH/2aMezEOddrq+dNOkDrdqf13h2ttOnexZsJxG1G6PNBXMAIAAAAABNtgnibjC4VKy5poYjvdsBBnVvDTF/4mmEAxsXVgZVKgVsACAAAAAAqvadzJFLqQbs8WxgZ2D2X+XnaPSDMLCVVgWxx5jnLcYAAzIyAH0AAAAFZAAgAAAAAF2wZoDL6/V59QqO8vdRZWDpXpkV4h4KOCSn5e7x7nmzBXMAIAAAAADLZBu7LCYjbThaVUqMK14H/elrVOYIKJQCx4C9Yjw37gVsACAAAAAAEh6Vs81jLU204aGpL90fmYTm5i5R8/RT1uIbg6VU3HwAAzIzAH0AAAAFZAAgAAAAAH27yYaLn9zh2CpvaoomUPercSfJRUmBY6XFqmhcXi9QBXMAIAAAAAAUwumVlIYIs9JhDhSj0R0+59psCMsFk94E62VxkPt42QVsACAAAAAAT5x2hCCd2bpmpnyWaxas8nSxTc8e4C9DfKaqr0ABEysAAzI0AH0AAAAFZAAgAAAAALMg2kNAO4AFFs/mW3In04yFeN4AP6Vo0klyUoT06RquBXMAIAAAAAAgGWJbeIdwlpqXCyVIYSs0dt54Rfc8JF4b8uYc+YUj0AVsACAAAAAAWHeWxIkyvXTOWvfZzqtPXjfGaWWKjGSIQENTU3zBCrsAAzI1AH0AAAAFZAAgAAAAALas/i1T2DFCEmrrLEi7O2ngJZyFHialOoedVXS+OjenBXMAIAAAAAA1kK0QxY4REcGxHeMkgumyF7iwlsRFtw9MlbSSoQY7uAVsACAAAAAAUNlpMJZs1p4HfsD4Q4WZ4TBEi6Oc2fX34rzyynqWCdwAAzI2AH0AAAAFZAAgAAAAAP1TejmWg1CEuNSMt6NUgeQ5lT+oBoeyF7d2l5xQrbXWBXMAIAAAAABPX0kj6obggdJShmqtVfueKHplH4ZrXusiwrRDHMOKeQVsACAAAAAAIYOsNwC3DA7fLcOzqdr0bOFdHCfmK8tLwPoaE9uKOosAAzI3AH0AAAAFZAAgAAAAAMrKn+QPa/NxYezNhlOX9nyEkN1kE/gW7EuZkVqYl0b8BXMAIAAAAABUoZMSPUywRGfX2EEencJEKH5x/P9ySUVrhStAwgR/LgVsACAAAAAAMgZFH6lQIIDrgHnFeslv3ld20ynwQjQJt3cAp4GgrFkAAzI4AH0AAAAFZAAgAAAAAMmD1+a+oVbiUZd1HuZqdgtdVsVKwuWAn3/M1B6QGBM3BXMAIAAAAACLyytOYuZ9WEsIrrtJbXUx4QgipbaAbmlJvSZVkGi0CAVsACAAAAAA4v1lSp5H9BB+HYJ4bH43tC8aeuPZMf78Ng1JOhJh190AAzI5AH0AAAAFZAAgAAAAAOVKV7IuFwmYP1qVv8h0NvJmfPICu8yQhzjG7oJdTLDoBXMAIAAAAABL70XLfQLKRsw1deJ2MUvxSWKxpF/Ez73jqtbLvqbuogVsACAAAAAAvfgzIorXxE91dDt4nQxYfntTsx0M8Gzdsao5naQqcRUAAzMwAH0AAAAFZAAgAAAAAKS/1RSAQma+xV9rz04IcdzmavtrBDjOKPM+Z2NEyYfPBXMAIAAAAAAOJDWGORDgfRv8+w5nunh41wXb2hCA0MRzwnLnQtIqPgVsACAAAAAAf42C1+T7xdHEFF83+c2mF5S8PuuL22ogXXELnRAZ4boAAzMxAH0AAAAFZAAgAAAAAFeq8o82uNY1X8cH6OhdTzHNBUnCChsEDs5tm0kPBz3qBXMAIAAAAABaxMBbsaeEj/EDtr8nZfrhhhirBRPJwVamDo5WwbgvTQVsACAAAAAAMbH453A+BYAaDOTo5kdhV1VdND1avNwvshEG/4MIJjQAAzMyAH0AAAAFZAAgAAAAAI8IKIfDrohHh2cjspJHCovqroSr5N3QyVtNzFvT5+FzBXMAIAAAAABXHXteKG0DoOMmECKp6ro1MZNQvXGzqTDdZ0DUc8QfFAVsACAAAAAA/w5s++XYmO+9TWTbtGc3n3ndV4T9JUribIbF4jmDLSMAAzMzAH0AAAAFZAAgAAAAAJkHvm15kIu1OtAiaByj5ieWqzxiu/epK6c/9+KYIrB0BXMAIAAAAACzg5TcyANk0nes/wCJudd1BwlkWWF6zw3nGclq5v3SJQVsACAAAAAAvruXHTT3irPJLyWpI1j/Xwf2FeIE/IV+6Z49pqRzISoAAzM0AH0AAAAFZAAgAAAAAAYSOvEWWuSg1Aym7EssNLR+xsY7e9BcwsX4JKlnSHJcBXMAIAAAAABT48eY3PXVDOjw7JpNjOe1j2JyI3LjDnQoqZ8Je5B2KgVsACAAAAAAU2815RR57TQ9uDg0XjWjBkAKvf8yssxDMzrM4+FqP6AAAzM1AH0AAAAFZAAgAAAAAGQxC9L1e9DfO5XZvX1yvc3hTLtQEdKO9FPMkyg0Y9ZABXMAIAAAAADtmcMNJwdWLxQEArMGZQyzpnu+Z5yMmPAkvgq4eAKwNQVsACAAAAAAJ88zt4Y/Hoqh+zrf6KCOiUwHbOzCxSfp6k/qsZaYGEgAAzM2AH0AAAAFZAAgAAAAADLHK2LNCNRO0pv8n4fAsxwtUqCNnVK8rRgNiQfXpHSdBXMAIAAAAACf16EBIHRKD3SzjRW+LMOl+47QXA3CJhMzlcqyFRW22AVsACAAAAAAMGz4fAOa0EoVv90fUffwLjBrQhHATf+NdlgCR65vujAAAzM3AH0AAAAFZAAgAAAAAHiZJiXKNF8bbukQGsdYkEi95I+FSBHy1I5/hK2uEZruBXMAIAAAAADE+lZBa8HDUJPN+bF6xI9x4N7GF9pj3vBR7y0BcfFhBAVsACAAAAAAGIEN6sfqq30nyxW4dxDgXr/jz5HmvA9T1jx/pKCn4zgAAzM4AH0AAAAFZAAgAAAAAI1oa2OIw5TvhT14tYCGmhanUoYcCZtNbrVbeoMldHNZBXMAIAAAAAAx2nS0Ipblf2XOgBiUOuJFBupBhe7nb6QPLZlA4aMPCgVsACAAAAAA9xu828hugIgo0E3de9dZD+gTpVUGlwtDba+tw/WcbUoAAzM5AH0AAAAFZAAgAAAAABgTWS3Yap7Q59hii/uPPimHWXsr+DUmsqfwt/X73qsOBXMAIAAAAACKK05liW5KrmEAvtpCB1WUltruzUylDDpjea//UlWoOAVsACAAAAAAcgN4P/wakJ5aJK5c1bvJBqpVGND221dli2YicPFfuAYAAzQwAH0AAAAFZAAgAAAAABOAnBPXDp6i9TISQXvcNKwGDLepZTu3cKrB4vKnSCjBBXMAIAAAAADjjzZO7UowAAvpwyG8BNOVqLCccMFk3aDK4unUeft5ywVsACAAAAAA4zkCd4k9gvfXoD1C7vwTjNcdVJwEARh8h/cxZ4PNMfgAAzQxAH0AAAAFZAAgAAAAAHN8hyvT1lYrAsdiV5GBdd5jhtrAYE/KnSjw2Ka9hjz9BXMAIAAAAAD794JK7EeXBs+D7yOVK7nWF8SbZ/7U8gZ7nnT9JFNwTAVsACAAAAAAg8Wt1HO3NhByq2ggux2a4Lo6Gryr24rEFIqh2acrwWMAAzQyAH0AAAAFZAAgAAAAAO93bPrq8bsnp1AtNd9ETnXIz0lH/2HYN/vuw9wA3fyFBXMAIAAAAABHlls5fbaF2oAGqptC481XQ4eYxInTC29aElfmVZgDUgVsACAAAAAANoQXEWpXJpgrSNK/cKi/m7oYhuSRlp1IZBF0bqTEATcAAzQzAH0AAAAFZAAgAAAAAL1YsAZm1SA0ztU6ySIrQgCCA74V6rr0/4iIygCcaJL6BXMAIAAAAADTXWTHWovGmUR1Zg9l/Aqq9H5mOCJQQrb/Dfae7e3wKAVsACAAAAAA5dunyJK6/SVfDD0t9QlNBcFqoZnf9legRjHaLSKAoQMAAzQ0AH0AAAAFZAAgAAAAAEoFAeHk0RZ9kD+cJRD3j7PcE5gzWKnyBrF1I/MDNp5mBXMAIAAAAACgHtc2hMBRSZjKw8RAdDHK+Pi1HeyjiBuAslGVNcW5tAVsACAAAAAAXzBLfq+GxRtX4Wa9fazA49DBLG6AjZm2XODStJKH8D0AAzQ1AH0AAAAFZAAgAAAAAAW+7DmSN/LX+/0uBVJDHIc2dhxAGz4+ehyyz8fAnNGoBXMAIAAAAAA6Ilw42EvvfLJ3Eq8Afd+FjPoPcQutZO6ltmCLEr8kxQVsACAAAAAAbbZalyo07BbFjPFlYmbmv0z023eT9eLkHqeVUnfUAUAAAzQ2AH0AAAAFZAAgAAAAANBdV7M7kuYO3EMoQItAbXv4t2cIhfaT9V6+s4cg9djlBXMAIAAAAABvz4MIvZWxxrcJCL5qxLfFhXiUYB1OLHdKEjco94SgDgVsACAAAAAAK2GVGvyPIKolF/ECcmfmkVcf1/IZNcaTv96N92yGrkEAAzQ3AH0AAAAFZAAgAAAAAMoAoiAn1kc79j5oPZtlMWHMhhgwNhLUnvqkqIFvcH1NBXMAIAAAAADcJTW7WiCyW0Z9YDUYwppXhLj4Ac1povpJvcAq+i48MQVsACAAAAAAIGxGDzoeB3PTmudl4+j6piQB++e33EEzuzAiXcqGxvUAAzQ4AH0AAAAFZAAgAAAAACI3j5QP7dWHpcT6WO/OhsWwRJNASBYqIBDNzW8IorEyBXMAIAAAAABxUpBSjXwCKDdGP9hYU+RvyR+96kChfvyyRC4jZmztqAVsACAAAAAAvBCHguWswb4X0xdcAryCvZgQuthXzt7597bJ5VxAMdgAAzQ5AH0AAAAFZAAgAAAAAKsbycEuQSeNrF8Qnxqw3x3og8JmQabwGqnDbqzFRVrrBXMAIAAAAACno/3ef2JZJS93SVVzmOZSN+jjJHT8s0XYq2M46d2sLAVsACAAAAAAAt5zLJG+/j4K8rnkFtAn8IvdUVNefe6utJ3rdzgwudIAAzUwAH0AAAAFZAAgAAAAAPXIcoO8TiULqlxzb74NFg+I8kWX5uXIDUPnh2DobIoMBXMAIAAAAADR6/drkdTpnr9g1XNvKDwtBRBdKn7c2c4ZNUVK5CThdQVsACAAAAAAJqOA1c6KVog3F4Hb/GfDb3jCxXDRTqpXWSbMH4ePIJsAAzUxAH0AAAAFZAAgAAAAAEa03ZOJmfHT6/nVadvIw71jVxEuIloyvxXraYEW7u7pBXMAIAAAAADzRlBJK75FLiKjz3djqcgjCLo/e3yntI3MnPS48OORhgVsACAAAAAAnQhx4Rnyj081XrLRLD5NLpWmRWCsd0M9Hl7Jl19R0h8AAzUyAH0AAAAFZAAgAAAAAKx8NLSZUU04pSSGmHa5fh2oLHsEN5mmNMNHL95/tuC9BXMAIAAAAAA59hcXVaN3MNdHoo11OcH1aPRzHCwpVjO9mGfMz4xh3QVsACAAAAAAYIPdjV2XbPj7dBeHPwnwhVU7zMuJ+xtMUW5mIOYtmdAAAzUzAH0AAAAFZAAgAAAAAHNKAUxUqBFNS9Ea9NgCZoXMWgwhP4x0/OvoaPRWMquXBXMAIAAAAABUZ551mnP4ZjX+PXU9ttomzuOpo427MVynpkyq+nsYCQVsACAAAAAALnVK5p2tTTeZEh1zYt4iqKIQT9Z0si//Hy1L85oF+5IAAzU0AH0AAAAFZAAgAAAAALfGXDlyDVcGaqtyHkLT0qpuRhJQLgCxtznazhFtuyn/BXMAIAAAAABipxlXDq14C62pXhwAeen5+syA+/C6bN4rtZYcO4zKwAVsACAAAAAAXUf0pzUq0NhLYagWDap4uEiwq5rLpcx29rWbt1NYMsMAAzU1AH0AAAAFZAAgAAAAANoEr8sheJjg4UCfBkuUzarU9NFoy1xwbXjs5ifVDeA9BXMAIAAAAABPoyTf6M+xeZVGES4aNzVlq7LgjqZXJ/QunjYVusGUEAVsACAAAAAA1hA2gMeZZPUNytk9K+lB1RCqWRudRr7GtadJlExJf8oAAzU2AH0AAAAFZAAgAAAAAKvDiK+xjlBe1uQ3SZTNQl2lClIIvpP/5CHwY6Kb3WlgBXMAIAAAAAANnxImq5MFbWaRBHdJp+yD09bVlcFtiFDYsy1eDZj+iQVsACAAAAAAWtsyO+FxMPSIezwsV1TJD8ZrXAdRnQM6DJ+f+1V3qEkAAzU3AH0AAAAFZAAgAAAAAF49IlFH9RmSUSvUQpEPUedEksrQUcjsOv44nMkwXhjzBXMAIAAAAADJtWGbk0bZzmk20obz+mNsp86UCu/nLLlbg7ppxYn7PgVsACAAAAAA3k0Tj/XgPQtcYijH8cIlQoe/VXf15q1nrZNmg7yWYEgAAzU4AH0AAAAFZAAgAAAAAOuSJyuvz50lp3BzXlFKnq62QkN2quNU1Gq1IDsnFoJCBXMAIAAAAAAqavH1d93XV3IzshWlMnzznucadBF0ND092/2ApI1AcAVsACAAAAAAzUrK4kpoKCmcpdZlZNI13fddjdoAseVe67jaX1LobIIAAzU5AH0AAAAFZAAgAAAAALtgC4Whb4ZdkCiI30zY6fwlsxSa7lEaOAU3SfUXr02XBXMAIAAAAACgdZ6U1ZVgUaZZwbIaCdlANpCw6TZV0bwg3DS1NC/mnAVsACAAAAAAzI49hdpp0PbO7S2KexISxC16sE73EUAEyuqUFAC/J48AAzYwAH0AAAAFZAAgAAAAAF6PfplcGp6vek1ThwenMHVkbZgrc/dHgdsgx1VdPqZ5BXMAIAAAAACha3qhWkqmuwJSEXPozDO8y1ZdRLyzt9Crt2vjGnT7AAVsACAAAAAA7nvcU59+LwxGupSF21jAeAE0x7JE94tjRkJfgM1yKU8AAzYxAH0AAAAFZAAgAAAAAKoLEhLvLjKc7lhOJfx+VrGJCx9tXlOSa9bxQzGR6rfbBXMAIAAAAAAIDK5wNnjRMBzET7x/KAMExL/zi1IumJM92XTgXfoPoAVsACAAAAAAFkUYWFwNr815dEdFqp+TiIozDcq5IBNVkyMoDjharDQAAzYyAH0AAAAFZAAgAAAAADoQv6lutRmh5scQFvIW6K5JBquLxszuygM1tzBiGknIBXMAIAAAAADAD+JjW7FoBQ76/rsECmmcL76bmyfXpUU/awqIsZdO+wVsACAAAAAAPFHdLw3jssmEXsgtvl/RBNaUCRA1kgSwsofG364VOvQAAzYzAH0AAAAFZAAgAAAAAJNHUGAgn56KekghO19d11nai3lAh0JAlWfeP+6w4lJBBXMAIAAAAAD9XGJlvz59msJvA6St9fKW9CG4JoHV61rlWWnkdBRLzwVsACAAAAAAxwP/X/InJJHmrjznvahIMgj6pQR30B62UtHCthSjrP0AAzY0AH0AAAAFZAAgAAAAAHgYoMGjEE6fAlAhICv0+doHcVX8CmMVxyq7+jlyGrvmBXMAIAAAAAC/5MQZgTHuIr/O5Z3mXPvqrom5JTQ8IeSpQGhO9sB+8gVsACAAAAAAuPSXVmJUAUpTQg/A9Bu1hYczZF58KEhVofakygbsvJQAAzY1AH0AAAAFZAAgAAAAANpIljbxHOM7pydY877gpRQvYY2TGK7igqgGsavqGPBABXMAIAAAAAAqHyEu9gpurPOulApPnr0x9wrygY/7mXe9rAC+tPK80wVsACAAAAAA7gkPzNsS3gCxdFBWbSW9tkBjoR5ib+saDvpGSB3A3ogAAzY2AH0AAAAFZAAgAAAAAGR+gEaZTeGNgG9BuM1bX2R9ed4FCxBA9F9QvdQDAjZwBXMAIAAAAABSkrYFQ6pf8MZ1flgmeIRkxaSh/Eep4Btdx4QYnGGnwAVsACAAAAAApRovMiV00hm/pEcT4XBsyPNw0eo8RLAX/fuabjdU+uwAAzY3AH0AAAAFZAAgAAAAAFNprhQ3ZwIcYbuzLolAT5n/vc14P9kUUQComDu6eFyKBXMAIAAAAAAcx9z9pk32YbPV/sfPZl9ALIEVsqoLXgqWLVK/tP+heAVsACAAAAAA/qxvuvJbAHwwhfrPVpmCFzNvg2cU/NXaWgqgYUZpgXwAAzY4AH0AAAAFZAAgAAAAADgyPqQdqQrgfmJjRFAILTHzXbdw5kpKyfeoEcy6YYG/BXMAIAAAAAAE+3XsBQ8VAxAkN81au+f3FDeCD/s7KoZD+fnM1MJSSAVsACAAAAAAhRnjrXecwV0yeCWKJ5J/x12Xx4qVJahsCEVHB/1U2rcAAzY5AH0AAAAFZAAgAAAAAI0CT7JNngTCTUSei1Arw7eHWCD0jumv2rb7imjWIlWABXMAIAAAAABSP8t6ya0SyCphXMwnru6ZUDXWElN0NfBvEOhDvW9bJQVsACAAAAAAGWeGmBNDRaMtvm7Rv+8TJ2sJ4WNXKcp3tqpv5Se9Ut4AAzcwAH0AAAAFZAAgAAAAAD/FIrGYFDjyYmVb7oTMVwweWP7A6F9LnyIuNO4MjBnXBXMAIAAAAACIZgJCQRZu7NhuNMyOqCn1tf+DfU1qm10TPCfj5JYV3wVsACAAAAAA5hmY4ptuNxULGf87SUFXQWGAONsL9U29duh8xqsHtxoAAzcxAH0AAAAFZAAgAAAAAHIkVuNDkSS1cHIThKc/O0r2/ubaABTOi8Q1r/dvBAsEBXMAIAAAAADdHYqchEiJLM340c3Q4vJABmmth3+MKzwLYlsG6GS7sQVsACAAAAAADa+KP/pdTiG22l+ZWd30P1iHjnBF4zSNRdFm0oEK82kAAzcyAH0AAAAFZAAgAAAAAJmoDILNhC6kn3masElfnjIjP1VjsjRavGk1gSUIjh1NBXMAIAAAAAD97Ilvp3XF8T6MmVVcxMPcdL80RgQ09UoC6PnoOvZ1IQVsACAAAAAA2RK3Xng6v8kpvfVW9tkVXjpE+BSnx9/+Fw85Evs+kUEAAzczAH0AAAAFZAAgAAAAAI5bm3YO0Xgf0VT+qjVTTfvckecM3Cwqj7DTKZXf8/NXBXMAIAAAAAD/m+h8fBhWaHm6Ykuz0WX1xL4Eme3ErLObyEVJf8NCywVsACAAAAAAfb1VZZCqs2ivYbRzX4p5CtaCkKW+g20Pr57FWXzEZi8AAzc0AH0AAAAFZAAgAAAAANqo4+p6qdtCzcB4BX1wQ6llU7eFBnuu4MtZwp4B6mDlBXMAIAAAAAAGiz+VaukMZ+6IH4jtn4KWWdKK4/W+O+gRioQDrfzpMgVsACAAAAAAG4YYkTp80EKo59mlHExDodRQFR7njhR5dmISwUJ6ukAAAzc1AH0AAAAFZAAgAAAAAPrFXmHP2Y4YAm7b/aqsdn/DPoDkv7B8egWkfe23XsM1BXMAIAAAAAAGhwpKAr7skeqHm3oseSbO7qKNhmYsuUrECBxJ5k+D2AVsACAAAAAAAqPQi9luYAu3GrFCEsVjd9z2zIDcp6SPTR2w6KQEr+IAAzc2AH0AAAAFZAAgAAAAABzjYxwAjXxXc0Uxv18rH8I3my0Aguow0kTwKyxbrm+cBXMAIAAAAADVbqJVr6IdokuhXkEtXF0C2gINLiAjMVN20lE20Vmp2QVsACAAAAAAD7K1Fx4gFaaizkIUrf+EGXQeG7QX1jadhGc6Ji471H8AAzc3AH0AAAAFZAAgAAAAAFMm2feF2fFCm/UC6AfIyepX/xJDSmnnolQIBnHcPmb5BXMAIAAAAABLI11kFrQoaNVZFmq/38aRNImPOjdJh0Lo6irI8M/AaAVsACAAAAAAOWul0oVqJ9CejD2RqphhTC98DJeRQy5EwbNerU2+4l8AAzc4AH0AAAAFZAAgAAAAAJvXB3KyNiNtQko4SSzo/9b2qmM2zU9CQTTDfLSBWMgRBXMAIAAAAAAvjuVP7KsLRDeqVqRziTKpBrjVyqKiIbO9Gw8Wl2wFTAVsACAAAAAADlE+oc1ins+paNcaOZJhBlKlObDJ4VQORWjFYocM4LgAAzc5AH0AAAAFZAAgAAAAAPGdcxDiid8z8XYnfdDivNMYVPgBKdGOUw6UStU+48CdBXMAIAAAAAARj6g1Ap0eEfuCZ4X2TsEw+Djrhto3fA5nLwPaY0vCTgVsACAAAAAAoHqiwGOUkBu8SX5U1yHho+UIFdSN2MdQN5s6bQ0EsJYAAzgwAH0AAAAFZAAgAAAAAP5rGPrYGt3aKob5f/ldP0qrW7bmWvqnKY4QwdDWz400BXMAIAAAAADTQkW2ymaaf/bhteOOGmSrIR97bAnJx+yN3yMj1bTeewVsACAAAAAADyQnHGH2gF4w4L8axUsSTf6Ubk7L5/eoFOJk12MtZAoAAzgxAH0AAAAFZAAgAAAAAAlz6wJze5UkIxKpJOZFGCOf3v2KByWyI6NB6JM9wNcBBXMAIAAAAABUC7P/neUIHHoZtq0jFVBHY75tSFYr1Y5S16YN5XxC1QVsACAAAAAAgvxRbXDisNnLY3pfsjDdnFLtkvYUC4lhA68eBXc7KAwAAzgyAH0AAAAFZAAgAAAAAFJ8AtHcjia/9Y5pLEc3qVgH5xKiXw12G9Kn2A1EY8McBXMAIAAAAAAxe7Bdw7eUSBk/oAawa7uicTEDgXLymRNhBy1LAxhDvwVsACAAAAAAxKPaIBKVx3jTA+R/el7P7AZ7efrmTGjJs3Hj/YdMddwAAzgzAH0AAAAFZAAgAAAAAO8uwQUaKFb6vqR3Sv3Wn4QAonC2exOC9lGG1juqP5DtBXMAIAAAAABZf1KyJgQg8/Rf5c02DgDK2aQu0rNCOvaL60ohDHyY+gVsACAAAAAAqyEjfKC8lYoIfoXYHUqHZPoaA6EK5BAZy5dxXZmay4kAAzg0AH0AAAAFZAAgAAAAAE8YtqyRsGCeiR6hhiyisR/hccmK4nZqIMzO4lUBmEFzBXMAIAAAAAC1UYOSKqAeG1UJiKjWFVskRhuFKpj9Ezy+lICZvFlN5AVsACAAAAAA6Ct9nNMKyRazn1OKnRKagm746CGu+jyhbL1qJnZxGi0AAzg1AH0AAAAFZAAgAAAAAPhCrMausDx1QUIEqp9rUdRKyM6a9AAx7jQ3ILIu8wNIBXMAIAAAAACmH8lotGCiF2q9VQxhsS+7LAZv79VUAsOUALaGxE/EpAVsACAAAAAAnc1xCKfdvbUEc8F7XZqlNn1C+hZTtC0I9I3LL06iaNkAAzg2AH0AAAAFZAAgAAAAAOBi/GAYFcstMSJPgp3VkMiuuUUCrZytvqYaU8dwm8v2BXMAIAAAAACEZSZVyD3pKzGlbdwlYmWQhHHTV5SnNLknl2Gw8IaUTQVsACAAAAAAfsLZsEDcWSuNsIo/TD1ReyQW75HPMgmuKZuWFOLKRLoAAzg3AH0AAAAFZAAgAAAAAIQuup+YGfH3mflzWopN8J1X8o8a0d9CSGIvrA5HOzraBXMAIAAAAADYvNLURXsC2ITMqK14LABQBI+hZZ5wNf24JMcKLW+84AVsACAAAAAACzfjbTBH7IwDU91OqLAz94RFkoqBOkzKAqQb55gT4/MAAzg4AH0AAAAFZAAgAAAAAKsh0ADyOnVocFrOrf6MpTrNvAj8iaiE923DPryu124gBXMAIAAAAADg24a8NVE1GyScc6tmnTbmu5ulzO+896fE92lN08MeswVsACAAAAAAaPxcOIxnU7But88/yadOuDJDMcCywwrRitaxMODT4msAAzg5AH0AAAAFZAAgAAAAAKkVC2Y6HtRmv72tDnPUSjJBvse7SxLqnr09/Uuj9sVVBXMAIAAAAABYNFUkH7ylPMN+Bc3HWX1e0flGYNbtJNCY9SltJCW/UAVsACAAAAAAZYK/f9H4OeihmpiFMH7Wm7uLvs2s92zNA8wyrNZTsuMAAzkwAH0AAAAFZAAgAAAAADDggcwcb/Yn1Kk39sOHsv7BO/MfP3m/AJzjGH506Wf9BXMAIAAAAAAYZIsdjICS0+BDyRUPnrSAZfPrwtuMaEDEn0/ijLNQmAVsACAAAAAAGPnYVvo2ulO9z4LGd/69NAklfIcZqZvFX2KK0s+FcTUAAzkxAH0AAAAFZAAgAAAAAEWY7dEUOJBgjOoWVht1wLehsWAzB3rSOBtLgTuM2HC8BXMAIAAAAAAAoswiHRROurjwUW8u8D5EUT+67yvrgpB/j6PzBDAfVwVsACAAAAAA6NhRTYFL/Sz4tao7vpPjLNgAJ0FX6P/IyMW65qT6YsMAAzkyAH0AAAAFZAAgAAAAAPZaapeAUUFPA7JTCMOWHJa9lnPFh0/gXfAPjA1ezm4ZBXMAIAAAAACmJvLY2nivw7/b3DOKH/X7bBXjJwoowqb1GtEFO3OYgAVsACAAAAAA/JcUoyKacCB1NfmH8vYqC1f7rd13KShrQqV2r9QBP44AAzkzAH0AAAAFZAAgAAAAAK00u6jadxCZAiA+fTsPVDsnW5p5LCr4+kZZZOTDuZlfBXMAIAAAAAAote4zTEYMDgaaQbAdN8Dzv93ljPLdGjJzvnRn3KXgtQVsACAAAAAAxXd9Mh6R3mnJy8m7UfqMKi6oD5DlZpkaOz6bEjMOdiwAAzk0AH0AAAAFZAAgAAAAAFbgabdyymiEVYYwtJSWa7lfl/oYuj/SukzJeDOR6wPVBXMAIAAAAADAFGFjS1vPbN6mQEhkDYTD6V2V23Ys9gUEUMGNvMPkaAVsACAAAAAAL/D5Sze/ZoEanZLK0IeEkhgVkxEjMWVCfmJaD3a8uNIAAzk1AH0AAAAFZAAgAAAAABNMR6UBv2E627CqLtQ/eDYx7OEwQ7JrR4mSHFa1N8tLBXMAIAAAAAAxH4gucI4UmNVB7625C6hFSVCuIpJO3lusJlPuL8H5EQVsACAAAAAAVLHNg0OUVqZ7WGOP53BkTap9FOw9dr1P4J8HxqFqU04AAzk2AH0AAAAFZAAgAAAAAG8cd6WBneNunlqrQ2EmNf35W7OGObGq9WL4ePX+LUDmBXMAIAAAAAAjJ2+sX87NSis9hBsgb1QprVRnO7Bf+GObCGoUqyPE4wVsACAAAAAAs9c9SM49/pWmyUQKslpt3RTMBNSRppfNO0JBvUqHPg0AAzk3AH0AAAAFZAAgAAAAAFWOUGkUpy8yf6gB3dio/aOfRKh7XuhvsUj48iESFJrGBXMAIAAAAAAY7sCDMcrUXvNuL6dO0m11WyijzXZvPIcOKob6IpC4PQVsACAAAAAAJOP+EHz6awDb1qK2bZQ3kTV7wsj5Daj/IGAWh4g7omAAAzk4AH0AAAAFZAAgAAAAAGUrIdKxOihwNmo6B+aG+Ag1qa0+iqdksHOjQj+Oy9bZBXMAIAAAAABwa5dbI2KmzBDNBTQBEkjZv4sPaeRkRNejcjdVymRFKQVsACAAAAAA4ml/nm0gJNTcJ4vuD+T2Qfq2fQZlibJp/j6MOGDrbHMAAzk5AH0AAAAFZAAgAAAAAOx89xV/hRk64/CkM9N2EMK6aldII0c8smdcsZ46NbP8BXMAIAAAAADBF6tfQ+7q9kTuLyuyrSnDgmrdmrXkdhl980i1KHuGHgVsACAAAAAACUqiFqHZdGbwAA+hN0YUE5zFg+H+dabIB4dj5/75W/YAAzEwMAB9AAAABWQAIAAAAADJDdC9aEFl4Y8J/awHbnXGHjfP+VXQilPHJg7ewaJI7AVzACAAAAAAE+tqRl6EcBMXvbr4GDiNIYObTsYpa1n6BJk9EjIJVicFbAAgAAAAAJVc+HYYqa0m1Hq6OiRX8c0iRnJYOt6AJAJoG0sG3GMSAAMxMDEAfQAAAAVkACAAAAAA3F9rjEKhpoHuTULVGgfUsGGwJs3bISrXkFP1v6KoQLgFcwAgAAAAAIBf0tXw96Z/Ds0XSIHX/zk3MzUR/7WZR/J6FpxRWChtBWwAIAAAAABWrjGlvKYuTS2s8L9rYy8Hf0juFGJfwQmxVIjkTmFIGQADMTAyAH0AAAAFZAAgAAAAAOYIYoWkX7dGuyKfi3XssUlc7u/gWzqrR9KMkikKVdmSBXMAIAAAAABVF2OYjRTGi9Tw8XCAwZWLpX35Yl271TlNWp6N/nROhAVsACAAAAAA0nWwYzXQ1+EkDvnGq+SMlq20z+j32Su+i/A95SggPb4AAzEwMwB9AAAABWQAIAAAAACMtPm12YtdEAvqu6Eji1yuRXnu1RJP6h0l7pH3lSH4MwVzACAAAAAAENyCFfyUAh1veQBGx+cxiB7Sasrj41jzCGflZkB5cRMFbAAgAAAAAKdI2LMqISr/T5vuJPg6ZRBm5fVi2aQCc4ra3A4+AjbDAAMxMDQAfQAAAAVkACAAAAAAvlI4lDcs6GB1cnm/Tzo014CXWqidCdyE5t2lknWQd4QFcwAgAAAAAD60SpNc4O2KT7J0llKdSpcX1/Xxs97N715a1HsTFkmBBWwAIAAAAABuuRkJWAH1CynggBt1/5sPh9PoGiqTlS24D/OE2uHXLQADMTA1AH0AAAAFZAAgAAAAAKl8zcHJRDjSjJeV/WvMxulW1zrTFtaeBy/aKKhadc6UBXMAIAAAAADBdWQl5SBIvtZZLIHszePwkO14W1mQ0izUk2Ov21cPNAVsACAAAAAAHErCYycpqiIcCZHdmPL1hi+ovLQk4TAvENpfLdTRamQAAzEwNgB9AAAABWQAIAAAAABb6LXDWqCp1beQgQjj8I3sRTtFhlrmiBi+h/+ikmrvugVzACAAAAAA9stpgTecT7uTyaGNs3K9Bp0A7R0QaIAOfscyMXHBPX8FbAAgAAAAAHUt+McyXrJ1H8SwnHNVO181Ki8vDAM1f7XI26mg95ZDAAMxMDcAfQAAAAVkACAAAAAA97NTT+81PhDhgptNtp4epzA0tP4iNb9j1AWkiiiKGM8FcwAgAAAAAKPbHg7ise16vxmdPCzksA/2Mn/qST0L9Xe8vnQugVkcBWwAIAAAAABB0EMXfvju4JU/mUH/OvxWbPEl9NJkcEp4iCbkXI41fAADMTA4AH0AAAAFZAAgAAAAAMqpayM2XotEFmm0gwQd9rIzApy0X+7HfOhNk6VU7F5lBXMAIAAAAACJR9+q5T9qFHXFNgGbZnPubG8rkO6cwWhzITQTmd6VgwVsACAAAAAAOZLQ6o7e4mVfDzbpQioa4d3RoTvqwgnbmc5Qh2wsZuoAAzEwOQB9AAAABWQAIAAAAADQnslvt6Hm2kJPmqsTVYQHE/wWeZ4bE1XSkt7TKy0r1gVzACAAAAAA8URTA4ZMrhHPvlp53TH6FDCzS+0+61qHm5XK6UiOrKEFbAAgAAAAAHQbgTCdZcbdA0avaTmZXUKnIS7Nwf1tNrcXDCw+PdBRAAMxMTAAfQAAAAVkACAAAAAAhujlgFPFczsdCGXtQ/002Ck8YWQHHzvWvUHrkbjv4rwFcwAgAAAAALbV0lLGcSGfE7mDM3n/fgEvi+ifjl7WZ5b3aqjDNvx9BWwAIAAAAACbceTZy8E3QA1pHmPN5kTlOx3EO8kJM5PUjTVftw1VpgADMTExAH0AAAAFZAAgAAAAABm/6pF96j26Jm7z5KkY1y33zcAEXLx2n0DwC03bs/ixBXMAIAAAAAD01OMvTZI/mqMgxIhA5nLs068mW+GKl3OW3ilf2D8+LgVsACAAAAAAaLvJDrqBESTNZSdcXsd+8GXPl8ZkUsGpeYuyYVv/kygAAzExMgB9AAAABWQAIAAAAACfw9/te4GkHZAapC9sDMHHHZgmlTrccyJDPFciOMSOcwVzACAAAAAAIIC1ZpHObvmMwUfqDRPl4C1aeuHwujM1G/yJbvybMNAFbAAgAAAAAAs9x1SnVpMfNv5Bm1aXGwHmbbI9keWa9HRD35XuCBK5AAMxMTMAfQAAAAVkACAAAAAAkxHJRbnShpPOylLoDdNShfILeA1hChKFQY9qQyZ5VmsFcwAgAAAAAKidrY+rC3hTY+YWu2a7fuMH2RD/XaiTIBW1hrxNCQOJBWwAIAAAAACW0kkqMIzIFMn7g+R0MI8l15fr3k/w/mHtY5n6SYTEwAADMTE0AH0AAAAFZAAgAAAAAByuYl8dBvfaZ0LO/81JW4hYypeNmvLMaxsIdvqMPrWoBXMAIAAAAABNddwobOUJzm9HOUD8BMZJqkNCUCqstHZkC76FIdNg9AVsACAAAAAAQQOkIQtkyNavqCnhQbNg3HfqrJdsAGaoxSJePJl1qXsAAzExNQB9AAAABWQAIAAAAABxMy7X5hf7AXGDz3Y/POu1ZpkMlNcSvSP92NOO/Gs7wAVzACAAAAAAHJshWo2T5wU2zvqCyJzcJQKQaHFHpCpMc9oWBXkpUPoFbAAgAAAAAGeiJKzlUXAvL0gOlW+Hz1mSa2HsV4RGmyLmCHlzbAkoAAMxMTYAfQAAAAVkACAAAAAAlqbslixl7Zw3bRlibZbe/WmKw23k8uKeIzPKYEtbIy0FcwAgAAAAAHEKwpUxkxOfef5HYvulXPmdbzTivwdwrSYIHDeNRcpcBWwAIAAAAADuPckac21Hrg/h0kt5ShJwVEZ9rx6SOHd2+HDjqxEWTQADMTE3AH0AAAAFZAAgAAAAAMXrXx0saZ+5gORmwM2FLuZG6iuO2YS+1IGPoAtDKoKBBXMAIAAAAADIQsxCr8CfFKaBcx8kIeSywnGh7JHjKRJ9vJd9x79y7wVsACAAAAAAcvBV+SykDYhmRFyVYwFYB9oBKBSHr55Jdz2cXeowsUQAAzExOAB9AAAABWQAIAAAAAAm83FA9yDUpwkbKTihe7m53u+DivS9BU2b4vQMtCVQ2AVzACAAAAAAz3m1UB/AbZPa4QSKFDnUgHaT78+6iGOFAtouiBorEgEFbAAgAAAAAIgbpyYtJj5513Z5XYqviH/HXG/5+mqR52iBbfqMmDtZAAMxMTkAfQAAAAVkACAAAAAAJRzYK0PUwr9RPG2/7yID0WgcTJPB2Xjccp5LAPDYunkFcwAgAAAAAIIh24h3DrltAzNFhF+MEmPrZtzr1PhCofhChZqfCW+jBWwAIAAAAAAzRNXtL5o9VXMk5D5ylI0odPDJDSZZry1wfN+TedH70gADMTIwAH0AAAAFZAAgAAAAAHSaHWs/dnmI9sc7nB50VB2Bzs0kHapMHCQdyVEYY30TBXMAIAAAAACkV22lhEjWv/9/DubfHBAcwJggKI5mIbSK5L2nyqloqQVsACAAAAAAS19m7DccQxgryOsBJ3GsCs37yfQqNi1G+S6fCXpEhn4AAzEyMQB9AAAABWQAIAAAAAAC/I4TQRtCl12YZmdGz17X4GqSQgfwCPgRBwdHmdwu+QVzACAAAAAAx8f3z2ut/RAZhleari4vCEE+tNIn4ikjoUwzitfQ588FbAAgAAAAAJci0w1ZB8W2spJQ+kMpod6HSCtSR2jrabOH+B0fj3A4AAMxMjIAfQAAAAVkACAAAAAADGB5yU2XT0fse/MPWgvBvZikVxrl5pf3S5K1hceKWooFcwAgAAAAAIxTmlLHMjNaVDEfJbXvRez0SEPWFREBJCT6qTHsrljoBWwAIAAAAAAlswzAl81+0DteibwHD+CG5mZJrfHXa9NnEFRtXybzzwADMTIzAH0AAAAFZAAgAAAAABmO7QD9vxWMmFjIHz13lyOeV6vHT6mYCsWxF7hb/yOjBXMAIAAAAACT9lmgkiqzuWG24afuzYiCeK9gmJqacmxAruIukd0xEAVsACAAAAAAZa0/FI/GkZR7CtX18Xg9Tn9zfxkD0UoaSt+pIO5t1t4AAzEyNAB9AAAABWQAIAAAAAAfPUoy7QyZKhIIURso+mkP9qr1izbjETqF5s22GwjCjAVzACAAAAAAvLMsIDQ/go4VUxeh50UHmsvMvfx51cwyONnRD2odvC0FbAAgAAAAAKMb+1CodEalAFnDrEL1Ndt8ztamZ+9134m9Kp3GQgd+AAMxMjUAfQAAAAVkACAAAAAAE3ZqUar0Bq2zWbARE0bAv98jBlK9UJ73/xcwdMWWlSkFcwAgAAAAAK4M+MmC+9sFiFsumMyJZQKxWmmJiuG9H7IzKw083xxkBWwAIAAAAAAqkAONzhvMhkyL1D/6h7QQxEkdhC3p2WjXH+VGq5qCqQADMTI2AH0AAAAFZAAgAAAAAMo8FJiOq63cAmyk2O7eI7GcbQh/1j4RrMTqly3rexftBXMAIAAAAADjVmpd0WiRGTw/gAqEgGolt2EI7Csv14vKdmYoMD0aAgVsACAAAAAA07XQBzBUQMNw7F2/YxJjZNuPVpHTTgbLd1oGk77+bygAAzEyNwB9AAAABWQAIAAAAACu5IGaIx7A3Jvly/kzlCsSA4s3iJwuIl8jEdRH0k93NwVzACAAAAAA9NRUyxYE+t0Xyosyt6vIfMFW/vBoYg6sR+jBNs4JAxIFbAAgAAAAAAzyZ91dx+0oMlOVAjRGiMrPySikY/U9eMEB4WJb3uWtAAMxMjgAfQAAAAVkACAAAAAALkRy0GJInXYLA+cgjs6Myb0a+Gu9hgXhHvhLNoGWfckFcwAgAAAAANbALyt9zCSvwnLaWCd2/y2eoB7qkWTvv1Ldu8r40JPuBWwAIAAAAAD4Fl5bV5sz4isIE9bX+lmAp+aAKaZgVYVZeVfrItkCZAADMTI5AH0AAAAFZAAgAAAAAGoUK/DSWhT8LZhszSUqDbTrp8cSA7rdqmADKL+MILtTBXMAIAAAAABHnEE9bVa6lvhfhEMkkV2kzSSxH/sMW/FIJuw3CzWs6wVsACAAAAAAanavcBdqZxgRGKvEK95wTmeL1K1CeDSXZsXUAs81uOgAAzEzMAB9AAAABWQAIAAAAAC922ZDQE3h2fQKibGMZ9hV0WNlmrPYYSdtaSyYxsWYqgVzACAAAAAAagMovciKK6WVjIc2cCj8nK5O/gVOFFVeVAJpRp89tmQFbAAgAAAAAKcTFfPQzaFiAtSFhqbN02sCE1BKWJSrRfGN5L6oZwzkAAMxMzEAfQAAAAVkACAAAAAAtK+JqX3K/z2txjAU15DgX4y90DS2YLfIJFolCOkJJJwFcwAgAAAAAMnR5V7gfX7MNqqUdL5AkWlkhyFXaBRVNej+Rcn8lrQkBWwAIAAAAAA2cDNRXZuiC241TGRvdFyctJnrNcdbZOP9zHio81tkngADMTMyAH0AAAAFZAAgAAAAAAeGrIMK/bac6kPczxbvRYqKMkcpeI2FjdMpD91FDWIvBXMAIAAAAAAix62z1LeS8yvSXCl5gHSIomjyx76fF3S1lp9k900hygVsACAAAAAAiYwzf2m71aWFD5ajcXyW2JX2EzQOkBroTGMg29nLPYIAAzEzMwB9AAAABWQAIAAAAACphf298InM0Us4HT8o1W1MGw0D/02vd7Jh+U0h7qaFaQVzACAAAAAAFXtk7YpqsOJxsqGWSIL+YcBE96G3Zz9D31gPqDW94y8FbAAgAAAAAAOrS1KVA94rjB1jZ1pPocpCeBG+B14RzWoHqVDpp7JbAAMxMzQAfQAAAAVkACAAAAAATLDS2cuDVM3yDMuWNgk2iGKBTzPpfJMbvxVOSY39ZfcFcwAgAAAAAPT5wRi2cLHIUflXzm6EQB/m7xdThP80ir1VV/JBBqvxBWwAIAAAAAB9lEtZS0aXCFbCtSbhnis27S5IPcfWGygHW8AHn3QqzwADMTM1AH0AAAAFZAAgAAAAAJNjExiZVX7jfFGfYpQu16qxLN0YPqVU/5CQ/Y67YSinBXMAIAAAAABMpm2+6KrkRUlXzQoMPHrQmIO6dkQz66tYdfTeA3dKqQVsACAAAAAAFXobHiMLvNZuEPr8jtewCX2J93EZG3JNeyVg92fue6YAAzEzNgB9AAAABWQAIAAAAABlFkYtLCx901X6QVVMkSn6Z7k30UF4xHaA0OZJJ9bdyQVzACAAAAAATez+F9GHcGzTp7jjv4feboUNb8JCkIp4EqcPFisnq7MFbAAgAAAAACE7JvOpBgMoZ7kRd4QbxIhxukPTUxXpzhjnBHiR7XoRAAMxMzcAfQAAAAVkACAAAAAA8NJKN0IxZnruhswGQkiruv8Ih0EMwDcSZx/Xasup9dkFcwAgAAAAAKaJZRxzA+Igeydvuk6cSwUHXcrmT4PjhuPu//FslpdnBWwAIAAAAAD53Rok1Vq/PMAnXmarqoHJ0PEyYUBmVESa9hIpCv/G9QADMTM4AH0AAAAFZAAgAAAAABHxHdEClz7hbSSgE58+dWLlSMJnoPz+jFxp4bB1GmLQBXMAIAAAAAD3nSvT6aGD+A110J/NwEfp0nPutlmuB5B+wA3CC3noGAVsACAAAAAA3Apjd+TapONB7k5wBVwTWgn8t+Sq2oyyU5/+as109RcAAzEzOQB9AAAABWQAIAAAAAC/o8qW/ifk3KuJ01VFkyNLgQafxB5/bGs2G5VyyVafOwVzACAAAAAA1bMqAFGDHSl6BYNLbxApvkAv2K1/oafywiX0MDz1dGUFbAAgAAAAAHJXLlId3edFoniLD/9K2A5973MeP2Ro31flDyqm3l5QAAMxNDAAfQAAAAVkACAAAAAAY2V8I1bz3a1AxTtmED6UhdhA09huFkuuEX8R+d/WDPUFcwAgAAAAAPTVoNRiI76tcRKqd+JBBVyy4+YcKST42p0QX2BtmQ2VBWwAIAAAAACcxt9hg14WqPNiDv1MkqVljM2e2KJEv53lA17LhV6ZigADMTQxAH0AAAAFZAAgAAAAAO2kSsW0WGN9AOtK4xK2SHrGhWiaAbMEKT4iZkRpaDN/BXMAIAAAAABKGzQcPM8LT2dwOggxoWjv/1imYWabbG/G4kBw8OWaxAVsACAAAAAAC9hLK1dScQTAqg+YAG3ObdPzg2Xet57HmOFpGmyUR9UAAzE0MgB9AAAABWQAIAAAAAAiCwzNEEaH/mDam68IdDftnhthyUFdb+ZCNSBQ91WlHQVzACAAAAAA7tHyHcxCzmbJeFYZyPm4mEgkTGKOvwY4MX82OvH0Jn8FbAAgAAAAAAb5IAbZ1hXCNegQ+S+C9i/Z8y6sS8KeU04V6hXa2ml6AAMxNDMAfQAAAAVkACAAAAAAGuCHVNJSuoVkpPOnS5s89GuA+BLi2IPBUr2Bg1sWEPIFcwAgAAAAAEl1gncS5/xO7bQ/KQSstRV3rOT2SW6nV92ZANeG2SR6BWwAIAAAAAA9LOcKmhek8F2wAh8yvT/vjp2gaouuO+Hmv10lwAeWPAADMTQ0AH0AAAAFZAAgAAAAAMfxz7gEaoCdPvXrubDhCZUS0ARLZc1svgbXgMDlVBPgBXMAIAAAAAB6a5dDA3fuT5Vz2KvAcbUEFX/+B7Nw2p1QqbPoQ5TTuAVsACAAAAAAcf/y75UOuI62A6vWH7bYr/5Jz+nirZVYK/81trN6XOQAAzE0NQB9AAAABWQAIAAAAACnYsqF/VzmjIImC9+dqrHO1TM6lJ6fRwM0mM6Wf6paOwVzACAAAAAA5tgZzch8uDCR1ky3SllVaKVpxAlbrhvlNDTazZZRZOAFbAAgAAAAALeGiLJS4z2zhgVpxzyPdRYyACP9QzQBOob34YrIZumCAAMxNDYAfQAAAAVkACAAAAAAEC0sIVmadtW4YMuRXH7RpAhXclsd+3bmqGXCMeaT014FcwAgAAAAABPpXh0uzpsJJB+IRUNajmMB9WGwswfpw5T9xk3Xj6ANBWwAIAAAAAAmf+NYh9TZ/QRu3w/GQz66n7DtfbJijN3G7KzeL8lstAADMTQ3AH0AAAAFZAAgAAAAABaIB3n49Xm9cOafSrQsE0WCcYp8rMIO/qVwIlMF5YLRBXMAIAAAAAC9EyWJV3xOu9bzgdJ/yX+ko7qLf1u3AxNMataW2C9EzQVsACAAAAAAvVbDkLxXx2DcMLifIQ3K0IIJcLcAG9DUrNfI6aoUjNcAAzE0OAB9AAAABWQAIAAAAAA5rZItA/cocRnngYqcJ3nBXQ+l688aKz3EQyLbYYunPAVzACAAAAAAwKyA+L7TgxztPClLrIMk2JXR+w7c04N3ZOqPgjvrIvsFbAAgAAAAACzvZ33h6aWEe8hmo+1f6OXJ72FY5hvWaUuha64ZV3KFAAMxNDkAfQAAAAVkACAAAAAA3htn7oHJ0YYpIrs+Mzyh85Ys67HwAdv5LQl1mCdoMWkFcwAgAAAAAEHjCtNNLenHuSIYux6ezAHsXDaj2DlTF67ToDhDDe6HBWwAIAAAAAD+P4H0sk9jOd+7vOANt2/1Ectb+4ZRGPE8GkHWNXW3MgADMTUwAH0AAAAFZAAgAAAAAEnt18Km/nqggfIJWxzTr9r3hnXNaueG6XO9A5G11LnGBXMAIAAAAAD7QxzGMN/ard5TfFLecE6uusMmXG2+RBsBR+/NCQHUwAVsACAAAAAAQEZ1ZZ8GC8rdbg7s87OM5Gr9qkTXS9+P5DuAZxj5Gl4AAzE1MQB9AAAABWQAIAAAAAAVAKK/GoY8AACu/hyMpO4hdLq6JnEyWNzkyci9sbaD/wVzACAAAAAA2HmeqpMlvvBpV2zQTYIRmsc4MFlfHRwLof0ycJgMg/MFbAAgAAAAACdltCeWi5E/q1Li1eXLChpM2D9QQSGLBZ82NklQSc0oAAMxNTIAfQAAAAVkACAAAAAAhHyq1GQC/GiMwpYjcsfkNxolJ10ARKjIjfkW1Wipzi0FcwAgAAAAAD/uaGWxTDq87F8XZ6CrFI+RNa8yMqfSZdqK00Kj833BBWwAIAAAAAD6aEdOO0CsQGagioOCvANPCEHSpJ8BSixlPBq5ERhB7AADMTUzAH0AAAAFZAAgAAAAABAJJxHoZD+MQBWqm9UM9Dd3z5ZohIZGWRaRVRsMptKQBXMAIAAAAADrE/ca+gqj/SH4oao4wE4qn2ovoTydzcMbDbrfnUs3zAVsACAAAAAAeNCIQN6hVnGJinytQRFGlQ2ocoprXNqpia+BSxzl+uwAAzE1NAB9AAAABWQAIAAAAAAv01wz7VG9mTepjXQi6Zma+7b/OVBaKVkWNbgDLr1mFgVzACAAAAAA0I5sxz8r6wkCp5Tgvr+iL4p6MxSOq5d3e1kZG+0b7NkFbAAgAAAAAIA32v6oGkAOS96HexGouNTex+tLahtx9QF2dgGClk6WAAMxNTUAfQAAAAVkACAAAAAAWXecRwxSon68xaa9THXnRDw5ZfzARKnvvjTjtbae6T0FcwAgAAAAAPh0UfUMEo7eILCMv2tiJQe1bF9qtXq7GJtC6H5Va4fIBWwAIAAAAADqFr1ThRrTXNgIOrJWScO9mk86Ufi95IDu5gi4vP+HWQADMTU2AH0AAAAFZAAgAAAAAEY5WL8/LpX36iAB1wlQrMO/xHVjoO9BePVzbUlBYo+bBXMAIAAAAABoKcpadDXUARedDvTmzUzWPe1jTuvD0z9oIcZmKuiSXwVsACAAAAAAJuJbwuaMrAFoI+jU/IYr+k4RzAqITrOjAd3HWCpJHqEAAzE1NwB9AAAABWQAIAAAAADnJnWqsfx0xqNnqfFGCxIplVu8mXjaHTViJT9+y2RuTgVzACAAAAAAWAaSCwIXDwdYxWf2NZTly/iKVfG/KDjHUcA1BokN5sMFbAAgAAAAAJVxavipE0H4/JQvhagdytXBZ8qGooeXpkbPQ1RfYMVHAAMxNTgAfQAAAAVkACAAAAAAsPG7LaIpJvcwqcbtfFUpIjj+vpNj70Zjaw3eV9T+QYsFcwAgAAAAAJQ71zi0NlCyY8ZQs3IasJ4gB1PmWx57HpnlCf3+hmhqBWwAIAAAAACD58TO6d+71GaOoS+r73rAxliAO9GMs4Uc8JbOTmC0OwADMTU5AH0AAAAFZAAgAAAAAAGiSqKaQDakMi1W87rFAhkogfRAevnwQ41onWNUJKtuBXMAIAAAAAASgiDpXfGh7E47KkOD8MAcX8+BnDShlnU5JAGdnPdqOAVsACAAAAAAI+2TTQIgbFq4Yr3lkzGwhG/tqChP7hRAx2W0fNaH6jcAAzE2MAB9AAAABWQAIAAAAAB7L4EnhjKA5xJD3ORhH2wOA1BvpnQ+7IjRYi+jjVEaJAVzACAAAAAAuhBIm0nL3FJnVJId+7CKDASEo+l2E89Z9/5aWSITK4AFbAAgAAAAALtSICOzQDfV9d+gZuYxpEj6cCeHnKTT+2G3ceP2H65kAAMxNjEAfQAAAAVkACAAAAAAaROn1NaDZFOGEWw724dsXBAm6bgmL5i0cki6QZQNrOoFcwAgAAAAANVT8R6UvhrAlyqYlxtmnvkR4uYK/hlvyQmBu/LP6/3ZBWwAIAAAAAD+aHNMP/X+jcRHyUtrCNkk1KfMtoD3GTmShS8pWGLt+AADMTYyAH0AAAAFZAAgAAAAADqSR5e0/Th59LrauDA7OnGD1Xr3H3NokfVxzDWOFaN7BXMAIAAAAACt30faNwTWRbvmykDpiDYUOCwA6QDbBBYBFWS7rdOB4AVsACAAAAAAF7SvnjjRk5v2flFOKaBAEDvjXaL1cpjsQLtK2fv9zdQAAzE2MwB9AAAABWQAIAAAAADmtb1ZgpZjSeodPG/hIVlsnS8hoRRwRbrTVx89VwL62AVzACAAAAAAi38e1g6sEyVfSDkzZbaZXGxKI/zKNbMasOl2LYoWrq8FbAAgAAAAAALACk0KcCDN/Kv8WuazY8ORtUGkOZ5Dsm0ys1oOppp/AAMxNjQAfQAAAAVkACAAAAAAf/f7AWVgBxoKjr7YsEQ4w/fqSvuQWV2HMiA3rQ7ur0sFcwAgAAAAADkkeJozP6FFhUdRIN74H4UhIHue+eVbOs1NvbdWYFQrBWwAIAAAAAB55FlHAkmTzAYj/TWrGkRJw2EhrVWUnZXDoMYjyfB/ZwADMTY1AH0AAAAFZAAgAAAAAI2WEOymtuFpdKi4ctanPLnlQud+yMKKb8p/nfKmIy56BXMAIAAAAADVKrJmhjr1rfF3p+T+tl7UFd1B7+BfJRk0e7a4im7ozgVsACAAAAAA5E7Ti3PnFiBQoCcb/DN7V1uM3Xd6VKiexPKntssFL7kAAzE2NgB9AAAABWQAIAAAAAAuHU9Qd79hjyvKOujGanSGDIQlxzsql8JytTZhEnPw+AVzACAAAAAAjF2gV/4+sOHVgDd/oR5wDi9zL7NGpGD+NsEpGXy/a4QFbAAgAAAAAJzMoyojYV6Ed/LpVN5zge93Odv3U7JgP7wxeRaJZGTdAAMxNjcAfQAAAAVkACAAAAAA7dQDkt3iyWYCT94d7yqUtPPwp4qkC0ddu+HFdHgVKEkFcwAgAAAAANuYvtvZBTEq4Rm9+5eb7VuFopowkrAuv86PGP8Q8/QvBWwAIAAAAACeqXoAOQOE4j0zRMlkVd8plaW0RX1npsFvB38Xmzv7sAADMTY4AH0AAAAFZAAgAAAAAAwnZSDhL4tNGYxlHPhKYB8s28dY5ScSwiKZm3UhT8U3BXMAIAAAAABDoY6dhivufTURQExyC9Gx3ocpl09bgbbQLChj3qVGbgVsACAAAAAAF+1nS7O0v85s3CCy+9HkdeoEfm2C6ZiNbPMMnSfsMHUAAzE2OQB9AAAABWQAIAAAAAC2VuRdaC4ZJmLdNOvD6R2tnvkyARteqXouJmI46V306QVzACAAAAAAMn1Z6B35wFTX9mEYAPM+IiJ5hauEwfD0CyIvBrxHg7IFbAAgAAAAAOG6DvDZkT9B/xZWmjao2AevN7MMbs3Oh9YJeSd/hZ+hAAMxNzAAfQAAAAVkACAAAAAAVerb7qVNy457rNOHOgDSKyWl5ojun7iWrv1uHPXrIZQFcwAgAAAAAIDcYS9j5z+gx0xdJj09L7876r/vjvKTi/d3bXDE3PhyBWwAIAAAAADuhVLqb1Bkrx8aNymS+bx2cL8GvLFNH4SAi690DUgnWQADMTcxAH0AAAAFZAAgAAAAAH/E44yLxKCJjuSmU9A8SEhbmkDOx1PqqtYcZtgOzJdrBXMAIAAAAABgLh9v2HjBbogrRoQ82LS6KjZQnzjxyJH4PH+F3jupSAVsACAAAAAAIlO46ehXp4TqpDV0t6op++KO+uWBFh8iFORZjmx2IjkAAzE3MgB9AAAABWQAIAAAAAAlNUdDL+f/SSQ5074mrq0JNh7CTXwTbbhsQyDwWeDVMwVzACAAAAAANIH2IlSNG0kUw4qz0budjcWn8mNR9cJlYUqPYdonucAFbAAgAAAAAJMrOUOyiu5Y3sV76zwEFct8L7+i8WGlQI2+8z2W2kzaAAMxNzMAfQAAAAVkACAAAAAASZ+CvUDtlk/R4HAQ3a+PHrKeY/8ifAfh0oXYFqliu80FcwAgAAAAAJelpzPgM65OZFt/mvGGpwibclQ49wH+1gbUGzd9OindBWwAIAAAAAD9qeDchteEpVXWcycmD9kl9449C1dOw0r60TBm5jK+cQADMTc0AH0AAAAFZAAgAAAAAN9fkoUVbvFV2vMNMAkak4gYfEnzwKI3eDM3pnDK5q3lBXMAIAAAAACnDkgVNVNUlbQ9RhR6Aot2nVy+U4km6+GHPkLr631jEAVsACAAAAAANzg/BnkvkmvOr8nS4omF+q9EG/4oisB+ul4YHi938hwAAzE3NQB9AAAABWQAIAAAAAASyK3b1nmNCMptVEGOjwoxYLLS9fYWm/Zxilqea0jpEQVzACAAAAAADDHsGrbqlKGEpxlvfyqOJKQJjwJrzsrB7k3HG0AUJbkFbAAgAAAAAKwx3S4XfDZh4+LuI9jf7XgUh5qiefNv87JD4qvVRfPSAAMxNzYAfQAAAAVkACAAAAAAlSP9iK31GlcG9MKGbLmq+VXMslURr+As736rrVNXcsUFcwAgAAAAAAvbj0zfq9zzi8XReheKFbCB+h9IsOLgXPPpI5vrEJNZBWwAIAAAAABXvoZhaQE7ogWjeBjceVkp03N20cKYP3TA8vuNsgpfAgADMTc3AH0AAAAFZAAgAAAAAOJNORH8Bev97gVU7y6bznOxJ+E6Qoykur1QP76hG1/7BXMAIAAAAAC+C1PtOOrSZgzBAGhr+dPe/kR0JUw9GTwLVNr61xC1aAVsACAAAAAAeA/L8MQIXkamaObtMPLpoDoi5FypA5WAPtMeMrgi0eQAAzE3OAB9AAAABWQAIAAAAAAKcHzLUomavInN6upPkyWhAqYQACP/vdVCIYpiy6U6HgVzACAAAAAATsR4KItY6R2+U7Gg6sJdaEcf58gjd1OulyWovIqfxKcFbAAgAAAAAFbm10ko67ahboAejQdAV0U2uA5OhZYdb8XUFJ8OL46LAAMxNzkAfQAAAAVkACAAAAAAqTOLiMpCdR59tLZzzIPqJvbCNvz2XQL9ust0qYaehtcFcwAgAAAAAArefox/3k5xGOeiw2m6NUdzuGxmPwcu5IFcj+jMwHgHBWwAIAAAAADLZGFJ7MQd5JXMgMXjqZO5LDLxcFClcXPlnRMWRn+1oAADMTgwAH0AAAAFZAAgAAAAAIPSqSeVzSRgNVNmrPYHmUMgykCY27NbdDUNhE5kx/SgBXMAIAAAAAAhX90nNfxyXmZe/+btZ7q6xMX4PFyj0paM1ccJ/5IUUQVsACAAAAAA419oHmD2W0SYoOMwhrhrp8jf68fg9hTkaRdCuVd3CN0AAzE4MQB9AAAABWQAIAAAAACLn5DxiqAosHGXIAY96FwFKjeqrzXWf3VJIQMwx1fl4gVzACAAAAAAindvU27nveutopdvuHmzdENBbeGFtI3Qcsr07jxmvm8FbAAgAAAAAPvl9pBStQvP4OGkN5v0MghUY6djm9n7XdKKfrW0l1sMAAMxODIAfQAAAAVkACAAAAAA7i2S6rHRSPBwZEn59yxaS7HiYBOmObIkeyCcFU42kf8FcwAgAAAAAGb3RSEyBmgarkTvyLWtOLJcPwCKbCRkESG4RZjVmY4iBWwAIAAAAADB2/wo5CSHR4ANtifY6ZRXNTO5+O8qP82DfAiAeanpZwADMTgzAH0AAAAFZAAgAAAAAFz+M+H/Z94mdPW5oP51B4HWptp1rxcMWAjnlHvWJDWrBXMAIAAAAACBFEOQyL7ZHu4Cq33QvXkmKuH5ibG/Md3RaED9CtG5HwVsACAAAAAAfggtJTprQ/yZzj7y5z9KvXsdeXMWP0yUXMMJqpOwI88AAzE4NAB9AAAABWQAIAAAAAAE7c2x3Z3aM1XGfLNk/XQ9jCazNRbGhVm7H8c2NjS5ywVzACAAAAAARJ9h8fdcwA19velF3L/Wcvi2rCzewlKZ2nA0p8bT9uwFbAAgAAAAAJtWe6b4wK2Hae2dZm/OEpYQnvoZjz4Sz5IgJC2wInecAAMxODUAfQAAAAVkACAAAAAAVoRt9B9dNVvIMGN+ea5TzRzQC+lqSZ8dd/170zU5o9cFcwAgAAAAAEwM95XZin5mv2yhCI8+ugtKuvRVmNgzzIQN0yi1+9aIBWwAIAAAAAAMGBq72n00rox3uqhxSB98mkenTGCdbbUF1gXrgottzgADMTg2AH0AAAAFZAAgAAAAAKRDkjyWv/etlYT4GyoXrmBED2FgZHnhc+l9Wsl06cH2BXMAIAAAAABohlpm3K850Vndf3NmNE0hHqDlNbSR8/IvMidQ3LnIZAVsACAAAAAAW42nGHa6q2MCAaaPVwaIDfr8QLyQwjKq23onZJYsqVsAAzE4NwB9AAAABWQAIAAAAAC3DFh5oklLCNLY90bgWm68dFXz65JpAZSp1K99MBTPAQVzACAAAAAAQgZecmxEUZVHoptEQClDwAf8smI3WynQ/i+JBP0g+kQFbAAgAAAAAEUSQGVnAPISD6voD0DiBUqyWKgt2rta0tjmoe+LNt6IAAMxODgAfQAAAAVkACAAAAAAQ5WKvWSB503qeNlOI2Tpjd5blheNr6OBO8pfJfPNstcFcwAgAAAAAKwHgQLSDJ5NwLBQbY5OnblQIsVDpGV7q3RCbFLD1U4/BWwAIAAAAACQ5nED99LnpbqXZuUOUjnO2HTphEAFBjLD4OZeDEYybgADMTg5AH0AAAAFZAAgAAAAAGfhFY3RGRm5ZgWRQef1tXxHBq5Y6fXaLAR4yJhrTBplBXMAIAAAAACKEF0ApLoB6lP2UqTFsTQYNc9OdDrs/vziPGzttGVLKQVsACAAAAAArOO6FyfNRyBi0sPT5iye7M8d16MTLcwRfodZq4uCYKEAAzE5MAB9AAAABWQAIAAAAAAIM73gPcgzgotYHLeMa2zAU4mFsr7CbILUZWfnuKSwagVzACAAAAAAJCSu98uV8xv88f2BIOWzt6p+6EjQStMBdkGPUkgN79cFbAAgAAAAAMGqPGMPxXbmYbVfSa/japvUljht1zZT33TY7ZjAiuPfAAMxOTEAfQAAAAVkACAAAAAAkWmHCUsiMy1pwZTHxVPBzPTrWFBUDqHNrVqcyyt7nO8FcwAgAAAAAMv2CebFRG/br7USELR98sIdgE9OQCRBGV5JZCO+uPMgBWwAIAAAAABt7qSmn3gxJu7aswsbUiwvO+G6lXj/Xhx+J/zQyZxzLAADMTkyAH0AAAAFZAAgAAAAAGInUYv0lP/rK7McM8taEHXRefk8Q2AunrvWqdfSV7UaBXMAIAAAAACE+WPxJ3gan7iRTbIxXXx+bKVcaf8kP4JD8DcwU0aL7wVsACAAAAAAUC4eTprX4DUZn2X+UXYU6QjtiXk+u57yoOPBbPQUmDkAAzE5MwB9AAAABWQAIAAAAACmHlg2ud3cplXlTsNTpvNnY6Qm1Fce0m899COamoDjaQVzACAAAAAArtJQeJIlepBWRU2aYar7+YGYVQ7dfDc1oxgTmA8r9q0FbAAgAAAAAOk45vg5VqZHAFCO3i0Z52SZi5RADf8NXwf68T5yad/DAAMxOTQAfQAAAAVkACAAAAAApzcWSAbZWV/Rq+ylRNqqlJqNVR4fhXrz4633/MQOQgcFcwAgAAAAAN/jz/bsEleiuCl+li83EWlG6UMHA8CyaOMRKCkXkSCPBWwAIAAAAAC3Sd+Qg+uFDKpGZHbrQgokXHQ1az1aFl4YK343OB6hcQAAEmNtAAAAAAAAAAAAABBwYXlsb2FkSWQAAAAAABBmaXJzdE9wZXJhdG9yAAEAAAASc3AAAQAAAAAAAAAQdGYAAQAAABNtbgD/////Y46NN8CHrb4J7f/fE214AP////9jjo03wIetvgnt/18A", + "subType": "06" + } + } + } + }, + "limit": 1 + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalNoPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + } + }, + "commandName": "delete" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Decimal-FindOneAndUpdate.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Decimal-FindOneAndUpdate.json new file mode 100644 index 0000000000..2697549f6a --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Decimal-FindOneAndUpdate.json @@ -0,0 +1,1969 @@ +{ + "description": "fle2v2-Rangev2-Decimal-FindOneAndUpdate", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalNoPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "FLE2 Range Decimal. FindOneAndUpdate.", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "1" + } + } + }, + "object": "coll" + }, + { + "name": "findOneAndUpdate", + "arguments": { + "filter": { + "encryptedDecimalNoPrecision": { + "$gt": { + "$numberDecimal": "0" + } + } + }, + "update": { + "$set": { + "encryptedDecimalNoPrecision": { + "$numberDecimal": "2" + } + } + }, + "returnDocument": "Before" + }, + "object": "coll", + "expectResult": { + "_id": 1, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "1" + } + } + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": { + "$numberInt": "0" + }, + "encryptedDecimalNoPrecision": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "rbf3AeBEv4wWFAKknqDxRW5cLNkFvbIs6iJjc6LShQY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "n+XAuFnP8Dov9TnhGFxNx0K/MnVM9WbJ7RouEu0ndO0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yRXojuVdn5GQtD97qYlaCL6cOLmZ7Cvcb3wFjkLUIdM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "DuIkdRPITRs55I4SZmgomAHCIsDQmXRhW8+MOznkzSk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SsBk+Et1lTbU+QRPx+xyJ/jMkmfG+QCvQEpip2YYrzA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "crCIzOd8KhHvvUlX7M1v9bhvU4pLdTc+X2SuqoKU5Ek=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "YOWdCw4UrqnxkAaVjqmC4sKQDMVMHEpFGnlxpxdaU6E=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "M3SShp81Ff8tQ632qKbv9MUcN6wjDaBReI0VXNu6Xh4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "gzHlSPxpM0hT75kQvWFzGlOxKvDoiKQZOr19V6l2zXI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "s3JnppOGYw9SL2Q1kMAZs948v2F5PrpXjGei/HioDWs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cG6+3Gk/zEH68P/uuuwiAUVCuyJwa1LeV+t29FlPPAo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dupdvR3AyJtM+g9NDKiaLVOtGca387JQp8w+V03m7Ig=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JqEQc5svj2jTvZ6LLA5ivE+kTb/0aRemSEmxk4G7Zrg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "szcXXXKnob+p3SoM4yED2R920LeJ7cVsclPMFTe4CeI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "o1QoGVXmuBdHwHm7aCtGMlMVKrjFdYvJXpoq6uhIAZ0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Jfm5wPlqqLCJRGQIqRq2NGmpn7s0Vrih2H3YAOoI2YU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zMHLb8ARbsYo8Ld05bqnGFf1Usha6EGb8QKwdSAyps0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yQdtq9lh5pugL7/i0Bj/PuZUUBUIzf+7wj1rl5y736w=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wGWVZdO7qIuyDg/BqDgqjgoQ02h5YYgwXQB1oCin2NE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "by9HMLj6NTEpgztZ5HSN6GxImkXPcaFINYDzgZY33X8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tWo0vbasi7bXmn/MsOx13VC1IsWtpx/nYp0uj4iMzdA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tQQpndUYd5O87lOtrGjH3wl9VsOK0ray7RMasL90sBM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cQjXEDCMsOpKLLf+vlTgIHA+cbSJdzqhbSX9Wvh95aA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "7yMpU48IxK9SzP2cx3VnTownGEwFmeFofuuFT97SuuY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kSOx1kz0CmBgzKQHZlo65ZUY1DIv9A99JRm+Us2y6Ew=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ubQpdPBe6/xvtr+AcXdfYLSvYCR4ot0tivehkCsupb4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xal+iCJ6FTefRQToyoNksc9NCZShyn04NDGi4IYrcoM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "d7jU4iOK50xHxlkSifcxlZFCM46TSgQzoYivxG3HNLY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tJvl2nsBLBVzL3pp6sKWCL4UXeh3q/roYBJjSb74ve0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "OIUCaKRvIx9t1w6Hxlz1IcQTdPNCfdRNwnnTm10W+X0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "A9tvzsiElotOUVIB4CqfQp9mAwqvTM35YkmAR170aHA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "lI8gpK7hpb7c9x4RQugsxMnQay5LZJmwslZdvMx/dcE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dNCzh40U0XvdKnSDi3HRQOWQftEsDVqc4uUvsVFGoq8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "IP+iwEBWBwVVZIdpaMu8k5+soFCz+TZkYn3drKZ9grE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pnqyh6e0y5svHkJDShlN9CHV0WvMBE4QbtJpQw5ZCXc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "elEl42tbVDoRTLjAhZUFEtXiut4b3PVhg/1ZLZSQdtE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "vHuu2FxwclMHqyE6JBYbTYgbEkB0dqb/JuaxsvfwsmY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xTf7NCe3Gf8QpE78HR5OknlLTKfs9J+RN9UZpH6fnso=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "XiWSasRnJAulGR6+LCVD3mwRObXylqYWR9jvpywq12c=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "MZMxEQ5ikx0PG1YFIExv0UnTZogsvgeOEZTpzvBDn4w=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yZMyMZBDrWbAhvnic7vvIYhmO9m5H2iuv0c8KNZrBzY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xxM14hTPY5j0vvcK2C7YAEjzdsfUTFHozHC0hEo1bxI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+01rqR1xVwkpGXcstbk1ItJqFVjH6Q8MGxEN3Cm9Y1A=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xOpLV0Z2VTRJ3iWtnWZcsyjXubTIkYWo31cO+HV1o1k=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "BWUOLqgLBqc5NwxVlSV5H3KFQPXbCp7mdo+jF+8cJqY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "fuQb1S6xZDGlrEbK+kI23aL53PP1PVNwqICnZNt9Yzg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SfscnoibFttahLdPVC4Ee+47ewGFKpDSU7M6HX19bKE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "rpSW2awybNVeKtat91VFxqbINoTfNhPfQAu+d73Xtf8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "9M/CP9ccOIIj2LLFmE0GFDO0Ban2wsNalEXfM6+h+1s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "WrEMG49l1ye4MhXs5ZS9tz8P6h+hDvthIg/2wW9ne1Q=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ImNhbfeyfH8qIEeA5ic0s3dAQBdzzTBS+CPsNih9vZ0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dWP33YDSn04UKJN2ogh2Rui0iW/0q2y18OCDRVcfyoo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "lYv0isAtfGh6H9tdp3cp2eHU7q2J+uk7QrgcxtK3w7Y=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "VGMoamB/+7zTOYcY/pqJc96xlv2PdW4hwsIAEIslTDQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yNeBWMF7BnD9wVwz2PgJsvWr77QiVvvWUvJF0+fqBug=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SfpvObJ+tJBXSvqeN7vlOfmhYign635lciYAJIjUtY8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dsen4NqjzVGjpjufiTMs3+gqeD09EbnuogPgxrJECwg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pxCWVM3sn19NsFEpgHbgLa+PmYlhN3mMiP0Wk8kJhYw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q11KNvJszjYIB9n9HcC+N4uz11a3eRj1L3BH9scKMDQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "A1PmkgcEToWh1JiVWE6mI5jUu7poxWWuCUt/cgRUUDc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "qJo3Hu4PJeanL7XEaWXO/n3YsodhZyd+MJOOmB9Kpd8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "BkBKLO8URFscfRY9Bav/1+L9mLohDgNr/MkZtGiraIs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "rZq5WA3Hx3xthOyHAJXK//f8pE2qbz7YKu3TIMp9GFY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "X07a/Lm80p5xd4RFs1dNmw+90tmPDPdGiAKVZkxd4zY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6YrBn2ofIw1b5ooakrLOwF41BWrps8OO0H9WH4/rtlE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "0l86Ag5OszXpa78SlOUV3K9nff5iC1p0mRXtLg9M1s4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Hn6yuxFHodeyu7ISlhYrbSf9pTiH4TDEvbYLWjTwFO0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zdf4y2etKBuIpkEU1zMwoCkCsdisfXZCh8QPamm+drY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "rOQ9oMdiK5xxGH+jPzOvwVqdGGnF3+HkJXxn81s6hp4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "61aKKsE3+BJHHWYvs3xSIBvlRmKswmaOo5rygQJguUg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "KuDb/GIzqDM8wv7m7m8AECiWJbae5EKKtJRugZx7kR0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Q+t8t2TmNUiCIorVr9F3AlVnX+Mpt2ZYvN+s8UGict8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tJRZIpKxUgHyL83kW8cvfjkxN3z6WoNnUg+SQw+LK+k=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pnUsYjip8SvW0+m9mR5WWTkpK+p6uwJ6yBUAlBnFKMk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "PArHlz+yPRYDycAP/PgnI/AkP8Wgmfg++Vf4UG1Bf0E=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wnIh53Q3jeK8jEBe1n8kJLa89/H0BxO26ZU8SRIAs9Q=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "4F8U59gzBLGhq58PEWQk2nch+R0Va7eTUoxMneReUIA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ihKagIW3uT1dm22ROr/g5QaCpxZVj2+Fs/YSdM2Noco=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "EJtUOOwjkrPUi9mavYAi+Gom9Y2DuFll7aDwo4mq0M0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dIkr8dbaVRQFskAVT6B286BbcBBt1pZPEOcTZqk4ZcI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "aYVAcZYkH/Tieoa1XOjE/zCy5AJcVTHjS0NG2QB7muA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "sBidL6y8TenseetpioIAAtn0lK/7C8MoW4JXpVYi3z8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "0Dd2klU/t4R86c2WJcJDAd57k/N7OjvYSO5Vf8KH8sw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "I3jZ92WEVmZmgaIkLbuWhBxl7EM6bEjiEttgBJunArA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "aGHoQMlgJoGvArjfIbc3nnkoc8SWBxcrN7hSmjMRzos=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "bpiWPnF/KVBQr5F6MEwc5ZZayzIRvQOLDAm4ntwOi8g=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tI7QVKbE6avWgDD9h4QKyFlnTxFCwd2iLySKakxNR/I=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "XGsge0CnoaXgE3rcpKm8AEeku5QVfokS3kcI+JKV1lk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JQxlryW2Q5WOwfrjAnaZxDvC83Dg6sjRVP5zegf2WiM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "YFuHKJOfoqp1iGVxoFjx7bLYgVdsN4GuUFxEgO9HJ5s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Z6vUdiCR18ylKomf08uxcQHeRtmyav7/Ecvzz4av3k4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SPGo1Ib5AiP/tSllL7Z5PAypvnKdwJLzt8imfIMSEJQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "m94Nh6PFFQFLIib9Cu5LAKavhXnagSHG6F5EF8lD96I=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pfEkQI98mB+gm1+JbmVurPAODMFPJ4E8DnqfVyUWbSo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "DNj3OVRLbr43s0vd+rgWghOL3FqeO/60npdojC8Ry/M=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kAYIQrjHVu49W8FTxyxJeiLVRWWjC9fPcBn+Hx1F+Ss=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "aCSO7UVOpoQvu/iridarxkxV1SVxU1i9HVSYXUAeXk4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Gh6hTP/yj1IKlXQ+Q69KTfMlGZjEcXoRLGbQHNFo/1s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "/gDgIFQ4tAlJk3GN48IS5Qa5IPmErwGk8CHxAbp6gs0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "PICyimwPjxpusyKxNssOOwUotAUbygpyEtORsVGXT8g=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "4lu+cBHyAUvuxC6JUNyHLzHsCogGSWFFnUCkDwfQdgI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pSndkmoNUJwXjgkbkgOrT5f9nSvuoMEZOkwAN9ElRaE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tyW+D4i26QihNM5MuBM+wnt5AdWGSJaJ4X5ydc9iWTU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "9Syjr8RoxUgPKr+O5rsCu07AvcebA4P8IVKyS1NVLWc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "67tPfDYnK2tmrioI51fOBG0ygajcV0pLo5+Zm/rEW7U=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "y0EiPRxYTuS1eVTIaPQUQBBxwkyxNckbePvKgChwd0M=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "NWd+2veAaeXQgR3vCvzlI4R1WW67D5YsVLdoXfdb8qg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "PY5RQqKQsL2GqBBSPNOEVpojNFRX/NijCghIpxD6CZk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "lcvwTyEjFlssCJtdjRpdN6oY+C7bxZY+WA+QAqzj9zg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CWE7XRNylvTwO/9Fv56dNqUaQWMmESNS/GNIwgBaEI0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ijwlrUeS8nRYqK1F8kiCYF0mNDolEZS+/lJO1Lg93C8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "8KzV+qYGYuIjoNj8eEpnTuHrMYuhzphl80rS6wrODuU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wDyTLjSEFF895hSQsHvmoEQVS6KIkZOtq1c9dVogm9I=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SGrtPuMYCjUrfKF0Pq/thdaQzmGBMUvlwN3ORIu9tHU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "KySHON3hIoUk4xWcwTqk6IL0kgjzjxgMBObVIkCGvk4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "hBIdS9j0XJPeT4ot73ngELkpUoSixvRBvdOL9z48jY8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Tx6um0q9HjS5ZvlFhvukpI6ORnyrXMWVW1OoxvgqII0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zFKlyfX5H81+d4A4J3FKn4T5JfG+OWtR06ddyX4Mxas=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cGgCDuPV7MeMMYEDpgOupqyNP4BQ4H7rBnd2QygumgM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "IPaUoy98v11EoglTpJ4kBlEawoZ8y7BPwzjLYBpkvHQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Pfo4Am6tOWAyZNn8G9W5HWWGC3ZWmX0igI/RRB870Ro=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "fnTSjd7bC1Udoq6iM7UDnHAC/lsIXSHp/Gy332qw+/I=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "fApBgVRrTDyEumkeWs5p3ag9KB48SbU4Si0dl7Ns9rc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "QxudfBItgoCnUj5NXVnSmWH3HK76YtKkMmzn4lyyUYY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "sSOvwhKa29Wq94bZ5jGIiJQGbG1uBrKSBfOYBz/oZeI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "FdaMgwwJ0NKsqmPZLC5oE+/0D74Dfpvig3LaI5yW5Fs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "sRWBy12IERN43BSZIrnBfC9+zFBUdvjTlkqIH81NGt4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "/4tIRpxKhoOwnXAiFn1Z7Xmric4USOIfKvTYQXk3QTc=", + "subType": "00" + } + } + ] + }, + { + "_id": { + "$numberInt": "1" + }, + "encryptedDecimalNoPrecision": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "Mr/laWHUijZT5VT3x2a7crb7wgd/UXOGz8jr8BVqBpM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wXVD/HSbBljko0jJcaxJ1nrzs2+pchLQqYR3vywS8SU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "VDCpBYsJIxTfcI6Zgf7FTmKMxUffQv+Ys8zt5dlK76I=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zYDslUwOUVNwTYkETfjceH/PU3bac9X3UuQyYJ19qK0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "rAOmHSz18Jx107xpbv9fYcPOmh/KPAqge0PAtuhIRnc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "BFOB1OGVUen7VsOuS0g8Ti7oDsTt2Yj/k/7ta8YAdGM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "2fckE5SPs0GU+akDkUEM6mm0EtcV3WDE/sQsnTtodlk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "mi9+aNjuwIvaMpSHENvKzKRAmX9cYguo2mXLvOoftHQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "K6TWn4VcWWkz/gkUkLmbtwkG7SNeABICmLDnoYJFlLU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Z+2/cEtGU0Fq7QJFNGA/0y4aWAsw0ncG6X0LYRqwS3c=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "rrSIf+lgcNZFbbUkS9BmE045jRWBpcBJXHzfMVEFuzE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "KlHL3Kyje1/LMIfgbCqw1SolxffJvvgsYBV5y77wxuA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "hzJ1YBoETmYeCh352dBmG8d8Wse/bUcqojTWpWQlgsc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "lSdcllDXx8MA+s0GULjDA1lQkcV0L8/aHtZ6dM2pZ2c=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "HGr7JLTTA7ksAnlmjSIwwdBVvgr3fv46/FTdiCPYpos=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "mMr25v1VwOEVZ8xaNUTHJCcsYqV+kwK6RzGYilxPtJ4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "129hJbziPJzNo0IoTU3bECdge0FtaPW8dm4dyNVNwYU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "doiLJ96qoo+v7NqIAZLq6BI5axV8Id8gT5vyJ1ZZ0PM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cW/Lcul3xYmfyvI/0x/+ybN78aQmBK1XIGs1EEU09N8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "1aVIwzu9N5EJV9yEES+/g6hOTH7cA2NTcLIc59cu0wU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kw5tyl7Ew0r1wFyrN1mB9FiVW2hK2BxxxUuJDNWjyjQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ADAY2YBrm6RJBDY/eLLcfNxmSJku+mefz74gH66oyco=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "8gkqB1LojzPrstpFG7RHYmWxXpIlPDTqWnNsXH7XDRU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "TESfVQMDQjfTZmHmUeYUE2XrokJ6CcrsKx/GmypGjOw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "qFM+HFVQ539S0Ouynd1fBHoemFxtU9PRxE5+Dq7Ljy4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "jPiFgUZteSmOg4wf3bsEKCZzcnxmMoILsgp/GaZD+dM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "YaWUgJhYgPNN7TkFK16H8SsQS226JguaVhOIQxZwQNQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "x90/Qk3AgyaFsvWf2KUCu5XF3j76WFSjt/GrnG01060=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ZGWybWL/xlEdMYRFCZDUoz10sywTf7U/7wufsb78lH0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "8l4ganN66jIcdxfHAdYLaym/mdzUUQ8TViw3MDRySPc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "c8p5XEGTqxqvRGVlR+nkxw9uUdoqDqTB0jlYQ361qMA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "1ZGFLlpQBcU3zIUg8MmgWwFKVz/SaA7eSYFrfe3Hb70=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "34529174M77rHr3Ftn9r8jU4a5ztYtyVhMn1wryZSkU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "YkQ4pxFWzc49MS0vZM6S8mNo4wAwo21rePBeF3C+9mI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "MhOf4mYY00KKVhptOcXf0bXB7WfuuM801MRJg4vXPgc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "7pbbD8ihNIYIBJ3tAUPGzHpFPpIeCTAk5L88qCB0/9w=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "C9Q5PoNJTQo6pmNzXEEXUEqH22//UUWY1gqILcIywec=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "AqGVk1QjDNDLYWGRBX/nv9QdGR2SEgXZEhF0EWBAiSE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "/sGI3VCbJUKATULJmhTayPOeVW+5MjWSvVCqS77sRbU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yOtbL0ih7gsuoxVtRrACMz+4N5uo7jIR7zzmtih2Beo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "uA6dkb2Iyg9Su8UNDvZzkPx33kPZtWr/CCuEY+XgzUM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "1DoSFPdHIplqZk+DyWAmEPckWwXw/GdB25NLmzeEZhk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "OfDVS0T3ZuIXI/LNbTp6C9UbPIWLKiMy6Wx+9tqNl+g=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "3PZjHXbmG6GtPz+iapKtQ3yY4PoFFgjIy+fV2xQv1YU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kaoLN0BoBWsmqE7kKkJQejATmLShd8qffcAmlhsxsGY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "vpiw9KgQdegGmp7IJnSGX2miujRLU0xzs0ITTqbPW7c=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "NuXFf7xGUefYjIUTuMxNUTCfVHrF8oL0AT7dPv5Plk4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "8Tz53LxtfEBJ9eR+d2690kwNsqPV6XyKo2PlqZCbUrc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "e6zsOmHSyV8tyQtSX6BSwui6wK9v1xG3giY/IILJQ2w=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "2fedFMCxa2DzmIpfbDKGXhQg0PPwbUv6vIWdwwlvhms=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yEJKMFnWXTC8tJUfzCInzQRByNEPjHxpw4L4m8No91Q=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "YbFuWwOiFuQyOzIJXDbOkCWC2DyrG+248TBuVCa1pXU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "w7IkwGdrguwDrar5+w0Z3va5wXyZ4VXJkDMISyRjPGo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "YmJUoILTRJPhyIyWyXJTsQ6KSZHHbEpwPVup6Ldm/Ko=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "FvMjcwVZJmfh6FP/yBg2wgskK+KHD8YVUY6WtrE8xbg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "h4HCtD4HyYz0nci49IVAa10Z4NJD/FHnRMV4sRX6qro=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "nC7BpXCmym+a0Is2kReM9cYN2M1Eh5rVo8fjms14Oiw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "1qtVWaeVo649ZZZtN8gXbwLgMWGLhz8beODbvru0I7Q=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Ej+mC0QFyMNIiSjR939S+iGBm7dm+1xObu5IcF/OpbU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "UQ8LbUG3cMegbr9yKfKanAPQE1EfPkFciVDrNqZ5GHY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "4iI3mXIDjnX+ralk1HhJY43mZx2uTJM7hsv9MQzTX7E=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "0WQCcs3rvsasgohERHHCaBM4Iy6yomS4qJ5To3/yYiw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "qDCTVPoue1/DOAGNAlUstdA9Sid8MgEY4e5EzHcVHRk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "9F9Mus0UnlzHb8E8ImxgXtz6SU98YXD0JqswOKw/Bzs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pctHpHKVBBcsahQ6TNh6/1V1ZrqOtKSAPtATV6BJqh0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "vfR3C/4cPkVdxtNaqtF/v635ONbhTf5WbwJM6s4EXNE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ejP43xUBIex6szDcqExAFpx1IE/Ksi5ywJ84GKDFRrs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "jbP4AWYd3S2f3ejmMG7dS5IbrFol48UUoT+ve3JLN6U=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CiDifI7958sUjNqJUBQULeyF7x0Up3loPWvYKw9uAuw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "e2dQFsiHqd2BFHNhlSxocjd+cPs4wkcUW/CnCz4KNuM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "PJFckVmzBipqaEqsuP2mkjhJE4qhw36NhfQ9DcOHyEU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "S3MeuJhET/B8VcfZYDR9fvX0nscDj416jdDekhmK11s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CGVHZRXpuNtQviDB2Kj03Q8uvs4w3RwTgV847R7GwPw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yUGgmgyLrxbEpDVy89XN3c2cmFpZXWWmuJ/35zVZ+Jw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "inb6Q97mL1a9onfNTT8v9wsoi/fz7KXKq3p8j90AU9c=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CCyYx/4npq9xGO1lsCo8ZJhFO9/tN7DB+/DTE778rYg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "LNnYw4fwbiAZu0kBdAHPEm/OFnreS+oArdB5O/l/I98=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "P006SxmUS/RjiQJVYPdMFnNo3827GIEmSzagggkg05Q=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "oyvwY+WsnYV6UHuPki1o0ILJ2jN4uyXf9yaUNtZJyBA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "36Lk3RHWh1wmtCWC/Yj6jNIo17U5y6SofAgQjzjVxD8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "vOOo8FqeHnuO9mqOYjIb4vgwIwVyXZ5Y+bY5d9tGFUM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "bJiDJjwQRNxqxlGjRm5lLziFhcfTDCnQ/qU1V85qcRg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "2Qgrm1n0wUELAQnpkEiIHB856yv76q8jLbpiucetcm0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "5ciPOYxTK0WDwwYyfs7yiVymwtYQXDELLxmM4JLl4/o=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "31dC2WUSIOKQc4jwT6PikfeYTwi80mTlh7P31T5KNQU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "YluTV2Mu53EGCKLcWfHZb0BM/IPW2xJdG3vYlDMEsM4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dh/8lGo2Ek6KukSwutH6Q35iy8TgV0FN0SJqe0ZVHN8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "EVw6HpIs3BKen2qY2gz4y5dw1JpXilfh07msZfQqJpc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "FYolLla9L8EZMROEdWetozroU40Dnmwwx2jIMrr7c1A=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "8M6k4QIutSIj6CM41vvkQtuFsaGrjoR9SZJVSLbfGKQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "9LM0VoddDNHway442MqY+Z7vohB2UHau/cddshhzf40=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "66i8Ytco4Yq/FMl6pIRZazz3CZlu8fO2OI6Pne0pvHU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "2a/HgX+MjZxjXtSvHgF1yEpHMJBkl8Caee8XrJtn0WM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "frhBM662c4ZVG7mWP8K/HhRjd01lydW/cPcHnDjifqc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6k1T7Q1t668PBqv6fwpVnT1HWh7Am5LtbKvwPJKcpGU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "UlJ5Edfusp8S/Pyhw6KTglIejmbr1HO0zUeHn/qFETA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "jsxsB+1ECB3assUdoC333do9tYH+LglHmVSJHy4N8Hg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "2nzIQxGYF7j3bGsIesECEOqhObKs/9ywknPHeJ3yges=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xJYKtuWrX90JrJVoYtnwP7Ce59XQGFYoalxpNfBXEH0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "NLI5lriBTleGCELcHBtNnmnvwSRkHHaLOX4cKboMgTw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "hUOQV0RmE5aJdJww1AR9rirJG4zOYPo+6cCkgn/BGvQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "h4G2Of76AgxcUziBwCyH+ayMOpdBWzg4yFrTfehSC2c=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "VuamM75RzGfQpj2/Y1jSVuQLrhy6OAwlZxjuQLB/9Ss=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kn9+hLq7hvw02xr9vrplOCDXKBTuFhfbX7d5v/l85Pg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "fAiGqKyLZpGngBYFbtYUYt8LUrJ49vYafiboifTDjxs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "BxRILymgfVJCczqjUIWXcfrfSgrrYkxTM5VTg0HkZLY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CrFY/PzfPU2zsFkGLu/dI6mEeizZzCR+uYgjZBAHro0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "AEbrIuwvXLTtYgMjOqnGQ8y8axUn5Ukrn7UZRSyfQVw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ouWeVH3PEFg+dKWlXc6BmqirJOaVWjJbMzZbCsce4dA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+hd6xFB+EG+kVP7WH4uMd1CLaWMnt5xJRaY/Guuga9Q=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zmpGalfAOL3gmcUMJYcLYIRT/2VDO/1Dw4KdYZoNcng=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "2PbHAoM/46J2UIZ/vyksKzmVVfxA7YUyIxWeL/N/vBk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "7fD9x+zk5MVFesb59Klqiwwmve7P5ON/5COURXj5smE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tlrNQ4jaq051iaWonuv1sSrYhKkL1LtNZuHsvATha3s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "fBodm28iClNpvlRyVq0dOdXQ08S7/N3aDwid+PdWvRo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "O+/nnRqT3Zv7yMMGug8GhKHaWy6u7BfRGtZoj0sdN1c=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "5AZZ/RTMY4Photnm/cpXZr/HnFRi3eljacMsipkJLHA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "oFVyo/kgoMxBIk2VE52ySSimeyU+Gr0EfCwapXnTpKA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Z8v59DfcnviA0mzvnUk+URVO0UuqAWvtarEgJva/n1c=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "P64GOntZ+zBJEHkigoh9FSxSO+rJTqR20z5aiGQ9an4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xMbSuDPfWuO/Dm7wuVl06GnzG9uzTlJJX9vFy7boGlY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kXPB19mRClxdH2UsHwlttS6lLU2uHvzuZgZz7kC45jU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "NDVjVYXAw4k0w4tFzvs7QDq39aaU3HQor4I2XMKKnCk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "uKw/+ErVfpTO1dGUfd3T/eWfZW3nUxXCdBGdjvHtZ88=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "av0uxEzWkizYWm0QUM/MN1hLibnxPvCWJKwjOV4yVQY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ERwUC47dvgOBzIsEESMIioLYbFOxOe8PtJTnmDkKuHM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "2gseKlG5Le12fS/vj4eaED4lturF16kAgJ1TpW3HxEE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "7Cvg0Y3j/5i2F1TeXxlMmU7xwif5dCmwkZAOrVC5K2Y=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 0, + "encryptedDecimalNoPrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalNoPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedDecimalNoPrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalNoPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "findAndModify": "default", + "query": { + "encryptedDecimalNoPrecision": { + "$gt": { + "$binary": { + "base64": "DR1jAAADcGF5bG9hZACxYgAABGcAnWIAAAMwAH0AAAAFZAAgAAAAAJu2KgiI8vM+kz9qD3ZQzFQY5qbgYqCqHG5R4jAlnlwXBXMAIAAAAAAAUXxFXsz764T79sGCdhxvNd5b6E/9p61FonsHyEIhogVsACAAAAAAt19RL3Oo5ni5L8kcvgOJYLgVYyXJExwP8pkuzLG7f/kAAzEAfQAAAAVkACAAAAAAPQPvL0ARjujSv2Rkm8r7spVsgeC1K3FWcskGGZ3OdDIFcwAgAAAAACgNn660GmefR8jLqzgR1u5O+Uocx9GyEHiBqVGko5FZBWwAIAAAAADflr+fsnZngm6KRWYgHa9JzK+bXogWl9evBU9sQUHPHQADMgB9AAAABWQAIAAAAAD2Zi6kcxmaD2mY3VWrP+wYJMPg6cSBIYPapxaFQxYFdQVzACAAAAAAM/cV36BLBY3xFBXsXJY8M9EHHOc/qrmdc2CJmj3M89gFbAAgAAAAAOpydOrKxx6m2gquSDV2Vv3w10GocmNCFeOo/fRhRH9JAAMzAH0AAAAFZAAgAAAAAOaNqI9srQ/mI9gwbk+VkizGBBH/PPWOVusgnfPk3tY1BXMAIAAAAAAc96O/pwKCmHCagT6T/QV/wz4vqO+R22GsZ1dse2Vg6QVsACAAAAAAgzIak+Q3UFLTHXPmJ+MuEklFtR3eLtvM+jdKkmGCV/YAAzQAfQAAAAVkACAAAAAA0XlQgy/Yu97EQOjronl9b3dcR1DFn3deuVhtTLbJZHkFcwAgAAAAACoMnpVl6EFJak8A+t5N4RFnQhkQEBnNAx8wDqmq5U/dBWwAIAAAAACR26FJif673qpwF1J1FEkQGJ1Ywcr/ZW6JQ7meGqzt1QADNQB9AAAABWQAIAAAAAAOtpNexRxfv0yRFvZO9DhlkpU4mDuAb8ykdLnE5Vf1VAVzACAAAAAAeblFKm/30orP16uQpZslvsoS8s0xfNPIBlw3VkHeekYFbAAgAAAAAPEoHj87sYE+nBut52/LPvleWQBzB/uaJFnosxp4NRO2AAM2AH0AAAAFZAAgAAAAAIr8xAFm1zPmrvW4Vy5Ct0W8FxMmyPmFzdWVzesBhAJFBXMAIAAAAABYeeXjJEzTHwxab6pUiCRiZjxgtN59a1y8Szy3hfkg+gVsACAAAAAAJuoY4rF8mbI+nKb+5XbZShJ8191o/e8ZCRHE0O4Ey8MAAzcAfQAAAAVkACAAAAAAl+ibLk0/+EwoqeC8S8cGgAtjtpQWGEZDsybMPnrrkwEFcwAgAAAAAHPPBudWgQ+HUorLDpJMqhS9VBF2VF5aLcxgrM1s+yU7BWwAIAAAAAAcCcBR2Vyv5pAFbaOU97yovuOi1+ATDnLLcAUqHecXcAADOAB9AAAABWQAIAAAAACR9erwLTb+tcWFZgJ2MEfM0PKI9uuwIjDTHADRFgD+SQVzACAAAAAAcOop8TXsGUVQoKhzUllMYWxL93xCOkwtIpV8Q6hiSYYFbAAgAAAAAKXKmh4V8veYwob1H03Q3p3PN8SRAaQwDT34KlNVUjiDAAM5AH0AAAAFZAAgAAAAALv0vCPgh7QpmM8Ug6ad5ioZJCh7pLMdT8FYyQioBQ6KBXMAIAAAAADsCPyIG8t6ApQkRk1fX/sfc1kpuWCWP8gAEpnYoBSHrQVsACAAAAAAJe/r67N6d8uTiogvfoR9rEXbIDjyLb9EVdqkayFFGaYAAzEwAH0AAAAFZAAgAAAAAIW4AxJgYoM0pcNTwk1RSbyjZGIqgKL1hcTJmNrnZmoPBXMAIAAAAAAZpfx3EFO0vY0f1eHnE0PazgqeNDTaj+pPJMUNW8lFrAVsACAAAAAAP+Um2vwW6Bj6vuz9DKz6+6aWkoKoEmFNoiz/xXm7lOsAAzExAH0AAAAFZAAgAAAAAKliO6L9zgeuufjj174hvmQGNRbmYYs9yAirL7OxwEW3BXMAIAAAAAAqU7vs3DWUQ95Eq8OejwWnD0GuXd+ASi/uD6S0l8MM1QVsACAAAAAAb9legYzsfctBPpHyl7YWpPmLr5QiNZFND/50N1vv2MUAAzEyAH0AAAAFZAAgAAAAAOGQcCBkk+j/Kzjt/Cs6g3BZPJG81wIHBS8JewHGpgk+BXMAIAAAAABjrxZXWCkdzrExwCgyHaafuPSQ4V4x2k9kUCAqUaYKDQVsACAAAAAADBU6KefT0v8zSmseaMNmQxKjJar72y7MojLFhkEHqrUAAzEzAH0AAAAFZAAgAAAAAPmCNEt4t97waOSd5hNi2fNCdWEkmcFJ37LI9k4Az4/5BXMAIAAAAABX7DuDPNg+duvELf3NbLWkPMFw2HGLgWGHyVWcPvSNCAVsACAAAAAAS7El1FtZ5STh8Q1FguvieyYX9b2DF1DFVsb9hzxXYRsAAzE0AH0AAAAFZAAgAAAAAD4vtVUYRNB+FD9yoQ2FVJH3nMeJeKbi6eZfth638YqbBXMAIAAAAAANCuUB4OdmuD6LaDK2f3vaqfgYYvg40wDXOBbcFjTqLwVsACAAAAAA9hqC2VoJBjwR7hcQ45xO8ZVojwC83jiRacCaDj6Px2gAAzE1AH0AAAAFZAAgAAAAAJPIRzjmTjbdIvshG6UslbEOd797ZSIdjGAhGWxVQvK1BXMAIAAAAABgmJ0Jh8WLs9IYs/a7DBjDWd8J3thW/AGJK7zDnMeYOAVsACAAAAAAi9zAsyAuou2oiCUHGc6QefLUkACa9IgeBhGu9W/r0X8AAzE2AH0AAAAFZAAgAAAAAABQyKQPoW8wGPIqnsTv69+DzIdRkohRhOhDmyVHkw9WBXMAIAAAAAAqWA2X4tB/h3O1Xlawtz6ndI6WaTwgU1QYflL35opu5gVsACAAAAAAWI/Gj5aZMwDIxztqmVL0g5LBcI8EdKEc2UA28pnekQoAAzE3AH0AAAAFZAAgAAAAACB7NOyGQ1Id3MYnxtBXqyZ5Ul/lHH6p1b10U63DfT6bBXMAIAAAAADpOryIcndxztkHSfLN3Kzq29sD8djS0PspDSqERMqokQVsACAAAAAADatsMW4ezgnyi1PiP7xk+gA4AFIN/fb5uJqfVkjg4UoAAzE4AH0AAAAFZAAgAAAAAKVfXLfs8XA14CRTB56oZwV+bFJN5BHraTXbqEXZDmTkBXMAIAAAAAASRWTsfGOpqdffiOodoqIgBzG/yzFyjR5CfUsIUIWGpgVsACAAAAAAkgCHbCwyX640/0Ni8+MoYxeHUiC+FSU4Mn9jTLYtgZgAAzE5AH0AAAAFZAAgAAAAAH/aZr4EuS0/noQR9rcF8vwoaxnxrwgOsSJ0ys8PkHhGBXMAIAAAAACd7ObGQW7qfddcvyxRTkPuvq/PHu7+6I5dxwS1Lzy5XAVsACAAAAAA3q0eKdV7KeU3pc+CtfypKR7BPxwaf30yu0j9FXeOOboAAzIwAH0AAAAFZAAgAAAAAKvlcpFFNq0oA+urq3w6d80PK1HHHw0H0yVWvU9aHijXBXMAIAAAAADWnAHQ5Fhlcjawki7kWzdqjM2f6IdGJblojrYElWjsZgVsACAAAAAAO0wvY66l24gx8nRxyVGC0QcTztIi81Kx3ndRhuZr6W4AAzIxAH0AAAAFZAAgAAAAAH/2aMezEOddrq+dNOkDrdqf13h2ttOnexZsJxG1G6PNBXMAIAAAAABNtgnibjC4VKy5poYjvdsBBnVvDTF/4mmEAxsXVgZVKgVsACAAAAAAqvadzJFLqQbs8WxgZ2D2X+XnaPSDMLCVVgWxx5jnLcYAAzIyAH0AAAAFZAAgAAAAAF2wZoDL6/V59QqO8vdRZWDpXpkV4h4KOCSn5e7x7nmzBXMAIAAAAADLZBu7LCYjbThaVUqMK14H/elrVOYIKJQCx4C9Yjw37gVsACAAAAAAEh6Vs81jLU204aGpL90fmYTm5i5R8/RT1uIbg6VU3HwAAzIzAH0AAAAFZAAgAAAAAH27yYaLn9zh2CpvaoomUPercSfJRUmBY6XFqmhcXi9QBXMAIAAAAAAUwumVlIYIs9JhDhSj0R0+59psCMsFk94E62VxkPt42QVsACAAAAAAT5x2hCCd2bpmpnyWaxas8nSxTc8e4C9DfKaqr0ABEysAAzI0AH0AAAAFZAAgAAAAALMg2kNAO4AFFs/mW3In04yFeN4AP6Vo0klyUoT06RquBXMAIAAAAAAgGWJbeIdwlpqXCyVIYSs0dt54Rfc8JF4b8uYc+YUj0AVsACAAAAAAWHeWxIkyvXTOWvfZzqtPXjfGaWWKjGSIQENTU3zBCrsAAzI1AH0AAAAFZAAgAAAAALas/i1T2DFCEmrrLEi7O2ngJZyFHialOoedVXS+OjenBXMAIAAAAAA1kK0QxY4REcGxHeMkgumyF7iwlsRFtw9MlbSSoQY7uAVsACAAAAAAUNlpMJZs1p4HfsD4Q4WZ4TBEi6Oc2fX34rzyynqWCdwAAzI2AH0AAAAFZAAgAAAAAP1TejmWg1CEuNSMt6NUgeQ5lT+oBoeyF7d2l5xQrbXWBXMAIAAAAABPX0kj6obggdJShmqtVfueKHplH4ZrXusiwrRDHMOKeQVsACAAAAAAIYOsNwC3DA7fLcOzqdr0bOFdHCfmK8tLwPoaE9uKOosAAzI3AH0AAAAFZAAgAAAAAMrKn+QPa/NxYezNhlOX9nyEkN1kE/gW7EuZkVqYl0b8BXMAIAAAAABUoZMSPUywRGfX2EEencJEKH5x/P9ySUVrhStAwgR/LgVsACAAAAAAMgZFH6lQIIDrgHnFeslv3ld20ynwQjQJt3cAp4GgrFkAAzI4AH0AAAAFZAAgAAAAAMmD1+a+oVbiUZd1HuZqdgtdVsVKwuWAn3/M1B6QGBM3BXMAIAAAAACLyytOYuZ9WEsIrrtJbXUx4QgipbaAbmlJvSZVkGi0CAVsACAAAAAA4v1lSp5H9BB+HYJ4bH43tC8aeuPZMf78Ng1JOhJh190AAzI5AH0AAAAFZAAgAAAAAOVKV7IuFwmYP1qVv8h0NvJmfPICu8yQhzjG7oJdTLDoBXMAIAAAAABL70XLfQLKRsw1deJ2MUvxSWKxpF/Ez73jqtbLvqbuogVsACAAAAAAvfgzIorXxE91dDt4nQxYfntTsx0M8Gzdsao5naQqcRUAAzMwAH0AAAAFZAAgAAAAAKS/1RSAQma+xV9rz04IcdzmavtrBDjOKPM+Z2NEyYfPBXMAIAAAAAAOJDWGORDgfRv8+w5nunh41wXb2hCA0MRzwnLnQtIqPgVsACAAAAAAf42C1+T7xdHEFF83+c2mF5S8PuuL22ogXXELnRAZ4boAAzMxAH0AAAAFZAAgAAAAAFeq8o82uNY1X8cH6OhdTzHNBUnCChsEDs5tm0kPBz3qBXMAIAAAAABaxMBbsaeEj/EDtr8nZfrhhhirBRPJwVamDo5WwbgvTQVsACAAAAAAMbH453A+BYAaDOTo5kdhV1VdND1avNwvshEG/4MIJjQAAzMyAH0AAAAFZAAgAAAAAI8IKIfDrohHh2cjspJHCovqroSr5N3QyVtNzFvT5+FzBXMAIAAAAABXHXteKG0DoOMmECKp6ro1MZNQvXGzqTDdZ0DUc8QfFAVsACAAAAAA/w5s++XYmO+9TWTbtGc3n3ndV4T9JUribIbF4jmDLSMAAzMzAH0AAAAFZAAgAAAAAJkHvm15kIu1OtAiaByj5ieWqzxiu/epK6c/9+KYIrB0BXMAIAAAAACzg5TcyANk0nes/wCJudd1BwlkWWF6zw3nGclq5v3SJQVsACAAAAAAvruXHTT3irPJLyWpI1j/Xwf2FeIE/IV+6Z49pqRzISoAAzM0AH0AAAAFZAAgAAAAAAYSOvEWWuSg1Aym7EssNLR+xsY7e9BcwsX4JKlnSHJcBXMAIAAAAABT48eY3PXVDOjw7JpNjOe1j2JyI3LjDnQoqZ8Je5B2KgVsACAAAAAAU2815RR57TQ9uDg0XjWjBkAKvf8yssxDMzrM4+FqP6AAAzM1AH0AAAAFZAAgAAAAAGQxC9L1e9DfO5XZvX1yvc3hTLtQEdKO9FPMkyg0Y9ZABXMAIAAAAADtmcMNJwdWLxQEArMGZQyzpnu+Z5yMmPAkvgq4eAKwNQVsACAAAAAAJ88zt4Y/Hoqh+zrf6KCOiUwHbOzCxSfp6k/qsZaYGEgAAzM2AH0AAAAFZAAgAAAAADLHK2LNCNRO0pv8n4fAsxwtUqCNnVK8rRgNiQfXpHSdBXMAIAAAAACf16EBIHRKD3SzjRW+LMOl+47QXA3CJhMzlcqyFRW22AVsACAAAAAAMGz4fAOa0EoVv90fUffwLjBrQhHATf+NdlgCR65vujAAAzM3AH0AAAAFZAAgAAAAAHiZJiXKNF8bbukQGsdYkEi95I+FSBHy1I5/hK2uEZruBXMAIAAAAADE+lZBa8HDUJPN+bF6xI9x4N7GF9pj3vBR7y0BcfFhBAVsACAAAAAAGIEN6sfqq30nyxW4dxDgXr/jz5HmvA9T1jx/pKCn4zgAAzM4AH0AAAAFZAAgAAAAAI1oa2OIw5TvhT14tYCGmhanUoYcCZtNbrVbeoMldHNZBXMAIAAAAAAx2nS0Ipblf2XOgBiUOuJFBupBhe7nb6QPLZlA4aMPCgVsACAAAAAA9xu828hugIgo0E3de9dZD+gTpVUGlwtDba+tw/WcbUoAAzM5AH0AAAAFZAAgAAAAABgTWS3Yap7Q59hii/uPPimHWXsr+DUmsqfwt/X73qsOBXMAIAAAAACKK05liW5KrmEAvtpCB1WUltruzUylDDpjea//UlWoOAVsACAAAAAAcgN4P/wakJ5aJK5c1bvJBqpVGND221dli2YicPFfuAYAAzQwAH0AAAAFZAAgAAAAABOAnBPXDp6i9TISQXvcNKwGDLepZTu3cKrB4vKnSCjBBXMAIAAAAADjjzZO7UowAAvpwyG8BNOVqLCccMFk3aDK4unUeft5ywVsACAAAAAA4zkCd4k9gvfXoD1C7vwTjNcdVJwEARh8h/cxZ4PNMfgAAzQxAH0AAAAFZAAgAAAAAHN8hyvT1lYrAsdiV5GBdd5jhtrAYE/KnSjw2Ka9hjz9BXMAIAAAAAD794JK7EeXBs+D7yOVK7nWF8SbZ/7U8gZ7nnT9JFNwTAVsACAAAAAAg8Wt1HO3NhByq2ggux2a4Lo6Gryr24rEFIqh2acrwWMAAzQyAH0AAAAFZAAgAAAAAO93bPrq8bsnp1AtNd9ETnXIz0lH/2HYN/vuw9wA3fyFBXMAIAAAAABHlls5fbaF2oAGqptC481XQ4eYxInTC29aElfmVZgDUgVsACAAAAAANoQXEWpXJpgrSNK/cKi/m7oYhuSRlp1IZBF0bqTEATcAAzQzAH0AAAAFZAAgAAAAAL1YsAZm1SA0ztU6ySIrQgCCA74V6rr0/4iIygCcaJL6BXMAIAAAAADTXWTHWovGmUR1Zg9l/Aqq9H5mOCJQQrb/Dfae7e3wKAVsACAAAAAA5dunyJK6/SVfDD0t9QlNBcFqoZnf9legRjHaLSKAoQMAAzQ0AH0AAAAFZAAgAAAAAEoFAeHk0RZ9kD+cJRD3j7PcE5gzWKnyBrF1I/MDNp5mBXMAIAAAAACgHtc2hMBRSZjKw8RAdDHK+Pi1HeyjiBuAslGVNcW5tAVsACAAAAAAXzBLfq+GxRtX4Wa9fazA49DBLG6AjZm2XODStJKH8D0AAzQ1AH0AAAAFZAAgAAAAAAW+7DmSN/LX+/0uBVJDHIc2dhxAGz4+ehyyz8fAnNGoBXMAIAAAAAA6Ilw42EvvfLJ3Eq8Afd+FjPoPcQutZO6ltmCLEr8kxQVsACAAAAAAbbZalyo07BbFjPFlYmbmv0z023eT9eLkHqeVUnfUAUAAAzQ2AH0AAAAFZAAgAAAAANBdV7M7kuYO3EMoQItAbXv4t2cIhfaT9V6+s4cg9djlBXMAIAAAAABvz4MIvZWxxrcJCL5qxLfFhXiUYB1OLHdKEjco94SgDgVsACAAAAAAK2GVGvyPIKolF/ECcmfmkVcf1/IZNcaTv96N92yGrkEAAzQ3AH0AAAAFZAAgAAAAAMoAoiAn1kc79j5oPZtlMWHMhhgwNhLUnvqkqIFvcH1NBXMAIAAAAADcJTW7WiCyW0Z9YDUYwppXhLj4Ac1povpJvcAq+i48MQVsACAAAAAAIGxGDzoeB3PTmudl4+j6piQB++e33EEzuzAiXcqGxvUAAzQ4AH0AAAAFZAAgAAAAACI3j5QP7dWHpcT6WO/OhsWwRJNASBYqIBDNzW8IorEyBXMAIAAAAABxUpBSjXwCKDdGP9hYU+RvyR+96kChfvyyRC4jZmztqAVsACAAAAAAvBCHguWswb4X0xdcAryCvZgQuthXzt7597bJ5VxAMdgAAzQ5AH0AAAAFZAAgAAAAAKsbycEuQSeNrF8Qnxqw3x3og8JmQabwGqnDbqzFRVrrBXMAIAAAAACno/3ef2JZJS93SVVzmOZSN+jjJHT8s0XYq2M46d2sLAVsACAAAAAAAt5zLJG+/j4K8rnkFtAn8IvdUVNefe6utJ3rdzgwudIAAzUwAH0AAAAFZAAgAAAAAPXIcoO8TiULqlxzb74NFg+I8kWX5uXIDUPnh2DobIoMBXMAIAAAAADR6/drkdTpnr9g1XNvKDwtBRBdKn7c2c4ZNUVK5CThdQVsACAAAAAAJqOA1c6KVog3F4Hb/GfDb3jCxXDRTqpXWSbMH4ePIJsAAzUxAH0AAAAFZAAgAAAAAEa03ZOJmfHT6/nVadvIw71jVxEuIloyvxXraYEW7u7pBXMAIAAAAADzRlBJK75FLiKjz3djqcgjCLo/e3yntI3MnPS48OORhgVsACAAAAAAnQhx4Rnyj081XrLRLD5NLpWmRWCsd0M9Hl7Jl19R0h8AAzUyAH0AAAAFZAAgAAAAAKx8NLSZUU04pSSGmHa5fh2oLHsEN5mmNMNHL95/tuC9BXMAIAAAAAA59hcXVaN3MNdHoo11OcH1aPRzHCwpVjO9mGfMz4xh3QVsACAAAAAAYIPdjV2XbPj7dBeHPwnwhVU7zMuJ+xtMUW5mIOYtmdAAAzUzAH0AAAAFZAAgAAAAAHNKAUxUqBFNS9Ea9NgCZoXMWgwhP4x0/OvoaPRWMquXBXMAIAAAAABUZ551mnP4ZjX+PXU9ttomzuOpo427MVynpkyq+nsYCQVsACAAAAAALnVK5p2tTTeZEh1zYt4iqKIQT9Z0si//Hy1L85oF+5IAAzU0AH0AAAAFZAAgAAAAALfGXDlyDVcGaqtyHkLT0qpuRhJQLgCxtznazhFtuyn/BXMAIAAAAABipxlXDq14C62pXhwAeen5+syA+/C6bN4rtZYcO4zKwAVsACAAAAAAXUf0pzUq0NhLYagWDap4uEiwq5rLpcx29rWbt1NYMsMAAzU1AH0AAAAFZAAgAAAAANoEr8sheJjg4UCfBkuUzarU9NFoy1xwbXjs5ifVDeA9BXMAIAAAAABPoyTf6M+xeZVGES4aNzVlq7LgjqZXJ/QunjYVusGUEAVsACAAAAAA1hA2gMeZZPUNytk9K+lB1RCqWRudRr7GtadJlExJf8oAAzU2AH0AAAAFZAAgAAAAAKvDiK+xjlBe1uQ3SZTNQl2lClIIvpP/5CHwY6Kb3WlgBXMAIAAAAAANnxImq5MFbWaRBHdJp+yD09bVlcFtiFDYsy1eDZj+iQVsACAAAAAAWtsyO+FxMPSIezwsV1TJD8ZrXAdRnQM6DJ+f+1V3qEkAAzU3AH0AAAAFZAAgAAAAAF49IlFH9RmSUSvUQpEPUedEksrQUcjsOv44nMkwXhjzBXMAIAAAAADJtWGbk0bZzmk20obz+mNsp86UCu/nLLlbg7ppxYn7PgVsACAAAAAA3k0Tj/XgPQtcYijH8cIlQoe/VXf15q1nrZNmg7yWYEgAAzU4AH0AAAAFZAAgAAAAAOuSJyuvz50lp3BzXlFKnq62QkN2quNU1Gq1IDsnFoJCBXMAIAAAAAAqavH1d93XV3IzshWlMnzznucadBF0ND092/2ApI1AcAVsACAAAAAAzUrK4kpoKCmcpdZlZNI13fddjdoAseVe67jaX1LobIIAAzU5AH0AAAAFZAAgAAAAALtgC4Whb4ZdkCiI30zY6fwlsxSa7lEaOAU3SfUXr02XBXMAIAAAAACgdZ6U1ZVgUaZZwbIaCdlANpCw6TZV0bwg3DS1NC/mnAVsACAAAAAAzI49hdpp0PbO7S2KexISxC16sE73EUAEyuqUFAC/J48AAzYwAH0AAAAFZAAgAAAAAF6PfplcGp6vek1ThwenMHVkbZgrc/dHgdsgx1VdPqZ5BXMAIAAAAACha3qhWkqmuwJSEXPozDO8y1ZdRLyzt9Crt2vjGnT7AAVsACAAAAAA7nvcU59+LwxGupSF21jAeAE0x7JE94tjRkJfgM1yKU8AAzYxAH0AAAAFZAAgAAAAAKoLEhLvLjKc7lhOJfx+VrGJCx9tXlOSa9bxQzGR6rfbBXMAIAAAAAAIDK5wNnjRMBzET7x/KAMExL/zi1IumJM92XTgXfoPoAVsACAAAAAAFkUYWFwNr815dEdFqp+TiIozDcq5IBNVkyMoDjharDQAAzYyAH0AAAAFZAAgAAAAADoQv6lutRmh5scQFvIW6K5JBquLxszuygM1tzBiGknIBXMAIAAAAADAD+JjW7FoBQ76/rsECmmcL76bmyfXpUU/awqIsZdO+wVsACAAAAAAPFHdLw3jssmEXsgtvl/RBNaUCRA1kgSwsofG364VOvQAAzYzAH0AAAAFZAAgAAAAAJNHUGAgn56KekghO19d11nai3lAh0JAlWfeP+6w4lJBBXMAIAAAAAD9XGJlvz59msJvA6St9fKW9CG4JoHV61rlWWnkdBRLzwVsACAAAAAAxwP/X/InJJHmrjznvahIMgj6pQR30B62UtHCthSjrP0AAzY0AH0AAAAFZAAgAAAAAHgYoMGjEE6fAlAhICv0+doHcVX8CmMVxyq7+jlyGrvmBXMAIAAAAAC/5MQZgTHuIr/O5Z3mXPvqrom5JTQ8IeSpQGhO9sB+8gVsACAAAAAAuPSXVmJUAUpTQg/A9Bu1hYczZF58KEhVofakygbsvJQAAzY1AH0AAAAFZAAgAAAAANpIljbxHOM7pydY877gpRQvYY2TGK7igqgGsavqGPBABXMAIAAAAAAqHyEu9gpurPOulApPnr0x9wrygY/7mXe9rAC+tPK80wVsACAAAAAA7gkPzNsS3gCxdFBWbSW9tkBjoR5ib+saDvpGSB3A3ogAAzY2AH0AAAAFZAAgAAAAAGR+gEaZTeGNgG9BuM1bX2R9ed4FCxBA9F9QvdQDAjZwBXMAIAAAAABSkrYFQ6pf8MZ1flgmeIRkxaSh/Eep4Btdx4QYnGGnwAVsACAAAAAApRovMiV00hm/pEcT4XBsyPNw0eo8RLAX/fuabjdU+uwAAzY3AH0AAAAFZAAgAAAAAFNprhQ3ZwIcYbuzLolAT5n/vc14P9kUUQComDu6eFyKBXMAIAAAAAAcx9z9pk32YbPV/sfPZl9ALIEVsqoLXgqWLVK/tP+heAVsACAAAAAA/qxvuvJbAHwwhfrPVpmCFzNvg2cU/NXaWgqgYUZpgXwAAzY4AH0AAAAFZAAgAAAAADgyPqQdqQrgfmJjRFAILTHzXbdw5kpKyfeoEcy6YYG/BXMAIAAAAAAE+3XsBQ8VAxAkN81au+f3FDeCD/s7KoZD+fnM1MJSSAVsACAAAAAAhRnjrXecwV0yeCWKJ5J/x12Xx4qVJahsCEVHB/1U2rcAAzY5AH0AAAAFZAAgAAAAAI0CT7JNngTCTUSei1Arw7eHWCD0jumv2rb7imjWIlWABXMAIAAAAABSP8t6ya0SyCphXMwnru6ZUDXWElN0NfBvEOhDvW9bJQVsACAAAAAAGWeGmBNDRaMtvm7Rv+8TJ2sJ4WNXKcp3tqpv5Se9Ut4AAzcwAH0AAAAFZAAgAAAAAD/FIrGYFDjyYmVb7oTMVwweWP7A6F9LnyIuNO4MjBnXBXMAIAAAAACIZgJCQRZu7NhuNMyOqCn1tf+DfU1qm10TPCfj5JYV3wVsACAAAAAA5hmY4ptuNxULGf87SUFXQWGAONsL9U29duh8xqsHtxoAAzcxAH0AAAAFZAAgAAAAAHIkVuNDkSS1cHIThKc/O0r2/ubaABTOi8Q1r/dvBAsEBXMAIAAAAADdHYqchEiJLM340c3Q4vJABmmth3+MKzwLYlsG6GS7sQVsACAAAAAADa+KP/pdTiG22l+ZWd30P1iHjnBF4zSNRdFm0oEK82kAAzcyAH0AAAAFZAAgAAAAAJmoDILNhC6kn3masElfnjIjP1VjsjRavGk1gSUIjh1NBXMAIAAAAAD97Ilvp3XF8T6MmVVcxMPcdL80RgQ09UoC6PnoOvZ1IQVsACAAAAAA2RK3Xng6v8kpvfVW9tkVXjpE+BSnx9/+Fw85Evs+kUEAAzczAH0AAAAFZAAgAAAAAI5bm3YO0Xgf0VT+qjVTTfvckecM3Cwqj7DTKZXf8/NXBXMAIAAAAAD/m+h8fBhWaHm6Ykuz0WX1xL4Eme3ErLObyEVJf8NCywVsACAAAAAAfb1VZZCqs2ivYbRzX4p5CtaCkKW+g20Pr57FWXzEZi8AAzc0AH0AAAAFZAAgAAAAANqo4+p6qdtCzcB4BX1wQ6llU7eFBnuu4MtZwp4B6mDlBXMAIAAAAAAGiz+VaukMZ+6IH4jtn4KWWdKK4/W+O+gRioQDrfzpMgVsACAAAAAAG4YYkTp80EKo59mlHExDodRQFR7njhR5dmISwUJ6ukAAAzc1AH0AAAAFZAAgAAAAAPrFXmHP2Y4YAm7b/aqsdn/DPoDkv7B8egWkfe23XsM1BXMAIAAAAAAGhwpKAr7skeqHm3oseSbO7qKNhmYsuUrECBxJ5k+D2AVsACAAAAAAAqPQi9luYAu3GrFCEsVjd9z2zIDcp6SPTR2w6KQEr+IAAzc2AH0AAAAFZAAgAAAAABzjYxwAjXxXc0Uxv18rH8I3my0Aguow0kTwKyxbrm+cBXMAIAAAAADVbqJVr6IdokuhXkEtXF0C2gINLiAjMVN20lE20Vmp2QVsACAAAAAAD7K1Fx4gFaaizkIUrf+EGXQeG7QX1jadhGc6Ji471H8AAzc3AH0AAAAFZAAgAAAAAFMm2feF2fFCm/UC6AfIyepX/xJDSmnnolQIBnHcPmb5BXMAIAAAAABLI11kFrQoaNVZFmq/38aRNImPOjdJh0Lo6irI8M/AaAVsACAAAAAAOWul0oVqJ9CejD2RqphhTC98DJeRQy5EwbNerU2+4l8AAzc4AH0AAAAFZAAgAAAAAJvXB3KyNiNtQko4SSzo/9b2qmM2zU9CQTTDfLSBWMgRBXMAIAAAAAAvjuVP7KsLRDeqVqRziTKpBrjVyqKiIbO9Gw8Wl2wFTAVsACAAAAAADlE+oc1ins+paNcaOZJhBlKlObDJ4VQORWjFYocM4LgAAzc5AH0AAAAFZAAgAAAAAPGdcxDiid8z8XYnfdDivNMYVPgBKdGOUw6UStU+48CdBXMAIAAAAAARj6g1Ap0eEfuCZ4X2TsEw+Djrhto3fA5nLwPaY0vCTgVsACAAAAAAoHqiwGOUkBu8SX5U1yHho+UIFdSN2MdQN5s6bQ0EsJYAAzgwAH0AAAAFZAAgAAAAAP5rGPrYGt3aKob5f/ldP0qrW7bmWvqnKY4QwdDWz400BXMAIAAAAADTQkW2ymaaf/bhteOOGmSrIR97bAnJx+yN3yMj1bTeewVsACAAAAAADyQnHGH2gF4w4L8axUsSTf6Ubk7L5/eoFOJk12MtZAoAAzgxAH0AAAAFZAAgAAAAAAlz6wJze5UkIxKpJOZFGCOf3v2KByWyI6NB6JM9wNcBBXMAIAAAAABUC7P/neUIHHoZtq0jFVBHY75tSFYr1Y5S16YN5XxC1QVsACAAAAAAgvxRbXDisNnLY3pfsjDdnFLtkvYUC4lhA68eBXc7KAwAAzgyAH0AAAAFZAAgAAAAAFJ8AtHcjia/9Y5pLEc3qVgH5xKiXw12G9Kn2A1EY8McBXMAIAAAAAAxe7Bdw7eUSBk/oAawa7uicTEDgXLymRNhBy1LAxhDvwVsACAAAAAAxKPaIBKVx3jTA+R/el7P7AZ7efrmTGjJs3Hj/YdMddwAAzgzAH0AAAAFZAAgAAAAAO8uwQUaKFb6vqR3Sv3Wn4QAonC2exOC9lGG1juqP5DtBXMAIAAAAABZf1KyJgQg8/Rf5c02DgDK2aQu0rNCOvaL60ohDHyY+gVsACAAAAAAqyEjfKC8lYoIfoXYHUqHZPoaA6EK5BAZy5dxXZmay4kAAzg0AH0AAAAFZAAgAAAAAE8YtqyRsGCeiR6hhiyisR/hccmK4nZqIMzO4lUBmEFzBXMAIAAAAAC1UYOSKqAeG1UJiKjWFVskRhuFKpj9Ezy+lICZvFlN5AVsACAAAAAA6Ct9nNMKyRazn1OKnRKagm746CGu+jyhbL1qJnZxGi0AAzg1AH0AAAAFZAAgAAAAAPhCrMausDx1QUIEqp9rUdRKyM6a9AAx7jQ3ILIu8wNIBXMAIAAAAACmH8lotGCiF2q9VQxhsS+7LAZv79VUAsOUALaGxE/EpAVsACAAAAAAnc1xCKfdvbUEc8F7XZqlNn1C+hZTtC0I9I3LL06iaNkAAzg2AH0AAAAFZAAgAAAAAOBi/GAYFcstMSJPgp3VkMiuuUUCrZytvqYaU8dwm8v2BXMAIAAAAACEZSZVyD3pKzGlbdwlYmWQhHHTV5SnNLknl2Gw8IaUTQVsACAAAAAAfsLZsEDcWSuNsIo/TD1ReyQW75HPMgmuKZuWFOLKRLoAAzg3AH0AAAAFZAAgAAAAAIQuup+YGfH3mflzWopN8J1X8o8a0d9CSGIvrA5HOzraBXMAIAAAAADYvNLURXsC2ITMqK14LABQBI+hZZ5wNf24JMcKLW+84AVsACAAAAAACzfjbTBH7IwDU91OqLAz94RFkoqBOkzKAqQb55gT4/MAAzg4AH0AAAAFZAAgAAAAAKsh0ADyOnVocFrOrf6MpTrNvAj8iaiE923DPryu124gBXMAIAAAAADg24a8NVE1GyScc6tmnTbmu5ulzO+896fE92lN08MeswVsACAAAAAAaPxcOIxnU7But88/yadOuDJDMcCywwrRitaxMODT4msAAzg5AH0AAAAFZAAgAAAAAKkVC2Y6HtRmv72tDnPUSjJBvse7SxLqnr09/Uuj9sVVBXMAIAAAAABYNFUkH7ylPMN+Bc3HWX1e0flGYNbtJNCY9SltJCW/UAVsACAAAAAAZYK/f9H4OeihmpiFMH7Wm7uLvs2s92zNA8wyrNZTsuMAAzkwAH0AAAAFZAAgAAAAADDggcwcb/Yn1Kk39sOHsv7BO/MfP3m/AJzjGH506Wf9BXMAIAAAAAAYZIsdjICS0+BDyRUPnrSAZfPrwtuMaEDEn0/ijLNQmAVsACAAAAAAGPnYVvo2ulO9z4LGd/69NAklfIcZqZvFX2KK0s+FcTUAAzkxAH0AAAAFZAAgAAAAAEWY7dEUOJBgjOoWVht1wLehsWAzB3rSOBtLgTuM2HC8BXMAIAAAAAAAoswiHRROurjwUW8u8D5EUT+67yvrgpB/j6PzBDAfVwVsACAAAAAA6NhRTYFL/Sz4tao7vpPjLNgAJ0FX6P/IyMW65qT6YsMAAzkyAH0AAAAFZAAgAAAAAPZaapeAUUFPA7JTCMOWHJa9lnPFh0/gXfAPjA1ezm4ZBXMAIAAAAACmJvLY2nivw7/b3DOKH/X7bBXjJwoowqb1GtEFO3OYgAVsACAAAAAA/JcUoyKacCB1NfmH8vYqC1f7rd13KShrQqV2r9QBP44AAzkzAH0AAAAFZAAgAAAAAK00u6jadxCZAiA+fTsPVDsnW5p5LCr4+kZZZOTDuZlfBXMAIAAAAAAote4zTEYMDgaaQbAdN8Dzv93ljPLdGjJzvnRn3KXgtQVsACAAAAAAxXd9Mh6R3mnJy8m7UfqMKi6oD5DlZpkaOz6bEjMOdiwAAzk0AH0AAAAFZAAgAAAAAFbgabdyymiEVYYwtJSWa7lfl/oYuj/SukzJeDOR6wPVBXMAIAAAAADAFGFjS1vPbN6mQEhkDYTD6V2V23Ys9gUEUMGNvMPkaAVsACAAAAAAL/D5Sze/ZoEanZLK0IeEkhgVkxEjMWVCfmJaD3a8uNIAAzk1AH0AAAAFZAAgAAAAABNMR6UBv2E627CqLtQ/eDYx7OEwQ7JrR4mSHFa1N8tLBXMAIAAAAAAxH4gucI4UmNVB7625C6hFSVCuIpJO3lusJlPuL8H5EQVsACAAAAAAVLHNg0OUVqZ7WGOP53BkTap9FOw9dr1P4J8HxqFqU04AAzk2AH0AAAAFZAAgAAAAAG8cd6WBneNunlqrQ2EmNf35W7OGObGq9WL4ePX+LUDmBXMAIAAAAAAjJ2+sX87NSis9hBsgb1QprVRnO7Bf+GObCGoUqyPE4wVsACAAAAAAs9c9SM49/pWmyUQKslpt3RTMBNSRppfNO0JBvUqHPg0AAzk3AH0AAAAFZAAgAAAAAFWOUGkUpy8yf6gB3dio/aOfRKh7XuhvsUj48iESFJrGBXMAIAAAAAAY7sCDMcrUXvNuL6dO0m11WyijzXZvPIcOKob6IpC4PQVsACAAAAAAJOP+EHz6awDb1qK2bZQ3kTV7wsj5Daj/IGAWh4g7omAAAzk4AH0AAAAFZAAgAAAAAGUrIdKxOihwNmo6B+aG+Ag1qa0+iqdksHOjQj+Oy9bZBXMAIAAAAABwa5dbI2KmzBDNBTQBEkjZv4sPaeRkRNejcjdVymRFKQVsACAAAAAA4ml/nm0gJNTcJ4vuD+T2Qfq2fQZlibJp/j6MOGDrbHMAAzk5AH0AAAAFZAAgAAAAAOx89xV/hRk64/CkM9N2EMK6aldII0c8smdcsZ46NbP8BXMAIAAAAADBF6tfQ+7q9kTuLyuyrSnDgmrdmrXkdhl980i1KHuGHgVsACAAAAAACUqiFqHZdGbwAA+hN0YUE5zFg+H+dabIB4dj5/75W/YAAzEwMAB9AAAABWQAIAAAAADJDdC9aEFl4Y8J/awHbnXGHjfP+VXQilPHJg7ewaJI7AVzACAAAAAAE+tqRl6EcBMXvbr4GDiNIYObTsYpa1n6BJk9EjIJVicFbAAgAAAAAJVc+HYYqa0m1Hq6OiRX8c0iRnJYOt6AJAJoG0sG3GMSAAMxMDEAfQAAAAVkACAAAAAA3F9rjEKhpoHuTULVGgfUsGGwJs3bISrXkFP1v6KoQLgFcwAgAAAAAIBf0tXw96Z/Ds0XSIHX/zk3MzUR/7WZR/J6FpxRWChtBWwAIAAAAABWrjGlvKYuTS2s8L9rYy8Hf0juFGJfwQmxVIjkTmFIGQADMTAyAH0AAAAFZAAgAAAAAOYIYoWkX7dGuyKfi3XssUlc7u/gWzqrR9KMkikKVdmSBXMAIAAAAABVF2OYjRTGi9Tw8XCAwZWLpX35Yl271TlNWp6N/nROhAVsACAAAAAA0nWwYzXQ1+EkDvnGq+SMlq20z+j32Su+i/A95SggPb4AAzEwMwB9AAAABWQAIAAAAACMtPm12YtdEAvqu6Eji1yuRXnu1RJP6h0l7pH3lSH4MwVzACAAAAAAENyCFfyUAh1veQBGx+cxiB7Sasrj41jzCGflZkB5cRMFbAAgAAAAAKdI2LMqISr/T5vuJPg6ZRBm5fVi2aQCc4ra3A4+AjbDAAMxMDQAfQAAAAVkACAAAAAAvlI4lDcs6GB1cnm/Tzo014CXWqidCdyE5t2lknWQd4QFcwAgAAAAAD60SpNc4O2KT7J0llKdSpcX1/Xxs97N715a1HsTFkmBBWwAIAAAAABuuRkJWAH1CynggBt1/5sPh9PoGiqTlS24D/OE2uHXLQADMTA1AH0AAAAFZAAgAAAAAKl8zcHJRDjSjJeV/WvMxulW1zrTFtaeBy/aKKhadc6UBXMAIAAAAADBdWQl5SBIvtZZLIHszePwkO14W1mQ0izUk2Ov21cPNAVsACAAAAAAHErCYycpqiIcCZHdmPL1hi+ovLQk4TAvENpfLdTRamQAAzEwNgB9AAAABWQAIAAAAABb6LXDWqCp1beQgQjj8I3sRTtFhlrmiBi+h/+ikmrvugVzACAAAAAA9stpgTecT7uTyaGNs3K9Bp0A7R0QaIAOfscyMXHBPX8FbAAgAAAAAHUt+McyXrJ1H8SwnHNVO181Ki8vDAM1f7XI26mg95ZDAAMxMDcAfQAAAAVkACAAAAAA97NTT+81PhDhgptNtp4epzA0tP4iNb9j1AWkiiiKGM8FcwAgAAAAAKPbHg7ise16vxmdPCzksA/2Mn/qST0L9Xe8vnQugVkcBWwAIAAAAABB0EMXfvju4JU/mUH/OvxWbPEl9NJkcEp4iCbkXI41fAADMTA4AH0AAAAFZAAgAAAAAMqpayM2XotEFmm0gwQd9rIzApy0X+7HfOhNk6VU7F5lBXMAIAAAAACJR9+q5T9qFHXFNgGbZnPubG8rkO6cwWhzITQTmd6VgwVsACAAAAAAOZLQ6o7e4mVfDzbpQioa4d3RoTvqwgnbmc5Qh2wsZuoAAzEwOQB9AAAABWQAIAAAAADQnslvt6Hm2kJPmqsTVYQHE/wWeZ4bE1XSkt7TKy0r1gVzACAAAAAA8URTA4ZMrhHPvlp53TH6FDCzS+0+61qHm5XK6UiOrKEFbAAgAAAAAHQbgTCdZcbdA0avaTmZXUKnIS7Nwf1tNrcXDCw+PdBRAAMxMTAAfQAAAAVkACAAAAAAhujlgFPFczsdCGXtQ/002Ck8YWQHHzvWvUHrkbjv4rwFcwAgAAAAALbV0lLGcSGfE7mDM3n/fgEvi+ifjl7WZ5b3aqjDNvx9BWwAIAAAAACbceTZy8E3QA1pHmPN5kTlOx3EO8kJM5PUjTVftw1VpgADMTExAH0AAAAFZAAgAAAAABm/6pF96j26Jm7z5KkY1y33zcAEXLx2n0DwC03bs/ixBXMAIAAAAAD01OMvTZI/mqMgxIhA5nLs068mW+GKl3OW3ilf2D8+LgVsACAAAAAAaLvJDrqBESTNZSdcXsd+8GXPl8ZkUsGpeYuyYVv/kygAAzExMgB9AAAABWQAIAAAAACfw9/te4GkHZAapC9sDMHHHZgmlTrccyJDPFciOMSOcwVzACAAAAAAIIC1ZpHObvmMwUfqDRPl4C1aeuHwujM1G/yJbvybMNAFbAAgAAAAAAs9x1SnVpMfNv5Bm1aXGwHmbbI9keWa9HRD35XuCBK5AAMxMTMAfQAAAAVkACAAAAAAkxHJRbnShpPOylLoDdNShfILeA1hChKFQY9qQyZ5VmsFcwAgAAAAAKidrY+rC3hTY+YWu2a7fuMH2RD/XaiTIBW1hrxNCQOJBWwAIAAAAACW0kkqMIzIFMn7g+R0MI8l15fr3k/w/mHtY5n6SYTEwAADMTE0AH0AAAAFZAAgAAAAAByuYl8dBvfaZ0LO/81JW4hYypeNmvLMaxsIdvqMPrWoBXMAIAAAAABNddwobOUJzm9HOUD8BMZJqkNCUCqstHZkC76FIdNg9AVsACAAAAAAQQOkIQtkyNavqCnhQbNg3HfqrJdsAGaoxSJePJl1qXsAAzExNQB9AAAABWQAIAAAAABxMy7X5hf7AXGDz3Y/POu1ZpkMlNcSvSP92NOO/Gs7wAVzACAAAAAAHJshWo2T5wU2zvqCyJzcJQKQaHFHpCpMc9oWBXkpUPoFbAAgAAAAAGeiJKzlUXAvL0gOlW+Hz1mSa2HsV4RGmyLmCHlzbAkoAAMxMTYAfQAAAAVkACAAAAAAlqbslixl7Zw3bRlibZbe/WmKw23k8uKeIzPKYEtbIy0FcwAgAAAAAHEKwpUxkxOfef5HYvulXPmdbzTivwdwrSYIHDeNRcpcBWwAIAAAAADuPckac21Hrg/h0kt5ShJwVEZ9rx6SOHd2+HDjqxEWTQADMTE3AH0AAAAFZAAgAAAAAMXrXx0saZ+5gORmwM2FLuZG6iuO2YS+1IGPoAtDKoKBBXMAIAAAAADIQsxCr8CfFKaBcx8kIeSywnGh7JHjKRJ9vJd9x79y7wVsACAAAAAAcvBV+SykDYhmRFyVYwFYB9oBKBSHr55Jdz2cXeowsUQAAzExOAB9AAAABWQAIAAAAAAm83FA9yDUpwkbKTihe7m53u+DivS9BU2b4vQMtCVQ2AVzACAAAAAAz3m1UB/AbZPa4QSKFDnUgHaT78+6iGOFAtouiBorEgEFbAAgAAAAAIgbpyYtJj5513Z5XYqviH/HXG/5+mqR52iBbfqMmDtZAAMxMTkAfQAAAAVkACAAAAAAJRzYK0PUwr9RPG2/7yID0WgcTJPB2Xjccp5LAPDYunkFcwAgAAAAAIIh24h3DrltAzNFhF+MEmPrZtzr1PhCofhChZqfCW+jBWwAIAAAAAAzRNXtL5o9VXMk5D5ylI0odPDJDSZZry1wfN+TedH70gADMTIwAH0AAAAFZAAgAAAAAHSaHWs/dnmI9sc7nB50VB2Bzs0kHapMHCQdyVEYY30TBXMAIAAAAACkV22lhEjWv/9/DubfHBAcwJggKI5mIbSK5L2nyqloqQVsACAAAAAAS19m7DccQxgryOsBJ3GsCs37yfQqNi1G+S6fCXpEhn4AAzEyMQB9AAAABWQAIAAAAAAC/I4TQRtCl12YZmdGz17X4GqSQgfwCPgRBwdHmdwu+QVzACAAAAAAx8f3z2ut/RAZhleari4vCEE+tNIn4ikjoUwzitfQ588FbAAgAAAAAJci0w1ZB8W2spJQ+kMpod6HSCtSR2jrabOH+B0fj3A4AAMxMjIAfQAAAAVkACAAAAAADGB5yU2XT0fse/MPWgvBvZikVxrl5pf3S5K1hceKWooFcwAgAAAAAIxTmlLHMjNaVDEfJbXvRez0SEPWFREBJCT6qTHsrljoBWwAIAAAAAAlswzAl81+0DteibwHD+CG5mZJrfHXa9NnEFRtXybzzwADMTIzAH0AAAAFZAAgAAAAABmO7QD9vxWMmFjIHz13lyOeV6vHT6mYCsWxF7hb/yOjBXMAIAAAAACT9lmgkiqzuWG24afuzYiCeK9gmJqacmxAruIukd0xEAVsACAAAAAAZa0/FI/GkZR7CtX18Xg9Tn9zfxkD0UoaSt+pIO5t1t4AAzEyNAB9AAAABWQAIAAAAAAfPUoy7QyZKhIIURso+mkP9qr1izbjETqF5s22GwjCjAVzACAAAAAAvLMsIDQ/go4VUxeh50UHmsvMvfx51cwyONnRD2odvC0FbAAgAAAAAKMb+1CodEalAFnDrEL1Ndt8ztamZ+9134m9Kp3GQgd+AAMxMjUAfQAAAAVkACAAAAAAE3ZqUar0Bq2zWbARE0bAv98jBlK9UJ73/xcwdMWWlSkFcwAgAAAAAK4M+MmC+9sFiFsumMyJZQKxWmmJiuG9H7IzKw083xxkBWwAIAAAAAAqkAONzhvMhkyL1D/6h7QQxEkdhC3p2WjXH+VGq5qCqQADMTI2AH0AAAAFZAAgAAAAAMo8FJiOq63cAmyk2O7eI7GcbQh/1j4RrMTqly3rexftBXMAIAAAAADjVmpd0WiRGTw/gAqEgGolt2EI7Csv14vKdmYoMD0aAgVsACAAAAAA07XQBzBUQMNw7F2/YxJjZNuPVpHTTgbLd1oGk77+bygAAzEyNwB9AAAABWQAIAAAAACu5IGaIx7A3Jvly/kzlCsSA4s3iJwuIl8jEdRH0k93NwVzACAAAAAA9NRUyxYE+t0Xyosyt6vIfMFW/vBoYg6sR+jBNs4JAxIFbAAgAAAAAAzyZ91dx+0oMlOVAjRGiMrPySikY/U9eMEB4WJb3uWtAAMxMjgAfQAAAAVkACAAAAAALkRy0GJInXYLA+cgjs6Myb0a+Gu9hgXhHvhLNoGWfckFcwAgAAAAANbALyt9zCSvwnLaWCd2/y2eoB7qkWTvv1Ldu8r40JPuBWwAIAAAAAD4Fl5bV5sz4isIE9bX+lmAp+aAKaZgVYVZeVfrItkCZAADMTI5AH0AAAAFZAAgAAAAAGoUK/DSWhT8LZhszSUqDbTrp8cSA7rdqmADKL+MILtTBXMAIAAAAABHnEE9bVa6lvhfhEMkkV2kzSSxH/sMW/FIJuw3CzWs6wVsACAAAAAAanavcBdqZxgRGKvEK95wTmeL1K1CeDSXZsXUAs81uOgAAzEzMAB9AAAABWQAIAAAAAC922ZDQE3h2fQKibGMZ9hV0WNlmrPYYSdtaSyYxsWYqgVzACAAAAAAagMovciKK6WVjIc2cCj8nK5O/gVOFFVeVAJpRp89tmQFbAAgAAAAAKcTFfPQzaFiAtSFhqbN02sCE1BKWJSrRfGN5L6oZwzkAAMxMzEAfQAAAAVkACAAAAAAtK+JqX3K/z2txjAU15DgX4y90DS2YLfIJFolCOkJJJwFcwAgAAAAAMnR5V7gfX7MNqqUdL5AkWlkhyFXaBRVNej+Rcn8lrQkBWwAIAAAAAA2cDNRXZuiC241TGRvdFyctJnrNcdbZOP9zHio81tkngADMTMyAH0AAAAFZAAgAAAAAAeGrIMK/bac6kPczxbvRYqKMkcpeI2FjdMpD91FDWIvBXMAIAAAAAAix62z1LeS8yvSXCl5gHSIomjyx76fF3S1lp9k900hygVsACAAAAAAiYwzf2m71aWFD5ajcXyW2JX2EzQOkBroTGMg29nLPYIAAzEzMwB9AAAABWQAIAAAAACphf298InM0Us4HT8o1W1MGw0D/02vd7Jh+U0h7qaFaQVzACAAAAAAFXtk7YpqsOJxsqGWSIL+YcBE96G3Zz9D31gPqDW94y8FbAAgAAAAAAOrS1KVA94rjB1jZ1pPocpCeBG+B14RzWoHqVDpp7JbAAMxMzQAfQAAAAVkACAAAAAATLDS2cuDVM3yDMuWNgk2iGKBTzPpfJMbvxVOSY39ZfcFcwAgAAAAAPT5wRi2cLHIUflXzm6EQB/m7xdThP80ir1VV/JBBqvxBWwAIAAAAAB9lEtZS0aXCFbCtSbhnis27S5IPcfWGygHW8AHn3QqzwADMTM1AH0AAAAFZAAgAAAAAJNjExiZVX7jfFGfYpQu16qxLN0YPqVU/5CQ/Y67YSinBXMAIAAAAABMpm2+6KrkRUlXzQoMPHrQmIO6dkQz66tYdfTeA3dKqQVsACAAAAAAFXobHiMLvNZuEPr8jtewCX2J93EZG3JNeyVg92fue6YAAzEzNgB9AAAABWQAIAAAAABlFkYtLCx901X6QVVMkSn6Z7k30UF4xHaA0OZJJ9bdyQVzACAAAAAATez+F9GHcGzTp7jjv4feboUNb8JCkIp4EqcPFisnq7MFbAAgAAAAACE7JvOpBgMoZ7kRd4QbxIhxukPTUxXpzhjnBHiR7XoRAAMxMzcAfQAAAAVkACAAAAAA8NJKN0IxZnruhswGQkiruv8Ih0EMwDcSZx/Xasup9dkFcwAgAAAAAKaJZRxzA+Igeydvuk6cSwUHXcrmT4PjhuPu//FslpdnBWwAIAAAAAD53Rok1Vq/PMAnXmarqoHJ0PEyYUBmVESa9hIpCv/G9QADMTM4AH0AAAAFZAAgAAAAABHxHdEClz7hbSSgE58+dWLlSMJnoPz+jFxp4bB1GmLQBXMAIAAAAAD3nSvT6aGD+A110J/NwEfp0nPutlmuB5B+wA3CC3noGAVsACAAAAAA3Apjd+TapONB7k5wBVwTWgn8t+Sq2oyyU5/+as109RcAAzEzOQB9AAAABWQAIAAAAAC/o8qW/ifk3KuJ01VFkyNLgQafxB5/bGs2G5VyyVafOwVzACAAAAAA1bMqAFGDHSl6BYNLbxApvkAv2K1/oafywiX0MDz1dGUFbAAgAAAAAHJXLlId3edFoniLD/9K2A5973MeP2Ro31flDyqm3l5QAAMxNDAAfQAAAAVkACAAAAAAY2V8I1bz3a1AxTtmED6UhdhA09huFkuuEX8R+d/WDPUFcwAgAAAAAPTVoNRiI76tcRKqd+JBBVyy4+YcKST42p0QX2BtmQ2VBWwAIAAAAACcxt9hg14WqPNiDv1MkqVljM2e2KJEv53lA17LhV6ZigADMTQxAH0AAAAFZAAgAAAAAO2kSsW0WGN9AOtK4xK2SHrGhWiaAbMEKT4iZkRpaDN/BXMAIAAAAABKGzQcPM8LT2dwOggxoWjv/1imYWabbG/G4kBw8OWaxAVsACAAAAAAC9hLK1dScQTAqg+YAG3ObdPzg2Xet57HmOFpGmyUR9UAAzE0MgB9AAAABWQAIAAAAAAiCwzNEEaH/mDam68IdDftnhthyUFdb+ZCNSBQ91WlHQVzACAAAAAA7tHyHcxCzmbJeFYZyPm4mEgkTGKOvwY4MX82OvH0Jn8FbAAgAAAAAAb5IAbZ1hXCNegQ+S+C9i/Z8y6sS8KeU04V6hXa2ml6AAMxNDMAfQAAAAVkACAAAAAAGuCHVNJSuoVkpPOnS5s89GuA+BLi2IPBUr2Bg1sWEPIFcwAgAAAAAEl1gncS5/xO7bQ/KQSstRV3rOT2SW6nV92ZANeG2SR6BWwAIAAAAAA9LOcKmhek8F2wAh8yvT/vjp2gaouuO+Hmv10lwAeWPAADMTQ0AH0AAAAFZAAgAAAAAMfxz7gEaoCdPvXrubDhCZUS0ARLZc1svgbXgMDlVBPgBXMAIAAAAAB6a5dDA3fuT5Vz2KvAcbUEFX/+B7Nw2p1QqbPoQ5TTuAVsACAAAAAAcf/y75UOuI62A6vWH7bYr/5Jz+nirZVYK/81trN6XOQAAzE0NQB9AAAABWQAIAAAAACnYsqF/VzmjIImC9+dqrHO1TM6lJ6fRwM0mM6Wf6paOwVzACAAAAAA5tgZzch8uDCR1ky3SllVaKVpxAlbrhvlNDTazZZRZOAFbAAgAAAAALeGiLJS4z2zhgVpxzyPdRYyACP9QzQBOob34YrIZumCAAMxNDYAfQAAAAVkACAAAAAAEC0sIVmadtW4YMuRXH7RpAhXclsd+3bmqGXCMeaT014FcwAgAAAAABPpXh0uzpsJJB+IRUNajmMB9WGwswfpw5T9xk3Xj6ANBWwAIAAAAAAmf+NYh9TZ/QRu3w/GQz66n7DtfbJijN3G7KzeL8lstAADMTQ3AH0AAAAFZAAgAAAAABaIB3n49Xm9cOafSrQsE0WCcYp8rMIO/qVwIlMF5YLRBXMAIAAAAAC9EyWJV3xOu9bzgdJ/yX+ko7qLf1u3AxNMataW2C9EzQVsACAAAAAAvVbDkLxXx2DcMLifIQ3K0IIJcLcAG9DUrNfI6aoUjNcAAzE0OAB9AAAABWQAIAAAAAA5rZItA/cocRnngYqcJ3nBXQ+l688aKz3EQyLbYYunPAVzACAAAAAAwKyA+L7TgxztPClLrIMk2JXR+w7c04N3ZOqPgjvrIvsFbAAgAAAAACzvZ33h6aWEe8hmo+1f6OXJ72FY5hvWaUuha64ZV3KFAAMxNDkAfQAAAAVkACAAAAAA3htn7oHJ0YYpIrs+Mzyh85Ys67HwAdv5LQl1mCdoMWkFcwAgAAAAAEHjCtNNLenHuSIYux6ezAHsXDaj2DlTF67ToDhDDe6HBWwAIAAAAAD+P4H0sk9jOd+7vOANt2/1Ectb+4ZRGPE8GkHWNXW3MgADMTUwAH0AAAAFZAAgAAAAAEnt18Km/nqggfIJWxzTr9r3hnXNaueG6XO9A5G11LnGBXMAIAAAAAD7QxzGMN/ard5TfFLecE6uusMmXG2+RBsBR+/NCQHUwAVsACAAAAAAQEZ1ZZ8GC8rdbg7s87OM5Gr9qkTXS9+P5DuAZxj5Gl4AAzE1MQB9AAAABWQAIAAAAAAVAKK/GoY8AACu/hyMpO4hdLq6JnEyWNzkyci9sbaD/wVzACAAAAAA2HmeqpMlvvBpV2zQTYIRmsc4MFlfHRwLof0ycJgMg/MFbAAgAAAAACdltCeWi5E/q1Li1eXLChpM2D9QQSGLBZ82NklQSc0oAAMxNTIAfQAAAAVkACAAAAAAhHyq1GQC/GiMwpYjcsfkNxolJ10ARKjIjfkW1Wipzi0FcwAgAAAAAD/uaGWxTDq87F8XZ6CrFI+RNa8yMqfSZdqK00Kj833BBWwAIAAAAAD6aEdOO0CsQGagioOCvANPCEHSpJ8BSixlPBq5ERhB7AADMTUzAH0AAAAFZAAgAAAAABAJJxHoZD+MQBWqm9UM9Dd3z5ZohIZGWRaRVRsMptKQBXMAIAAAAADrE/ca+gqj/SH4oao4wE4qn2ovoTydzcMbDbrfnUs3zAVsACAAAAAAeNCIQN6hVnGJinytQRFGlQ2ocoprXNqpia+BSxzl+uwAAzE1NAB9AAAABWQAIAAAAAAv01wz7VG9mTepjXQi6Zma+7b/OVBaKVkWNbgDLr1mFgVzACAAAAAA0I5sxz8r6wkCp5Tgvr+iL4p6MxSOq5d3e1kZG+0b7NkFbAAgAAAAAIA32v6oGkAOS96HexGouNTex+tLahtx9QF2dgGClk6WAAMxNTUAfQAAAAVkACAAAAAAWXecRwxSon68xaa9THXnRDw5ZfzARKnvvjTjtbae6T0FcwAgAAAAAPh0UfUMEo7eILCMv2tiJQe1bF9qtXq7GJtC6H5Va4fIBWwAIAAAAADqFr1ThRrTXNgIOrJWScO9mk86Ufi95IDu5gi4vP+HWQADMTU2AH0AAAAFZAAgAAAAAEY5WL8/LpX36iAB1wlQrMO/xHVjoO9BePVzbUlBYo+bBXMAIAAAAABoKcpadDXUARedDvTmzUzWPe1jTuvD0z9oIcZmKuiSXwVsACAAAAAAJuJbwuaMrAFoI+jU/IYr+k4RzAqITrOjAd3HWCpJHqEAAzE1NwB9AAAABWQAIAAAAADnJnWqsfx0xqNnqfFGCxIplVu8mXjaHTViJT9+y2RuTgVzACAAAAAAWAaSCwIXDwdYxWf2NZTly/iKVfG/KDjHUcA1BokN5sMFbAAgAAAAAJVxavipE0H4/JQvhagdytXBZ8qGooeXpkbPQ1RfYMVHAAMxNTgAfQAAAAVkACAAAAAAsPG7LaIpJvcwqcbtfFUpIjj+vpNj70Zjaw3eV9T+QYsFcwAgAAAAAJQ71zi0NlCyY8ZQs3IasJ4gB1PmWx57HpnlCf3+hmhqBWwAIAAAAACD58TO6d+71GaOoS+r73rAxliAO9GMs4Uc8JbOTmC0OwADMTU5AH0AAAAFZAAgAAAAAAGiSqKaQDakMi1W87rFAhkogfRAevnwQ41onWNUJKtuBXMAIAAAAAASgiDpXfGh7E47KkOD8MAcX8+BnDShlnU5JAGdnPdqOAVsACAAAAAAI+2TTQIgbFq4Yr3lkzGwhG/tqChP7hRAx2W0fNaH6jcAAzE2MAB9AAAABWQAIAAAAAB7L4EnhjKA5xJD3ORhH2wOA1BvpnQ+7IjRYi+jjVEaJAVzACAAAAAAuhBIm0nL3FJnVJId+7CKDASEo+l2E89Z9/5aWSITK4AFbAAgAAAAALtSICOzQDfV9d+gZuYxpEj6cCeHnKTT+2G3ceP2H65kAAMxNjEAfQAAAAVkACAAAAAAaROn1NaDZFOGEWw724dsXBAm6bgmL5i0cki6QZQNrOoFcwAgAAAAANVT8R6UvhrAlyqYlxtmnvkR4uYK/hlvyQmBu/LP6/3ZBWwAIAAAAAD+aHNMP/X+jcRHyUtrCNkk1KfMtoD3GTmShS8pWGLt+AADMTYyAH0AAAAFZAAgAAAAADqSR5e0/Th59LrauDA7OnGD1Xr3H3NokfVxzDWOFaN7BXMAIAAAAACt30faNwTWRbvmykDpiDYUOCwA6QDbBBYBFWS7rdOB4AVsACAAAAAAF7SvnjjRk5v2flFOKaBAEDvjXaL1cpjsQLtK2fv9zdQAAzE2MwB9AAAABWQAIAAAAADmtb1ZgpZjSeodPG/hIVlsnS8hoRRwRbrTVx89VwL62AVzACAAAAAAi38e1g6sEyVfSDkzZbaZXGxKI/zKNbMasOl2LYoWrq8FbAAgAAAAAALACk0KcCDN/Kv8WuazY8ORtUGkOZ5Dsm0ys1oOppp/AAMxNjQAfQAAAAVkACAAAAAAf/f7AWVgBxoKjr7YsEQ4w/fqSvuQWV2HMiA3rQ7ur0sFcwAgAAAAADkkeJozP6FFhUdRIN74H4UhIHue+eVbOs1NvbdWYFQrBWwAIAAAAAB55FlHAkmTzAYj/TWrGkRJw2EhrVWUnZXDoMYjyfB/ZwADMTY1AH0AAAAFZAAgAAAAAI2WEOymtuFpdKi4ctanPLnlQud+yMKKb8p/nfKmIy56BXMAIAAAAADVKrJmhjr1rfF3p+T+tl7UFd1B7+BfJRk0e7a4im7ozgVsACAAAAAA5E7Ti3PnFiBQoCcb/DN7V1uM3Xd6VKiexPKntssFL7kAAzE2NgB9AAAABWQAIAAAAAAuHU9Qd79hjyvKOujGanSGDIQlxzsql8JytTZhEnPw+AVzACAAAAAAjF2gV/4+sOHVgDd/oR5wDi9zL7NGpGD+NsEpGXy/a4QFbAAgAAAAAJzMoyojYV6Ed/LpVN5zge93Odv3U7JgP7wxeRaJZGTdAAMxNjcAfQAAAAVkACAAAAAA7dQDkt3iyWYCT94d7yqUtPPwp4qkC0ddu+HFdHgVKEkFcwAgAAAAANuYvtvZBTEq4Rm9+5eb7VuFopowkrAuv86PGP8Q8/QvBWwAIAAAAACeqXoAOQOE4j0zRMlkVd8plaW0RX1npsFvB38Xmzv7sAADMTY4AH0AAAAFZAAgAAAAAAwnZSDhL4tNGYxlHPhKYB8s28dY5ScSwiKZm3UhT8U3BXMAIAAAAABDoY6dhivufTURQExyC9Gx3ocpl09bgbbQLChj3qVGbgVsACAAAAAAF+1nS7O0v85s3CCy+9HkdeoEfm2C6ZiNbPMMnSfsMHUAAzE2OQB9AAAABWQAIAAAAAC2VuRdaC4ZJmLdNOvD6R2tnvkyARteqXouJmI46V306QVzACAAAAAAMn1Z6B35wFTX9mEYAPM+IiJ5hauEwfD0CyIvBrxHg7IFbAAgAAAAAOG6DvDZkT9B/xZWmjao2AevN7MMbs3Oh9YJeSd/hZ+hAAMxNzAAfQAAAAVkACAAAAAAVerb7qVNy457rNOHOgDSKyWl5ojun7iWrv1uHPXrIZQFcwAgAAAAAIDcYS9j5z+gx0xdJj09L7876r/vjvKTi/d3bXDE3PhyBWwAIAAAAADuhVLqb1Bkrx8aNymS+bx2cL8GvLFNH4SAi690DUgnWQADMTcxAH0AAAAFZAAgAAAAAH/E44yLxKCJjuSmU9A8SEhbmkDOx1PqqtYcZtgOzJdrBXMAIAAAAABgLh9v2HjBbogrRoQ82LS6KjZQnzjxyJH4PH+F3jupSAVsACAAAAAAIlO46ehXp4TqpDV0t6op++KO+uWBFh8iFORZjmx2IjkAAzE3MgB9AAAABWQAIAAAAAAlNUdDL+f/SSQ5074mrq0JNh7CTXwTbbhsQyDwWeDVMwVzACAAAAAANIH2IlSNG0kUw4qz0budjcWn8mNR9cJlYUqPYdonucAFbAAgAAAAAJMrOUOyiu5Y3sV76zwEFct8L7+i8WGlQI2+8z2W2kzaAAMxNzMAfQAAAAVkACAAAAAASZ+CvUDtlk/R4HAQ3a+PHrKeY/8ifAfh0oXYFqliu80FcwAgAAAAAJelpzPgM65OZFt/mvGGpwibclQ49wH+1gbUGzd9OindBWwAIAAAAAD9qeDchteEpVXWcycmD9kl9449C1dOw0r60TBm5jK+cQADMTc0AH0AAAAFZAAgAAAAAN9fkoUVbvFV2vMNMAkak4gYfEnzwKI3eDM3pnDK5q3lBXMAIAAAAACnDkgVNVNUlbQ9RhR6Aot2nVy+U4km6+GHPkLr631jEAVsACAAAAAANzg/BnkvkmvOr8nS4omF+q9EG/4oisB+ul4YHi938hwAAzE3NQB9AAAABWQAIAAAAAASyK3b1nmNCMptVEGOjwoxYLLS9fYWm/Zxilqea0jpEQVzACAAAAAADDHsGrbqlKGEpxlvfyqOJKQJjwJrzsrB7k3HG0AUJbkFbAAgAAAAAKwx3S4XfDZh4+LuI9jf7XgUh5qiefNv87JD4qvVRfPSAAMxNzYAfQAAAAVkACAAAAAAlSP9iK31GlcG9MKGbLmq+VXMslURr+As736rrVNXcsUFcwAgAAAAAAvbj0zfq9zzi8XReheKFbCB+h9IsOLgXPPpI5vrEJNZBWwAIAAAAABXvoZhaQE7ogWjeBjceVkp03N20cKYP3TA8vuNsgpfAgADMTc3AH0AAAAFZAAgAAAAAOJNORH8Bev97gVU7y6bznOxJ+E6Qoykur1QP76hG1/7BXMAIAAAAAC+C1PtOOrSZgzBAGhr+dPe/kR0JUw9GTwLVNr61xC1aAVsACAAAAAAeA/L8MQIXkamaObtMPLpoDoi5FypA5WAPtMeMrgi0eQAAzE3OAB9AAAABWQAIAAAAAAKcHzLUomavInN6upPkyWhAqYQACP/vdVCIYpiy6U6HgVzACAAAAAATsR4KItY6R2+U7Gg6sJdaEcf58gjd1OulyWovIqfxKcFbAAgAAAAAFbm10ko67ahboAejQdAV0U2uA5OhZYdb8XUFJ8OL46LAAMxNzkAfQAAAAVkACAAAAAAqTOLiMpCdR59tLZzzIPqJvbCNvz2XQL9ust0qYaehtcFcwAgAAAAAArefox/3k5xGOeiw2m6NUdzuGxmPwcu5IFcj+jMwHgHBWwAIAAAAADLZGFJ7MQd5JXMgMXjqZO5LDLxcFClcXPlnRMWRn+1oAADMTgwAH0AAAAFZAAgAAAAAIPSqSeVzSRgNVNmrPYHmUMgykCY27NbdDUNhE5kx/SgBXMAIAAAAAAhX90nNfxyXmZe/+btZ7q6xMX4PFyj0paM1ccJ/5IUUQVsACAAAAAA419oHmD2W0SYoOMwhrhrp8jf68fg9hTkaRdCuVd3CN0AAzE4MQB9AAAABWQAIAAAAACLn5DxiqAosHGXIAY96FwFKjeqrzXWf3VJIQMwx1fl4gVzACAAAAAAindvU27nveutopdvuHmzdENBbeGFtI3Qcsr07jxmvm8FbAAgAAAAAPvl9pBStQvP4OGkN5v0MghUY6djm9n7XdKKfrW0l1sMAAMxODIAfQAAAAVkACAAAAAA7i2S6rHRSPBwZEn59yxaS7HiYBOmObIkeyCcFU42kf8FcwAgAAAAAGb3RSEyBmgarkTvyLWtOLJcPwCKbCRkESG4RZjVmY4iBWwAIAAAAADB2/wo5CSHR4ANtifY6ZRXNTO5+O8qP82DfAiAeanpZwADMTgzAH0AAAAFZAAgAAAAAFz+M+H/Z94mdPW5oP51B4HWptp1rxcMWAjnlHvWJDWrBXMAIAAAAACBFEOQyL7ZHu4Cq33QvXkmKuH5ibG/Md3RaED9CtG5HwVsACAAAAAAfggtJTprQ/yZzj7y5z9KvXsdeXMWP0yUXMMJqpOwI88AAzE4NAB9AAAABWQAIAAAAAAE7c2x3Z3aM1XGfLNk/XQ9jCazNRbGhVm7H8c2NjS5ywVzACAAAAAARJ9h8fdcwA19velF3L/Wcvi2rCzewlKZ2nA0p8bT9uwFbAAgAAAAAJtWe6b4wK2Hae2dZm/OEpYQnvoZjz4Sz5IgJC2wInecAAMxODUAfQAAAAVkACAAAAAAVoRt9B9dNVvIMGN+ea5TzRzQC+lqSZ8dd/170zU5o9cFcwAgAAAAAEwM95XZin5mv2yhCI8+ugtKuvRVmNgzzIQN0yi1+9aIBWwAIAAAAAAMGBq72n00rox3uqhxSB98mkenTGCdbbUF1gXrgottzgADMTg2AH0AAAAFZAAgAAAAAKRDkjyWv/etlYT4GyoXrmBED2FgZHnhc+l9Wsl06cH2BXMAIAAAAABohlpm3K850Vndf3NmNE0hHqDlNbSR8/IvMidQ3LnIZAVsACAAAAAAW42nGHa6q2MCAaaPVwaIDfr8QLyQwjKq23onZJYsqVsAAzE4NwB9AAAABWQAIAAAAAC3DFh5oklLCNLY90bgWm68dFXz65JpAZSp1K99MBTPAQVzACAAAAAAQgZecmxEUZVHoptEQClDwAf8smI3WynQ/i+JBP0g+kQFbAAgAAAAAEUSQGVnAPISD6voD0DiBUqyWKgt2rta0tjmoe+LNt6IAAMxODgAfQAAAAVkACAAAAAAQ5WKvWSB503qeNlOI2Tpjd5blheNr6OBO8pfJfPNstcFcwAgAAAAAKwHgQLSDJ5NwLBQbY5OnblQIsVDpGV7q3RCbFLD1U4/BWwAIAAAAACQ5nED99LnpbqXZuUOUjnO2HTphEAFBjLD4OZeDEYybgADMTg5AH0AAAAFZAAgAAAAAGfhFY3RGRm5ZgWRQef1tXxHBq5Y6fXaLAR4yJhrTBplBXMAIAAAAACKEF0ApLoB6lP2UqTFsTQYNc9OdDrs/vziPGzttGVLKQVsACAAAAAArOO6FyfNRyBi0sPT5iye7M8d16MTLcwRfodZq4uCYKEAAzE5MAB9AAAABWQAIAAAAAAIM73gPcgzgotYHLeMa2zAU4mFsr7CbILUZWfnuKSwagVzACAAAAAAJCSu98uV8xv88f2BIOWzt6p+6EjQStMBdkGPUkgN79cFbAAgAAAAAMGqPGMPxXbmYbVfSa/japvUljht1zZT33TY7ZjAiuPfAAMxOTEAfQAAAAVkACAAAAAAkWmHCUsiMy1pwZTHxVPBzPTrWFBUDqHNrVqcyyt7nO8FcwAgAAAAAMv2CebFRG/br7USELR98sIdgE9OQCRBGV5JZCO+uPMgBWwAIAAAAABt7qSmn3gxJu7aswsbUiwvO+G6lXj/Xhx+J/zQyZxzLAADMTkyAH0AAAAFZAAgAAAAAGInUYv0lP/rK7McM8taEHXRefk8Q2AunrvWqdfSV7UaBXMAIAAAAACE+WPxJ3gan7iRTbIxXXx+bKVcaf8kP4JD8DcwU0aL7wVsACAAAAAAUC4eTprX4DUZn2X+UXYU6QjtiXk+u57yoOPBbPQUmDkAAzE5MwB9AAAABWQAIAAAAACmHlg2ud3cplXlTsNTpvNnY6Qm1Fce0m899COamoDjaQVzACAAAAAArtJQeJIlepBWRU2aYar7+YGYVQ7dfDc1oxgTmA8r9q0FbAAgAAAAAOk45vg5VqZHAFCO3i0Z52SZi5RADf8NXwf68T5yad/DAAMxOTQAfQAAAAVkACAAAAAApzcWSAbZWV/Rq+ylRNqqlJqNVR4fhXrz4633/MQOQgcFcwAgAAAAAN/jz/bsEleiuCl+li83EWlG6UMHA8CyaOMRKCkXkSCPBWwAIAAAAAC3Sd+Qg+uFDKpGZHbrQgokXHQ1az1aFl4YK343OB6hcQAAEmNtAAAAAAAAAAAAABBwYXlsb2FkSWQAAAAAABBmaXJzdE9wZXJhdG9yAAEAAAASc3AAAQAAAAAAAAAQdGYAAQAAABNtbgD/////Y46NN8CHrb4J7f/fE214AP////9jjo03wIetvgnt/18A", + "subType": "06" + } + } + } + }, + "update": { + "$set": { + "encryptedDecimalNoPrecision": { + "$$type": "binData" + } + } + }, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalNoPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + } + }, + "commandName": "findAndModify" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Decimal-InsertFind.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Decimal-InsertFind.json new file mode 100644 index 0000000000..e3d52f5d04 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Decimal-InsertFind.json @@ -0,0 +1,1956 @@ +{ + "description": "fle2v2-Rangev2-Decimal-InsertFind", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalNoPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "FLE2 Range Decimal. Insert and Find.", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "1" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDecimalNoPrecision": { + "$gt": { + "$numberDecimal": "0" + } + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "1" + } + } + ] + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": { + "$numberInt": "0" + }, + "encryptedDecimalNoPrecision": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "rbf3AeBEv4wWFAKknqDxRW5cLNkFvbIs6iJjc6LShQY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "n+XAuFnP8Dov9TnhGFxNx0K/MnVM9WbJ7RouEu0ndO0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yRXojuVdn5GQtD97qYlaCL6cOLmZ7Cvcb3wFjkLUIdM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "DuIkdRPITRs55I4SZmgomAHCIsDQmXRhW8+MOznkzSk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SsBk+Et1lTbU+QRPx+xyJ/jMkmfG+QCvQEpip2YYrzA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "crCIzOd8KhHvvUlX7M1v9bhvU4pLdTc+X2SuqoKU5Ek=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "YOWdCw4UrqnxkAaVjqmC4sKQDMVMHEpFGnlxpxdaU6E=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "M3SShp81Ff8tQ632qKbv9MUcN6wjDaBReI0VXNu6Xh4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "gzHlSPxpM0hT75kQvWFzGlOxKvDoiKQZOr19V6l2zXI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "s3JnppOGYw9SL2Q1kMAZs948v2F5PrpXjGei/HioDWs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cG6+3Gk/zEH68P/uuuwiAUVCuyJwa1LeV+t29FlPPAo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dupdvR3AyJtM+g9NDKiaLVOtGca387JQp8w+V03m7Ig=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JqEQc5svj2jTvZ6LLA5ivE+kTb/0aRemSEmxk4G7Zrg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "szcXXXKnob+p3SoM4yED2R920LeJ7cVsclPMFTe4CeI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "o1QoGVXmuBdHwHm7aCtGMlMVKrjFdYvJXpoq6uhIAZ0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Jfm5wPlqqLCJRGQIqRq2NGmpn7s0Vrih2H3YAOoI2YU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zMHLb8ARbsYo8Ld05bqnGFf1Usha6EGb8QKwdSAyps0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yQdtq9lh5pugL7/i0Bj/PuZUUBUIzf+7wj1rl5y736w=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wGWVZdO7qIuyDg/BqDgqjgoQ02h5YYgwXQB1oCin2NE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "by9HMLj6NTEpgztZ5HSN6GxImkXPcaFINYDzgZY33X8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tWo0vbasi7bXmn/MsOx13VC1IsWtpx/nYp0uj4iMzdA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tQQpndUYd5O87lOtrGjH3wl9VsOK0ray7RMasL90sBM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cQjXEDCMsOpKLLf+vlTgIHA+cbSJdzqhbSX9Wvh95aA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "7yMpU48IxK9SzP2cx3VnTownGEwFmeFofuuFT97SuuY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kSOx1kz0CmBgzKQHZlo65ZUY1DIv9A99JRm+Us2y6Ew=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ubQpdPBe6/xvtr+AcXdfYLSvYCR4ot0tivehkCsupb4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xal+iCJ6FTefRQToyoNksc9NCZShyn04NDGi4IYrcoM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "d7jU4iOK50xHxlkSifcxlZFCM46TSgQzoYivxG3HNLY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tJvl2nsBLBVzL3pp6sKWCL4UXeh3q/roYBJjSb74ve0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "OIUCaKRvIx9t1w6Hxlz1IcQTdPNCfdRNwnnTm10W+X0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "A9tvzsiElotOUVIB4CqfQp9mAwqvTM35YkmAR170aHA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "lI8gpK7hpb7c9x4RQugsxMnQay5LZJmwslZdvMx/dcE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dNCzh40U0XvdKnSDi3HRQOWQftEsDVqc4uUvsVFGoq8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "IP+iwEBWBwVVZIdpaMu8k5+soFCz+TZkYn3drKZ9grE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pnqyh6e0y5svHkJDShlN9CHV0WvMBE4QbtJpQw5ZCXc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "elEl42tbVDoRTLjAhZUFEtXiut4b3PVhg/1ZLZSQdtE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "vHuu2FxwclMHqyE6JBYbTYgbEkB0dqb/JuaxsvfwsmY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xTf7NCe3Gf8QpE78HR5OknlLTKfs9J+RN9UZpH6fnso=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "XiWSasRnJAulGR6+LCVD3mwRObXylqYWR9jvpywq12c=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "MZMxEQ5ikx0PG1YFIExv0UnTZogsvgeOEZTpzvBDn4w=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yZMyMZBDrWbAhvnic7vvIYhmO9m5H2iuv0c8KNZrBzY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xxM14hTPY5j0vvcK2C7YAEjzdsfUTFHozHC0hEo1bxI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+01rqR1xVwkpGXcstbk1ItJqFVjH6Q8MGxEN3Cm9Y1A=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xOpLV0Z2VTRJ3iWtnWZcsyjXubTIkYWo31cO+HV1o1k=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "BWUOLqgLBqc5NwxVlSV5H3KFQPXbCp7mdo+jF+8cJqY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "fuQb1S6xZDGlrEbK+kI23aL53PP1PVNwqICnZNt9Yzg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SfscnoibFttahLdPVC4Ee+47ewGFKpDSU7M6HX19bKE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "rpSW2awybNVeKtat91VFxqbINoTfNhPfQAu+d73Xtf8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "9M/CP9ccOIIj2LLFmE0GFDO0Ban2wsNalEXfM6+h+1s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "WrEMG49l1ye4MhXs5ZS9tz8P6h+hDvthIg/2wW9ne1Q=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ImNhbfeyfH8qIEeA5ic0s3dAQBdzzTBS+CPsNih9vZ0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dWP33YDSn04UKJN2ogh2Rui0iW/0q2y18OCDRVcfyoo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "lYv0isAtfGh6H9tdp3cp2eHU7q2J+uk7QrgcxtK3w7Y=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "VGMoamB/+7zTOYcY/pqJc96xlv2PdW4hwsIAEIslTDQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yNeBWMF7BnD9wVwz2PgJsvWr77QiVvvWUvJF0+fqBug=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SfpvObJ+tJBXSvqeN7vlOfmhYign635lciYAJIjUtY8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dsen4NqjzVGjpjufiTMs3+gqeD09EbnuogPgxrJECwg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pxCWVM3sn19NsFEpgHbgLa+PmYlhN3mMiP0Wk8kJhYw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q11KNvJszjYIB9n9HcC+N4uz11a3eRj1L3BH9scKMDQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "A1PmkgcEToWh1JiVWE6mI5jUu7poxWWuCUt/cgRUUDc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "qJo3Hu4PJeanL7XEaWXO/n3YsodhZyd+MJOOmB9Kpd8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "BkBKLO8URFscfRY9Bav/1+L9mLohDgNr/MkZtGiraIs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "rZq5WA3Hx3xthOyHAJXK//f8pE2qbz7YKu3TIMp9GFY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "X07a/Lm80p5xd4RFs1dNmw+90tmPDPdGiAKVZkxd4zY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6YrBn2ofIw1b5ooakrLOwF41BWrps8OO0H9WH4/rtlE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "0l86Ag5OszXpa78SlOUV3K9nff5iC1p0mRXtLg9M1s4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Hn6yuxFHodeyu7ISlhYrbSf9pTiH4TDEvbYLWjTwFO0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zdf4y2etKBuIpkEU1zMwoCkCsdisfXZCh8QPamm+drY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "rOQ9oMdiK5xxGH+jPzOvwVqdGGnF3+HkJXxn81s6hp4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "61aKKsE3+BJHHWYvs3xSIBvlRmKswmaOo5rygQJguUg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "KuDb/GIzqDM8wv7m7m8AECiWJbae5EKKtJRugZx7kR0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Q+t8t2TmNUiCIorVr9F3AlVnX+Mpt2ZYvN+s8UGict8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tJRZIpKxUgHyL83kW8cvfjkxN3z6WoNnUg+SQw+LK+k=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pnUsYjip8SvW0+m9mR5WWTkpK+p6uwJ6yBUAlBnFKMk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "PArHlz+yPRYDycAP/PgnI/AkP8Wgmfg++Vf4UG1Bf0E=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wnIh53Q3jeK8jEBe1n8kJLa89/H0BxO26ZU8SRIAs9Q=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "4F8U59gzBLGhq58PEWQk2nch+R0Va7eTUoxMneReUIA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ihKagIW3uT1dm22ROr/g5QaCpxZVj2+Fs/YSdM2Noco=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "EJtUOOwjkrPUi9mavYAi+Gom9Y2DuFll7aDwo4mq0M0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dIkr8dbaVRQFskAVT6B286BbcBBt1pZPEOcTZqk4ZcI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "aYVAcZYkH/Tieoa1XOjE/zCy5AJcVTHjS0NG2QB7muA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "sBidL6y8TenseetpioIAAtn0lK/7C8MoW4JXpVYi3z8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "0Dd2klU/t4R86c2WJcJDAd57k/N7OjvYSO5Vf8KH8sw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "I3jZ92WEVmZmgaIkLbuWhBxl7EM6bEjiEttgBJunArA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "aGHoQMlgJoGvArjfIbc3nnkoc8SWBxcrN7hSmjMRzos=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "bpiWPnF/KVBQr5F6MEwc5ZZayzIRvQOLDAm4ntwOi8g=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tI7QVKbE6avWgDD9h4QKyFlnTxFCwd2iLySKakxNR/I=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "XGsge0CnoaXgE3rcpKm8AEeku5QVfokS3kcI+JKV1lk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JQxlryW2Q5WOwfrjAnaZxDvC83Dg6sjRVP5zegf2WiM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "YFuHKJOfoqp1iGVxoFjx7bLYgVdsN4GuUFxEgO9HJ5s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Z6vUdiCR18ylKomf08uxcQHeRtmyav7/Ecvzz4av3k4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SPGo1Ib5AiP/tSllL7Z5PAypvnKdwJLzt8imfIMSEJQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "m94Nh6PFFQFLIib9Cu5LAKavhXnagSHG6F5EF8lD96I=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pfEkQI98mB+gm1+JbmVurPAODMFPJ4E8DnqfVyUWbSo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "DNj3OVRLbr43s0vd+rgWghOL3FqeO/60npdojC8Ry/M=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kAYIQrjHVu49W8FTxyxJeiLVRWWjC9fPcBn+Hx1F+Ss=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "aCSO7UVOpoQvu/iridarxkxV1SVxU1i9HVSYXUAeXk4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Gh6hTP/yj1IKlXQ+Q69KTfMlGZjEcXoRLGbQHNFo/1s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "/gDgIFQ4tAlJk3GN48IS5Qa5IPmErwGk8CHxAbp6gs0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "PICyimwPjxpusyKxNssOOwUotAUbygpyEtORsVGXT8g=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "4lu+cBHyAUvuxC6JUNyHLzHsCogGSWFFnUCkDwfQdgI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pSndkmoNUJwXjgkbkgOrT5f9nSvuoMEZOkwAN9ElRaE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tyW+D4i26QihNM5MuBM+wnt5AdWGSJaJ4X5ydc9iWTU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "9Syjr8RoxUgPKr+O5rsCu07AvcebA4P8IVKyS1NVLWc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "67tPfDYnK2tmrioI51fOBG0ygajcV0pLo5+Zm/rEW7U=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "y0EiPRxYTuS1eVTIaPQUQBBxwkyxNckbePvKgChwd0M=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "NWd+2veAaeXQgR3vCvzlI4R1WW67D5YsVLdoXfdb8qg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "PY5RQqKQsL2GqBBSPNOEVpojNFRX/NijCghIpxD6CZk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "lcvwTyEjFlssCJtdjRpdN6oY+C7bxZY+WA+QAqzj9zg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CWE7XRNylvTwO/9Fv56dNqUaQWMmESNS/GNIwgBaEI0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ijwlrUeS8nRYqK1F8kiCYF0mNDolEZS+/lJO1Lg93C8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "8KzV+qYGYuIjoNj8eEpnTuHrMYuhzphl80rS6wrODuU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wDyTLjSEFF895hSQsHvmoEQVS6KIkZOtq1c9dVogm9I=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SGrtPuMYCjUrfKF0Pq/thdaQzmGBMUvlwN3ORIu9tHU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "KySHON3hIoUk4xWcwTqk6IL0kgjzjxgMBObVIkCGvk4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "hBIdS9j0XJPeT4ot73ngELkpUoSixvRBvdOL9z48jY8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Tx6um0q9HjS5ZvlFhvukpI6ORnyrXMWVW1OoxvgqII0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zFKlyfX5H81+d4A4J3FKn4T5JfG+OWtR06ddyX4Mxas=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cGgCDuPV7MeMMYEDpgOupqyNP4BQ4H7rBnd2QygumgM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "IPaUoy98v11EoglTpJ4kBlEawoZ8y7BPwzjLYBpkvHQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Pfo4Am6tOWAyZNn8G9W5HWWGC3ZWmX0igI/RRB870Ro=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "fnTSjd7bC1Udoq6iM7UDnHAC/lsIXSHp/Gy332qw+/I=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "fApBgVRrTDyEumkeWs5p3ag9KB48SbU4Si0dl7Ns9rc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "QxudfBItgoCnUj5NXVnSmWH3HK76YtKkMmzn4lyyUYY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "sSOvwhKa29Wq94bZ5jGIiJQGbG1uBrKSBfOYBz/oZeI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "FdaMgwwJ0NKsqmPZLC5oE+/0D74Dfpvig3LaI5yW5Fs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "sRWBy12IERN43BSZIrnBfC9+zFBUdvjTlkqIH81NGt4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "/4tIRpxKhoOwnXAiFn1Z7Xmric4USOIfKvTYQXk3QTc=", + "subType": "00" + } + } + ] + }, + { + "_id": { + "$numberInt": "1" + }, + "encryptedDecimalNoPrecision": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "RGTjNVEsNJb+DG7DpPOam8rQWD5HZAMpRyiTQaw7tk8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "I93Md7QNPGmEEGYU1+VVCqBPBEvXdqHPtTJtMOn06Yk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "GecBFQ1PemlECWZWCl7f74vmsL6eB6mzQ9n6tK6FYfs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "QpjhZl+O1ORifgtCZuWAdcP6OKL7IZ2cA46v8FJcV28=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "RlQWwhU+uVv0a+9IB5cUkEfvHBvOw3B1Sx6WfPWMqes=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ubb81XTC7U+4tcNzf1oYvOY6gR5hC2Izqx54f4GuJ0E=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6M4Q5NMQ9TqNnjzGOxIkiUIY8TEL0I3XD1QnhefQUqU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "BtInzk9t2FFMCEY6AQ7zN8jwrrZEs2irSv6q0Q4NaIw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6vxXfETu9cuBIpRBo3jUUU04mJIH/aAhLX8K6VI5Xv0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wXPCdS+q23zi1bkPnaVG2j0PsVtxdeSLJ//h6J1x8RU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "KY3KkfBAsN2l80wbpj41G0gwBR5KmmFnZcagg7D3ENk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tI8NFAxXCX4VOnY5X73K6KI/Yspd3aR94KV39MhJlAw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "nFxH0UC3mATKA6Vboz+QX/hAjj19kF/SH6H5Cne7qC0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q8hYqIYaIi7nOdG/7qQZYnz8Bsacfi66M1nVku4SH08=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "4saA92R4arp4anvD9xFtze+sNcQqTEhPHyl1h70A8NE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "DbIziOBRRyeQS6RtBR09E37LV+CTKrEjGoRMLSpG6eE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Fv80Plp/7w2gnVqrwawLd6qhJ10G4NCDm3re67cNq4Y=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "T/T2oiQCBBES4YN7EodzPRdabZSFlYIClHBym+bQUZE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ZQgHD3l46Ujqtbnj1VbbeM29C9wJzOhz+yZ/7XdSrxk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ltlFKzWvyZvHxDFOYDd/XXJ6kUiJj0ln2HTCEz2o4Z4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "flW8A7bltC1u8bzx0WJtxosGJdOVsJFfbx33jxnpFGg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SXO+92QbMKwUSG2t27ciunV1c3VvFkUuDmSczpRe008=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+KioGs1GM+xRBzFE67ePTWj04KMSE5/Y6qUF7nJ5kvU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "L3xNVbh6YH+RzqABN+5Jgb7T234Efpn766DmUvxIxgg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "hPF+60mBYPjh21dEmPlBhKgyc9S2qLtTkypYvnqP2Fc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "EletRsETy2HcjaPIm2c8CkT7ch/P3pJJDC8hasepcSU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "r5bMXUaNKqLPxZ+TG9HYTG4aSDgcpim27rN8rQFkM0w=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "0Q7Erdr8+/S0wUEDDIqlS5XjBVWvhZY65K0uUDb6+Ns=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xEcnhXy35hbXNVBPOOt3TUHbxvKfQ48KjA9b6/rbMqQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "T8bEpiQNgsEudXvyKE9SZlSvbpV/LUaslsdqgSFltyo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "hIoiaF2YjnxDbODfhFEB+JGZ5nf8suD3Shck5bwQ3N0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "qnA6qzejeRJ0rsZaZ0zOvKAaXyxt5lpscKQNYFZNl4k=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "anAKCL2DN/le2VaP0n2ucYSEH/DaaEH/8Sa4OqTZsRA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JCZlBJaFm618oWYSnT9Jr1MtwFVw4BZjOzO+5yWgR90=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yxyk4n9762WzcDVGnTn4jCqUnSMIVCrLDIjCX1QVj34=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "fDI6fdKvDJwim5/CQwWZEzcrXE3LHgy7FTtffcC7tXE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Vex+gcz5T+WkzsVZQrkqUR2ryyZbnaOGuWpYvjN0zCw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "8TLEXz+Gbbp6llHpZXVjLsdlYY9f6hrKpHVpyfDe0RY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "7fTyt5BrunypS65TfOzFW2E2qdIuT4SLeDeGlbQoJCs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "8fKGrkqN0/KuSjyXgDBmRauDKrSa//JBKRWHEB9xBf4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "s4codmG7uN4ss6P357jL21lazEe90M9GOK5WrOknSV0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "RkSpua8XF+NUdxVDU90EbLUTTyZFX3tt3atBTroFaRk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "LnTCuCDyAHK5B9KXzjtwGmWB+qergQk2OCjnIx9MI2A=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cBFh0virAX4pVXf/udIGI2951i0+0aZAdJcBVGtYnT4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "G54X6myQXWZ5fw/G31en3QbdgfXzL9+hFTtJpnWMqDI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "EdsiiuezcsFJFnYIyGjCOhnqMj1BOwTB5EFxN+ERUkg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dVH9MXLtk0WTwGQ3xmrhOqfropMUkDW3o6paNPGl3NU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "sB3HqXKWY3pKbuEH8BTbfNIGfbY+7/ZbOc3XC+JRNNI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "WHyDk62Xhqbo4/iie2aLIM4x2uuAjv6102dJSHI58oM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pNUFuHpeNRDUZ/NrtII2c6sNc9eGR1lIUlIyXKERA+0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "UPa+pdCqnN0bfAptdzldQOSd01gidrDKy8KhWrpSKAI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "l+7dOAlo+HUffMqFYXL6pgUFeTbwOM9CjKQLxEoLtc4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SRnDXV/rN6C8xwMutv9E1luv3DOUio3VkgPr8Cpm7Ew=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "QcH6gl+gX7xZ7OWhUNQMbndJy0Piz49pDo6RsnLkVSA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "t+uL4DnfsI/Zll/KXWW1cOKX3Hu8WIkm3pt9efCVSAQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "myutHDctku/+Uug/nD8gRbYvmx/IovtoAAC2/fz2oHA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6C+cjD0e0nSCP6cPqQYbNG7SlOd6Mfvi8hyfm7Ng+D8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zg01JSoOj9oBKT0S1ldJucXzY5AKgreS+h2xJreWTOs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "7qQ80/FjodHl1m1py/Oii0/9C/xWbLdhaRXQ+kkCP10=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "YwWMNH07vL6c5Nhg+MRnVByhzUunu8y0VLM9z/XvR5U=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Dle8bU98+fudAbc14SToZFkwvV3tcYVsjDug0NWljpc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "J+eKL1vPJmlzltvhI6Li5Fz/TJmi3Ng+ehRTcs46API=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zB3XzfFygLwC3WHkj0up+VbEd25KKoce1vOpG/5bwK4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "vnVnmOnL+z2pqwE+A6cVKS0Iwy4F4/2IiElJca9bUQM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+lG5r/Fpqry3BtFuvY67+RntmHAMDoLVOSGc6ZoXPb0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "L5MXQertqc6uj7ADe8aWKbd1sYHPCE7P1VYVg9Zc3VI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "imKONuZgopt0bhM3GMX2WVPwQYMTobuUUEdhcLfHs4c=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "eOkU1J1uVbiVFWBerbXsSIVcF2nqiicTkFy4x7kFHB8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "gI0uDhXeoH/UatDQKEf4qo8FHzWZDhb/wuWTqbq/ID4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cOkd5Aa3btYhtojE/smsF/PJnULqQ4NNqTkU6KXTFmo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "AWNJMs1MTe294oFipp8Y6P0CjpkZ4qCZoClQF3XcHq8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6gJtlzXOFhGYrVbTuRMmvMlDTwXdNtR9aGBlHZPwIMw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "LEmwVGA/xsEG7UrcOoYLFu6KCXgijzFznenknuDacm8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "mIRFPTXRrGaPtp/Ydij2jgkRe4uoUvAKxW2d8b9zYL0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "B+Uv2u48WALOO0L311z+eryjYQzKJVMfdHMZPhOAFmY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "INXXp0wDyVCq+NtfIrrC2ciETmyW/dWB/48/u4yLEZ4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "se7DGo8XrlrQDLEcco1tZrQt9kDe+0RTyl2bw/quG4w=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "vr0m2+Zk9lbN6UgWCyn8xJWJOokU3IDYab5U5q1+CgQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "XI+eJ8Gy2JktG1gICgoj1qpsfy1tKmH0kglWbaQH6DA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "A+UCuNnuAUqnQzspA6TVqUPRmtZmpSex5HFw7THRxs0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xaH2Ehfljd19uo0Fvb3iwkdaiWEVQd2YPoitgEPkhSM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "S/iZBJGcc8+qZxyMtab65MMBoSglybwk3x58Nb86gnY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "w14ZE5qqY5YgkS4Zcs9YNbrQbY1XfGOOHNn9bOYnFVQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "0MhGd/jEF1vjkKGp+ZMn9SjLK54jkp9W4Hg+Sp/oxaI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "92QZ73e/NRTYgCm4aifaKth6aAsKnLLccBc0zx/qUTY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "WOjzemCgFJOiGIp81RSVh/tFlzSTj9eFWcBnsiv2Ycs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "DrsP9CmfKPjw5yLL8bnSeAxfNzAwlb+Z8OqCiKgBY7o=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "lMogqg8veBv6mri3/drMe9afJiKMvevkmGcw9BedfLo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "TxqwNcY8Tg2MPpNdkPBwvfpuTttSYRHU26DGECKYQ9o=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "l0u1b4b4vYACWIwfnB7PZac4oDEgjQZCzHruNPTgAIY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "iVSGQ+cCfhbWIrY/v/WBORK92elu9gfRKyGhr6r/k00=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yK1forG50diEXte8ECzjfpHeYsPyuQ/dgxbxn/nzY5k=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "gIfTLCD3VwnOwkC0zPXWTqaITxX6ZplA69PO2a6zolc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "O/Zxlgh3WqpzJ7+Sd8XWMVID4/GXJUUWaSqfgDUi3b0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ZQ6yv368zwahUqSUYH/StL0Qgz/TwS1CzlMjVDvCciI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "m2rPEYkjwyiKdonMrKlcF7hya4lFOAUwEePJ3SgrNx8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Mq0yl5iVKlq71bT/dT/fXOWf2n90bTnXFnOdGDN0JOc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6qDGMXipPLC2O6EAAMjO2F9xx4rdqZso4IkPpH2304U=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "jvQHRQQa2RIszE2LX2Hv2LbRhYawJ6qmtRt8HZzFQXg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ovJXQrkZlpeHRciKyE/WWNm5O389gRgzx1W+Dw596X4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "a4kgRNvYctGYqyQv9qScL/WkljTYVylJ9pE9KDULlxU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "qV4Q48vPiCJMTjljotzYKI/zfExWpkKOSHGcAjGyDig=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "jtI7zbBF+QW/aYYTkn90zzyHLXLgmy7l1bzgMb2oqic=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q0KmJl9txPdn962UNvnfe6UFhdk9YaFZuTm33F+csso=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ULNdEqeZJgtmNOhN/Y9INzsE9AnxWYwOMn+pIbRXIFs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "R4oz9+wkdjpKe5tE1jpG7IURAnfvS5fLP4LrD5cZfTE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "qG5Z7VhwSu/HT/YFTgDzyAAzJKq51xPw2HeEV5btYC4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "OM/1DmIIZ5Qyhtq8TGkHTBEMVKjAnKRZMRXYtTG8ctc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "2R5vZbljLXnDFA99YfGuRB7pAdPJVKsT25zLNMC0fUk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "OMbavF2EmdAz1fHkLV3ctFEUDfriKhoT2gidwHZ9z1o=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "MWT4Zrw3/vVvTYMa1Is5Pjr3wEwnBfnEAPPUAHKQhNU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tBkRPfG9yxfKocQx5pAJX0oEHKPL0Tgtr+0UYe09InE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "lqxpnDR/H0YgH7RcfKoNoaaRhe1SIazIeMbQ1fu9y3Q=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "utT1UdR22PWOTrOkZauztX613lAplV4eh/ejTRb7ZSk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "S+Y2yFyKi/a6FXhih4yGo29X8I8OT6/zwEoX6NMKT4o=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "QSjVppg29x6oS5yBg8OFjrFt0tuTpWCuKxfIy0k8YnE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "y3r6/Xsfvsl3HksXlVYkJgHUqpQGfICxg3x9f8Zw1qM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "BSltHzEwDjFN4du9rDHAPvl22atlcTioEtt+gC5L1tk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "0arGXjSN0006UnXbrWsGqhvBair569DeFDUME3Df3rA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "s/DumaMad08S+PBUUcrS+v42K0z8HgcdiQtrFAEu2Qs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "EzJ8Y8N0OQBTlnvrK82PdevDNZZO4E6CNgYVu8Cj6Ks=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "VA4vr8jBPI5QdiPrULzzZjBMIUbG3V7Slg5zm0bFcKc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "YAOvEB2ZLtq9LQiFViBHWaxxWVVonC2rNYj9tN9s3L0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "hgaHMo9aAGS+nBwvqnTjZO+YkiQPY1c1XcIYeaYKHyI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "YvaoLt3ZpH0atB0tNzwMjpoxRYJXl0DqSjisMJiGVBE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "EMmW6CptFsiLoPOi5/uAJQ2FmeLg6mCpuVLLrRWk7Mc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "1jQsNMarSnarlYmXEuoFokeBMg/090qUD9wqo1Zn8Gs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "hupXNKhRpJxpyDAAP1TgJ5JMZh9lhbMk6s7D7dMS3C8=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 0, + "encryptedDecimalNoPrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalNoPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedDecimalNoPrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalNoPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "default", + "filter": { + "encryptedDecimalNoPrecision": { + "$gt": { + "$binary": { + "base64": "DR1jAAADcGF5bG9hZACxYgAABGcAnWIAAAMwAH0AAAAFZAAgAAAAAJu2KgiI8vM+kz9qD3ZQzFQY5qbgYqCqHG5R4jAlnlwXBXMAIAAAAAAAUXxFXsz764T79sGCdhxvNd5b6E/9p61FonsHyEIhogVsACAAAAAAt19RL3Oo5ni5L8kcvgOJYLgVYyXJExwP8pkuzLG7f/kAAzEAfQAAAAVkACAAAAAAPQPvL0ARjujSv2Rkm8r7spVsgeC1K3FWcskGGZ3OdDIFcwAgAAAAACgNn660GmefR8jLqzgR1u5O+Uocx9GyEHiBqVGko5FZBWwAIAAAAADflr+fsnZngm6KRWYgHa9JzK+bXogWl9evBU9sQUHPHQADMgB9AAAABWQAIAAAAAD2Zi6kcxmaD2mY3VWrP+wYJMPg6cSBIYPapxaFQxYFdQVzACAAAAAAM/cV36BLBY3xFBXsXJY8M9EHHOc/qrmdc2CJmj3M89gFbAAgAAAAAOpydOrKxx6m2gquSDV2Vv3w10GocmNCFeOo/fRhRH9JAAMzAH0AAAAFZAAgAAAAAOaNqI9srQ/mI9gwbk+VkizGBBH/PPWOVusgnfPk3tY1BXMAIAAAAAAc96O/pwKCmHCagT6T/QV/wz4vqO+R22GsZ1dse2Vg6QVsACAAAAAAgzIak+Q3UFLTHXPmJ+MuEklFtR3eLtvM+jdKkmGCV/YAAzQAfQAAAAVkACAAAAAA0XlQgy/Yu97EQOjronl9b3dcR1DFn3deuVhtTLbJZHkFcwAgAAAAACoMnpVl6EFJak8A+t5N4RFnQhkQEBnNAx8wDqmq5U/dBWwAIAAAAACR26FJif673qpwF1J1FEkQGJ1Ywcr/ZW6JQ7meGqzt1QADNQB9AAAABWQAIAAAAAAOtpNexRxfv0yRFvZO9DhlkpU4mDuAb8ykdLnE5Vf1VAVzACAAAAAAeblFKm/30orP16uQpZslvsoS8s0xfNPIBlw3VkHeekYFbAAgAAAAAPEoHj87sYE+nBut52/LPvleWQBzB/uaJFnosxp4NRO2AAM2AH0AAAAFZAAgAAAAAIr8xAFm1zPmrvW4Vy5Ct0W8FxMmyPmFzdWVzesBhAJFBXMAIAAAAABYeeXjJEzTHwxab6pUiCRiZjxgtN59a1y8Szy3hfkg+gVsACAAAAAAJuoY4rF8mbI+nKb+5XbZShJ8191o/e8ZCRHE0O4Ey8MAAzcAfQAAAAVkACAAAAAAl+ibLk0/+EwoqeC8S8cGgAtjtpQWGEZDsybMPnrrkwEFcwAgAAAAAHPPBudWgQ+HUorLDpJMqhS9VBF2VF5aLcxgrM1s+yU7BWwAIAAAAAAcCcBR2Vyv5pAFbaOU97yovuOi1+ATDnLLcAUqHecXcAADOAB9AAAABWQAIAAAAACR9erwLTb+tcWFZgJ2MEfM0PKI9uuwIjDTHADRFgD+SQVzACAAAAAAcOop8TXsGUVQoKhzUllMYWxL93xCOkwtIpV8Q6hiSYYFbAAgAAAAAKXKmh4V8veYwob1H03Q3p3PN8SRAaQwDT34KlNVUjiDAAM5AH0AAAAFZAAgAAAAALv0vCPgh7QpmM8Ug6ad5ioZJCh7pLMdT8FYyQioBQ6KBXMAIAAAAADsCPyIG8t6ApQkRk1fX/sfc1kpuWCWP8gAEpnYoBSHrQVsACAAAAAAJe/r67N6d8uTiogvfoR9rEXbIDjyLb9EVdqkayFFGaYAAzEwAH0AAAAFZAAgAAAAAIW4AxJgYoM0pcNTwk1RSbyjZGIqgKL1hcTJmNrnZmoPBXMAIAAAAAAZpfx3EFO0vY0f1eHnE0PazgqeNDTaj+pPJMUNW8lFrAVsACAAAAAAP+Um2vwW6Bj6vuz9DKz6+6aWkoKoEmFNoiz/xXm7lOsAAzExAH0AAAAFZAAgAAAAAKliO6L9zgeuufjj174hvmQGNRbmYYs9yAirL7OxwEW3BXMAIAAAAAAqU7vs3DWUQ95Eq8OejwWnD0GuXd+ASi/uD6S0l8MM1QVsACAAAAAAb9legYzsfctBPpHyl7YWpPmLr5QiNZFND/50N1vv2MUAAzEyAH0AAAAFZAAgAAAAAOGQcCBkk+j/Kzjt/Cs6g3BZPJG81wIHBS8JewHGpgk+BXMAIAAAAABjrxZXWCkdzrExwCgyHaafuPSQ4V4x2k9kUCAqUaYKDQVsACAAAAAADBU6KefT0v8zSmseaMNmQxKjJar72y7MojLFhkEHqrUAAzEzAH0AAAAFZAAgAAAAAPmCNEt4t97waOSd5hNi2fNCdWEkmcFJ37LI9k4Az4/5BXMAIAAAAABX7DuDPNg+duvELf3NbLWkPMFw2HGLgWGHyVWcPvSNCAVsACAAAAAAS7El1FtZ5STh8Q1FguvieyYX9b2DF1DFVsb9hzxXYRsAAzE0AH0AAAAFZAAgAAAAAD4vtVUYRNB+FD9yoQ2FVJH3nMeJeKbi6eZfth638YqbBXMAIAAAAAANCuUB4OdmuD6LaDK2f3vaqfgYYvg40wDXOBbcFjTqLwVsACAAAAAA9hqC2VoJBjwR7hcQ45xO8ZVojwC83jiRacCaDj6Px2gAAzE1AH0AAAAFZAAgAAAAAJPIRzjmTjbdIvshG6UslbEOd797ZSIdjGAhGWxVQvK1BXMAIAAAAABgmJ0Jh8WLs9IYs/a7DBjDWd8J3thW/AGJK7zDnMeYOAVsACAAAAAAi9zAsyAuou2oiCUHGc6QefLUkACa9IgeBhGu9W/r0X8AAzE2AH0AAAAFZAAgAAAAAABQyKQPoW8wGPIqnsTv69+DzIdRkohRhOhDmyVHkw9WBXMAIAAAAAAqWA2X4tB/h3O1Xlawtz6ndI6WaTwgU1QYflL35opu5gVsACAAAAAAWI/Gj5aZMwDIxztqmVL0g5LBcI8EdKEc2UA28pnekQoAAzE3AH0AAAAFZAAgAAAAACB7NOyGQ1Id3MYnxtBXqyZ5Ul/lHH6p1b10U63DfT6bBXMAIAAAAADpOryIcndxztkHSfLN3Kzq29sD8djS0PspDSqERMqokQVsACAAAAAADatsMW4ezgnyi1PiP7xk+gA4AFIN/fb5uJqfVkjg4UoAAzE4AH0AAAAFZAAgAAAAAKVfXLfs8XA14CRTB56oZwV+bFJN5BHraTXbqEXZDmTkBXMAIAAAAAASRWTsfGOpqdffiOodoqIgBzG/yzFyjR5CfUsIUIWGpgVsACAAAAAAkgCHbCwyX640/0Ni8+MoYxeHUiC+FSU4Mn9jTLYtgZgAAzE5AH0AAAAFZAAgAAAAAH/aZr4EuS0/noQR9rcF8vwoaxnxrwgOsSJ0ys8PkHhGBXMAIAAAAACd7ObGQW7qfddcvyxRTkPuvq/PHu7+6I5dxwS1Lzy5XAVsACAAAAAA3q0eKdV7KeU3pc+CtfypKR7BPxwaf30yu0j9FXeOOboAAzIwAH0AAAAFZAAgAAAAAKvlcpFFNq0oA+urq3w6d80PK1HHHw0H0yVWvU9aHijXBXMAIAAAAADWnAHQ5Fhlcjawki7kWzdqjM2f6IdGJblojrYElWjsZgVsACAAAAAAO0wvY66l24gx8nRxyVGC0QcTztIi81Kx3ndRhuZr6W4AAzIxAH0AAAAFZAAgAAAAAH/2aMezEOddrq+dNOkDrdqf13h2ttOnexZsJxG1G6PNBXMAIAAAAABNtgnibjC4VKy5poYjvdsBBnVvDTF/4mmEAxsXVgZVKgVsACAAAAAAqvadzJFLqQbs8WxgZ2D2X+XnaPSDMLCVVgWxx5jnLcYAAzIyAH0AAAAFZAAgAAAAAF2wZoDL6/V59QqO8vdRZWDpXpkV4h4KOCSn5e7x7nmzBXMAIAAAAADLZBu7LCYjbThaVUqMK14H/elrVOYIKJQCx4C9Yjw37gVsACAAAAAAEh6Vs81jLU204aGpL90fmYTm5i5R8/RT1uIbg6VU3HwAAzIzAH0AAAAFZAAgAAAAAH27yYaLn9zh2CpvaoomUPercSfJRUmBY6XFqmhcXi9QBXMAIAAAAAAUwumVlIYIs9JhDhSj0R0+59psCMsFk94E62VxkPt42QVsACAAAAAAT5x2hCCd2bpmpnyWaxas8nSxTc8e4C9DfKaqr0ABEysAAzI0AH0AAAAFZAAgAAAAALMg2kNAO4AFFs/mW3In04yFeN4AP6Vo0klyUoT06RquBXMAIAAAAAAgGWJbeIdwlpqXCyVIYSs0dt54Rfc8JF4b8uYc+YUj0AVsACAAAAAAWHeWxIkyvXTOWvfZzqtPXjfGaWWKjGSIQENTU3zBCrsAAzI1AH0AAAAFZAAgAAAAALas/i1T2DFCEmrrLEi7O2ngJZyFHialOoedVXS+OjenBXMAIAAAAAA1kK0QxY4REcGxHeMkgumyF7iwlsRFtw9MlbSSoQY7uAVsACAAAAAAUNlpMJZs1p4HfsD4Q4WZ4TBEi6Oc2fX34rzyynqWCdwAAzI2AH0AAAAFZAAgAAAAAP1TejmWg1CEuNSMt6NUgeQ5lT+oBoeyF7d2l5xQrbXWBXMAIAAAAABPX0kj6obggdJShmqtVfueKHplH4ZrXusiwrRDHMOKeQVsACAAAAAAIYOsNwC3DA7fLcOzqdr0bOFdHCfmK8tLwPoaE9uKOosAAzI3AH0AAAAFZAAgAAAAAMrKn+QPa/NxYezNhlOX9nyEkN1kE/gW7EuZkVqYl0b8BXMAIAAAAABUoZMSPUywRGfX2EEencJEKH5x/P9ySUVrhStAwgR/LgVsACAAAAAAMgZFH6lQIIDrgHnFeslv3ld20ynwQjQJt3cAp4GgrFkAAzI4AH0AAAAFZAAgAAAAAMmD1+a+oVbiUZd1HuZqdgtdVsVKwuWAn3/M1B6QGBM3BXMAIAAAAACLyytOYuZ9WEsIrrtJbXUx4QgipbaAbmlJvSZVkGi0CAVsACAAAAAA4v1lSp5H9BB+HYJ4bH43tC8aeuPZMf78Ng1JOhJh190AAzI5AH0AAAAFZAAgAAAAAOVKV7IuFwmYP1qVv8h0NvJmfPICu8yQhzjG7oJdTLDoBXMAIAAAAABL70XLfQLKRsw1deJ2MUvxSWKxpF/Ez73jqtbLvqbuogVsACAAAAAAvfgzIorXxE91dDt4nQxYfntTsx0M8Gzdsao5naQqcRUAAzMwAH0AAAAFZAAgAAAAAKS/1RSAQma+xV9rz04IcdzmavtrBDjOKPM+Z2NEyYfPBXMAIAAAAAAOJDWGORDgfRv8+w5nunh41wXb2hCA0MRzwnLnQtIqPgVsACAAAAAAf42C1+T7xdHEFF83+c2mF5S8PuuL22ogXXELnRAZ4boAAzMxAH0AAAAFZAAgAAAAAFeq8o82uNY1X8cH6OhdTzHNBUnCChsEDs5tm0kPBz3qBXMAIAAAAABaxMBbsaeEj/EDtr8nZfrhhhirBRPJwVamDo5WwbgvTQVsACAAAAAAMbH453A+BYAaDOTo5kdhV1VdND1avNwvshEG/4MIJjQAAzMyAH0AAAAFZAAgAAAAAI8IKIfDrohHh2cjspJHCovqroSr5N3QyVtNzFvT5+FzBXMAIAAAAABXHXteKG0DoOMmECKp6ro1MZNQvXGzqTDdZ0DUc8QfFAVsACAAAAAA/w5s++XYmO+9TWTbtGc3n3ndV4T9JUribIbF4jmDLSMAAzMzAH0AAAAFZAAgAAAAAJkHvm15kIu1OtAiaByj5ieWqzxiu/epK6c/9+KYIrB0BXMAIAAAAACzg5TcyANk0nes/wCJudd1BwlkWWF6zw3nGclq5v3SJQVsACAAAAAAvruXHTT3irPJLyWpI1j/Xwf2FeIE/IV+6Z49pqRzISoAAzM0AH0AAAAFZAAgAAAAAAYSOvEWWuSg1Aym7EssNLR+xsY7e9BcwsX4JKlnSHJcBXMAIAAAAABT48eY3PXVDOjw7JpNjOe1j2JyI3LjDnQoqZ8Je5B2KgVsACAAAAAAU2815RR57TQ9uDg0XjWjBkAKvf8yssxDMzrM4+FqP6AAAzM1AH0AAAAFZAAgAAAAAGQxC9L1e9DfO5XZvX1yvc3hTLtQEdKO9FPMkyg0Y9ZABXMAIAAAAADtmcMNJwdWLxQEArMGZQyzpnu+Z5yMmPAkvgq4eAKwNQVsACAAAAAAJ88zt4Y/Hoqh+zrf6KCOiUwHbOzCxSfp6k/qsZaYGEgAAzM2AH0AAAAFZAAgAAAAADLHK2LNCNRO0pv8n4fAsxwtUqCNnVK8rRgNiQfXpHSdBXMAIAAAAACf16EBIHRKD3SzjRW+LMOl+47QXA3CJhMzlcqyFRW22AVsACAAAAAAMGz4fAOa0EoVv90fUffwLjBrQhHATf+NdlgCR65vujAAAzM3AH0AAAAFZAAgAAAAAHiZJiXKNF8bbukQGsdYkEi95I+FSBHy1I5/hK2uEZruBXMAIAAAAADE+lZBa8HDUJPN+bF6xI9x4N7GF9pj3vBR7y0BcfFhBAVsACAAAAAAGIEN6sfqq30nyxW4dxDgXr/jz5HmvA9T1jx/pKCn4zgAAzM4AH0AAAAFZAAgAAAAAI1oa2OIw5TvhT14tYCGmhanUoYcCZtNbrVbeoMldHNZBXMAIAAAAAAx2nS0Ipblf2XOgBiUOuJFBupBhe7nb6QPLZlA4aMPCgVsACAAAAAA9xu828hugIgo0E3de9dZD+gTpVUGlwtDba+tw/WcbUoAAzM5AH0AAAAFZAAgAAAAABgTWS3Yap7Q59hii/uPPimHWXsr+DUmsqfwt/X73qsOBXMAIAAAAACKK05liW5KrmEAvtpCB1WUltruzUylDDpjea//UlWoOAVsACAAAAAAcgN4P/wakJ5aJK5c1bvJBqpVGND221dli2YicPFfuAYAAzQwAH0AAAAFZAAgAAAAABOAnBPXDp6i9TISQXvcNKwGDLepZTu3cKrB4vKnSCjBBXMAIAAAAADjjzZO7UowAAvpwyG8BNOVqLCccMFk3aDK4unUeft5ywVsACAAAAAA4zkCd4k9gvfXoD1C7vwTjNcdVJwEARh8h/cxZ4PNMfgAAzQxAH0AAAAFZAAgAAAAAHN8hyvT1lYrAsdiV5GBdd5jhtrAYE/KnSjw2Ka9hjz9BXMAIAAAAAD794JK7EeXBs+D7yOVK7nWF8SbZ/7U8gZ7nnT9JFNwTAVsACAAAAAAg8Wt1HO3NhByq2ggux2a4Lo6Gryr24rEFIqh2acrwWMAAzQyAH0AAAAFZAAgAAAAAO93bPrq8bsnp1AtNd9ETnXIz0lH/2HYN/vuw9wA3fyFBXMAIAAAAABHlls5fbaF2oAGqptC481XQ4eYxInTC29aElfmVZgDUgVsACAAAAAANoQXEWpXJpgrSNK/cKi/m7oYhuSRlp1IZBF0bqTEATcAAzQzAH0AAAAFZAAgAAAAAL1YsAZm1SA0ztU6ySIrQgCCA74V6rr0/4iIygCcaJL6BXMAIAAAAADTXWTHWovGmUR1Zg9l/Aqq9H5mOCJQQrb/Dfae7e3wKAVsACAAAAAA5dunyJK6/SVfDD0t9QlNBcFqoZnf9legRjHaLSKAoQMAAzQ0AH0AAAAFZAAgAAAAAEoFAeHk0RZ9kD+cJRD3j7PcE5gzWKnyBrF1I/MDNp5mBXMAIAAAAACgHtc2hMBRSZjKw8RAdDHK+Pi1HeyjiBuAslGVNcW5tAVsACAAAAAAXzBLfq+GxRtX4Wa9fazA49DBLG6AjZm2XODStJKH8D0AAzQ1AH0AAAAFZAAgAAAAAAW+7DmSN/LX+/0uBVJDHIc2dhxAGz4+ehyyz8fAnNGoBXMAIAAAAAA6Ilw42EvvfLJ3Eq8Afd+FjPoPcQutZO6ltmCLEr8kxQVsACAAAAAAbbZalyo07BbFjPFlYmbmv0z023eT9eLkHqeVUnfUAUAAAzQ2AH0AAAAFZAAgAAAAANBdV7M7kuYO3EMoQItAbXv4t2cIhfaT9V6+s4cg9djlBXMAIAAAAABvz4MIvZWxxrcJCL5qxLfFhXiUYB1OLHdKEjco94SgDgVsACAAAAAAK2GVGvyPIKolF/ECcmfmkVcf1/IZNcaTv96N92yGrkEAAzQ3AH0AAAAFZAAgAAAAAMoAoiAn1kc79j5oPZtlMWHMhhgwNhLUnvqkqIFvcH1NBXMAIAAAAADcJTW7WiCyW0Z9YDUYwppXhLj4Ac1povpJvcAq+i48MQVsACAAAAAAIGxGDzoeB3PTmudl4+j6piQB++e33EEzuzAiXcqGxvUAAzQ4AH0AAAAFZAAgAAAAACI3j5QP7dWHpcT6WO/OhsWwRJNASBYqIBDNzW8IorEyBXMAIAAAAABxUpBSjXwCKDdGP9hYU+RvyR+96kChfvyyRC4jZmztqAVsACAAAAAAvBCHguWswb4X0xdcAryCvZgQuthXzt7597bJ5VxAMdgAAzQ5AH0AAAAFZAAgAAAAAKsbycEuQSeNrF8Qnxqw3x3og8JmQabwGqnDbqzFRVrrBXMAIAAAAACno/3ef2JZJS93SVVzmOZSN+jjJHT8s0XYq2M46d2sLAVsACAAAAAAAt5zLJG+/j4K8rnkFtAn8IvdUVNefe6utJ3rdzgwudIAAzUwAH0AAAAFZAAgAAAAAPXIcoO8TiULqlxzb74NFg+I8kWX5uXIDUPnh2DobIoMBXMAIAAAAADR6/drkdTpnr9g1XNvKDwtBRBdKn7c2c4ZNUVK5CThdQVsACAAAAAAJqOA1c6KVog3F4Hb/GfDb3jCxXDRTqpXWSbMH4ePIJsAAzUxAH0AAAAFZAAgAAAAAEa03ZOJmfHT6/nVadvIw71jVxEuIloyvxXraYEW7u7pBXMAIAAAAADzRlBJK75FLiKjz3djqcgjCLo/e3yntI3MnPS48OORhgVsACAAAAAAnQhx4Rnyj081XrLRLD5NLpWmRWCsd0M9Hl7Jl19R0h8AAzUyAH0AAAAFZAAgAAAAAKx8NLSZUU04pSSGmHa5fh2oLHsEN5mmNMNHL95/tuC9BXMAIAAAAAA59hcXVaN3MNdHoo11OcH1aPRzHCwpVjO9mGfMz4xh3QVsACAAAAAAYIPdjV2XbPj7dBeHPwnwhVU7zMuJ+xtMUW5mIOYtmdAAAzUzAH0AAAAFZAAgAAAAAHNKAUxUqBFNS9Ea9NgCZoXMWgwhP4x0/OvoaPRWMquXBXMAIAAAAABUZ551mnP4ZjX+PXU9ttomzuOpo427MVynpkyq+nsYCQVsACAAAAAALnVK5p2tTTeZEh1zYt4iqKIQT9Z0si//Hy1L85oF+5IAAzU0AH0AAAAFZAAgAAAAALfGXDlyDVcGaqtyHkLT0qpuRhJQLgCxtznazhFtuyn/BXMAIAAAAABipxlXDq14C62pXhwAeen5+syA+/C6bN4rtZYcO4zKwAVsACAAAAAAXUf0pzUq0NhLYagWDap4uEiwq5rLpcx29rWbt1NYMsMAAzU1AH0AAAAFZAAgAAAAANoEr8sheJjg4UCfBkuUzarU9NFoy1xwbXjs5ifVDeA9BXMAIAAAAABPoyTf6M+xeZVGES4aNzVlq7LgjqZXJ/QunjYVusGUEAVsACAAAAAA1hA2gMeZZPUNytk9K+lB1RCqWRudRr7GtadJlExJf8oAAzU2AH0AAAAFZAAgAAAAAKvDiK+xjlBe1uQ3SZTNQl2lClIIvpP/5CHwY6Kb3WlgBXMAIAAAAAANnxImq5MFbWaRBHdJp+yD09bVlcFtiFDYsy1eDZj+iQVsACAAAAAAWtsyO+FxMPSIezwsV1TJD8ZrXAdRnQM6DJ+f+1V3qEkAAzU3AH0AAAAFZAAgAAAAAF49IlFH9RmSUSvUQpEPUedEksrQUcjsOv44nMkwXhjzBXMAIAAAAADJtWGbk0bZzmk20obz+mNsp86UCu/nLLlbg7ppxYn7PgVsACAAAAAA3k0Tj/XgPQtcYijH8cIlQoe/VXf15q1nrZNmg7yWYEgAAzU4AH0AAAAFZAAgAAAAAOuSJyuvz50lp3BzXlFKnq62QkN2quNU1Gq1IDsnFoJCBXMAIAAAAAAqavH1d93XV3IzshWlMnzznucadBF0ND092/2ApI1AcAVsACAAAAAAzUrK4kpoKCmcpdZlZNI13fddjdoAseVe67jaX1LobIIAAzU5AH0AAAAFZAAgAAAAALtgC4Whb4ZdkCiI30zY6fwlsxSa7lEaOAU3SfUXr02XBXMAIAAAAACgdZ6U1ZVgUaZZwbIaCdlANpCw6TZV0bwg3DS1NC/mnAVsACAAAAAAzI49hdpp0PbO7S2KexISxC16sE73EUAEyuqUFAC/J48AAzYwAH0AAAAFZAAgAAAAAF6PfplcGp6vek1ThwenMHVkbZgrc/dHgdsgx1VdPqZ5BXMAIAAAAACha3qhWkqmuwJSEXPozDO8y1ZdRLyzt9Crt2vjGnT7AAVsACAAAAAA7nvcU59+LwxGupSF21jAeAE0x7JE94tjRkJfgM1yKU8AAzYxAH0AAAAFZAAgAAAAAKoLEhLvLjKc7lhOJfx+VrGJCx9tXlOSa9bxQzGR6rfbBXMAIAAAAAAIDK5wNnjRMBzET7x/KAMExL/zi1IumJM92XTgXfoPoAVsACAAAAAAFkUYWFwNr815dEdFqp+TiIozDcq5IBNVkyMoDjharDQAAzYyAH0AAAAFZAAgAAAAADoQv6lutRmh5scQFvIW6K5JBquLxszuygM1tzBiGknIBXMAIAAAAADAD+JjW7FoBQ76/rsECmmcL76bmyfXpUU/awqIsZdO+wVsACAAAAAAPFHdLw3jssmEXsgtvl/RBNaUCRA1kgSwsofG364VOvQAAzYzAH0AAAAFZAAgAAAAAJNHUGAgn56KekghO19d11nai3lAh0JAlWfeP+6w4lJBBXMAIAAAAAD9XGJlvz59msJvA6St9fKW9CG4JoHV61rlWWnkdBRLzwVsACAAAAAAxwP/X/InJJHmrjznvahIMgj6pQR30B62UtHCthSjrP0AAzY0AH0AAAAFZAAgAAAAAHgYoMGjEE6fAlAhICv0+doHcVX8CmMVxyq7+jlyGrvmBXMAIAAAAAC/5MQZgTHuIr/O5Z3mXPvqrom5JTQ8IeSpQGhO9sB+8gVsACAAAAAAuPSXVmJUAUpTQg/A9Bu1hYczZF58KEhVofakygbsvJQAAzY1AH0AAAAFZAAgAAAAANpIljbxHOM7pydY877gpRQvYY2TGK7igqgGsavqGPBABXMAIAAAAAAqHyEu9gpurPOulApPnr0x9wrygY/7mXe9rAC+tPK80wVsACAAAAAA7gkPzNsS3gCxdFBWbSW9tkBjoR5ib+saDvpGSB3A3ogAAzY2AH0AAAAFZAAgAAAAAGR+gEaZTeGNgG9BuM1bX2R9ed4FCxBA9F9QvdQDAjZwBXMAIAAAAABSkrYFQ6pf8MZ1flgmeIRkxaSh/Eep4Btdx4QYnGGnwAVsACAAAAAApRovMiV00hm/pEcT4XBsyPNw0eo8RLAX/fuabjdU+uwAAzY3AH0AAAAFZAAgAAAAAFNprhQ3ZwIcYbuzLolAT5n/vc14P9kUUQComDu6eFyKBXMAIAAAAAAcx9z9pk32YbPV/sfPZl9ALIEVsqoLXgqWLVK/tP+heAVsACAAAAAA/qxvuvJbAHwwhfrPVpmCFzNvg2cU/NXaWgqgYUZpgXwAAzY4AH0AAAAFZAAgAAAAADgyPqQdqQrgfmJjRFAILTHzXbdw5kpKyfeoEcy6YYG/BXMAIAAAAAAE+3XsBQ8VAxAkN81au+f3FDeCD/s7KoZD+fnM1MJSSAVsACAAAAAAhRnjrXecwV0yeCWKJ5J/x12Xx4qVJahsCEVHB/1U2rcAAzY5AH0AAAAFZAAgAAAAAI0CT7JNngTCTUSei1Arw7eHWCD0jumv2rb7imjWIlWABXMAIAAAAABSP8t6ya0SyCphXMwnru6ZUDXWElN0NfBvEOhDvW9bJQVsACAAAAAAGWeGmBNDRaMtvm7Rv+8TJ2sJ4WNXKcp3tqpv5Se9Ut4AAzcwAH0AAAAFZAAgAAAAAD/FIrGYFDjyYmVb7oTMVwweWP7A6F9LnyIuNO4MjBnXBXMAIAAAAACIZgJCQRZu7NhuNMyOqCn1tf+DfU1qm10TPCfj5JYV3wVsACAAAAAA5hmY4ptuNxULGf87SUFXQWGAONsL9U29duh8xqsHtxoAAzcxAH0AAAAFZAAgAAAAAHIkVuNDkSS1cHIThKc/O0r2/ubaABTOi8Q1r/dvBAsEBXMAIAAAAADdHYqchEiJLM340c3Q4vJABmmth3+MKzwLYlsG6GS7sQVsACAAAAAADa+KP/pdTiG22l+ZWd30P1iHjnBF4zSNRdFm0oEK82kAAzcyAH0AAAAFZAAgAAAAAJmoDILNhC6kn3masElfnjIjP1VjsjRavGk1gSUIjh1NBXMAIAAAAAD97Ilvp3XF8T6MmVVcxMPcdL80RgQ09UoC6PnoOvZ1IQVsACAAAAAA2RK3Xng6v8kpvfVW9tkVXjpE+BSnx9/+Fw85Evs+kUEAAzczAH0AAAAFZAAgAAAAAI5bm3YO0Xgf0VT+qjVTTfvckecM3Cwqj7DTKZXf8/NXBXMAIAAAAAD/m+h8fBhWaHm6Ykuz0WX1xL4Eme3ErLObyEVJf8NCywVsACAAAAAAfb1VZZCqs2ivYbRzX4p5CtaCkKW+g20Pr57FWXzEZi8AAzc0AH0AAAAFZAAgAAAAANqo4+p6qdtCzcB4BX1wQ6llU7eFBnuu4MtZwp4B6mDlBXMAIAAAAAAGiz+VaukMZ+6IH4jtn4KWWdKK4/W+O+gRioQDrfzpMgVsACAAAAAAG4YYkTp80EKo59mlHExDodRQFR7njhR5dmISwUJ6ukAAAzc1AH0AAAAFZAAgAAAAAPrFXmHP2Y4YAm7b/aqsdn/DPoDkv7B8egWkfe23XsM1BXMAIAAAAAAGhwpKAr7skeqHm3oseSbO7qKNhmYsuUrECBxJ5k+D2AVsACAAAAAAAqPQi9luYAu3GrFCEsVjd9z2zIDcp6SPTR2w6KQEr+IAAzc2AH0AAAAFZAAgAAAAABzjYxwAjXxXc0Uxv18rH8I3my0Aguow0kTwKyxbrm+cBXMAIAAAAADVbqJVr6IdokuhXkEtXF0C2gINLiAjMVN20lE20Vmp2QVsACAAAAAAD7K1Fx4gFaaizkIUrf+EGXQeG7QX1jadhGc6Ji471H8AAzc3AH0AAAAFZAAgAAAAAFMm2feF2fFCm/UC6AfIyepX/xJDSmnnolQIBnHcPmb5BXMAIAAAAABLI11kFrQoaNVZFmq/38aRNImPOjdJh0Lo6irI8M/AaAVsACAAAAAAOWul0oVqJ9CejD2RqphhTC98DJeRQy5EwbNerU2+4l8AAzc4AH0AAAAFZAAgAAAAAJvXB3KyNiNtQko4SSzo/9b2qmM2zU9CQTTDfLSBWMgRBXMAIAAAAAAvjuVP7KsLRDeqVqRziTKpBrjVyqKiIbO9Gw8Wl2wFTAVsACAAAAAADlE+oc1ins+paNcaOZJhBlKlObDJ4VQORWjFYocM4LgAAzc5AH0AAAAFZAAgAAAAAPGdcxDiid8z8XYnfdDivNMYVPgBKdGOUw6UStU+48CdBXMAIAAAAAARj6g1Ap0eEfuCZ4X2TsEw+Djrhto3fA5nLwPaY0vCTgVsACAAAAAAoHqiwGOUkBu8SX5U1yHho+UIFdSN2MdQN5s6bQ0EsJYAAzgwAH0AAAAFZAAgAAAAAP5rGPrYGt3aKob5f/ldP0qrW7bmWvqnKY4QwdDWz400BXMAIAAAAADTQkW2ymaaf/bhteOOGmSrIR97bAnJx+yN3yMj1bTeewVsACAAAAAADyQnHGH2gF4w4L8axUsSTf6Ubk7L5/eoFOJk12MtZAoAAzgxAH0AAAAFZAAgAAAAAAlz6wJze5UkIxKpJOZFGCOf3v2KByWyI6NB6JM9wNcBBXMAIAAAAABUC7P/neUIHHoZtq0jFVBHY75tSFYr1Y5S16YN5XxC1QVsACAAAAAAgvxRbXDisNnLY3pfsjDdnFLtkvYUC4lhA68eBXc7KAwAAzgyAH0AAAAFZAAgAAAAAFJ8AtHcjia/9Y5pLEc3qVgH5xKiXw12G9Kn2A1EY8McBXMAIAAAAAAxe7Bdw7eUSBk/oAawa7uicTEDgXLymRNhBy1LAxhDvwVsACAAAAAAxKPaIBKVx3jTA+R/el7P7AZ7efrmTGjJs3Hj/YdMddwAAzgzAH0AAAAFZAAgAAAAAO8uwQUaKFb6vqR3Sv3Wn4QAonC2exOC9lGG1juqP5DtBXMAIAAAAABZf1KyJgQg8/Rf5c02DgDK2aQu0rNCOvaL60ohDHyY+gVsACAAAAAAqyEjfKC8lYoIfoXYHUqHZPoaA6EK5BAZy5dxXZmay4kAAzg0AH0AAAAFZAAgAAAAAE8YtqyRsGCeiR6hhiyisR/hccmK4nZqIMzO4lUBmEFzBXMAIAAAAAC1UYOSKqAeG1UJiKjWFVskRhuFKpj9Ezy+lICZvFlN5AVsACAAAAAA6Ct9nNMKyRazn1OKnRKagm746CGu+jyhbL1qJnZxGi0AAzg1AH0AAAAFZAAgAAAAAPhCrMausDx1QUIEqp9rUdRKyM6a9AAx7jQ3ILIu8wNIBXMAIAAAAACmH8lotGCiF2q9VQxhsS+7LAZv79VUAsOUALaGxE/EpAVsACAAAAAAnc1xCKfdvbUEc8F7XZqlNn1C+hZTtC0I9I3LL06iaNkAAzg2AH0AAAAFZAAgAAAAAOBi/GAYFcstMSJPgp3VkMiuuUUCrZytvqYaU8dwm8v2BXMAIAAAAACEZSZVyD3pKzGlbdwlYmWQhHHTV5SnNLknl2Gw8IaUTQVsACAAAAAAfsLZsEDcWSuNsIo/TD1ReyQW75HPMgmuKZuWFOLKRLoAAzg3AH0AAAAFZAAgAAAAAIQuup+YGfH3mflzWopN8J1X8o8a0d9CSGIvrA5HOzraBXMAIAAAAADYvNLURXsC2ITMqK14LABQBI+hZZ5wNf24JMcKLW+84AVsACAAAAAACzfjbTBH7IwDU91OqLAz94RFkoqBOkzKAqQb55gT4/MAAzg4AH0AAAAFZAAgAAAAAKsh0ADyOnVocFrOrf6MpTrNvAj8iaiE923DPryu124gBXMAIAAAAADg24a8NVE1GyScc6tmnTbmu5ulzO+896fE92lN08MeswVsACAAAAAAaPxcOIxnU7But88/yadOuDJDMcCywwrRitaxMODT4msAAzg5AH0AAAAFZAAgAAAAAKkVC2Y6HtRmv72tDnPUSjJBvse7SxLqnr09/Uuj9sVVBXMAIAAAAABYNFUkH7ylPMN+Bc3HWX1e0flGYNbtJNCY9SltJCW/UAVsACAAAAAAZYK/f9H4OeihmpiFMH7Wm7uLvs2s92zNA8wyrNZTsuMAAzkwAH0AAAAFZAAgAAAAADDggcwcb/Yn1Kk39sOHsv7BO/MfP3m/AJzjGH506Wf9BXMAIAAAAAAYZIsdjICS0+BDyRUPnrSAZfPrwtuMaEDEn0/ijLNQmAVsACAAAAAAGPnYVvo2ulO9z4LGd/69NAklfIcZqZvFX2KK0s+FcTUAAzkxAH0AAAAFZAAgAAAAAEWY7dEUOJBgjOoWVht1wLehsWAzB3rSOBtLgTuM2HC8BXMAIAAAAAAAoswiHRROurjwUW8u8D5EUT+67yvrgpB/j6PzBDAfVwVsACAAAAAA6NhRTYFL/Sz4tao7vpPjLNgAJ0FX6P/IyMW65qT6YsMAAzkyAH0AAAAFZAAgAAAAAPZaapeAUUFPA7JTCMOWHJa9lnPFh0/gXfAPjA1ezm4ZBXMAIAAAAACmJvLY2nivw7/b3DOKH/X7bBXjJwoowqb1GtEFO3OYgAVsACAAAAAA/JcUoyKacCB1NfmH8vYqC1f7rd13KShrQqV2r9QBP44AAzkzAH0AAAAFZAAgAAAAAK00u6jadxCZAiA+fTsPVDsnW5p5LCr4+kZZZOTDuZlfBXMAIAAAAAAote4zTEYMDgaaQbAdN8Dzv93ljPLdGjJzvnRn3KXgtQVsACAAAAAAxXd9Mh6R3mnJy8m7UfqMKi6oD5DlZpkaOz6bEjMOdiwAAzk0AH0AAAAFZAAgAAAAAFbgabdyymiEVYYwtJSWa7lfl/oYuj/SukzJeDOR6wPVBXMAIAAAAADAFGFjS1vPbN6mQEhkDYTD6V2V23Ys9gUEUMGNvMPkaAVsACAAAAAAL/D5Sze/ZoEanZLK0IeEkhgVkxEjMWVCfmJaD3a8uNIAAzk1AH0AAAAFZAAgAAAAABNMR6UBv2E627CqLtQ/eDYx7OEwQ7JrR4mSHFa1N8tLBXMAIAAAAAAxH4gucI4UmNVB7625C6hFSVCuIpJO3lusJlPuL8H5EQVsACAAAAAAVLHNg0OUVqZ7WGOP53BkTap9FOw9dr1P4J8HxqFqU04AAzk2AH0AAAAFZAAgAAAAAG8cd6WBneNunlqrQ2EmNf35W7OGObGq9WL4ePX+LUDmBXMAIAAAAAAjJ2+sX87NSis9hBsgb1QprVRnO7Bf+GObCGoUqyPE4wVsACAAAAAAs9c9SM49/pWmyUQKslpt3RTMBNSRppfNO0JBvUqHPg0AAzk3AH0AAAAFZAAgAAAAAFWOUGkUpy8yf6gB3dio/aOfRKh7XuhvsUj48iESFJrGBXMAIAAAAAAY7sCDMcrUXvNuL6dO0m11WyijzXZvPIcOKob6IpC4PQVsACAAAAAAJOP+EHz6awDb1qK2bZQ3kTV7wsj5Daj/IGAWh4g7omAAAzk4AH0AAAAFZAAgAAAAAGUrIdKxOihwNmo6B+aG+Ag1qa0+iqdksHOjQj+Oy9bZBXMAIAAAAABwa5dbI2KmzBDNBTQBEkjZv4sPaeRkRNejcjdVymRFKQVsACAAAAAA4ml/nm0gJNTcJ4vuD+T2Qfq2fQZlibJp/j6MOGDrbHMAAzk5AH0AAAAFZAAgAAAAAOx89xV/hRk64/CkM9N2EMK6aldII0c8smdcsZ46NbP8BXMAIAAAAADBF6tfQ+7q9kTuLyuyrSnDgmrdmrXkdhl980i1KHuGHgVsACAAAAAACUqiFqHZdGbwAA+hN0YUE5zFg+H+dabIB4dj5/75W/YAAzEwMAB9AAAABWQAIAAAAADJDdC9aEFl4Y8J/awHbnXGHjfP+VXQilPHJg7ewaJI7AVzACAAAAAAE+tqRl6EcBMXvbr4GDiNIYObTsYpa1n6BJk9EjIJVicFbAAgAAAAAJVc+HYYqa0m1Hq6OiRX8c0iRnJYOt6AJAJoG0sG3GMSAAMxMDEAfQAAAAVkACAAAAAA3F9rjEKhpoHuTULVGgfUsGGwJs3bISrXkFP1v6KoQLgFcwAgAAAAAIBf0tXw96Z/Ds0XSIHX/zk3MzUR/7WZR/J6FpxRWChtBWwAIAAAAABWrjGlvKYuTS2s8L9rYy8Hf0juFGJfwQmxVIjkTmFIGQADMTAyAH0AAAAFZAAgAAAAAOYIYoWkX7dGuyKfi3XssUlc7u/gWzqrR9KMkikKVdmSBXMAIAAAAABVF2OYjRTGi9Tw8XCAwZWLpX35Yl271TlNWp6N/nROhAVsACAAAAAA0nWwYzXQ1+EkDvnGq+SMlq20z+j32Su+i/A95SggPb4AAzEwMwB9AAAABWQAIAAAAACMtPm12YtdEAvqu6Eji1yuRXnu1RJP6h0l7pH3lSH4MwVzACAAAAAAENyCFfyUAh1veQBGx+cxiB7Sasrj41jzCGflZkB5cRMFbAAgAAAAAKdI2LMqISr/T5vuJPg6ZRBm5fVi2aQCc4ra3A4+AjbDAAMxMDQAfQAAAAVkACAAAAAAvlI4lDcs6GB1cnm/Tzo014CXWqidCdyE5t2lknWQd4QFcwAgAAAAAD60SpNc4O2KT7J0llKdSpcX1/Xxs97N715a1HsTFkmBBWwAIAAAAABuuRkJWAH1CynggBt1/5sPh9PoGiqTlS24D/OE2uHXLQADMTA1AH0AAAAFZAAgAAAAAKl8zcHJRDjSjJeV/WvMxulW1zrTFtaeBy/aKKhadc6UBXMAIAAAAADBdWQl5SBIvtZZLIHszePwkO14W1mQ0izUk2Ov21cPNAVsACAAAAAAHErCYycpqiIcCZHdmPL1hi+ovLQk4TAvENpfLdTRamQAAzEwNgB9AAAABWQAIAAAAABb6LXDWqCp1beQgQjj8I3sRTtFhlrmiBi+h/+ikmrvugVzACAAAAAA9stpgTecT7uTyaGNs3K9Bp0A7R0QaIAOfscyMXHBPX8FbAAgAAAAAHUt+McyXrJ1H8SwnHNVO181Ki8vDAM1f7XI26mg95ZDAAMxMDcAfQAAAAVkACAAAAAA97NTT+81PhDhgptNtp4epzA0tP4iNb9j1AWkiiiKGM8FcwAgAAAAAKPbHg7ise16vxmdPCzksA/2Mn/qST0L9Xe8vnQugVkcBWwAIAAAAABB0EMXfvju4JU/mUH/OvxWbPEl9NJkcEp4iCbkXI41fAADMTA4AH0AAAAFZAAgAAAAAMqpayM2XotEFmm0gwQd9rIzApy0X+7HfOhNk6VU7F5lBXMAIAAAAACJR9+q5T9qFHXFNgGbZnPubG8rkO6cwWhzITQTmd6VgwVsACAAAAAAOZLQ6o7e4mVfDzbpQioa4d3RoTvqwgnbmc5Qh2wsZuoAAzEwOQB9AAAABWQAIAAAAADQnslvt6Hm2kJPmqsTVYQHE/wWeZ4bE1XSkt7TKy0r1gVzACAAAAAA8URTA4ZMrhHPvlp53TH6FDCzS+0+61qHm5XK6UiOrKEFbAAgAAAAAHQbgTCdZcbdA0avaTmZXUKnIS7Nwf1tNrcXDCw+PdBRAAMxMTAAfQAAAAVkACAAAAAAhujlgFPFczsdCGXtQ/002Ck8YWQHHzvWvUHrkbjv4rwFcwAgAAAAALbV0lLGcSGfE7mDM3n/fgEvi+ifjl7WZ5b3aqjDNvx9BWwAIAAAAACbceTZy8E3QA1pHmPN5kTlOx3EO8kJM5PUjTVftw1VpgADMTExAH0AAAAFZAAgAAAAABm/6pF96j26Jm7z5KkY1y33zcAEXLx2n0DwC03bs/ixBXMAIAAAAAD01OMvTZI/mqMgxIhA5nLs068mW+GKl3OW3ilf2D8+LgVsACAAAAAAaLvJDrqBESTNZSdcXsd+8GXPl8ZkUsGpeYuyYVv/kygAAzExMgB9AAAABWQAIAAAAACfw9/te4GkHZAapC9sDMHHHZgmlTrccyJDPFciOMSOcwVzACAAAAAAIIC1ZpHObvmMwUfqDRPl4C1aeuHwujM1G/yJbvybMNAFbAAgAAAAAAs9x1SnVpMfNv5Bm1aXGwHmbbI9keWa9HRD35XuCBK5AAMxMTMAfQAAAAVkACAAAAAAkxHJRbnShpPOylLoDdNShfILeA1hChKFQY9qQyZ5VmsFcwAgAAAAAKidrY+rC3hTY+YWu2a7fuMH2RD/XaiTIBW1hrxNCQOJBWwAIAAAAACW0kkqMIzIFMn7g+R0MI8l15fr3k/w/mHtY5n6SYTEwAADMTE0AH0AAAAFZAAgAAAAAByuYl8dBvfaZ0LO/81JW4hYypeNmvLMaxsIdvqMPrWoBXMAIAAAAABNddwobOUJzm9HOUD8BMZJqkNCUCqstHZkC76FIdNg9AVsACAAAAAAQQOkIQtkyNavqCnhQbNg3HfqrJdsAGaoxSJePJl1qXsAAzExNQB9AAAABWQAIAAAAABxMy7X5hf7AXGDz3Y/POu1ZpkMlNcSvSP92NOO/Gs7wAVzACAAAAAAHJshWo2T5wU2zvqCyJzcJQKQaHFHpCpMc9oWBXkpUPoFbAAgAAAAAGeiJKzlUXAvL0gOlW+Hz1mSa2HsV4RGmyLmCHlzbAkoAAMxMTYAfQAAAAVkACAAAAAAlqbslixl7Zw3bRlibZbe/WmKw23k8uKeIzPKYEtbIy0FcwAgAAAAAHEKwpUxkxOfef5HYvulXPmdbzTivwdwrSYIHDeNRcpcBWwAIAAAAADuPckac21Hrg/h0kt5ShJwVEZ9rx6SOHd2+HDjqxEWTQADMTE3AH0AAAAFZAAgAAAAAMXrXx0saZ+5gORmwM2FLuZG6iuO2YS+1IGPoAtDKoKBBXMAIAAAAADIQsxCr8CfFKaBcx8kIeSywnGh7JHjKRJ9vJd9x79y7wVsACAAAAAAcvBV+SykDYhmRFyVYwFYB9oBKBSHr55Jdz2cXeowsUQAAzExOAB9AAAABWQAIAAAAAAm83FA9yDUpwkbKTihe7m53u+DivS9BU2b4vQMtCVQ2AVzACAAAAAAz3m1UB/AbZPa4QSKFDnUgHaT78+6iGOFAtouiBorEgEFbAAgAAAAAIgbpyYtJj5513Z5XYqviH/HXG/5+mqR52iBbfqMmDtZAAMxMTkAfQAAAAVkACAAAAAAJRzYK0PUwr9RPG2/7yID0WgcTJPB2Xjccp5LAPDYunkFcwAgAAAAAIIh24h3DrltAzNFhF+MEmPrZtzr1PhCofhChZqfCW+jBWwAIAAAAAAzRNXtL5o9VXMk5D5ylI0odPDJDSZZry1wfN+TedH70gADMTIwAH0AAAAFZAAgAAAAAHSaHWs/dnmI9sc7nB50VB2Bzs0kHapMHCQdyVEYY30TBXMAIAAAAACkV22lhEjWv/9/DubfHBAcwJggKI5mIbSK5L2nyqloqQVsACAAAAAAS19m7DccQxgryOsBJ3GsCs37yfQqNi1G+S6fCXpEhn4AAzEyMQB9AAAABWQAIAAAAAAC/I4TQRtCl12YZmdGz17X4GqSQgfwCPgRBwdHmdwu+QVzACAAAAAAx8f3z2ut/RAZhleari4vCEE+tNIn4ikjoUwzitfQ588FbAAgAAAAAJci0w1ZB8W2spJQ+kMpod6HSCtSR2jrabOH+B0fj3A4AAMxMjIAfQAAAAVkACAAAAAADGB5yU2XT0fse/MPWgvBvZikVxrl5pf3S5K1hceKWooFcwAgAAAAAIxTmlLHMjNaVDEfJbXvRez0SEPWFREBJCT6qTHsrljoBWwAIAAAAAAlswzAl81+0DteibwHD+CG5mZJrfHXa9NnEFRtXybzzwADMTIzAH0AAAAFZAAgAAAAABmO7QD9vxWMmFjIHz13lyOeV6vHT6mYCsWxF7hb/yOjBXMAIAAAAACT9lmgkiqzuWG24afuzYiCeK9gmJqacmxAruIukd0xEAVsACAAAAAAZa0/FI/GkZR7CtX18Xg9Tn9zfxkD0UoaSt+pIO5t1t4AAzEyNAB9AAAABWQAIAAAAAAfPUoy7QyZKhIIURso+mkP9qr1izbjETqF5s22GwjCjAVzACAAAAAAvLMsIDQ/go4VUxeh50UHmsvMvfx51cwyONnRD2odvC0FbAAgAAAAAKMb+1CodEalAFnDrEL1Ndt8ztamZ+9134m9Kp3GQgd+AAMxMjUAfQAAAAVkACAAAAAAE3ZqUar0Bq2zWbARE0bAv98jBlK9UJ73/xcwdMWWlSkFcwAgAAAAAK4M+MmC+9sFiFsumMyJZQKxWmmJiuG9H7IzKw083xxkBWwAIAAAAAAqkAONzhvMhkyL1D/6h7QQxEkdhC3p2WjXH+VGq5qCqQADMTI2AH0AAAAFZAAgAAAAAMo8FJiOq63cAmyk2O7eI7GcbQh/1j4RrMTqly3rexftBXMAIAAAAADjVmpd0WiRGTw/gAqEgGolt2EI7Csv14vKdmYoMD0aAgVsACAAAAAA07XQBzBUQMNw7F2/YxJjZNuPVpHTTgbLd1oGk77+bygAAzEyNwB9AAAABWQAIAAAAACu5IGaIx7A3Jvly/kzlCsSA4s3iJwuIl8jEdRH0k93NwVzACAAAAAA9NRUyxYE+t0Xyosyt6vIfMFW/vBoYg6sR+jBNs4JAxIFbAAgAAAAAAzyZ91dx+0oMlOVAjRGiMrPySikY/U9eMEB4WJb3uWtAAMxMjgAfQAAAAVkACAAAAAALkRy0GJInXYLA+cgjs6Myb0a+Gu9hgXhHvhLNoGWfckFcwAgAAAAANbALyt9zCSvwnLaWCd2/y2eoB7qkWTvv1Ldu8r40JPuBWwAIAAAAAD4Fl5bV5sz4isIE9bX+lmAp+aAKaZgVYVZeVfrItkCZAADMTI5AH0AAAAFZAAgAAAAAGoUK/DSWhT8LZhszSUqDbTrp8cSA7rdqmADKL+MILtTBXMAIAAAAABHnEE9bVa6lvhfhEMkkV2kzSSxH/sMW/FIJuw3CzWs6wVsACAAAAAAanavcBdqZxgRGKvEK95wTmeL1K1CeDSXZsXUAs81uOgAAzEzMAB9AAAABWQAIAAAAAC922ZDQE3h2fQKibGMZ9hV0WNlmrPYYSdtaSyYxsWYqgVzACAAAAAAagMovciKK6WVjIc2cCj8nK5O/gVOFFVeVAJpRp89tmQFbAAgAAAAAKcTFfPQzaFiAtSFhqbN02sCE1BKWJSrRfGN5L6oZwzkAAMxMzEAfQAAAAVkACAAAAAAtK+JqX3K/z2txjAU15DgX4y90DS2YLfIJFolCOkJJJwFcwAgAAAAAMnR5V7gfX7MNqqUdL5AkWlkhyFXaBRVNej+Rcn8lrQkBWwAIAAAAAA2cDNRXZuiC241TGRvdFyctJnrNcdbZOP9zHio81tkngADMTMyAH0AAAAFZAAgAAAAAAeGrIMK/bac6kPczxbvRYqKMkcpeI2FjdMpD91FDWIvBXMAIAAAAAAix62z1LeS8yvSXCl5gHSIomjyx76fF3S1lp9k900hygVsACAAAAAAiYwzf2m71aWFD5ajcXyW2JX2EzQOkBroTGMg29nLPYIAAzEzMwB9AAAABWQAIAAAAACphf298InM0Us4HT8o1W1MGw0D/02vd7Jh+U0h7qaFaQVzACAAAAAAFXtk7YpqsOJxsqGWSIL+YcBE96G3Zz9D31gPqDW94y8FbAAgAAAAAAOrS1KVA94rjB1jZ1pPocpCeBG+B14RzWoHqVDpp7JbAAMxMzQAfQAAAAVkACAAAAAATLDS2cuDVM3yDMuWNgk2iGKBTzPpfJMbvxVOSY39ZfcFcwAgAAAAAPT5wRi2cLHIUflXzm6EQB/m7xdThP80ir1VV/JBBqvxBWwAIAAAAAB9lEtZS0aXCFbCtSbhnis27S5IPcfWGygHW8AHn3QqzwADMTM1AH0AAAAFZAAgAAAAAJNjExiZVX7jfFGfYpQu16qxLN0YPqVU/5CQ/Y67YSinBXMAIAAAAABMpm2+6KrkRUlXzQoMPHrQmIO6dkQz66tYdfTeA3dKqQVsACAAAAAAFXobHiMLvNZuEPr8jtewCX2J93EZG3JNeyVg92fue6YAAzEzNgB9AAAABWQAIAAAAABlFkYtLCx901X6QVVMkSn6Z7k30UF4xHaA0OZJJ9bdyQVzACAAAAAATez+F9GHcGzTp7jjv4feboUNb8JCkIp4EqcPFisnq7MFbAAgAAAAACE7JvOpBgMoZ7kRd4QbxIhxukPTUxXpzhjnBHiR7XoRAAMxMzcAfQAAAAVkACAAAAAA8NJKN0IxZnruhswGQkiruv8Ih0EMwDcSZx/Xasup9dkFcwAgAAAAAKaJZRxzA+Igeydvuk6cSwUHXcrmT4PjhuPu//FslpdnBWwAIAAAAAD53Rok1Vq/PMAnXmarqoHJ0PEyYUBmVESa9hIpCv/G9QADMTM4AH0AAAAFZAAgAAAAABHxHdEClz7hbSSgE58+dWLlSMJnoPz+jFxp4bB1GmLQBXMAIAAAAAD3nSvT6aGD+A110J/NwEfp0nPutlmuB5B+wA3CC3noGAVsACAAAAAA3Apjd+TapONB7k5wBVwTWgn8t+Sq2oyyU5/+as109RcAAzEzOQB9AAAABWQAIAAAAAC/o8qW/ifk3KuJ01VFkyNLgQafxB5/bGs2G5VyyVafOwVzACAAAAAA1bMqAFGDHSl6BYNLbxApvkAv2K1/oafywiX0MDz1dGUFbAAgAAAAAHJXLlId3edFoniLD/9K2A5973MeP2Ro31flDyqm3l5QAAMxNDAAfQAAAAVkACAAAAAAY2V8I1bz3a1AxTtmED6UhdhA09huFkuuEX8R+d/WDPUFcwAgAAAAAPTVoNRiI76tcRKqd+JBBVyy4+YcKST42p0QX2BtmQ2VBWwAIAAAAACcxt9hg14WqPNiDv1MkqVljM2e2KJEv53lA17LhV6ZigADMTQxAH0AAAAFZAAgAAAAAO2kSsW0WGN9AOtK4xK2SHrGhWiaAbMEKT4iZkRpaDN/BXMAIAAAAABKGzQcPM8LT2dwOggxoWjv/1imYWabbG/G4kBw8OWaxAVsACAAAAAAC9hLK1dScQTAqg+YAG3ObdPzg2Xet57HmOFpGmyUR9UAAzE0MgB9AAAABWQAIAAAAAAiCwzNEEaH/mDam68IdDftnhthyUFdb+ZCNSBQ91WlHQVzACAAAAAA7tHyHcxCzmbJeFYZyPm4mEgkTGKOvwY4MX82OvH0Jn8FbAAgAAAAAAb5IAbZ1hXCNegQ+S+C9i/Z8y6sS8KeU04V6hXa2ml6AAMxNDMAfQAAAAVkACAAAAAAGuCHVNJSuoVkpPOnS5s89GuA+BLi2IPBUr2Bg1sWEPIFcwAgAAAAAEl1gncS5/xO7bQ/KQSstRV3rOT2SW6nV92ZANeG2SR6BWwAIAAAAAA9LOcKmhek8F2wAh8yvT/vjp2gaouuO+Hmv10lwAeWPAADMTQ0AH0AAAAFZAAgAAAAAMfxz7gEaoCdPvXrubDhCZUS0ARLZc1svgbXgMDlVBPgBXMAIAAAAAB6a5dDA3fuT5Vz2KvAcbUEFX/+B7Nw2p1QqbPoQ5TTuAVsACAAAAAAcf/y75UOuI62A6vWH7bYr/5Jz+nirZVYK/81trN6XOQAAzE0NQB9AAAABWQAIAAAAACnYsqF/VzmjIImC9+dqrHO1TM6lJ6fRwM0mM6Wf6paOwVzACAAAAAA5tgZzch8uDCR1ky3SllVaKVpxAlbrhvlNDTazZZRZOAFbAAgAAAAALeGiLJS4z2zhgVpxzyPdRYyACP9QzQBOob34YrIZumCAAMxNDYAfQAAAAVkACAAAAAAEC0sIVmadtW4YMuRXH7RpAhXclsd+3bmqGXCMeaT014FcwAgAAAAABPpXh0uzpsJJB+IRUNajmMB9WGwswfpw5T9xk3Xj6ANBWwAIAAAAAAmf+NYh9TZ/QRu3w/GQz66n7DtfbJijN3G7KzeL8lstAADMTQ3AH0AAAAFZAAgAAAAABaIB3n49Xm9cOafSrQsE0WCcYp8rMIO/qVwIlMF5YLRBXMAIAAAAAC9EyWJV3xOu9bzgdJ/yX+ko7qLf1u3AxNMataW2C9EzQVsACAAAAAAvVbDkLxXx2DcMLifIQ3K0IIJcLcAG9DUrNfI6aoUjNcAAzE0OAB9AAAABWQAIAAAAAA5rZItA/cocRnngYqcJ3nBXQ+l688aKz3EQyLbYYunPAVzACAAAAAAwKyA+L7TgxztPClLrIMk2JXR+w7c04N3ZOqPgjvrIvsFbAAgAAAAACzvZ33h6aWEe8hmo+1f6OXJ72FY5hvWaUuha64ZV3KFAAMxNDkAfQAAAAVkACAAAAAA3htn7oHJ0YYpIrs+Mzyh85Ys67HwAdv5LQl1mCdoMWkFcwAgAAAAAEHjCtNNLenHuSIYux6ezAHsXDaj2DlTF67ToDhDDe6HBWwAIAAAAAD+P4H0sk9jOd+7vOANt2/1Ectb+4ZRGPE8GkHWNXW3MgADMTUwAH0AAAAFZAAgAAAAAEnt18Km/nqggfIJWxzTr9r3hnXNaueG6XO9A5G11LnGBXMAIAAAAAD7QxzGMN/ard5TfFLecE6uusMmXG2+RBsBR+/NCQHUwAVsACAAAAAAQEZ1ZZ8GC8rdbg7s87OM5Gr9qkTXS9+P5DuAZxj5Gl4AAzE1MQB9AAAABWQAIAAAAAAVAKK/GoY8AACu/hyMpO4hdLq6JnEyWNzkyci9sbaD/wVzACAAAAAA2HmeqpMlvvBpV2zQTYIRmsc4MFlfHRwLof0ycJgMg/MFbAAgAAAAACdltCeWi5E/q1Li1eXLChpM2D9QQSGLBZ82NklQSc0oAAMxNTIAfQAAAAVkACAAAAAAhHyq1GQC/GiMwpYjcsfkNxolJ10ARKjIjfkW1Wipzi0FcwAgAAAAAD/uaGWxTDq87F8XZ6CrFI+RNa8yMqfSZdqK00Kj833BBWwAIAAAAAD6aEdOO0CsQGagioOCvANPCEHSpJ8BSixlPBq5ERhB7AADMTUzAH0AAAAFZAAgAAAAABAJJxHoZD+MQBWqm9UM9Dd3z5ZohIZGWRaRVRsMptKQBXMAIAAAAADrE/ca+gqj/SH4oao4wE4qn2ovoTydzcMbDbrfnUs3zAVsACAAAAAAeNCIQN6hVnGJinytQRFGlQ2ocoprXNqpia+BSxzl+uwAAzE1NAB9AAAABWQAIAAAAAAv01wz7VG9mTepjXQi6Zma+7b/OVBaKVkWNbgDLr1mFgVzACAAAAAA0I5sxz8r6wkCp5Tgvr+iL4p6MxSOq5d3e1kZG+0b7NkFbAAgAAAAAIA32v6oGkAOS96HexGouNTex+tLahtx9QF2dgGClk6WAAMxNTUAfQAAAAVkACAAAAAAWXecRwxSon68xaa9THXnRDw5ZfzARKnvvjTjtbae6T0FcwAgAAAAAPh0UfUMEo7eILCMv2tiJQe1bF9qtXq7GJtC6H5Va4fIBWwAIAAAAADqFr1ThRrTXNgIOrJWScO9mk86Ufi95IDu5gi4vP+HWQADMTU2AH0AAAAFZAAgAAAAAEY5WL8/LpX36iAB1wlQrMO/xHVjoO9BePVzbUlBYo+bBXMAIAAAAABoKcpadDXUARedDvTmzUzWPe1jTuvD0z9oIcZmKuiSXwVsACAAAAAAJuJbwuaMrAFoI+jU/IYr+k4RzAqITrOjAd3HWCpJHqEAAzE1NwB9AAAABWQAIAAAAADnJnWqsfx0xqNnqfFGCxIplVu8mXjaHTViJT9+y2RuTgVzACAAAAAAWAaSCwIXDwdYxWf2NZTly/iKVfG/KDjHUcA1BokN5sMFbAAgAAAAAJVxavipE0H4/JQvhagdytXBZ8qGooeXpkbPQ1RfYMVHAAMxNTgAfQAAAAVkACAAAAAAsPG7LaIpJvcwqcbtfFUpIjj+vpNj70Zjaw3eV9T+QYsFcwAgAAAAAJQ71zi0NlCyY8ZQs3IasJ4gB1PmWx57HpnlCf3+hmhqBWwAIAAAAACD58TO6d+71GaOoS+r73rAxliAO9GMs4Uc8JbOTmC0OwADMTU5AH0AAAAFZAAgAAAAAAGiSqKaQDakMi1W87rFAhkogfRAevnwQ41onWNUJKtuBXMAIAAAAAASgiDpXfGh7E47KkOD8MAcX8+BnDShlnU5JAGdnPdqOAVsACAAAAAAI+2TTQIgbFq4Yr3lkzGwhG/tqChP7hRAx2W0fNaH6jcAAzE2MAB9AAAABWQAIAAAAAB7L4EnhjKA5xJD3ORhH2wOA1BvpnQ+7IjRYi+jjVEaJAVzACAAAAAAuhBIm0nL3FJnVJId+7CKDASEo+l2E89Z9/5aWSITK4AFbAAgAAAAALtSICOzQDfV9d+gZuYxpEj6cCeHnKTT+2G3ceP2H65kAAMxNjEAfQAAAAVkACAAAAAAaROn1NaDZFOGEWw724dsXBAm6bgmL5i0cki6QZQNrOoFcwAgAAAAANVT8R6UvhrAlyqYlxtmnvkR4uYK/hlvyQmBu/LP6/3ZBWwAIAAAAAD+aHNMP/X+jcRHyUtrCNkk1KfMtoD3GTmShS8pWGLt+AADMTYyAH0AAAAFZAAgAAAAADqSR5e0/Th59LrauDA7OnGD1Xr3H3NokfVxzDWOFaN7BXMAIAAAAACt30faNwTWRbvmykDpiDYUOCwA6QDbBBYBFWS7rdOB4AVsACAAAAAAF7SvnjjRk5v2flFOKaBAEDvjXaL1cpjsQLtK2fv9zdQAAzE2MwB9AAAABWQAIAAAAADmtb1ZgpZjSeodPG/hIVlsnS8hoRRwRbrTVx89VwL62AVzACAAAAAAi38e1g6sEyVfSDkzZbaZXGxKI/zKNbMasOl2LYoWrq8FbAAgAAAAAALACk0KcCDN/Kv8WuazY8ORtUGkOZ5Dsm0ys1oOppp/AAMxNjQAfQAAAAVkACAAAAAAf/f7AWVgBxoKjr7YsEQ4w/fqSvuQWV2HMiA3rQ7ur0sFcwAgAAAAADkkeJozP6FFhUdRIN74H4UhIHue+eVbOs1NvbdWYFQrBWwAIAAAAAB55FlHAkmTzAYj/TWrGkRJw2EhrVWUnZXDoMYjyfB/ZwADMTY1AH0AAAAFZAAgAAAAAI2WEOymtuFpdKi4ctanPLnlQud+yMKKb8p/nfKmIy56BXMAIAAAAADVKrJmhjr1rfF3p+T+tl7UFd1B7+BfJRk0e7a4im7ozgVsACAAAAAA5E7Ti3PnFiBQoCcb/DN7V1uM3Xd6VKiexPKntssFL7kAAzE2NgB9AAAABWQAIAAAAAAuHU9Qd79hjyvKOujGanSGDIQlxzsql8JytTZhEnPw+AVzACAAAAAAjF2gV/4+sOHVgDd/oR5wDi9zL7NGpGD+NsEpGXy/a4QFbAAgAAAAAJzMoyojYV6Ed/LpVN5zge93Odv3U7JgP7wxeRaJZGTdAAMxNjcAfQAAAAVkACAAAAAA7dQDkt3iyWYCT94d7yqUtPPwp4qkC0ddu+HFdHgVKEkFcwAgAAAAANuYvtvZBTEq4Rm9+5eb7VuFopowkrAuv86PGP8Q8/QvBWwAIAAAAACeqXoAOQOE4j0zRMlkVd8plaW0RX1npsFvB38Xmzv7sAADMTY4AH0AAAAFZAAgAAAAAAwnZSDhL4tNGYxlHPhKYB8s28dY5ScSwiKZm3UhT8U3BXMAIAAAAABDoY6dhivufTURQExyC9Gx3ocpl09bgbbQLChj3qVGbgVsACAAAAAAF+1nS7O0v85s3CCy+9HkdeoEfm2C6ZiNbPMMnSfsMHUAAzE2OQB9AAAABWQAIAAAAAC2VuRdaC4ZJmLdNOvD6R2tnvkyARteqXouJmI46V306QVzACAAAAAAMn1Z6B35wFTX9mEYAPM+IiJ5hauEwfD0CyIvBrxHg7IFbAAgAAAAAOG6DvDZkT9B/xZWmjao2AevN7MMbs3Oh9YJeSd/hZ+hAAMxNzAAfQAAAAVkACAAAAAAVerb7qVNy457rNOHOgDSKyWl5ojun7iWrv1uHPXrIZQFcwAgAAAAAIDcYS9j5z+gx0xdJj09L7876r/vjvKTi/d3bXDE3PhyBWwAIAAAAADuhVLqb1Bkrx8aNymS+bx2cL8GvLFNH4SAi690DUgnWQADMTcxAH0AAAAFZAAgAAAAAH/E44yLxKCJjuSmU9A8SEhbmkDOx1PqqtYcZtgOzJdrBXMAIAAAAABgLh9v2HjBbogrRoQ82LS6KjZQnzjxyJH4PH+F3jupSAVsACAAAAAAIlO46ehXp4TqpDV0t6op++KO+uWBFh8iFORZjmx2IjkAAzE3MgB9AAAABWQAIAAAAAAlNUdDL+f/SSQ5074mrq0JNh7CTXwTbbhsQyDwWeDVMwVzACAAAAAANIH2IlSNG0kUw4qz0budjcWn8mNR9cJlYUqPYdonucAFbAAgAAAAAJMrOUOyiu5Y3sV76zwEFct8L7+i8WGlQI2+8z2W2kzaAAMxNzMAfQAAAAVkACAAAAAASZ+CvUDtlk/R4HAQ3a+PHrKeY/8ifAfh0oXYFqliu80FcwAgAAAAAJelpzPgM65OZFt/mvGGpwibclQ49wH+1gbUGzd9OindBWwAIAAAAAD9qeDchteEpVXWcycmD9kl9449C1dOw0r60TBm5jK+cQADMTc0AH0AAAAFZAAgAAAAAN9fkoUVbvFV2vMNMAkak4gYfEnzwKI3eDM3pnDK5q3lBXMAIAAAAACnDkgVNVNUlbQ9RhR6Aot2nVy+U4km6+GHPkLr631jEAVsACAAAAAANzg/BnkvkmvOr8nS4omF+q9EG/4oisB+ul4YHi938hwAAzE3NQB9AAAABWQAIAAAAAASyK3b1nmNCMptVEGOjwoxYLLS9fYWm/Zxilqea0jpEQVzACAAAAAADDHsGrbqlKGEpxlvfyqOJKQJjwJrzsrB7k3HG0AUJbkFbAAgAAAAAKwx3S4XfDZh4+LuI9jf7XgUh5qiefNv87JD4qvVRfPSAAMxNzYAfQAAAAVkACAAAAAAlSP9iK31GlcG9MKGbLmq+VXMslURr+As736rrVNXcsUFcwAgAAAAAAvbj0zfq9zzi8XReheKFbCB+h9IsOLgXPPpI5vrEJNZBWwAIAAAAABXvoZhaQE7ogWjeBjceVkp03N20cKYP3TA8vuNsgpfAgADMTc3AH0AAAAFZAAgAAAAAOJNORH8Bev97gVU7y6bznOxJ+E6Qoykur1QP76hG1/7BXMAIAAAAAC+C1PtOOrSZgzBAGhr+dPe/kR0JUw9GTwLVNr61xC1aAVsACAAAAAAeA/L8MQIXkamaObtMPLpoDoi5FypA5WAPtMeMrgi0eQAAzE3OAB9AAAABWQAIAAAAAAKcHzLUomavInN6upPkyWhAqYQACP/vdVCIYpiy6U6HgVzACAAAAAATsR4KItY6R2+U7Gg6sJdaEcf58gjd1OulyWovIqfxKcFbAAgAAAAAFbm10ko67ahboAejQdAV0U2uA5OhZYdb8XUFJ8OL46LAAMxNzkAfQAAAAVkACAAAAAAqTOLiMpCdR59tLZzzIPqJvbCNvz2XQL9ust0qYaehtcFcwAgAAAAAArefox/3k5xGOeiw2m6NUdzuGxmPwcu5IFcj+jMwHgHBWwAIAAAAADLZGFJ7MQd5JXMgMXjqZO5LDLxcFClcXPlnRMWRn+1oAADMTgwAH0AAAAFZAAgAAAAAIPSqSeVzSRgNVNmrPYHmUMgykCY27NbdDUNhE5kx/SgBXMAIAAAAAAhX90nNfxyXmZe/+btZ7q6xMX4PFyj0paM1ccJ/5IUUQVsACAAAAAA419oHmD2W0SYoOMwhrhrp8jf68fg9hTkaRdCuVd3CN0AAzE4MQB9AAAABWQAIAAAAACLn5DxiqAosHGXIAY96FwFKjeqrzXWf3VJIQMwx1fl4gVzACAAAAAAindvU27nveutopdvuHmzdENBbeGFtI3Qcsr07jxmvm8FbAAgAAAAAPvl9pBStQvP4OGkN5v0MghUY6djm9n7XdKKfrW0l1sMAAMxODIAfQAAAAVkACAAAAAA7i2S6rHRSPBwZEn59yxaS7HiYBOmObIkeyCcFU42kf8FcwAgAAAAAGb3RSEyBmgarkTvyLWtOLJcPwCKbCRkESG4RZjVmY4iBWwAIAAAAADB2/wo5CSHR4ANtifY6ZRXNTO5+O8qP82DfAiAeanpZwADMTgzAH0AAAAFZAAgAAAAAFz+M+H/Z94mdPW5oP51B4HWptp1rxcMWAjnlHvWJDWrBXMAIAAAAACBFEOQyL7ZHu4Cq33QvXkmKuH5ibG/Md3RaED9CtG5HwVsACAAAAAAfggtJTprQ/yZzj7y5z9KvXsdeXMWP0yUXMMJqpOwI88AAzE4NAB9AAAABWQAIAAAAAAE7c2x3Z3aM1XGfLNk/XQ9jCazNRbGhVm7H8c2NjS5ywVzACAAAAAARJ9h8fdcwA19velF3L/Wcvi2rCzewlKZ2nA0p8bT9uwFbAAgAAAAAJtWe6b4wK2Hae2dZm/OEpYQnvoZjz4Sz5IgJC2wInecAAMxODUAfQAAAAVkACAAAAAAVoRt9B9dNVvIMGN+ea5TzRzQC+lqSZ8dd/170zU5o9cFcwAgAAAAAEwM95XZin5mv2yhCI8+ugtKuvRVmNgzzIQN0yi1+9aIBWwAIAAAAAAMGBq72n00rox3uqhxSB98mkenTGCdbbUF1gXrgottzgADMTg2AH0AAAAFZAAgAAAAAKRDkjyWv/etlYT4GyoXrmBED2FgZHnhc+l9Wsl06cH2BXMAIAAAAABohlpm3K850Vndf3NmNE0hHqDlNbSR8/IvMidQ3LnIZAVsACAAAAAAW42nGHa6q2MCAaaPVwaIDfr8QLyQwjKq23onZJYsqVsAAzE4NwB9AAAABWQAIAAAAAC3DFh5oklLCNLY90bgWm68dFXz65JpAZSp1K99MBTPAQVzACAAAAAAQgZecmxEUZVHoptEQClDwAf8smI3WynQ/i+JBP0g+kQFbAAgAAAAAEUSQGVnAPISD6voD0DiBUqyWKgt2rta0tjmoe+LNt6IAAMxODgAfQAAAAVkACAAAAAAQ5WKvWSB503qeNlOI2Tpjd5blheNr6OBO8pfJfPNstcFcwAgAAAAAKwHgQLSDJ5NwLBQbY5OnblQIsVDpGV7q3RCbFLD1U4/BWwAIAAAAACQ5nED99LnpbqXZuUOUjnO2HTphEAFBjLD4OZeDEYybgADMTg5AH0AAAAFZAAgAAAAAGfhFY3RGRm5ZgWRQef1tXxHBq5Y6fXaLAR4yJhrTBplBXMAIAAAAACKEF0ApLoB6lP2UqTFsTQYNc9OdDrs/vziPGzttGVLKQVsACAAAAAArOO6FyfNRyBi0sPT5iye7M8d16MTLcwRfodZq4uCYKEAAzE5MAB9AAAABWQAIAAAAAAIM73gPcgzgotYHLeMa2zAU4mFsr7CbILUZWfnuKSwagVzACAAAAAAJCSu98uV8xv88f2BIOWzt6p+6EjQStMBdkGPUkgN79cFbAAgAAAAAMGqPGMPxXbmYbVfSa/japvUljht1zZT33TY7ZjAiuPfAAMxOTEAfQAAAAVkACAAAAAAkWmHCUsiMy1pwZTHxVPBzPTrWFBUDqHNrVqcyyt7nO8FcwAgAAAAAMv2CebFRG/br7USELR98sIdgE9OQCRBGV5JZCO+uPMgBWwAIAAAAABt7qSmn3gxJu7aswsbUiwvO+G6lXj/Xhx+J/zQyZxzLAADMTkyAH0AAAAFZAAgAAAAAGInUYv0lP/rK7McM8taEHXRefk8Q2AunrvWqdfSV7UaBXMAIAAAAACE+WPxJ3gan7iRTbIxXXx+bKVcaf8kP4JD8DcwU0aL7wVsACAAAAAAUC4eTprX4DUZn2X+UXYU6QjtiXk+u57yoOPBbPQUmDkAAzE5MwB9AAAABWQAIAAAAACmHlg2ud3cplXlTsNTpvNnY6Qm1Fce0m899COamoDjaQVzACAAAAAArtJQeJIlepBWRU2aYar7+YGYVQ7dfDc1oxgTmA8r9q0FbAAgAAAAAOk45vg5VqZHAFCO3i0Z52SZi5RADf8NXwf68T5yad/DAAMxOTQAfQAAAAVkACAAAAAApzcWSAbZWV/Rq+ylRNqqlJqNVR4fhXrz4633/MQOQgcFcwAgAAAAAN/jz/bsEleiuCl+li83EWlG6UMHA8CyaOMRKCkXkSCPBWwAIAAAAAC3Sd+Qg+uFDKpGZHbrQgokXHQ1az1aFl4YK343OB6hcQAAEmNtAAAAAAAAAAAAABBwYXlsb2FkSWQAAAAAABBmaXJzdE9wZXJhdG9yAAEAAAASc3AAAQAAAAAAAAAQdGYAAQAAABNtbgD/////Y46NN8CHrb4J7f/fE214AP////9jjo03wIetvgnt/18A", + "subType": "06" + } + } + } + }, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalNoPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + } + }, + "commandName": "find" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Decimal-Update.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Decimal-Update.json new file mode 100644 index 0000000000..8ade3593e6 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Decimal-Update.json @@ -0,0 +1,1975 @@ +{ + "description": "fle2v2-Rangev2-Decimal-Update", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalNoPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "FLE2 Range Decimal. Update.", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalNoPrecision": { + "$numberDecimal": "1" + } + } + }, + "object": "coll" + }, + { + "name": "updateOne", + "arguments": { + "filter": { + "encryptedDecimalNoPrecision": { + "$gt": { + "$numberDecimal": "0" + } + } + }, + "update": { + "$set": { + "encryptedDecimalNoPrecision": { + "$numberDecimal": "2" + } + } + } + }, + "object": "coll", + "expectResult": { + "matchedCount": 1, + "modifiedCount": 1, + "upsertedCount": 0 + } + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": { + "$numberInt": "0" + }, + "encryptedDecimalNoPrecision": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "rbf3AeBEv4wWFAKknqDxRW5cLNkFvbIs6iJjc6LShQY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "n+XAuFnP8Dov9TnhGFxNx0K/MnVM9WbJ7RouEu0ndO0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yRXojuVdn5GQtD97qYlaCL6cOLmZ7Cvcb3wFjkLUIdM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "DuIkdRPITRs55I4SZmgomAHCIsDQmXRhW8+MOznkzSk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SsBk+Et1lTbU+QRPx+xyJ/jMkmfG+QCvQEpip2YYrzA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "crCIzOd8KhHvvUlX7M1v9bhvU4pLdTc+X2SuqoKU5Ek=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "YOWdCw4UrqnxkAaVjqmC4sKQDMVMHEpFGnlxpxdaU6E=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "M3SShp81Ff8tQ632qKbv9MUcN6wjDaBReI0VXNu6Xh4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "gzHlSPxpM0hT75kQvWFzGlOxKvDoiKQZOr19V6l2zXI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "s3JnppOGYw9SL2Q1kMAZs948v2F5PrpXjGei/HioDWs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cG6+3Gk/zEH68P/uuuwiAUVCuyJwa1LeV+t29FlPPAo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dupdvR3AyJtM+g9NDKiaLVOtGca387JQp8w+V03m7Ig=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JqEQc5svj2jTvZ6LLA5ivE+kTb/0aRemSEmxk4G7Zrg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "szcXXXKnob+p3SoM4yED2R920LeJ7cVsclPMFTe4CeI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "o1QoGVXmuBdHwHm7aCtGMlMVKrjFdYvJXpoq6uhIAZ0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Jfm5wPlqqLCJRGQIqRq2NGmpn7s0Vrih2H3YAOoI2YU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zMHLb8ARbsYo8Ld05bqnGFf1Usha6EGb8QKwdSAyps0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yQdtq9lh5pugL7/i0Bj/PuZUUBUIzf+7wj1rl5y736w=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wGWVZdO7qIuyDg/BqDgqjgoQ02h5YYgwXQB1oCin2NE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "by9HMLj6NTEpgztZ5HSN6GxImkXPcaFINYDzgZY33X8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tWo0vbasi7bXmn/MsOx13VC1IsWtpx/nYp0uj4iMzdA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tQQpndUYd5O87lOtrGjH3wl9VsOK0ray7RMasL90sBM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cQjXEDCMsOpKLLf+vlTgIHA+cbSJdzqhbSX9Wvh95aA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "7yMpU48IxK9SzP2cx3VnTownGEwFmeFofuuFT97SuuY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kSOx1kz0CmBgzKQHZlo65ZUY1DIv9A99JRm+Us2y6Ew=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ubQpdPBe6/xvtr+AcXdfYLSvYCR4ot0tivehkCsupb4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xal+iCJ6FTefRQToyoNksc9NCZShyn04NDGi4IYrcoM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "d7jU4iOK50xHxlkSifcxlZFCM46TSgQzoYivxG3HNLY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tJvl2nsBLBVzL3pp6sKWCL4UXeh3q/roYBJjSb74ve0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "OIUCaKRvIx9t1w6Hxlz1IcQTdPNCfdRNwnnTm10W+X0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "A9tvzsiElotOUVIB4CqfQp9mAwqvTM35YkmAR170aHA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "lI8gpK7hpb7c9x4RQugsxMnQay5LZJmwslZdvMx/dcE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dNCzh40U0XvdKnSDi3HRQOWQftEsDVqc4uUvsVFGoq8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "IP+iwEBWBwVVZIdpaMu8k5+soFCz+TZkYn3drKZ9grE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pnqyh6e0y5svHkJDShlN9CHV0WvMBE4QbtJpQw5ZCXc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "elEl42tbVDoRTLjAhZUFEtXiut4b3PVhg/1ZLZSQdtE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "vHuu2FxwclMHqyE6JBYbTYgbEkB0dqb/JuaxsvfwsmY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xTf7NCe3Gf8QpE78HR5OknlLTKfs9J+RN9UZpH6fnso=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "XiWSasRnJAulGR6+LCVD3mwRObXylqYWR9jvpywq12c=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "MZMxEQ5ikx0PG1YFIExv0UnTZogsvgeOEZTpzvBDn4w=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yZMyMZBDrWbAhvnic7vvIYhmO9m5H2iuv0c8KNZrBzY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xxM14hTPY5j0vvcK2C7YAEjzdsfUTFHozHC0hEo1bxI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+01rqR1xVwkpGXcstbk1ItJqFVjH6Q8MGxEN3Cm9Y1A=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xOpLV0Z2VTRJ3iWtnWZcsyjXubTIkYWo31cO+HV1o1k=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "BWUOLqgLBqc5NwxVlSV5H3KFQPXbCp7mdo+jF+8cJqY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "fuQb1S6xZDGlrEbK+kI23aL53PP1PVNwqICnZNt9Yzg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SfscnoibFttahLdPVC4Ee+47ewGFKpDSU7M6HX19bKE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "rpSW2awybNVeKtat91VFxqbINoTfNhPfQAu+d73Xtf8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "9M/CP9ccOIIj2LLFmE0GFDO0Ban2wsNalEXfM6+h+1s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "WrEMG49l1ye4MhXs5ZS9tz8P6h+hDvthIg/2wW9ne1Q=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ImNhbfeyfH8qIEeA5ic0s3dAQBdzzTBS+CPsNih9vZ0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dWP33YDSn04UKJN2ogh2Rui0iW/0q2y18OCDRVcfyoo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "lYv0isAtfGh6H9tdp3cp2eHU7q2J+uk7QrgcxtK3w7Y=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "VGMoamB/+7zTOYcY/pqJc96xlv2PdW4hwsIAEIslTDQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yNeBWMF7BnD9wVwz2PgJsvWr77QiVvvWUvJF0+fqBug=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SfpvObJ+tJBXSvqeN7vlOfmhYign635lciYAJIjUtY8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dsen4NqjzVGjpjufiTMs3+gqeD09EbnuogPgxrJECwg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pxCWVM3sn19NsFEpgHbgLa+PmYlhN3mMiP0Wk8kJhYw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q11KNvJszjYIB9n9HcC+N4uz11a3eRj1L3BH9scKMDQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "A1PmkgcEToWh1JiVWE6mI5jUu7poxWWuCUt/cgRUUDc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "qJo3Hu4PJeanL7XEaWXO/n3YsodhZyd+MJOOmB9Kpd8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "BkBKLO8URFscfRY9Bav/1+L9mLohDgNr/MkZtGiraIs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "rZq5WA3Hx3xthOyHAJXK//f8pE2qbz7YKu3TIMp9GFY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "X07a/Lm80p5xd4RFs1dNmw+90tmPDPdGiAKVZkxd4zY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6YrBn2ofIw1b5ooakrLOwF41BWrps8OO0H9WH4/rtlE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "0l86Ag5OszXpa78SlOUV3K9nff5iC1p0mRXtLg9M1s4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Hn6yuxFHodeyu7ISlhYrbSf9pTiH4TDEvbYLWjTwFO0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zdf4y2etKBuIpkEU1zMwoCkCsdisfXZCh8QPamm+drY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "rOQ9oMdiK5xxGH+jPzOvwVqdGGnF3+HkJXxn81s6hp4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "61aKKsE3+BJHHWYvs3xSIBvlRmKswmaOo5rygQJguUg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "KuDb/GIzqDM8wv7m7m8AECiWJbae5EKKtJRugZx7kR0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Q+t8t2TmNUiCIorVr9F3AlVnX+Mpt2ZYvN+s8UGict8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tJRZIpKxUgHyL83kW8cvfjkxN3z6WoNnUg+SQw+LK+k=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pnUsYjip8SvW0+m9mR5WWTkpK+p6uwJ6yBUAlBnFKMk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "PArHlz+yPRYDycAP/PgnI/AkP8Wgmfg++Vf4UG1Bf0E=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wnIh53Q3jeK8jEBe1n8kJLa89/H0BxO26ZU8SRIAs9Q=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "4F8U59gzBLGhq58PEWQk2nch+R0Va7eTUoxMneReUIA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ihKagIW3uT1dm22ROr/g5QaCpxZVj2+Fs/YSdM2Noco=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "EJtUOOwjkrPUi9mavYAi+Gom9Y2DuFll7aDwo4mq0M0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dIkr8dbaVRQFskAVT6B286BbcBBt1pZPEOcTZqk4ZcI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "aYVAcZYkH/Tieoa1XOjE/zCy5AJcVTHjS0NG2QB7muA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "sBidL6y8TenseetpioIAAtn0lK/7C8MoW4JXpVYi3z8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "0Dd2klU/t4R86c2WJcJDAd57k/N7OjvYSO5Vf8KH8sw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "I3jZ92WEVmZmgaIkLbuWhBxl7EM6bEjiEttgBJunArA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "aGHoQMlgJoGvArjfIbc3nnkoc8SWBxcrN7hSmjMRzos=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "bpiWPnF/KVBQr5F6MEwc5ZZayzIRvQOLDAm4ntwOi8g=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tI7QVKbE6avWgDD9h4QKyFlnTxFCwd2iLySKakxNR/I=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "XGsge0CnoaXgE3rcpKm8AEeku5QVfokS3kcI+JKV1lk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JQxlryW2Q5WOwfrjAnaZxDvC83Dg6sjRVP5zegf2WiM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "YFuHKJOfoqp1iGVxoFjx7bLYgVdsN4GuUFxEgO9HJ5s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Z6vUdiCR18ylKomf08uxcQHeRtmyav7/Ecvzz4av3k4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SPGo1Ib5AiP/tSllL7Z5PAypvnKdwJLzt8imfIMSEJQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "m94Nh6PFFQFLIib9Cu5LAKavhXnagSHG6F5EF8lD96I=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pfEkQI98mB+gm1+JbmVurPAODMFPJ4E8DnqfVyUWbSo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "DNj3OVRLbr43s0vd+rgWghOL3FqeO/60npdojC8Ry/M=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kAYIQrjHVu49W8FTxyxJeiLVRWWjC9fPcBn+Hx1F+Ss=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "aCSO7UVOpoQvu/iridarxkxV1SVxU1i9HVSYXUAeXk4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Gh6hTP/yj1IKlXQ+Q69KTfMlGZjEcXoRLGbQHNFo/1s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "/gDgIFQ4tAlJk3GN48IS5Qa5IPmErwGk8CHxAbp6gs0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "PICyimwPjxpusyKxNssOOwUotAUbygpyEtORsVGXT8g=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "4lu+cBHyAUvuxC6JUNyHLzHsCogGSWFFnUCkDwfQdgI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pSndkmoNUJwXjgkbkgOrT5f9nSvuoMEZOkwAN9ElRaE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tyW+D4i26QihNM5MuBM+wnt5AdWGSJaJ4X5ydc9iWTU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "9Syjr8RoxUgPKr+O5rsCu07AvcebA4P8IVKyS1NVLWc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "67tPfDYnK2tmrioI51fOBG0ygajcV0pLo5+Zm/rEW7U=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "y0EiPRxYTuS1eVTIaPQUQBBxwkyxNckbePvKgChwd0M=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "NWd+2veAaeXQgR3vCvzlI4R1WW67D5YsVLdoXfdb8qg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "PY5RQqKQsL2GqBBSPNOEVpojNFRX/NijCghIpxD6CZk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "lcvwTyEjFlssCJtdjRpdN6oY+C7bxZY+WA+QAqzj9zg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CWE7XRNylvTwO/9Fv56dNqUaQWMmESNS/GNIwgBaEI0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ijwlrUeS8nRYqK1F8kiCYF0mNDolEZS+/lJO1Lg93C8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "8KzV+qYGYuIjoNj8eEpnTuHrMYuhzphl80rS6wrODuU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wDyTLjSEFF895hSQsHvmoEQVS6KIkZOtq1c9dVogm9I=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SGrtPuMYCjUrfKF0Pq/thdaQzmGBMUvlwN3ORIu9tHU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "KySHON3hIoUk4xWcwTqk6IL0kgjzjxgMBObVIkCGvk4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "hBIdS9j0XJPeT4ot73ngELkpUoSixvRBvdOL9z48jY8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Tx6um0q9HjS5ZvlFhvukpI6ORnyrXMWVW1OoxvgqII0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zFKlyfX5H81+d4A4J3FKn4T5JfG+OWtR06ddyX4Mxas=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cGgCDuPV7MeMMYEDpgOupqyNP4BQ4H7rBnd2QygumgM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "IPaUoy98v11EoglTpJ4kBlEawoZ8y7BPwzjLYBpkvHQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Pfo4Am6tOWAyZNn8G9W5HWWGC3ZWmX0igI/RRB870Ro=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "fnTSjd7bC1Udoq6iM7UDnHAC/lsIXSHp/Gy332qw+/I=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "fApBgVRrTDyEumkeWs5p3ag9KB48SbU4Si0dl7Ns9rc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "QxudfBItgoCnUj5NXVnSmWH3HK76YtKkMmzn4lyyUYY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "sSOvwhKa29Wq94bZ5jGIiJQGbG1uBrKSBfOYBz/oZeI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "FdaMgwwJ0NKsqmPZLC5oE+/0D74Dfpvig3LaI5yW5Fs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "sRWBy12IERN43BSZIrnBfC9+zFBUdvjTlkqIH81NGt4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "/4tIRpxKhoOwnXAiFn1Z7Xmric4USOIfKvTYQXk3QTc=", + "subType": "00" + } + } + ] + }, + { + "_id": { + "$numberInt": "1" + }, + "encryptedDecimalNoPrecision": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "Mr/laWHUijZT5VT3x2a7crb7wgd/UXOGz8jr8BVqBpM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wXVD/HSbBljko0jJcaxJ1nrzs2+pchLQqYR3vywS8SU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "VDCpBYsJIxTfcI6Zgf7FTmKMxUffQv+Ys8zt5dlK76I=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zYDslUwOUVNwTYkETfjceH/PU3bac9X3UuQyYJ19qK0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "rAOmHSz18Jx107xpbv9fYcPOmh/KPAqge0PAtuhIRnc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "BFOB1OGVUen7VsOuS0g8Ti7oDsTt2Yj/k/7ta8YAdGM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "2fckE5SPs0GU+akDkUEM6mm0EtcV3WDE/sQsnTtodlk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "mi9+aNjuwIvaMpSHENvKzKRAmX9cYguo2mXLvOoftHQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "K6TWn4VcWWkz/gkUkLmbtwkG7SNeABICmLDnoYJFlLU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Z+2/cEtGU0Fq7QJFNGA/0y4aWAsw0ncG6X0LYRqwS3c=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "rrSIf+lgcNZFbbUkS9BmE045jRWBpcBJXHzfMVEFuzE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "KlHL3Kyje1/LMIfgbCqw1SolxffJvvgsYBV5y77wxuA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "hzJ1YBoETmYeCh352dBmG8d8Wse/bUcqojTWpWQlgsc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "lSdcllDXx8MA+s0GULjDA1lQkcV0L8/aHtZ6dM2pZ2c=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "HGr7JLTTA7ksAnlmjSIwwdBVvgr3fv46/FTdiCPYpos=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "mMr25v1VwOEVZ8xaNUTHJCcsYqV+kwK6RzGYilxPtJ4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "129hJbziPJzNo0IoTU3bECdge0FtaPW8dm4dyNVNwYU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "doiLJ96qoo+v7NqIAZLq6BI5axV8Id8gT5vyJ1ZZ0PM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cW/Lcul3xYmfyvI/0x/+ybN78aQmBK1XIGs1EEU09N8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "1aVIwzu9N5EJV9yEES+/g6hOTH7cA2NTcLIc59cu0wU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kw5tyl7Ew0r1wFyrN1mB9FiVW2hK2BxxxUuJDNWjyjQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ADAY2YBrm6RJBDY/eLLcfNxmSJku+mefz74gH66oyco=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "8gkqB1LojzPrstpFG7RHYmWxXpIlPDTqWnNsXH7XDRU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "TESfVQMDQjfTZmHmUeYUE2XrokJ6CcrsKx/GmypGjOw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "qFM+HFVQ539S0Ouynd1fBHoemFxtU9PRxE5+Dq7Ljy4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "jPiFgUZteSmOg4wf3bsEKCZzcnxmMoILsgp/GaZD+dM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "YaWUgJhYgPNN7TkFK16H8SsQS226JguaVhOIQxZwQNQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "x90/Qk3AgyaFsvWf2KUCu5XF3j76WFSjt/GrnG01060=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ZGWybWL/xlEdMYRFCZDUoz10sywTf7U/7wufsb78lH0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "8l4ganN66jIcdxfHAdYLaym/mdzUUQ8TViw3MDRySPc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "c8p5XEGTqxqvRGVlR+nkxw9uUdoqDqTB0jlYQ361qMA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "1ZGFLlpQBcU3zIUg8MmgWwFKVz/SaA7eSYFrfe3Hb70=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "34529174M77rHr3Ftn9r8jU4a5ztYtyVhMn1wryZSkU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "YkQ4pxFWzc49MS0vZM6S8mNo4wAwo21rePBeF3C+9mI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "MhOf4mYY00KKVhptOcXf0bXB7WfuuM801MRJg4vXPgc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "7pbbD8ihNIYIBJ3tAUPGzHpFPpIeCTAk5L88qCB0/9w=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "C9Q5PoNJTQo6pmNzXEEXUEqH22//UUWY1gqILcIywec=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "AqGVk1QjDNDLYWGRBX/nv9QdGR2SEgXZEhF0EWBAiSE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "/sGI3VCbJUKATULJmhTayPOeVW+5MjWSvVCqS77sRbU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yOtbL0ih7gsuoxVtRrACMz+4N5uo7jIR7zzmtih2Beo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "uA6dkb2Iyg9Su8UNDvZzkPx33kPZtWr/CCuEY+XgzUM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "1DoSFPdHIplqZk+DyWAmEPckWwXw/GdB25NLmzeEZhk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "OfDVS0T3ZuIXI/LNbTp6C9UbPIWLKiMy6Wx+9tqNl+g=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "3PZjHXbmG6GtPz+iapKtQ3yY4PoFFgjIy+fV2xQv1YU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kaoLN0BoBWsmqE7kKkJQejATmLShd8qffcAmlhsxsGY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "vpiw9KgQdegGmp7IJnSGX2miujRLU0xzs0ITTqbPW7c=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "NuXFf7xGUefYjIUTuMxNUTCfVHrF8oL0AT7dPv5Plk4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "8Tz53LxtfEBJ9eR+d2690kwNsqPV6XyKo2PlqZCbUrc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "e6zsOmHSyV8tyQtSX6BSwui6wK9v1xG3giY/IILJQ2w=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "2fedFMCxa2DzmIpfbDKGXhQg0PPwbUv6vIWdwwlvhms=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yEJKMFnWXTC8tJUfzCInzQRByNEPjHxpw4L4m8No91Q=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "YbFuWwOiFuQyOzIJXDbOkCWC2DyrG+248TBuVCa1pXU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "w7IkwGdrguwDrar5+w0Z3va5wXyZ4VXJkDMISyRjPGo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "YmJUoILTRJPhyIyWyXJTsQ6KSZHHbEpwPVup6Ldm/Ko=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "FvMjcwVZJmfh6FP/yBg2wgskK+KHD8YVUY6WtrE8xbg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "h4HCtD4HyYz0nci49IVAa10Z4NJD/FHnRMV4sRX6qro=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "nC7BpXCmym+a0Is2kReM9cYN2M1Eh5rVo8fjms14Oiw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "1qtVWaeVo649ZZZtN8gXbwLgMWGLhz8beODbvru0I7Q=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Ej+mC0QFyMNIiSjR939S+iGBm7dm+1xObu5IcF/OpbU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "UQ8LbUG3cMegbr9yKfKanAPQE1EfPkFciVDrNqZ5GHY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "4iI3mXIDjnX+ralk1HhJY43mZx2uTJM7hsv9MQzTX7E=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "0WQCcs3rvsasgohERHHCaBM4Iy6yomS4qJ5To3/yYiw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "qDCTVPoue1/DOAGNAlUstdA9Sid8MgEY4e5EzHcVHRk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "9F9Mus0UnlzHb8E8ImxgXtz6SU98YXD0JqswOKw/Bzs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pctHpHKVBBcsahQ6TNh6/1V1ZrqOtKSAPtATV6BJqh0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "vfR3C/4cPkVdxtNaqtF/v635ONbhTf5WbwJM6s4EXNE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ejP43xUBIex6szDcqExAFpx1IE/Ksi5ywJ84GKDFRrs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "jbP4AWYd3S2f3ejmMG7dS5IbrFol48UUoT+ve3JLN6U=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CiDifI7958sUjNqJUBQULeyF7x0Up3loPWvYKw9uAuw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "e2dQFsiHqd2BFHNhlSxocjd+cPs4wkcUW/CnCz4KNuM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "PJFckVmzBipqaEqsuP2mkjhJE4qhw36NhfQ9DcOHyEU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "S3MeuJhET/B8VcfZYDR9fvX0nscDj416jdDekhmK11s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CGVHZRXpuNtQviDB2Kj03Q8uvs4w3RwTgV847R7GwPw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yUGgmgyLrxbEpDVy89XN3c2cmFpZXWWmuJ/35zVZ+Jw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "inb6Q97mL1a9onfNTT8v9wsoi/fz7KXKq3p8j90AU9c=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CCyYx/4npq9xGO1lsCo8ZJhFO9/tN7DB+/DTE778rYg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "LNnYw4fwbiAZu0kBdAHPEm/OFnreS+oArdB5O/l/I98=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "P006SxmUS/RjiQJVYPdMFnNo3827GIEmSzagggkg05Q=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "oyvwY+WsnYV6UHuPki1o0ILJ2jN4uyXf9yaUNtZJyBA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "36Lk3RHWh1wmtCWC/Yj6jNIo17U5y6SofAgQjzjVxD8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "vOOo8FqeHnuO9mqOYjIb4vgwIwVyXZ5Y+bY5d9tGFUM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "bJiDJjwQRNxqxlGjRm5lLziFhcfTDCnQ/qU1V85qcRg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "2Qgrm1n0wUELAQnpkEiIHB856yv76q8jLbpiucetcm0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "5ciPOYxTK0WDwwYyfs7yiVymwtYQXDELLxmM4JLl4/o=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "31dC2WUSIOKQc4jwT6PikfeYTwi80mTlh7P31T5KNQU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "YluTV2Mu53EGCKLcWfHZb0BM/IPW2xJdG3vYlDMEsM4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dh/8lGo2Ek6KukSwutH6Q35iy8TgV0FN0SJqe0ZVHN8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "EVw6HpIs3BKen2qY2gz4y5dw1JpXilfh07msZfQqJpc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "FYolLla9L8EZMROEdWetozroU40Dnmwwx2jIMrr7c1A=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "8M6k4QIutSIj6CM41vvkQtuFsaGrjoR9SZJVSLbfGKQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "9LM0VoddDNHway442MqY+Z7vohB2UHau/cddshhzf40=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "66i8Ytco4Yq/FMl6pIRZazz3CZlu8fO2OI6Pne0pvHU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "2a/HgX+MjZxjXtSvHgF1yEpHMJBkl8Caee8XrJtn0WM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "frhBM662c4ZVG7mWP8K/HhRjd01lydW/cPcHnDjifqc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6k1T7Q1t668PBqv6fwpVnT1HWh7Am5LtbKvwPJKcpGU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "UlJ5Edfusp8S/Pyhw6KTglIejmbr1HO0zUeHn/qFETA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "jsxsB+1ECB3assUdoC333do9tYH+LglHmVSJHy4N8Hg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "2nzIQxGYF7j3bGsIesECEOqhObKs/9ywknPHeJ3yges=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xJYKtuWrX90JrJVoYtnwP7Ce59XQGFYoalxpNfBXEH0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "NLI5lriBTleGCELcHBtNnmnvwSRkHHaLOX4cKboMgTw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "hUOQV0RmE5aJdJww1AR9rirJG4zOYPo+6cCkgn/BGvQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "h4G2Of76AgxcUziBwCyH+ayMOpdBWzg4yFrTfehSC2c=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "VuamM75RzGfQpj2/Y1jSVuQLrhy6OAwlZxjuQLB/9Ss=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kn9+hLq7hvw02xr9vrplOCDXKBTuFhfbX7d5v/l85Pg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "fAiGqKyLZpGngBYFbtYUYt8LUrJ49vYafiboifTDjxs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "BxRILymgfVJCczqjUIWXcfrfSgrrYkxTM5VTg0HkZLY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CrFY/PzfPU2zsFkGLu/dI6mEeizZzCR+uYgjZBAHro0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "AEbrIuwvXLTtYgMjOqnGQ8y8axUn5Ukrn7UZRSyfQVw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ouWeVH3PEFg+dKWlXc6BmqirJOaVWjJbMzZbCsce4dA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+hd6xFB+EG+kVP7WH4uMd1CLaWMnt5xJRaY/Guuga9Q=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zmpGalfAOL3gmcUMJYcLYIRT/2VDO/1Dw4KdYZoNcng=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "2PbHAoM/46J2UIZ/vyksKzmVVfxA7YUyIxWeL/N/vBk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "7fD9x+zk5MVFesb59Klqiwwmve7P5ON/5COURXj5smE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tlrNQ4jaq051iaWonuv1sSrYhKkL1LtNZuHsvATha3s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "fBodm28iClNpvlRyVq0dOdXQ08S7/N3aDwid+PdWvRo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "O+/nnRqT3Zv7yMMGug8GhKHaWy6u7BfRGtZoj0sdN1c=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "5AZZ/RTMY4Photnm/cpXZr/HnFRi3eljacMsipkJLHA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "oFVyo/kgoMxBIk2VE52ySSimeyU+Gr0EfCwapXnTpKA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Z8v59DfcnviA0mzvnUk+URVO0UuqAWvtarEgJva/n1c=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "P64GOntZ+zBJEHkigoh9FSxSO+rJTqR20z5aiGQ9an4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xMbSuDPfWuO/Dm7wuVl06GnzG9uzTlJJX9vFy7boGlY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kXPB19mRClxdH2UsHwlttS6lLU2uHvzuZgZz7kC45jU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "NDVjVYXAw4k0w4tFzvs7QDq39aaU3HQor4I2XMKKnCk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "uKw/+ErVfpTO1dGUfd3T/eWfZW3nUxXCdBGdjvHtZ88=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "av0uxEzWkizYWm0QUM/MN1hLibnxPvCWJKwjOV4yVQY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ERwUC47dvgOBzIsEESMIioLYbFOxOe8PtJTnmDkKuHM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "2gseKlG5Le12fS/vj4eaED4lturF16kAgJ1TpW3HxEE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "7Cvg0Y3j/5i2F1TeXxlMmU7xwif5dCmwkZAOrVC5K2Y=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 0, + "encryptedDecimalNoPrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalNoPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedDecimalNoPrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalNoPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "update": "default", + "ordered": true, + "updates": [ + { + "q": { + "encryptedDecimalNoPrecision": { + "$gt": { + "$binary": { + "base64": "DR1jAAADcGF5bG9hZACxYgAABGcAnWIAAAMwAH0AAAAFZAAgAAAAAJu2KgiI8vM+kz9qD3ZQzFQY5qbgYqCqHG5R4jAlnlwXBXMAIAAAAAAAUXxFXsz764T79sGCdhxvNd5b6E/9p61FonsHyEIhogVsACAAAAAAt19RL3Oo5ni5L8kcvgOJYLgVYyXJExwP8pkuzLG7f/kAAzEAfQAAAAVkACAAAAAAPQPvL0ARjujSv2Rkm8r7spVsgeC1K3FWcskGGZ3OdDIFcwAgAAAAACgNn660GmefR8jLqzgR1u5O+Uocx9GyEHiBqVGko5FZBWwAIAAAAADflr+fsnZngm6KRWYgHa9JzK+bXogWl9evBU9sQUHPHQADMgB9AAAABWQAIAAAAAD2Zi6kcxmaD2mY3VWrP+wYJMPg6cSBIYPapxaFQxYFdQVzACAAAAAAM/cV36BLBY3xFBXsXJY8M9EHHOc/qrmdc2CJmj3M89gFbAAgAAAAAOpydOrKxx6m2gquSDV2Vv3w10GocmNCFeOo/fRhRH9JAAMzAH0AAAAFZAAgAAAAAOaNqI9srQ/mI9gwbk+VkizGBBH/PPWOVusgnfPk3tY1BXMAIAAAAAAc96O/pwKCmHCagT6T/QV/wz4vqO+R22GsZ1dse2Vg6QVsACAAAAAAgzIak+Q3UFLTHXPmJ+MuEklFtR3eLtvM+jdKkmGCV/YAAzQAfQAAAAVkACAAAAAA0XlQgy/Yu97EQOjronl9b3dcR1DFn3deuVhtTLbJZHkFcwAgAAAAACoMnpVl6EFJak8A+t5N4RFnQhkQEBnNAx8wDqmq5U/dBWwAIAAAAACR26FJif673qpwF1J1FEkQGJ1Ywcr/ZW6JQ7meGqzt1QADNQB9AAAABWQAIAAAAAAOtpNexRxfv0yRFvZO9DhlkpU4mDuAb8ykdLnE5Vf1VAVzACAAAAAAeblFKm/30orP16uQpZslvsoS8s0xfNPIBlw3VkHeekYFbAAgAAAAAPEoHj87sYE+nBut52/LPvleWQBzB/uaJFnosxp4NRO2AAM2AH0AAAAFZAAgAAAAAIr8xAFm1zPmrvW4Vy5Ct0W8FxMmyPmFzdWVzesBhAJFBXMAIAAAAABYeeXjJEzTHwxab6pUiCRiZjxgtN59a1y8Szy3hfkg+gVsACAAAAAAJuoY4rF8mbI+nKb+5XbZShJ8191o/e8ZCRHE0O4Ey8MAAzcAfQAAAAVkACAAAAAAl+ibLk0/+EwoqeC8S8cGgAtjtpQWGEZDsybMPnrrkwEFcwAgAAAAAHPPBudWgQ+HUorLDpJMqhS9VBF2VF5aLcxgrM1s+yU7BWwAIAAAAAAcCcBR2Vyv5pAFbaOU97yovuOi1+ATDnLLcAUqHecXcAADOAB9AAAABWQAIAAAAACR9erwLTb+tcWFZgJ2MEfM0PKI9uuwIjDTHADRFgD+SQVzACAAAAAAcOop8TXsGUVQoKhzUllMYWxL93xCOkwtIpV8Q6hiSYYFbAAgAAAAAKXKmh4V8veYwob1H03Q3p3PN8SRAaQwDT34KlNVUjiDAAM5AH0AAAAFZAAgAAAAALv0vCPgh7QpmM8Ug6ad5ioZJCh7pLMdT8FYyQioBQ6KBXMAIAAAAADsCPyIG8t6ApQkRk1fX/sfc1kpuWCWP8gAEpnYoBSHrQVsACAAAAAAJe/r67N6d8uTiogvfoR9rEXbIDjyLb9EVdqkayFFGaYAAzEwAH0AAAAFZAAgAAAAAIW4AxJgYoM0pcNTwk1RSbyjZGIqgKL1hcTJmNrnZmoPBXMAIAAAAAAZpfx3EFO0vY0f1eHnE0PazgqeNDTaj+pPJMUNW8lFrAVsACAAAAAAP+Um2vwW6Bj6vuz9DKz6+6aWkoKoEmFNoiz/xXm7lOsAAzExAH0AAAAFZAAgAAAAAKliO6L9zgeuufjj174hvmQGNRbmYYs9yAirL7OxwEW3BXMAIAAAAAAqU7vs3DWUQ95Eq8OejwWnD0GuXd+ASi/uD6S0l8MM1QVsACAAAAAAb9legYzsfctBPpHyl7YWpPmLr5QiNZFND/50N1vv2MUAAzEyAH0AAAAFZAAgAAAAAOGQcCBkk+j/Kzjt/Cs6g3BZPJG81wIHBS8JewHGpgk+BXMAIAAAAABjrxZXWCkdzrExwCgyHaafuPSQ4V4x2k9kUCAqUaYKDQVsACAAAAAADBU6KefT0v8zSmseaMNmQxKjJar72y7MojLFhkEHqrUAAzEzAH0AAAAFZAAgAAAAAPmCNEt4t97waOSd5hNi2fNCdWEkmcFJ37LI9k4Az4/5BXMAIAAAAABX7DuDPNg+duvELf3NbLWkPMFw2HGLgWGHyVWcPvSNCAVsACAAAAAAS7El1FtZ5STh8Q1FguvieyYX9b2DF1DFVsb9hzxXYRsAAzE0AH0AAAAFZAAgAAAAAD4vtVUYRNB+FD9yoQ2FVJH3nMeJeKbi6eZfth638YqbBXMAIAAAAAANCuUB4OdmuD6LaDK2f3vaqfgYYvg40wDXOBbcFjTqLwVsACAAAAAA9hqC2VoJBjwR7hcQ45xO8ZVojwC83jiRacCaDj6Px2gAAzE1AH0AAAAFZAAgAAAAAJPIRzjmTjbdIvshG6UslbEOd797ZSIdjGAhGWxVQvK1BXMAIAAAAABgmJ0Jh8WLs9IYs/a7DBjDWd8J3thW/AGJK7zDnMeYOAVsACAAAAAAi9zAsyAuou2oiCUHGc6QefLUkACa9IgeBhGu9W/r0X8AAzE2AH0AAAAFZAAgAAAAAABQyKQPoW8wGPIqnsTv69+DzIdRkohRhOhDmyVHkw9WBXMAIAAAAAAqWA2X4tB/h3O1Xlawtz6ndI6WaTwgU1QYflL35opu5gVsACAAAAAAWI/Gj5aZMwDIxztqmVL0g5LBcI8EdKEc2UA28pnekQoAAzE3AH0AAAAFZAAgAAAAACB7NOyGQ1Id3MYnxtBXqyZ5Ul/lHH6p1b10U63DfT6bBXMAIAAAAADpOryIcndxztkHSfLN3Kzq29sD8djS0PspDSqERMqokQVsACAAAAAADatsMW4ezgnyi1PiP7xk+gA4AFIN/fb5uJqfVkjg4UoAAzE4AH0AAAAFZAAgAAAAAKVfXLfs8XA14CRTB56oZwV+bFJN5BHraTXbqEXZDmTkBXMAIAAAAAASRWTsfGOpqdffiOodoqIgBzG/yzFyjR5CfUsIUIWGpgVsACAAAAAAkgCHbCwyX640/0Ni8+MoYxeHUiC+FSU4Mn9jTLYtgZgAAzE5AH0AAAAFZAAgAAAAAH/aZr4EuS0/noQR9rcF8vwoaxnxrwgOsSJ0ys8PkHhGBXMAIAAAAACd7ObGQW7qfddcvyxRTkPuvq/PHu7+6I5dxwS1Lzy5XAVsACAAAAAA3q0eKdV7KeU3pc+CtfypKR7BPxwaf30yu0j9FXeOOboAAzIwAH0AAAAFZAAgAAAAAKvlcpFFNq0oA+urq3w6d80PK1HHHw0H0yVWvU9aHijXBXMAIAAAAADWnAHQ5Fhlcjawki7kWzdqjM2f6IdGJblojrYElWjsZgVsACAAAAAAO0wvY66l24gx8nRxyVGC0QcTztIi81Kx3ndRhuZr6W4AAzIxAH0AAAAFZAAgAAAAAH/2aMezEOddrq+dNOkDrdqf13h2ttOnexZsJxG1G6PNBXMAIAAAAABNtgnibjC4VKy5poYjvdsBBnVvDTF/4mmEAxsXVgZVKgVsACAAAAAAqvadzJFLqQbs8WxgZ2D2X+XnaPSDMLCVVgWxx5jnLcYAAzIyAH0AAAAFZAAgAAAAAF2wZoDL6/V59QqO8vdRZWDpXpkV4h4KOCSn5e7x7nmzBXMAIAAAAADLZBu7LCYjbThaVUqMK14H/elrVOYIKJQCx4C9Yjw37gVsACAAAAAAEh6Vs81jLU204aGpL90fmYTm5i5R8/RT1uIbg6VU3HwAAzIzAH0AAAAFZAAgAAAAAH27yYaLn9zh2CpvaoomUPercSfJRUmBY6XFqmhcXi9QBXMAIAAAAAAUwumVlIYIs9JhDhSj0R0+59psCMsFk94E62VxkPt42QVsACAAAAAAT5x2hCCd2bpmpnyWaxas8nSxTc8e4C9DfKaqr0ABEysAAzI0AH0AAAAFZAAgAAAAALMg2kNAO4AFFs/mW3In04yFeN4AP6Vo0klyUoT06RquBXMAIAAAAAAgGWJbeIdwlpqXCyVIYSs0dt54Rfc8JF4b8uYc+YUj0AVsACAAAAAAWHeWxIkyvXTOWvfZzqtPXjfGaWWKjGSIQENTU3zBCrsAAzI1AH0AAAAFZAAgAAAAALas/i1T2DFCEmrrLEi7O2ngJZyFHialOoedVXS+OjenBXMAIAAAAAA1kK0QxY4REcGxHeMkgumyF7iwlsRFtw9MlbSSoQY7uAVsACAAAAAAUNlpMJZs1p4HfsD4Q4WZ4TBEi6Oc2fX34rzyynqWCdwAAzI2AH0AAAAFZAAgAAAAAP1TejmWg1CEuNSMt6NUgeQ5lT+oBoeyF7d2l5xQrbXWBXMAIAAAAABPX0kj6obggdJShmqtVfueKHplH4ZrXusiwrRDHMOKeQVsACAAAAAAIYOsNwC3DA7fLcOzqdr0bOFdHCfmK8tLwPoaE9uKOosAAzI3AH0AAAAFZAAgAAAAAMrKn+QPa/NxYezNhlOX9nyEkN1kE/gW7EuZkVqYl0b8BXMAIAAAAABUoZMSPUywRGfX2EEencJEKH5x/P9ySUVrhStAwgR/LgVsACAAAAAAMgZFH6lQIIDrgHnFeslv3ld20ynwQjQJt3cAp4GgrFkAAzI4AH0AAAAFZAAgAAAAAMmD1+a+oVbiUZd1HuZqdgtdVsVKwuWAn3/M1B6QGBM3BXMAIAAAAACLyytOYuZ9WEsIrrtJbXUx4QgipbaAbmlJvSZVkGi0CAVsACAAAAAA4v1lSp5H9BB+HYJ4bH43tC8aeuPZMf78Ng1JOhJh190AAzI5AH0AAAAFZAAgAAAAAOVKV7IuFwmYP1qVv8h0NvJmfPICu8yQhzjG7oJdTLDoBXMAIAAAAABL70XLfQLKRsw1deJ2MUvxSWKxpF/Ez73jqtbLvqbuogVsACAAAAAAvfgzIorXxE91dDt4nQxYfntTsx0M8Gzdsao5naQqcRUAAzMwAH0AAAAFZAAgAAAAAKS/1RSAQma+xV9rz04IcdzmavtrBDjOKPM+Z2NEyYfPBXMAIAAAAAAOJDWGORDgfRv8+w5nunh41wXb2hCA0MRzwnLnQtIqPgVsACAAAAAAf42C1+T7xdHEFF83+c2mF5S8PuuL22ogXXELnRAZ4boAAzMxAH0AAAAFZAAgAAAAAFeq8o82uNY1X8cH6OhdTzHNBUnCChsEDs5tm0kPBz3qBXMAIAAAAABaxMBbsaeEj/EDtr8nZfrhhhirBRPJwVamDo5WwbgvTQVsACAAAAAAMbH453A+BYAaDOTo5kdhV1VdND1avNwvshEG/4MIJjQAAzMyAH0AAAAFZAAgAAAAAI8IKIfDrohHh2cjspJHCovqroSr5N3QyVtNzFvT5+FzBXMAIAAAAABXHXteKG0DoOMmECKp6ro1MZNQvXGzqTDdZ0DUc8QfFAVsACAAAAAA/w5s++XYmO+9TWTbtGc3n3ndV4T9JUribIbF4jmDLSMAAzMzAH0AAAAFZAAgAAAAAJkHvm15kIu1OtAiaByj5ieWqzxiu/epK6c/9+KYIrB0BXMAIAAAAACzg5TcyANk0nes/wCJudd1BwlkWWF6zw3nGclq5v3SJQVsACAAAAAAvruXHTT3irPJLyWpI1j/Xwf2FeIE/IV+6Z49pqRzISoAAzM0AH0AAAAFZAAgAAAAAAYSOvEWWuSg1Aym7EssNLR+xsY7e9BcwsX4JKlnSHJcBXMAIAAAAABT48eY3PXVDOjw7JpNjOe1j2JyI3LjDnQoqZ8Je5B2KgVsACAAAAAAU2815RR57TQ9uDg0XjWjBkAKvf8yssxDMzrM4+FqP6AAAzM1AH0AAAAFZAAgAAAAAGQxC9L1e9DfO5XZvX1yvc3hTLtQEdKO9FPMkyg0Y9ZABXMAIAAAAADtmcMNJwdWLxQEArMGZQyzpnu+Z5yMmPAkvgq4eAKwNQVsACAAAAAAJ88zt4Y/Hoqh+zrf6KCOiUwHbOzCxSfp6k/qsZaYGEgAAzM2AH0AAAAFZAAgAAAAADLHK2LNCNRO0pv8n4fAsxwtUqCNnVK8rRgNiQfXpHSdBXMAIAAAAACf16EBIHRKD3SzjRW+LMOl+47QXA3CJhMzlcqyFRW22AVsACAAAAAAMGz4fAOa0EoVv90fUffwLjBrQhHATf+NdlgCR65vujAAAzM3AH0AAAAFZAAgAAAAAHiZJiXKNF8bbukQGsdYkEi95I+FSBHy1I5/hK2uEZruBXMAIAAAAADE+lZBa8HDUJPN+bF6xI9x4N7GF9pj3vBR7y0BcfFhBAVsACAAAAAAGIEN6sfqq30nyxW4dxDgXr/jz5HmvA9T1jx/pKCn4zgAAzM4AH0AAAAFZAAgAAAAAI1oa2OIw5TvhT14tYCGmhanUoYcCZtNbrVbeoMldHNZBXMAIAAAAAAx2nS0Ipblf2XOgBiUOuJFBupBhe7nb6QPLZlA4aMPCgVsACAAAAAA9xu828hugIgo0E3de9dZD+gTpVUGlwtDba+tw/WcbUoAAzM5AH0AAAAFZAAgAAAAABgTWS3Yap7Q59hii/uPPimHWXsr+DUmsqfwt/X73qsOBXMAIAAAAACKK05liW5KrmEAvtpCB1WUltruzUylDDpjea//UlWoOAVsACAAAAAAcgN4P/wakJ5aJK5c1bvJBqpVGND221dli2YicPFfuAYAAzQwAH0AAAAFZAAgAAAAABOAnBPXDp6i9TISQXvcNKwGDLepZTu3cKrB4vKnSCjBBXMAIAAAAADjjzZO7UowAAvpwyG8BNOVqLCccMFk3aDK4unUeft5ywVsACAAAAAA4zkCd4k9gvfXoD1C7vwTjNcdVJwEARh8h/cxZ4PNMfgAAzQxAH0AAAAFZAAgAAAAAHN8hyvT1lYrAsdiV5GBdd5jhtrAYE/KnSjw2Ka9hjz9BXMAIAAAAAD794JK7EeXBs+D7yOVK7nWF8SbZ/7U8gZ7nnT9JFNwTAVsACAAAAAAg8Wt1HO3NhByq2ggux2a4Lo6Gryr24rEFIqh2acrwWMAAzQyAH0AAAAFZAAgAAAAAO93bPrq8bsnp1AtNd9ETnXIz0lH/2HYN/vuw9wA3fyFBXMAIAAAAABHlls5fbaF2oAGqptC481XQ4eYxInTC29aElfmVZgDUgVsACAAAAAANoQXEWpXJpgrSNK/cKi/m7oYhuSRlp1IZBF0bqTEATcAAzQzAH0AAAAFZAAgAAAAAL1YsAZm1SA0ztU6ySIrQgCCA74V6rr0/4iIygCcaJL6BXMAIAAAAADTXWTHWovGmUR1Zg9l/Aqq9H5mOCJQQrb/Dfae7e3wKAVsACAAAAAA5dunyJK6/SVfDD0t9QlNBcFqoZnf9legRjHaLSKAoQMAAzQ0AH0AAAAFZAAgAAAAAEoFAeHk0RZ9kD+cJRD3j7PcE5gzWKnyBrF1I/MDNp5mBXMAIAAAAACgHtc2hMBRSZjKw8RAdDHK+Pi1HeyjiBuAslGVNcW5tAVsACAAAAAAXzBLfq+GxRtX4Wa9fazA49DBLG6AjZm2XODStJKH8D0AAzQ1AH0AAAAFZAAgAAAAAAW+7DmSN/LX+/0uBVJDHIc2dhxAGz4+ehyyz8fAnNGoBXMAIAAAAAA6Ilw42EvvfLJ3Eq8Afd+FjPoPcQutZO6ltmCLEr8kxQVsACAAAAAAbbZalyo07BbFjPFlYmbmv0z023eT9eLkHqeVUnfUAUAAAzQ2AH0AAAAFZAAgAAAAANBdV7M7kuYO3EMoQItAbXv4t2cIhfaT9V6+s4cg9djlBXMAIAAAAABvz4MIvZWxxrcJCL5qxLfFhXiUYB1OLHdKEjco94SgDgVsACAAAAAAK2GVGvyPIKolF/ECcmfmkVcf1/IZNcaTv96N92yGrkEAAzQ3AH0AAAAFZAAgAAAAAMoAoiAn1kc79j5oPZtlMWHMhhgwNhLUnvqkqIFvcH1NBXMAIAAAAADcJTW7WiCyW0Z9YDUYwppXhLj4Ac1povpJvcAq+i48MQVsACAAAAAAIGxGDzoeB3PTmudl4+j6piQB++e33EEzuzAiXcqGxvUAAzQ4AH0AAAAFZAAgAAAAACI3j5QP7dWHpcT6WO/OhsWwRJNASBYqIBDNzW8IorEyBXMAIAAAAABxUpBSjXwCKDdGP9hYU+RvyR+96kChfvyyRC4jZmztqAVsACAAAAAAvBCHguWswb4X0xdcAryCvZgQuthXzt7597bJ5VxAMdgAAzQ5AH0AAAAFZAAgAAAAAKsbycEuQSeNrF8Qnxqw3x3og8JmQabwGqnDbqzFRVrrBXMAIAAAAACno/3ef2JZJS93SVVzmOZSN+jjJHT8s0XYq2M46d2sLAVsACAAAAAAAt5zLJG+/j4K8rnkFtAn8IvdUVNefe6utJ3rdzgwudIAAzUwAH0AAAAFZAAgAAAAAPXIcoO8TiULqlxzb74NFg+I8kWX5uXIDUPnh2DobIoMBXMAIAAAAADR6/drkdTpnr9g1XNvKDwtBRBdKn7c2c4ZNUVK5CThdQVsACAAAAAAJqOA1c6KVog3F4Hb/GfDb3jCxXDRTqpXWSbMH4ePIJsAAzUxAH0AAAAFZAAgAAAAAEa03ZOJmfHT6/nVadvIw71jVxEuIloyvxXraYEW7u7pBXMAIAAAAADzRlBJK75FLiKjz3djqcgjCLo/e3yntI3MnPS48OORhgVsACAAAAAAnQhx4Rnyj081XrLRLD5NLpWmRWCsd0M9Hl7Jl19R0h8AAzUyAH0AAAAFZAAgAAAAAKx8NLSZUU04pSSGmHa5fh2oLHsEN5mmNMNHL95/tuC9BXMAIAAAAAA59hcXVaN3MNdHoo11OcH1aPRzHCwpVjO9mGfMz4xh3QVsACAAAAAAYIPdjV2XbPj7dBeHPwnwhVU7zMuJ+xtMUW5mIOYtmdAAAzUzAH0AAAAFZAAgAAAAAHNKAUxUqBFNS9Ea9NgCZoXMWgwhP4x0/OvoaPRWMquXBXMAIAAAAABUZ551mnP4ZjX+PXU9ttomzuOpo427MVynpkyq+nsYCQVsACAAAAAALnVK5p2tTTeZEh1zYt4iqKIQT9Z0si//Hy1L85oF+5IAAzU0AH0AAAAFZAAgAAAAALfGXDlyDVcGaqtyHkLT0qpuRhJQLgCxtznazhFtuyn/BXMAIAAAAABipxlXDq14C62pXhwAeen5+syA+/C6bN4rtZYcO4zKwAVsACAAAAAAXUf0pzUq0NhLYagWDap4uEiwq5rLpcx29rWbt1NYMsMAAzU1AH0AAAAFZAAgAAAAANoEr8sheJjg4UCfBkuUzarU9NFoy1xwbXjs5ifVDeA9BXMAIAAAAABPoyTf6M+xeZVGES4aNzVlq7LgjqZXJ/QunjYVusGUEAVsACAAAAAA1hA2gMeZZPUNytk9K+lB1RCqWRudRr7GtadJlExJf8oAAzU2AH0AAAAFZAAgAAAAAKvDiK+xjlBe1uQ3SZTNQl2lClIIvpP/5CHwY6Kb3WlgBXMAIAAAAAANnxImq5MFbWaRBHdJp+yD09bVlcFtiFDYsy1eDZj+iQVsACAAAAAAWtsyO+FxMPSIezwsV1TJD8ZrXAdRnQM6DJ+f+1V3qEkAAzU3AH0AAAAFZAAgAAAAAF49IlFH9RmSUSvUQpEPUedEksrQUcjsOv44nMkwXhjzBXMAIAAAAADJtWGbk0bZzmk20obz+mNsp86UCu/nLLlbg7ppxYn7PgVsACAAAAAA3k0Tj/XgPQtcYijH8cIlQoe/VXf15q1nrZNmg7yWYEgAAzU4AH0AAAAFZAAgAAAAAOuSJyuvz50lp3BzXlFKnq62QkN2quNU1Gq1IDsnFoJCBXMAIAAAAAAqavH1d93XV3IzshWlMnzznucadBF0ND092/2ApI1AcAVsACAAAAAAzUrK4kpoKCmcpdZlZNI13fddjdoAseVe67jaX1LobIIAAzU5AH0AAAAFZAAgAAAAALtgC4Whb4ZdkCiI30zY6fwlsxSa7lEaOAU3SfUXr02XBXMAIAAAAACgdZ6U1ZVgUaZZwbIaCdlANpCw6TZV0bwg3DS1NC/mnAVsACAAAAAAzI49hdpp0PbO7S2KexISxC16sE73EUAEyuqUFAC/J48AAzYwAH0AAAAFZAAgAAAAAF6PfplcGp6vek1ThwenMHVkbZgrc/dHgdsgx1VdPqZ5BXMAIAAAAACha3qhWkqmuwJSEXPozDO8y1ZdRLyzt9Crt2vjGnT7AAVsACAAAAAA7nvcU59+LwxGupSF21jAeAE0x7JE94tjRkJfgM1yKU8AAzYxAH0AAAAFZAAgAAAAAKoLEhLvLjKc7lhOJfx+VrGJCx9tXlOSa9bxQzGR6rfbBXMAIAAAAAAIDK5wNnjRMBzET7x/KAMExL/zi1IumJM92XTgXfoPoAVsACAAAAAAFkUYWFwNr815dEdFqp+TiIozDcq5IBNVkyMoDjharDQAAzYyAH0AAAAFZAAgAAAAADoQv6lutRmh5scQFvIW6K5JBquLxszuygM1tzBiGknIBXMAIAAAAADAD+JjW7FoBQ76/rsECmmcL76bmyfXpUU/awqIsZdO+wVsACAAAAAAPFHdLw3jssmEXsgtvl/RBNaUCRA1kgSwsofG364VOvQAAzYzAH0AAAAFZAAgAAAAAJNHUGAgn56KekghO19d11nai3lAh0JAlWfeP+6w4lJBBXMAIAAAAAD9XGJlvz59msJvA6St9fKW9CG4JoHV61rlWWnkdBRLzwVsACAAAAAAxwP/X/InJJHmrjznvahIMgj6pQR30B62UtHCthSjrP0AAzY0AH0AAAAFZAAgAAAAAHgYoMGjEE6fAlAhICv0+doHcVX8CmMVxyq7+jlyGrvmBXMAIAAAAAC/5MQZgTHuIr/O5Z3mXPvqrom5JTQ8IeSpQGhO9sB+8gVsACAAAAAAuPSXVmJUAUpTQg/A9Bu1hYczZF58KEhVofakygbsvJQAAzY1AH0AAAAFZAAgAAAAANpIljbxHOM7pydY877gpRQvYY2TGK7igqgGsavqGPBABXMAIAAAAAAqHyEu9gpurPOulApPnr0x9wrygY/7mXe9rAC+tPK80wVsACAAAAAA7gkPzNsS3gCxdFBWbSW9tkBjoR5ib+saDvpGSB3A3ogAAzY2AH0AAAAFZAAgAAAAAGR+gEaZTeGNgG9BuM1bX2R9ed4FCxBA9F9QvdQDAjZwBXMAIAAAAABSkrYFQ6pf8MZ1flgmeIRkxaSh/Eep4Btdx4QYnGGnwAVsACAAAAAApRovMiV00hm/pEcT4XBsyPNw0eo8RLAX/fuabjdU+uwAAzY3AH0AAAAFZAAgAAAAAFNprhQ3ZwIcYbuzLolAT5n/vc14P9kUUQComDu6eFyKBXMAIAAAAAAcx9z9pk32YbPV/sfPZl9ALIEVsqoLXgqWLVK/tP+heAVsACAAAAAA/qxvuvJbAHwwhfrPVpmCFzNvg2cU/NXaWgqgYUZpgXwAAzY4AH0AAAAFZAAgAAAAADgyPqQdqQrgfmJjRFAILTHzXbdw5kpKyfeoEcy6YYG/BXMAIAAAAAAE+3XsBQ8VAxAkN81au+f3FDeCD/s7KoZD+fnM1MJSSAVsACAAAAAAhRnjrXecwV0yeCWKJ5J/x12Xx4qVJahsCEVHB/1U2rcAAzY5AH0AAAAFZAAgAAAAAI0CT7JNngTCTUSei1Arw7eHWCD0jumv2rb7imjWIlWABXMAIAAAAABSP8t6ya0SyCphXMwnru6ZUDXWElN0NfBvEOhDvW9bJQVsACAAAAAAGWeGmBNDRaMtvm7Rv+8TJ2sJ4WNXKcp3tqpv5Se9Ut4AAzcwAH0AAAAFZAAgAAAAAD/FIrGYFDjyYmVb7oTMVwweWP7A6F9LnyIuNO4MjBnXBXMAIAAAAACIZgJCQRZu7NhuNMyOqCn1tf+DfU1qm10TPCfj5JYV3wVsACAAAAAA5hmY4ptuNxULGf87SUFXQWGAONsL9U29duh8xqsHtxoAAzcxAH0AAAAFZAAgAAAAAHIkVuNDkSS1cHIThKc/O0r2/ubaABTOi8Q1r/dvBAsEBXMAIAAAAADdHYqchEiJLM340c3Q4vJABmmth3+MKzwLYlsG6GS7sQVsACAAAAAADa+KP/pdTiG22l+ZWd30P1iHjnBF4zSNRdFm0oEK82kAAzcyAH0AAAAFZAAgAAAAAJmoDILNhC6kn3masElfnjIjP1VjsjRavGk1gSUIjh1NBXMAIAAAAAD97Ilvp3XF8T6MmVVcxMPcdL80RgQ09UoC6PnoOvZ1IQVsACAAAAAA2RK3Xng6v8kpvfVW9tkVXjpE+BSnx9/+Fw85Evs+kUEAAzczAH0AAAAFZAAgAAAAAI5bm3YO0Xgf0VT+qjVTTfvckecM3Cwqj7DTKZXf8/NXBXMAIAAAAAD/m+h8fBhWaHm6Ykuz0WX1xL4Eme3ErLObyEVJf8NCywVsACAAAAAAfb1VZZCqs2ivYbRzX4p5CtaCkKW+g20Pr57FWXzEZi8AAzc0AH0AAAAFZAAgAAAAANqo4+p6qdtCzcB4BX1wQ6llU7eFBnuu4MtZwp4B6mDlBXMAIAAAAAAGiz+VaukMZ+6IH4jtn4KWWdKK4/W+O+gRioQDrfzpMgVsACAAAAAAG4YYkTp80EKo59mlHExDodRQFR7njhR5dmISwUJ6ukAAAzc1AH0AAAAFZAAgAAAAAPrFXmHP2Y4YAm7b/aqsdn/DPoDkv7B8egWkfe23XsM1BXMAIAAAAAAGhwpKAr7skeqHm3oseSbO7qKNhmYsuUrECBxJ5k+D2AVsACAAAAAAAqPQi9luYAu3GrFCEsVjd9z2zIDcp6SPTR2w6KQEr+IAAzc2AH0AAAAFZAAgAAAAABzjYxwAjXxXc0Uxv18rH8I3my0Aguow0kTwKyxbrm+cBXMAIAAAAADVbqJVr6IdokuhXkEtXF0C2gINLiAjMVN20lE20Vmp2QVsACAAAAAAD7K1Fx4gFaaizkIUrf+EGXQeG7QX1jadhGc6Ji471H8AAzc3AH0AAAAFZAAgAAAAAFMm2feF2fFCm/UC6AfIyepX/xJDSmnnolQIBnHcPmb5BXMAIAAAAABLI11kFrQoaNVZFmq/38aRNImPOjdJh0Lo6irI8M/AaAVsACAAAAAAOWul0oVqJ9CejD2RqphhTC98DJeRQy5EwbNerU2+4l8AAzc4AH0AAAAFZAAgAAAAAJvXB3KyNiNtQko4SSzo/9b2qmM2zU9CQTTDfLSBWMgRBXMAIAAAAAAvjuVP7KsLRDeqVqRziTKpBrjVyqKiIbO9Gw8Wl2wFTAVsACAAAAAADlE+oc1ins+paNcaOZJhBlKlObDJ4VQORWjFYocM4LgAAzc5AH0AAAAFZAAgAAAAAPGdcxDiid8z8XYnfdDivNMYVPgBKdGOUw6UStU+48CdBXMAIAAAAAARj6g1Ap0eEfuCZ4X2TsEw+Djrhto3fA5nLwPaY0vCTgVsACAAAAAAoHqiwGOUkBu8SX5U1yHho+UIFdSN2MdQN5s6bQ0EsJYAAzgwAH0AAAAFZAAgAAAAAP5rGPrYGt3aKob5f/ldP0qrW7bmWvqnKY4QwdDWz400BXMAIAAAAADTQkW2ymaaf/bhteOOGmSrIR97bAnJx+yN3yMj1bTeewVsACAAAAAADyQnHGH2gF4w4L8axUsSTf6Ubk7L5/eoFOJk12MtZAoAAzgxAH0AAAAFZAAgAAAAAAlz6wJze5UkIxKpJOZFGCOf3v2KByWyI6NB6JM9wNcBBXMAIAAAAABUC7P/neUIHHoZtq0jFVBHY75tSFYr1Y5S16YN5XxC1QVsACAAAAAAgvxRbXDisNnLY3pfsjDdnFLtkvYUC4lhA68eBXc7KAwAAzgyAH0AAAAFZAAgAAAAAFJ8AtHcjia/9Y5pLEc3qVgH5xKiXw12G9Kn2A1EY8McBXMAIAAAAAAxe7Bdw7eUSBk/oAawa7uicTEDgXLymRNhBy1LAxhDvwVsACAAAAAAxKPaIBKVx3jTA+R/el7P7AZ7efrmTGjJs3Hj/YdMddwAAzgzAH0AAAAFZAAgAAAAAO8uwQUaKFb6vqR3Sv3Wn4QAonC2exOC9lGG1juqP5DtBXMAIAAAAABZf1KyJgQg8/Rf5c02DgDK2aQu0rNCOvaL60ohDHyY+gVsACAAAAAAqyEjfKC8lYoIfoXYHUqHZPoaA6EK5BAZy5dxXZmay4kAAzg0AH0AAAAFZAAgAAAAAE8YtqyRsGCeiR6hhiyisR/hccmK4nZqIMzO4lUBmEFzBXMAIAAAAAC1UYOSKqAeG1UJiKjWFVskRhuFKpj9Ezy+lICZvFlN5AVsACAAAAAA6Ct9nNMKyRazn1OKnRKagm746CGu+jyhbL1qJnZxGi0AAzg1AH0AAAAFZAAgAAAAAPhCrMausDx1QUIEqp9rUdRKyM6a9AAx7jQ3ILIu8wNIBXMAIAAAAACmH8lotGCiF2q9VQxhsS+7LAZv79VUAsOUALaGxE/EpAVsACAAAAAAnc1xCKfdvbUEc8F7XZqlNn1C+hZTtC0I9I3LL06iaNkAAzg2AH0AAAAFZAAgAAAAAOBi/GAYFcstMSJPgp3VkMiuuUUCrZytvqYaU8dwm8v2BXMAIAAAAACEZSZVyD3pKzGlbdwlYmWQhHHTV5SnNLknl2Gw8IaUTQVsACAAAAAAfsLZsEDcWSuNsIo/TD1ReyQW75HPMgmuKZuWFOLKRLoAAzg3AH0AAAAFZAAgAAAAAIQuup+YGfH3mflzWopN8J1X8o8a0d9CSGIvrA5HOzraBXMAIAAAAADYvNLURXsC2ITMqK14LABQBI+hZZ5wNf24JMcKLW+84AVsACAAAAAACzfjbTBH7IwDU91OqLAz94RFkoqBOkzKAqQb55gT4/MAAzg4AH0AAAAFZAAgAAAAAKsh0ADyOnVocFrOrf6MpTrNvAj8iaiE923DPryu124gBXMAIAAAAADg24a8NVE1GyScc6tmnTbmu5ulzO+896fE92lN08MeswVsACAAAAAAaPxcOIxnU7But88/yadOuDJDMcCywwrRitaxMODT4msAAzg5AH0AAAAFZAAgAAAAAKkVC2Y6HtRmv72tDnPUSjJBvse7SxLqnr09/Uuj9sVVBXMAIAAAAABYNFUkH7ylPMN+Bc3HWX1e0flGYNbtJNCY9SltJCW/UAVsACAAAAAAZYK/f9H4OeihmpiFMH7Wm7uLvs2s92zNA8wyrNZTsuMAAzkwAH0AAAAFZAAgAAAAADDggcwcb/Yn1Kk39sOHsv7BO/MfP3m/AJzjGH506Wf9BXMAIAAAAAAYZIsdjICS0+BDyRUPnrSAZfPrwtuMaEDEn0/ijLNQmAVsACAAAAAAGPnYVvo2ulO9z4LGd/69NAklfIcZqZvFX2KK0s+FcTUAAzkxAH0AAAAFZAAgAAAAAEWY7dEUOJBgjOoWVht1wLehsWAzB3rSOBtLgTuM2HC8BXMAIAAAAAAAoswiHRROurjwUW8u8D5EUT+67yvrgpB/j6PzBDAfVwVsACAAAAAA6NhRTYFL/Sz4tao7vpPjLNgAJ0FX6P/IyMW65qT6YsMAAzkyAH0AAAAFZAAgAAAAAPZaapeAUUFPA7JTCMOWHJa9lnPFh0/gXfAPjA1ezm4ZBXMAIAAAAACmJvLY2nivw7/b3DOKH/X7bBXjJwoowqb1GtEFO3OYgAVsACAAAAAA/JcUoyKacCB1NfmH8vYqC1f7rd13KShrQqV2r9QBP44AAzkzAH0AAAAFZAAgAAAAAK00u6jadxCZAiA+fTsPVDsnW5p5LCr4+kZZZOTDuZlfBXMAIAAAAAAote4zTEYMDgaaQbAdN8Dzv93ljPLdGjJzvnRn3KXgtQVsACAAAAAAxXd9Mh6R3mnJy8m7UfqMKi6oD5DlZpkaOz6bEjMOdiwAAzk0AH0AAAAFZAAgAAAAAFbgabdyymiEVYYwtJSWa7lfl/oYuj/SukzJeDOR6wPVBXMAIAAAAADAFGFjS1vPbN6mQEhkDYTD6V2V23Ys9gUEUMGNvMPkaAVsACAAAAAAL/D5Sze/ZoEanZLK0IeEkhgVkxEjMWVCfmJaD3a8uNIAAzk1AH0AAAAFZAAgAAAAABNMR6UBv2E627CqLtQ/eDYx7OEwQ7JrR4mSHFa1N8tLBXMAIAAAAAAxH4gucI4UmNVB7625C6hFSVCuIpJO3lusJlPuL8H5EQVsACAAAAAAVLHNg0OUVqZ7WGOP53BkTap9FOw9dr1P4J8HxqFqU04AAzk2AH0AAAAFZAAgAAAAAG8cd6WBneNunlqrQ2EmNf35W7OGObGq9WL4ePX+LUDmBXMAIAAAAAAjJ2+sX87NSis9hBsgb1QprVRnO7Bf+GObCGoUqyPE4wVsACAAAAAAs9c9SM49/pWmyUQKslpt3RTMBNSRppfNO0JBvUqHPg0AAzk3AH0AAAAFZAAgAAAAAFWOUGkUpy8yf6gB3dio/aOfRKh7XuhvsUj48iESFJrGBXMAIAAAAAAY7sCDMcrUXvNuL6dO0m11WyijzXZvPIcOKob6IpC4PQVsACAAAAAAJOP+EHz6awDb1qK2bZQ3kTV7wsj5Daj/IGAWh4g7omAAAzk4AH0AAAAFZAAgAAAAAGUrIdKxOihwNmo6B+aG+Ag1qa0+iqdksHOjQj+Oy9bZBXMAIAAAAABwa5dbI2KmzBDNBTQBEkjZv4sPaeRkRNejcjdVymRFKQVsACAAAAAA4ml/nm0gJNTcJ4vuD+T2Qfq2fQZlibJp/j6MOGDrbHMAAzk5AH0AAAAFZAAgAAAAAOx89xV/hRk64/CkM9N2EMK6aldII0c8smdcsZ46NbP8BXMAIAAAAADBF6tfQ+7q9kTuLyuyrSnDgmrdmrXkdhl980i1KHuGHgVsACAAAAAACUqiFqHZdGbwAA+hN0YUE5zFg+H+dabIB4dj5/75W/YAAzEwMAB9AAAABWQAIAAAAADJDdC9aEFl4Y8J/awHbnXGHjfP+VXQilPHJg7ewaJI7AVzACAAAAAAE+tqRl6EcBMXvbr4GDiNIYObTsYpa1n6BJk9EjIJVicFbAAgAAAAAJVc+HYYqa0m1Hq6OiRX8c0iRnJYOt6AJAJoG0sG3GMSAAMxMDEAfQAAAAVkACAAAAAA3F9rjEKhpoHuTULVGgfUsGGwJs3bISrXkFP1v6KoQLgFcwAgAAAAAIBf0tXw96Z/Ds0XSIHX/zk3MzUR/7WZR/J6FpxRWChtBWwAIAAAAABWrjGlvKYuTS2s8L9rYy8Hf0juFGJfwQmxVIjkTmFIGQADMTAyAH0AAAAFZAAgAAAAAOYIYoWkX7dGuyKfi3XssUlc7u/gWzqrR9KMkikKVdmSBXMAIAAAAABVF2OYjRTGi9Tw8XCAwZWLpX35Yl271TlNWp6N/nROhAVsACAAAAAA0nWwYzXQ1+EkDvnGq+SMlq20z+j32Su+i/A95SggPb4AAzEwMwB9AAAABWQAIAAAAACMtPm12YtdEAvqu6Eji1yuRXnu1RJP6h0l7pH3lSH4MwVzACAAAAAAENyCFfyUAh1veQBGx+cxiB7Sasrj41jzCGflZkB5cRMFbAAgAAAAAKdI2LMqISr/T5vuJPg6ZRBm5fVi2aQCc4ra3A4+AjbDAAMxMDQAfQAAAAVkACAAAAAAvlI4lDcs6GB1cnm/Tzo014CXWqidCdyE5t2lknWQd4QFcwAgAAAAAD60SpNc4O2KT7J0llKdSpcX1/Xxs97N715a1HsTFkmBBWwAIAAAAABuuRkJWAH1CynggBt1/5sPh9PoGiqTlS24D/OE2uHXLQADMTA1AH0AAAAFZAAgAAAAAKl8zcHJRDjSjJeV/WvMxulW1zrTFtaeBy/aKKhadc6UBXMAIAAAAADBdWQl5SBIvtZZLIHszePwkO14W1mQ0izUk2Ov21cPNAVsACAAAAAAHErCYycpqiIcCZHdmPL1hi+ovLQk4TAvENpfLdTRamQAAzEwNgB9AAAABWQAIAAAAABb6LXDWqCp1beQgQjj8I3sRTtFhlrmiBi+h/+ikmrvugVzACAAAAAA9stpgTecT7uTyaGNs3K9Bp0A7R0QaIAOfscyMXHBPX8FbAAgAAAAAHUt+McyXrJ1H8SwnHNVO181Ki8vDAM1f7XI26mg95ZDAAMxMDcAfQAAAAVkACAAAAAA97NTT+81PhDhgptNtp4epzA0tP4iNb9j1AWkiiiKGM8FcwAgAAAAAKPbHg7ise16vxmdPCzksA/2Mn/qST0L9Xe8vnQugVkcBWwAIAAAAABB0EMXfvju4JU/mUH/OvxWbPEl9NJkcEp4iCbkXI41fAADMTA4AH0AAAAFZAAgAAAAAMqpayM2XotEFmm0gwQd9rIzApy0X+7HfOhNk6VU7F5lBXMAIAAAAACJR9+q5T9qFHXFNgGbZnPubG8rkO6cwWhzITQTmd6VgwVsACAAAAAAOZLQ6o7e4mVfDzbpQioa4d3RoTvqwgnbmc5Qh2wsZuoAAzEwOQB9AAAABWQAIAAAAADQnslvt6Hm2kJPmqsTVYQHE/wWeZ4bE1XSkt7TKy0r1gVzACAAAAAA8URTA4ZMrhHPvlp53TH6FDCzS+0+61qHm5XK6UiOrKEFbAAgAAAAAHQbgTCdZcbdA0avaTmZXUKnIS7Nwf1tNrcXDCw+PdBRAAMxMTAAfQAAAAVkACAAAAAAhujlgFPFczsdCGXtQ/002Ck8YWQHHzvWvUHrkbjv4rwFcwAgAAAAALbV0lLGcSGfE7mDM3n/fgEvi+ifjl7WZ5b3aqjDNvx9BWwAIAAAAACbceTZy8E3QA1pHmPN5kTlOx3EO8kJM5PUjTVftw1VpgADMTExAH0AAAAFZAAgAAAAABm/6pF96j26Jm7z5KkY1y33zcAEXLx2n0DwC03bs/ixBXMAIAAAAAD01OMvTZI/mqMgxIhA5nLs068mW+GKl3OW3ilf2D8+LgVsACAAAAAAaLvJDrqBESTNZSdcXsd+8GXPl8ZkUsGpeYuyYVv/kygAAzExMgB9AAAABWQAIAAAAACfw9/te4GkHZAapC9sDMHHHZgmlTrccyJDPFciOMSOcwVzACAAAAAAIIC1ZpHObvmMwUfqDRPl4C1aeuHwujM1G/yJbvybMNAFbAAgAAAAAAs9x1SnVpMfNv5Bm1aXGwHmbbI9keWa9HRD35XuCBK5AAMxMTMAfQAAAAVkACAAAAAAkxHJRbnShpPOylLoDdNShfILeA1hChKFQY9qQyZ5VmsFcwAgAAAAAKidrY+rC3hTY+YWu2a7fuMH2RD/XaiTIBW1hrxNCQOJBWwAIAAAAACW0kkqMIzIFMn7g+R0MI8l15fr3k/w/mHtY5n6SYTEwAADMTE0AH0AAAAFZAAgAAAAAByuYl8dBvfaZ0LO/81JW4hYypeNmvLMaxsIdvqMPrWoBXMAIAAAAABNddwobOUJzm9HOUD8BMZJqkNCUCqstHZkC76FIdNg9AVsACAAAAAAQQOkIQtkyNavqCnhQbNg3HfqrJdsAGaoxSJePJl1qXsAAzExNQB9AAAABWQAIAAAAABxMy7X5hf7AXGDz3Y/POu1ZpkMlNcSvSP92NOO/Gs7wAVzACAAAAAAHJshWo2T5wU2zvqCyJzcJQKQaHFHpCpMc9oWBXkpUPoFbAAgAAAAAGeiJKzlUXAvL0gOlW+Hz1mSa2HsV4RGmyLmCHlzbAkoAAMxMTYAfQAAAAVkACAAAAAAlqbslixl7Zw3bRlibZbe/WmKw23k8uKeIzPKYEtbIy0FcwAgAAAAAHEKwpUxkxOfef5HYvulXPmdbzTivwdwrSYIHDeNRcpcBWwAIAAAAADuPckac21Hrg/h0kt5ShJwVEZ9rx6SOHd2+HDjqxEWTQADMTE3AH0AAAAFZAAgAAAAAMXrXx0saZ+5gORmwM2FLuZG6iuO2YS+1IGPoAtDKoKBBXMAIAAAAADIQsxCr8CfFKaBcx8kIeSywnGh7JHjKRJ9vJd9x79y7wVsACAAAAAAcvBV+SykDYhmRFyVYwFYB9oBKBSHr55Jdz2cXeowsUQAAzExOAB9AAAABWQAIAAAAAAm83FA9yDUpwkbKTihe7m53u+DivS9BU2b4vQMtCVQ2AVzACAAAAAAz3m1UB/AbZPa4QSKFDnUgHaT78+6iGOFAtouiBorEgEFbAAgAAAAAIgbpyYtJj5513Z5XYqviH/HXG/5+mqR52iBbfqMmDtZAAMxMTkAfQAAAAVkACAAAAAAJRzYK0PUwr9RPG2/7yID0WgcTJPB2Xjccp5LAPDYunkFcwAgAAAAAIIh24h3DrltAzNFhF+MEmPrZtzr1PhCofhChZqfCW+jBWwAIAAAAAAzRNXtL5o9VXMk5D5ylI0odPDJDSZZry1wfN+TedH70gADMTIwAH0AAAAFZAAgAAAAAHSaHWs/dnmI9sc7nB50VB2Bzs0kHapMHCQdyVEYY30TBXMAIAAAAACkV22lhEjWv/9/DubfHBAcwJggKI5mIbSK5L2nyqloqQVsACAAAAAAS19m7DccQxgryOsBJ3GsCs37yfQqNi1G+S6fCXpEhn4AAzEyMQB9AAAABWQAIAAAAAAC/I4TQRtCl12YZmdGz17X4GqSQgfwCPgRBwdHmdwu+QVzACAAAAAAx8f3z2ut/RAZhleari4vCEE+tNIn4ikjoUwzitfQ588FbAAgAAAAAJci0w1ZB8W2spJQ+kMpod6HSCtSR2jrabOH+B0fj3A4AAMxMjIAfQAAAAVkACAAAAAADGB5yU2XT0fse/MPWgvBvZikVxrl5pf3S5K1hceKWooFcwAgAAAAAIxTmlLHMjNaVDEfJbXvRez0SEPWFREBJCT6qTHsrljoBWwAIAAAAAAlswzAl81+0DteibwHD+CG5mZJrfHXa9NnEFRtXybzzwADMTIzAH0AAAAFZAAgAAAAABmO7QD9vxWMmFjIHz13lyOeV6vHT6mYCsWxF7hb/yOjBXMAIAAAAACT9lmgkiqzuWG24afuzYiCeK9gmJqacmxAruIukd0xEAVsACAAAAAAZa0/FI/GkZR7CtX18Xg9Tn9zfxkD0UoaSt+pIO5t1t4AAzEyNAB9AAAABWQAIAAAAAAfPUoy7QyZKhIIURso+mkP9qr1izbjETqF5s22GwjCjAVzACAAAAAAvLMsIDQ/go4VUxeh50UHmsvMvfx51cwyONnRD2odvC0FbAAgAAAAAKMb+1CodEalAFnDrEL1Ndt8ztamZ+9134m9Kp3GQgd+AAMxMjUAfQAAAAVkACAAAAAAE3ZqUar0Bq2zWbARE0bAv98jBlK9UJ73/xcwdMWWlSkFcwAgAAAAAK4M+MmC+9sFiFsumMyJZQKxWmmJiuG9H7IzKw083xxkBWwAIAAAAAAqkAONzhvMhkyL1D/6h7QQxEkdhC3p2WjXH+VGq5qCqQADMTI2AH0AAAAFZAAgAAAAAMo8FJiOq63cAmyk2O7eI7GcbQh/1j4RrMTqly3rexftBXMAIAAAAADjVmpd0WiRGTw/gAqEgGolt2EI7Csv14vKdmYoMD0aAgVsACAAAAAA07XQBzBUQMNw7F2/YxJjZNuPVpHTTgbLd1oGk77+bygAAzEyNwB9AAAABWQAIAAAAACu5IGaIx7A3Jvly/kzlCsSA4s3iJwuIl8jEdRH0k93NwVzACAAAAAA9NRUyxYE+t0Xyosyt6vIfMFW/vBoYg6sR+jBNs4JAxIFbAAgAAAAAAzyZ91dx+0oMlOVAjRGiMrPySikY/U9eMEB4WJb3uWtAAMxMjgAfQAAAAVkACAAAAAALkRy0GJInXYLA+cgjs6Myb0a+Gu9hgXhHvhLNoGWfckFcwAgAAAAANbALyt9zCSvwnLaWCd2/y2eoB7qkWTvv1Ldu8r40JPuBWwAIAAAAAD4Fl5bV5sz4isIE9bX+lmAp+aAKaZgVYVZeVfrItkCZAADMTI5AH0AAAAFZAAgAAAAAGoUK/DSWhT8LZhszSUqDbTrp8cSA7rdqmADKL+MILtTBXMAIAAAAABHnEE9bVa6lvhfhEMkkV2kzSSxH/sMW/FIJuw3CzWs6wVsACAAAAAAanavcBdqZxgRGKvEK95wTmeL1K1CeDSXZsXUAs81uOgAAzEzMAB9AAAABWQAIAAAAAC922ZDQE3h2fQKibGMZ9hV0WNlmrPYYSdtaSyYxsWYqgVzACAAAAAAagMovciKK6WVjIc2cCj8nK5O/gVOFFVeVAJpRp89tmQFbAAgAAAAAKcTFfPQzaFiAtSFhqbN02sCE1BKWJSrRfGN5L6oZwzkAAMxMzEAfQAAAAVkACAAAAAAtK+JqX3K/z2txjAU15DgX4y90DS2YLfIJFolCOkJJJwFcwAgAAAAAMnR5V7gfX7MNqqUdL5AkWlkhyFXaBRVNej+Rcn8lrQkBWwAIAAAAAA2cDNRXZuiC241TGRvdFyctJnrNcdbZOP9zHio81tkngADMTMyAH0AAAAFZAAgAAAAAAeGrIMK/bac6kPczxbvRYqKMkcpeI2FjdMpD91FDWIvBXMAIAAAAAAix62z1LeS8yvSXCl5gHSIomjyx76fF3S1lp9k900hygVsACAAAAAAiYwzf2m71aWFD5ajcXyW2JX2EzQOkBroTGMg29nLPYIAAzEzMwB9AAAABWQAIAAAAACphf298InM0Us4HT8o1W1MGw0D/02vd7Jh+U0h7qaFaQVzACAAAAAAFXtk7YpqsOJxsqGWSIL+YcBE96G3Zz9D31gPqDW94y8FbAAgAAAAAAOrS1KVA94rjB1jZ1pPocpCeBG+B14RzWoHqVDpp7JbAAMxMzQAfQAAAAVkACAAAAAATLDS2cuDVM3yDMuWNgk2iGKBTzPpfJMbvxVOSY39ZfcFcwAgAAAAAPT5wRi2cLHIUflXzm6EQB/m7xdThP80ir1VV/JBBqvxBWwAIAAAAAB9lEtZS0aXCFbCtSbhnis27S5IPcfWGygHW8AHn3QqzwADMTM1AH0AAAAFZAAgAAAAAJNjExiZVX7jfFGfYpQu16qxLN0YPqVU/5CQ/Y67YSinBXMAIAAAAABMpm2+6KrkRUlXzQoMPHrQmIO6dkQz66tYdfTeA3dKqQVsACAAAAAAFXobHiMLvNZuEPr8jtewCX2J93EZG3JNeyVg92fue6YAAzEzNgB9AAAABWQAIAAAAABlFkYtLCx901X6QVVMkSn6Z7k30UF4xHaA0OZJJ9bdyQVzACAAAAAATez+F9GHcGzTp7jjv4feboUNb8JCkIp4EqcPFisnq7MFbAAgAAAAACE7JvOpBgMoZ7kRd4QbxIhxukPTUxXpzhjnBHiR7XoRAAMxMzcAfQAAAAVkACAAAAAA8NJKN0IxZnruhswGQkiruv8Ih0EMwDcSZx/Xasup9dkFcwAgAAAAAKaJZRxzA+Igeydvuk6cSwUHXcrmT4PjhuPu//FslpdnBWwAIAAAAAD53Rok1Vq/PMAnXmarqoHJ0PEyYUBmVESa9hIpCv/G9QADMTM4AH0AAAAFZAAgAAAAABHxHdEClz7hbSSgE58+dWLlSMJnoPz+jFxp4bB1GmLQBXMAIAAAAAD3nSvT6aGD+A110J/NwEfp0nPutlmuB5B+wA3CC3noGAVsACAAAAAA3Apjd+TapONB7k5wBVwTWgn8t+Sq2oyyU5/+as109RcAAzEzOQB9AAAABWQAIAAAAAC/o8qW/ifk3KuJ01VFkyNLgQafxB5/bGs2G5VyyVafOwVzACAAAAAA1bMqAFGDHSl6BYNLbxApvkAv2K1/oafywiX0MDz1dGUFbAAgAAAAAHJXLlId3edFoniLD/9K2A5973MeP2Ro31flDyqm3l5QAAMxNDAAfQAAAAVkACAAAAAAY2V8I1bz3a1AxTtmED6UhdhA09huFkuuEX8R+d/WDPUFcwAgAAAAAPTVoNRiI76tcRKqd+JBBVyy4+YcKST42p0QX2BtmQ2VBWwAIAAAAACcxt9hg14WqPNiDv1MkqVljM2e2KJEv53lA17LhV6ZigADMTQxAH0AAAAFZAAgAAAAAO2kSsW0WGN9AOtK4xK2SHrGhWiaAbMEKT4iZkRpaDN/BXMAIAAAAABKGzQcPM8LT2dwOggxoWjv/1imYWabbG/G4kBw8OWaxAVsACAAAAAAC9hLK1dScQTAqg+YAG3ObdPzg2Xet57HmOFpGmyUR9UAAzE0MgB9AAAABWQAIAAAAAAiCwzNEEaH/mDam68IdDftnhthyUFdb+ZCNSBQ91WlHQVzACAAAAAA7tHyHcxCzmbJeFYZyPm4mEgkTGKOvwY4MX82OvH0Jn8FbAAgAAAAAAb5IAbZ1hXCNegQ+S+C9i/Z8y6sS8KeU04V6hXa2ml6AAMxNDMAfQAAAAVkACAAAAAAGuCHVNJSuoVkpPOnS5s89GuA+BLi2IPBUr2Bg1sWEPIFcwAgAAAAAEl1gncS5/xO7bQ/KQSstRV3rOT2SW6nV92ZANeG2SR6BWwAIAAAAAA9LOcKmhek8F2wAh8yvT/vjp2gaouuO+Hmv10lwAeWPAADMTQ0AH0AAAAFZAAgAAAAAMfxz7gEaoCdPvXrubDhCZUS0ARLZc1svgbXgMDlVBPgBXMAIAAAAAB6a5dDA3fuT5Vz2KvAcbUEFX/+B7Nw2p1QqbPoQ5TTuAVsACAAAAAAcf/y75UOuI62A6vWH7bYr/5Jz+nirZVYK/81trN6XOQAAzE0NQB9AAAABWQAIAAAAACnYsqF/VzmjIImC9+dqrHO1TM6lJ6fRwM0mM6Wf6paOwVzACAAAAAA5tgZzch8uDCR1ky3SllVaKVpxAlbrhvlNDTazZZRZOAFbAAgAAAAALeGiLJS4z2zhgVpxzyPdRYyACP9QzQBOob34YrIZumCAAMxNDYAfQAAAAVkACAAAAAAEC0sIVmadtW4YMuRXH7RpAhXclsd+3bmqGXCMeaT014FcwAgAAAAABPpXh0uzpsJJB+IRUNajmMB9WGwswfpw5T9xk3Xj6ANBWwAIAAAAAAmf+NYh9TZ/QRu3w/GQz66n7DtfbJijN3G7KzeL8lstAADMTQ3AH0AAAAFZAAgAAAAABaIB3n49Xm9cOafSrQsE0WCcYp8rMIO/qVwIlMF5YLRBXMAIAAAAAC9EyWJV3xOu9bzgdJ/yX+ko7qLf1u3AxNMataW2C9EzQVsACAAAAAAvVbDkLxXx2DcMLifIQ3K0IIJcLcAG9DUrNfI6aoUjNcAAzE0OAB9AAAABWQAIAAAAAA5rZItA/cocRnngYqcJ3nBXQ+l688aKz3EQyLbYYunPAVzACAAAAAAwKyA+L7TgxztPClLrIMk2JXR+w7c04N3ZOqPgjvrIvsFbAAgAAAAACzvZ33h6aWEe8hmo+1f6OXJ72FY5hvWaUuha64ZV3KFAAMxNDkAfQAAAAVkACAAAAAA3htn7oHJ0YYpIrs+Mzyh85Ys67HwAdv5LQl1mCdoMWkFcwAgAAAAAEHjCtNNLenHuSIYux6ezAHsXDaj2DlTF67ToDhDDe6HBWwAIAAAAAD+P4H0sk9jOd+7vOANt2/1Ectb+4ZRGPE8GkHWNXW3MgADMTUwAH0AAAAFZAAgAAAAAEnt18Km/nqggfIJWxzTr9r3hnXNaueG6XO9A5G11LnGBXMAIAAAAAD7QxzGMN/ard5TfFLecE6uusMmXG2+RBsBR+/NCQHUwAVsACAAAAAAQEZ1ZZ8GC8rdbg7s87OM5Gr9qkTXS9+P5DuAZxj5Gl4AAzE1MQB9AAAABWQAIAAAAAAVAKK/GoY8AACu/hyMpO4hdLq6JnEyWNzkyci9sbaD/wVzACAAAAAA2HmeqpMlvvBpV2zQTYIRmsc4MFlfHRwLof0ycJgMg/MFbAAgAAAAACdltCeWi5E/q1Li1eXLChpM2D9QQSGLBZ82NklQSc0oAAMxNTIAfQAAAAVkACAAAAAAhHyq1GQC/GiMwpYjcsfkNxolJ10ARKjIjfkW1Wipzi0FcwAgAAAAAD/uaGWxTDq87F8XZ6CrFI+RNa8yMqfSZdqK00Kj833BBWwAIAAAAAD6aEdOO0CsQGagioOCvANPCEHSpJ8BSixlPBq5ERhB7AADMTUzAH0AAAAFZAAgAAAAABAJJxHoZD+MQBWqm9UM9Dd3z5ZohIZGWRaRVRsMptKQBXMAIAAAAADrE/ca+gqj/SH4oao4wE4qn2ovoTydzcMbDbrfnUs3zAVsACAAAAAAeNCIQN6hVnGJinytQRFGlQ2ocoprXNqpia+BSxzl+uwAAzE1NAB9AAAABWQAIAAAAAAv01wz7VG9mTepjXQi6Zma+7b/OVBaKVkWNbgDLr1mFgVzACAAAAAA0I5sxz8r6wkCp5Tgvr+iL4p6MxSOq5d3e1kZG+0b7NkFbAAgAAAAAIA32v6oGkAOS96HexGouNTex+tLahtx9QF2dgGClk6WAAMxNTUAfQAAAAVkACAAAAAAWXecRwxSon68xaa9THXnRDw5ZfzARKnvvjTjtbae6T0FcwAgAAAAAPh0UfUMEo7eILCMv2tiJQe1bF9qtXq7GJtC6H5Va4fIBWwAIAAAAADqFr1ThRrTXNgIOrJWScO9mk86Ufi95IDu5gi4vP+HWQADMTU2AH0AAAAFZAAgAAAAAEY5WL8/LpX36iAB1wlQrMO/xHVjoO9BePVzbUlBYo+bBXMAIAAAAABoKcpadDXUARedDvTmzUzWPe1jTuvD0z9oIcZmKuiSXwVsACAAAAAAJuJbwuaMrAFoI+jU/IYr+k4RzAqITrOjAd3HWCpJHqEAAzE1NwB9AAAABWQAIAAAAADnJnWqsfx0xqNnqfFGCxIplVu8mXjaHTViJT9+y2RuTgVzACAAAAAAWAaSCwIXDwdYxWf2NZTly/iKVfG/KDjHUcA1BokN5sMFbAAgAAAAAJVxavipE0H4/JQvhagdytXBZ8qGooeXpkbPQ1RfYMVHAAMxNTgAfQAAAAVkACAAAAAAsPG7LaIpJvcwqcbtfFUpIjj+vpNj70Zjaw3eV9T+QYsFcwAgAAAAAJQ71zi0NlCyY8ZQs3IasJ4gB1PmWx57HpnlCf3+hmhqBWwAIAAAAACD58TO6d+71GaOoS+r73rAxliAO9GMs4Uc8JbOTmC0OwADMTU5AH0AAAAFZAAgAAAAAAGiSqKaQDakMi1W87rFAhkogfRAevnwQ41onWNUJKtuBXMAIAAAAAASgiDpXfGh7E47KkOD8MAcX8+BnDShlnU5JAGdnPdqOAVsACAAAAAAI+2TTQIgbFq4Yr3lkzGwhG/tqChP7hRAx2W0fNaH6jcAAzE2MAB9AAAABWQAIAAAAAB7L4EnhjKA5xJD3ORhH2wOA1BvpnQ+7IjRYi+jjVEaJAVzACAAAAAAuhBIm0nL3FJnVJId+7CKDASEo+l2E89Z9/5aWSITK4AFbAAgAAAAALtSICOzQDfV9d+gZuYxpEj6cCeHnKTT+2G3ceP2H65kAAMxNjEAfQAAAAVkACAAAAAAaROn1NaDZFOGEWw724dsXBAm6bgmL5i0cki6QZQNrOoFcwAgAAAAANVT8R6UvhrAlyqYlxtmnvkR4uYK/hlvyQmBu/LP6/3ZBWwAIAAAAAD+aHNMP/X+jcRHyUtrCNkk1KfMtoD3GTmShS8pWGLt+AADMTYyAH0AAAAFZAAgAAAAADqSR5e0/Th59LrauDA7OnGD1Xr3H3NokfVxzDWOFaN7BXMAIAAAAACt30faNwTWRbvmykDpiDYUOCwA6QDbBBYBFWS7rdOB4AVsACAAAAAAF7SvnjjRk5v2flFOKaBAEDvjXaL1cpjsQLtK2fv9zdQAAzE2MwB9AAAABWQAIAAAAADmtb1ZgpZjSeodPG/hIVlsnS8hoRRwRbrTVx89VwL62AVzACAAAAAAi38e1g6sEyVfSDkzZbaZXGxKI/zKNbMasOl2LYoWrq8FbAAgAAAAAALACk0KcCDN/Kv8WuazY8ORtUGkOZ5Dsm0ys1oOppp/AAMxNjQAfQAAAAVkACAAAAAAf/f7AWVgBxoKjr7YsEQ4w/fqSvuQWV2HMiA3rQ7ur0sFcwAgAAAAADkkeJozP6FFhUdRIN74H4UhIHue+eVbOs1NvbdWYFQrBWwAIAAAAAB55FlHAkmTzAYj/TWrGkRJw2EhrVWUnZXDoMYjyfB/ZwADMTY1AH0AAAAFZAAgAAAAAI2WEOymtuFpdKi4ctanPLnlQud+yMKKb8p/nfKmIy56BXMAIAAAAADVKrJmhjr1rfF3p+T+tl7UFd1B7+BfJRk0e7a4im7ozgVsACAAAAAA5E7Ti3PnFiBQoCcb/DN7V1uM3Xd6VKiexPKntssFL7kAAzE2NgB9AAAABWQAIAAAAAAuHU9Qd79hjyvKOujGanSGDIQlxzsql8JytTZhEnPw+AVzACAAAAAAjF2gV/4+sOHVgDd/oR5wDi9zL7NGpGD+NsEpGXy/a4QFbAAgAAAAAJzMoyojYV6Ed/LpVN5zge93Odv3U7JgP7wxeRaJZGTdAAMxNjcAfQAAAAVkACAAAAAA7dQDkt3iyWYCT94d7yqUtPPwp4qkC0ddu+HFdHgVKEkFcwAgAAAAANuYvtvZBTEq4Rm9+5eb7VuFopowkrAuv86PGP8Q8/QvBWwAIAAAAACeqXoAOQOE4j0zRMlkVd8plaW0RX1npsFvB38Xmzv7sAADMTY4AH0AAAAFZAAgAAAAAAwnZSDhL4tNGYxlHPhKYB8s28dY5ScSwiKZm3UhT8U3BXMAIAAAAABDoY6dhivufTURQExyC9Gx3ocpl09bgbbQLChj3qVGbgVsACAAAAAAF+1nS7O0v85s3CCy+9HkdeoEfm2C6ZiNbPMMnSfsMHUAAzE2OQB9AAAABWQAIAAAAAC2VuRdaC4ZJmLdNOvD6R2tnvkyARteqXouJmI46V306QVzACAAAAAAMn1Z6B35wFTX9mEYAPM+IiJ5hauEwfD0CyIvBrxHg7IFbAAgAAAAAOG6DvDZkT9B/xZWmjao2AevN7MMbs3Oh9YJeSd/hZ+hAAMxNzAAfQAAAAVkACAAAAAAVerb7qVNy457rNOHOgDSKyWl5ojun7iWrv1uHPXrIZQFcwAgAAAAAIDcYS9j5z+gx0xdJj09L7876r/vjvKTi/d3bXDE3PhyBWwAIAAAAADuhVLqb1Bkrx8aNymS+bx2cL8GvLFNH4SAi690DUgnWQADMTcxAH0AAAAFZAAgAAAAAH/E44yLxKCJjuSmU9A8SEhbmkDOx1PqqtYcZtgOzJdrBXMAIAAAAABgLh9v2HjBbogrRoQ82LS6KjZQnzjxyJH4PH+F3jupSAVsACAAAAAAIlO46ehXp4TqpDV0t6op++KO+uWBFh8iFORZjmx2IjkAAzE3MgB9AAAABWQAIAAAAAAlNUdDL+f/SSQ5074mrq0JNh7CTXwTbbhsQyDwWeDVMwVzACAAAAAANIH2IlSNG0kUw4qz0budjcWn8mNR9cJlYUqPYdonucAFbAAgAAAAAJMrOUOyiu5Y3sV76zwEFct8L7+i8WGlQI2+8z2W2kzaAAMxNzMAfQAAAAVkACAAAAAASZ+CvUDtlk/R4HAQ3a+PHrKeY/8ifAfh0oXYFqliu80FcwAgAAAAAJelpzPgM65OZFt/mvGGpwibclQ49wH+1gbUGzd9OindBWwAIAAAAAD9qeDchteEpVXWcycmD9kl9449C1dOw0r60TBm5jK+cQADMTc0AH0AAAAFZAAgAAAAAN9fkoUVbvFV2vMNMAkak4gYfEnzwKI3eDM3pnDK5q3lBXMAIAAAAACnDkgVNVNUlbQ9RhR6Aot2nVy+U4km6+GHPkLr631jEAVsACAAAAAANzg/BnkvkmvOr8nS4omF+q9EG/4oisB+ul4YHi938hwAAzE3NQB9AAAABWQAIAAAAAASyK3b1nmNCMptVEGOjwoxYLLS9fYWm/Zxilqea0jpEQVzACAAAAAADDHsGrbqlKGEpxlvfyqOJKQJjwJrzsrB7k3HG0AUJbkFbAAgAAAAAKwx3S4XfDZh4+LuI9jf7XgUh5qiefNv87JD4qvVRfPSAAMxNzYAfQAAAAVkACAAAAAAlSP9iK31GlcG9MKGbLmq+VXMslURr+As736rrVNXcsUFcwAgAAAAAAvbj0zfq9zzi8XReheKFbCB+h9IsOLgXPPpI5vrEJNZBWwAIAAAAABXvoZhaQE7ogWjeBjceVkp03N20cKYP3TA8vuNsgpfAgADMTc3AH0AAAAFZAAgAAAAAOJNORH8Bev97gVU7y6bznOxJ+E6Qoykur1QP76hG1/7BXMAIAAAAAC+C1PtOOrSZgzBAGhr+dPe/kR0JUw9GTwLVNr61xC1aAVsACAAAAAAeA/L8MQIXkamaObtMPLpoDoi5FypA5WAPtMeMrgi0eQAAzE3OAB9AAAABWQAIAAAAAAKcHzLUomavInN6upPkyWhAqYQACP/vdVCIYpiy6U6HgVzACAAAAAATsR4KItY6R2+U7Gg6sJdaEcf58gjd1OulyWovIqfxKcFbAAgAAAAAFbm10ko67ahboAejQdAV0U2uA5OhZYdb8XUFJ8OL46LAAMxNzkAfQAAAAVkACAAAAAAqTOLiMpCdR59tLZzzIPqJvbCNvz2XQL9ust0qYaehtcFcwAgAAAAAArefox/3k5xGOeiw2m6NUdzuGxmPwcu5IFcj+jMwHgHBWwAIAAAAADLZGFJ7MQd5JXMgMXjqZO5LDLxcFClcXPlnRMWRn+1oAADMTgwAH0AAAAFZAAgAAAAAIPSqSeVzSRgNVNmrPYHmUMgykCY27NbdDUNhE5kx/SgBXMAIAAAAAAhX90nNfxyXmZe/+btZ7q6xMX4PFyj0paM1ccJ/5IUUQVsACAAAAAA419oHmD2W0SYoOMwhrhrp8jf68fg9hTkaRdCuVd3CN0AAzE4MQB9AAAABWQAIAAAAACLn5DxiqAosHGXIAY96FwFKjeqrzXWf3VJIQMwx1fl4gVzACAAAAAAindvU27nveutopdvuHmzdENBbeGFtI3Qcsr07jxmvm8FbAAgAAAAAPvl9pBStQvP4OGkN5v0MghUY6djm9n7XdKKfrW0l1sMAAMxODIAfQAAAAVkACAAAAAA7i2S6rHRSPBwZEn59yxaS7HiYBOmObIkeyCcFU42kf8FcwAgAAAAAGb3RSEyBmgarkTvyLWtOLJcPwCKbCRkESG4RZjVmY4iBWwAIAAAAADB2/wo5CSHR4ANtifY6ZRXNTO5+O8qP82DfAiAeanpZwADMTgzAH0AAAAFZAAgAAAAAFz+M+H/Z94mdPW5oP51B4HWptp1rxcMWAjnlHvWJDWrBXMAIAAAAACBFEOQyL7ZHu4Cq33QvXkmKuH5ibG/Md3RaED9CtG5HwVsACAAAAAAfggtJTprQ/yZzj7y5z9KvXsdeXMWP0yUXMMJqpOwI88AAzE4NAB9AAAABWQAIAAAAAAE7c2x3Z3aM1XGfLNk/XQ9jCazNRbGhVm7H8c2NjS5ywVzACAAAAAARJ9h8fdcwA19velF3L/Wcvi2rCzewlKZ2nA0p8bT9uwFbAAgAAAAAJtWe6b4wK2Hae2dZm/OEpYQnvoZjz4Sz5IgJC2wInecAAMxODUAfQAAAAVkACAAAAAAVoRt9B9dNVvIMGN+ea5TzRzQC+lqSZ8dd/170zU5o9cFcwAgAAAAAEwM95XZin5mv2yhCI8+ugtKuvRVmNgzzIQN0yi1+9aIBWwAIAAAAAAMGBq72n00rox3uqhxSB98mkenTGCdbbUF1gXrgottzgADMTg2AH0AAAAFZAAgAAAAAKRDkjyWv/etlYT4GyoXrmBED2FgZHnhc+l9Wsl06cH2BXMAIAAAAABohlpm3K850Vndf3NmNE0hHqDlNbSR8/IvMidQ3LnIZAVsACAAAAAAW42nGHa6q2MCAaaPVwaIDfr8QLyQwjKq23onZJYsqVsAAzE4NwB9AAAABWQAIAAAAAC3DFh5oklLCNLY90bgWm68dFXz65JpAZSp1K99MBTPAQVzACAAAAAAQgZecmxEUZVHoptEQClDwAf8smI3WynQ/i+JBP0g+kQFbAAgAAAAAEUSQGVnAPISD6voD0DiBUqyWKgt2rta0tjmoe+LNt6IAAMxODgAfQAAAAVkACAAAAAAQ5WKvWSB503qeNlOI2Tpjd5blheNr6OBO8pfJfPNstcFcwAgAAAAAKwHgQLSDJ5NwLBQbY5OnblQIsVDpGV7q3RCbFLD1U4/BWwAIAAAAACQ5nED99LnpbqXZuUOUjnO2HTphEAFBjLD4OZeDEYybgADMTg5AH0AAAAFZAAgAAAAAGfhFY3RGRm5ZgWRQef1tXxHBq5Y6fXaLAR4yJhrTBplBXMAIAAAAACKEF0ApLoB6lP2UqTFsTQYNc9OdDrs/vziPGzttGVLKQVsACAAAAAArOO6FyfNRyBi0sPT5iye7M8d16MTLcwRfodZq4uCYKEAAzE5MAB9AAAABWQAIAAAAAAIM73gPcgzgotYHLeMa2zAU4mFsr7CbILUZWfnuKSwagVzACAAAAAAJCSu98uV8xv88f2BIOWzt6p+6EjQStMBdkGPUkgN79cFbAAgAAAAAMGqPGMPxXbmYbVfSa/japvUljht1zZT33TY7ZjAiuPfAAMxOTEAfQAAAAVkACAAAAAAkWmHCUsiMy1pwZTHxVPBzPTrWFBUDqHNrVqcyyt7nO8FcwAgAAAAAMv2CebFRG/br7USELR98sIdgE9OQCRBGV5JZCO+uPMgBWwAIAAAAABt7qSmn3gxJu7aswsbUiwvO+G6lXj/Xhx+J/zQyZxzLAADMTkyAH0AAAAFZAAgAAAAAGInUYv0lP/rK7McM8taEHXRefk8Q2AunrvWqdfSV7UaBXMAIAAAAACE+WPxJ3gan7iRTbIxXXx+bKVcaf8kP4JD8DcwU0aL7wVsACAAAAAAUC4eTprX4DUZn2X+UXYU6QjtiXk+u57yoOPBbPQUmDkAAzE5MwB9AAAABWQAIAAAAACmHlg2ud3cplXlTsNTpvNnY6Qm1Fce0m899COamoDjaQVzACAAAAAArtJQeJIlepBWRU2aYar7+YGYVQ7dfDc1oxgTmA8r9q0FbAAgAAAAAOk45vg5VqZHAFCO3i0Z52SZi5RADf8NXwf68T5yad/DAAMxOTQAfQAAAAVkACAAAAAApzcWSAbZWV/Rq+ylRNqqlJqNVR4fhXrz4633/MQOQgcFcwAgAAAAAN/jz/bsEleiuCl+li83EWlG6UMHA8CyaOMRKCkXkSCPBWwAIAAAAAC3Sd+Qg+uFDKpGZHbrQgokXHQ1az1aFl4YK343OB6hcQAAEmNtAAAAAAAAAAAAABBwYXlsb2FkSWQAAAAAABBmaXJzdE9wZXJhdG9yAAEAAAASc3AAAQAAAAAAAAAQdGYAAQAAABNtbgD/////Y46NN8CHrb4J7f/fE214AP////9jjo03wIetvgnt/18A", + "subType": "06" + } + } + } + }, + "u": { + "$set": { + "encryptedDecimalNoPrecision": { + "$$type": "binData" + } + } + }, + "multi": false, + "upsert": false + } + ], + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalNoPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + }, + "$db": "default" + }, + "commandName": "update" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-DecimalPrecision-Aggregate.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-DecimalPrecision-Aggregate.json new file mode 100644 index 0000000000..41ba49112b --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-DecimalPrecision-Aggregate.json @@ -0,0 +1,647 @@ +{ + "description": "fle2v2-Rangev2-DecimalPrecision-Aggregate", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDecimal": "0.0" + }, + "max": { + "$numberDecimal": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "FLE2 Range DecimalPrecision. Aggregate.", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDecimalPrecision": { + "$gt": { + "$numberDecimal": "0" + } + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1" + } + } + ] + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": { + "$numberInt": "0" + }, + "encryptedDecimalPrecision": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "Dri0CXmL78L2DOgk9w0DwxHOMGMzih7m6l59vgy+WWo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "x7GR49EN0t3WXQDihkrbonK7qNIBYC87tpL/XEUyIYc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JfYUqWF+OoGjiYkRI4L5iPlF+T1Eleul7Fki22jp4Qc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q1RyGfIgsaQHoZFRw+DD28V26rN5hweApPLwExncvT8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "L2PFeKGvLS6C+DLudR6fGlBq3ERPvjWvRyNRIA2HVb0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CWxaNqL3iP1yCixDkcmf9bmW3E5VeN8TJkg1jJe528s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+vC6araOEo+fpW7PSIP40/EnzBCj1d2N10Jr3rrXJJM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6SV63Mf51Z6A6p2X3rCnJKCu6ku3Oeb45mBYbz+IoAo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "RjBYT2h3ZAoHxhf8DU6/dFbDkEBZp0IxREcsRTu2MXs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "b7d8mRzD1kI1tdc7uNL+YAUonJ6pODLsRLkArfEKSkM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Xg8C1/A0KJaXOw4i+26Rv03/CydaaunOzXh0CIT+gn8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "UoKUDw2wJYToUCcFaIs03YQSTksYR0MIOTJllwODqKc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "c/5cwAT0C5jber2xlJnWD3a5tVDy0nRtr5HG02hoFOY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wSUrRXavAGaajNeqC5mEUH1K67oYl5Wy9RNIzKjwLAM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6vrp4wWDtHEgHWR99I70WVDzevg1Fk/Pw5U8gUDa0OU=", + "subType": "00" + } + } + ] + }, + { + "_id": { + "$numberInt": "1" + }, + "encryptedDecimalPrecision": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "mVZb+Ra0EYjQ4Zrh9X//E2T8MRj7NMqm5GUJXhRrBEI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "FA74j21GUEJb1DJBOpR9nVnjaDZnd8yAQNuaW9Qi26g=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kJv//KVkbrobIBf+QeWC5jxn20mx/P0R1N6aCSMgKM8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zB+Whi9IUUGxfLEe+lGuIzLX4LFbIhaIAm5lRk65QTc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ybO1QU3CgvhO8JgRXH+HxKszWcpl5aGDYYVa75fHa1g=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "X3Y3eSAbbMg//JgiHHiFpYOpV61t8kkDexI+CQyitH4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SlNHXyqVFGDPrX/2ppwog6l4pwj3PKda2TkZbqgfSfA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "McjV8xwTF3xI7863DYOBdyvIv6UpzThl6v9vBRk05bI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "MgwakFvPyBlwqFTbhWUF79URJQWFoJTGotlEVSPPUsQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "DyBERpMSD5lEM5Nhpcn4WGgxgn/mkUVJp+PYSLX5jsE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "I43iazc0xj1WVbYB/V+uTL/tughN1bBlxh1iypBnNsA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wjOBa/ATMuOywFmuPgC0GF/oeLqu0Z7eK5udzkTPbis=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "gRQVwiR+m+0Vg8ZDXqrQQcVnTyobwCXNaA4BCJVXtMc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "WUZ6huwx0ZbLb0R00uiC9FOJzsUocUN8qE5+YRenkvQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "7s79aKEuPgQcS/YPOOVcYNZvHIo7FFsWtFCrnDKXefA=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 0, + "encryptedDecimalPrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDecimal": "0.0" + }, + "max": { + "$numberDecimal": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedDecimalPrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDecimal": "0.0" + }, + "max": { + "$numberDecimal": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "aggregate": "default", + "pipeline": [ + { + "$match": { + "encryptedDecimalPrecision": { + "$gt": { + "$binary": { + "base64": "DRYKAAADcGF5bG9hZACiCQAABGcAjgkAAAMwAH0AAAAFZAAgAAAAAHdJ2Vnb4MMzqVYVssjSdDy8XU4GVzMTfGifGETgQ2mYBXMAIAAAAAD7cFfKJGIXo6PjyeX2ria02CckW7dWFDoY/3FyBdm1NQVsACAAAAAAhEPSNv4M023A3hzNFuy83+hIKuZ2mKRY954N++aEOBUAAzEAfQAAAAVkACAAAAAAlmvfDrZoydUet4eCVMq7z6a58Ea+1HLJOWxN5lNcrWEFcwAgAAAAAEBo5AWZyC41b9ayjWNQSL4iYEAIwR/JG+ssN8bdoK9RBWwAIAAAAACEndE0SLxFSElOrNnqeX0EPmgDio3udZjVREy4JLS3sQADMgB9AAAABWQAIAAAAABbiLaoxAA6rinMJw1hC8ZUiq6UU1AQaPFn/py/Y06WuQVzACAAAAAAhtDasFkvYE7SCNu1je/hxdE9TJtAvvH3NtdEbKzNbCUFbAAgAAAAAIGepU1RSCF8sWODHEpKglsoqw3VBBH4a/URGxgGzbq2AAMzAH0AAAAFZAAgAAAAALORWwSr+tYNxcil2KIGSbNhTHvcPbdj+rLVQNx21S/KBXMAIAAAAAD6diZBkPEJ1cQy06LAxdbNK8Nlxbb44fH4Wk3Y3260nQVsACAAAAAA1eYAZBFHlDiaDAljWi8blGQ2nvvZa5AO5doeo0SFZsgAAzQAfQAAAAVkACAAAAAAG5XMK96PjClNlUvg82j4pMY1YxsznZfj4uNweD394FoFcwAgAAAAAKHgQLdGJHkrfFg9nB93Ac+3VgBw6aU44MTkKIQ91dZoBWwAIAAAAAAPxXmi+SDJ+40A0KdwfRczexlZQrHjIA+D3oUB0EY9tAADNQB9AAAABWQAIAAAAAA6M++b9I0YFemmWBAWAE3glu2Ah3Ta1FBxAQEIWS0toAVzACAAAAAANXYTqPf1Y6X3Ns6YQIX0C3FKCyWUo+Kk+fNcQvc0WSoFbAAgAAAAAA+uJUw1ICYgyeygSRe206VTWVtUnhdci3iHbyP5YtEVAAM2AH0AAAAFZAAgAAAAAKl8bV1riH/uyJ+X0HHd3+18k2cJl2dQFXCdoagutFcaBXMAIAAAAABm8F2Ew9f0VOABdcF+lP0Bi+zWvEUPniWgrxPq/Sx3uwVsACAAAAAAJfFErjZ6BPhsw5LjJLqNtKDLJ4zV0eIZppQpd9b0wZoAAzcAfQAAAAVkACAAAAAAsYZD8JEP6kYsPncFnNZwJxhu4YtUTKPNcjHtv67H+rYFcwAgAAAAAI4LqZcRkvbs/2F62Flu0pixNcor4WmBD0DHGaf039wLBWwAIAAAAAD4wUR3xd9lKltcqqo8LYvdMQWzCRobkV/ppKB/yn5dUgADOAB9AAAABWQAIAAAAAC0vdAi+dmoIXvZ5LqUqvyKV9/tHqSI2SWiSJO5pTnA2wVzACAAAAAAS2qvf9fvfVUH5WtsVxjxmskpGjYTQV34LwvQQw1y9wIFbAAgAAAAAE0+FKuK7HxbypvCeEJzMTcjOWE0ScYOlTBMUNlIv55hAAM5AH0AAAAFZAAgAAAAAH31lb/srBcrOXkzddCwAnclsR5/3QijEVgECs2JjOWBBXMAIAAAAABg7+prDT73YcCvLE5QbuIrqGcjLc5pQD2Miq0d29yrxgVsACAAAAAAetRiPwDSFWBzpWSWkOKWM6fKStRJ8SyObnpc79ux8p0AAzEwAH0AAAAFZAAgAAAAAOK8brUuc2onBNDRtfYMR736dHj4dQqXod8JG7tAMTsDBXMAIAAAAAAW6SrGAL6Bx0s7ZlsYULFfOAiYIGhEWu6md3r+Rk40awVsACAAAAAAIHYXP8RLcCboUmHN3+OlnEw1DxaLSnbTB9PdF228fFAAAzExAH0AAAAFZAAgAAAAAFdthRhe2Q8CvxGIhjTJZv0Lk97GkHciTPxZ/mckLoNaBXMAIAAAAAAqOxsAr23LOVB0DIHbPf9UDJJRFXY2YoKbjhRqw5psbQVsACAAAAAA0G2GD8ZQjDBntjLpW4rqwKRS6HiUjL03g1N6chANozcAAzEyAH0AAAAFZAAgAAAAAMWymwwbvIeMqmnKWWifUqoCxOsdpnonM2qdLPyjqJO/BXMAIAAAAAB6IDmmpUhBD2zpRj8/y/kmOSXcjuIU14sNh6GKSsg2uwVsACAAAAAAWMFPNOk3EMSQDS9JGPSMIQP0oNGVugxXKKUrIPPlhHgAAzEzAH0AAAAFZAAgAAAAAPcLmtq+V1e+MRlZ7NHq1+mrRVBQje5zj685ZvdsfKvSBXMAIAAAAABdHz/3w2k5km97QN9m7oLFYJaVJneNlMboIlz5yUASQAVsACAAAAAAWbp8JVJnx8fEVAJFa7WMfMa7wXeP5M3C8MX20J/i9n0AAzE0AH0AAAAFZAAgAAAAAJaRYmo8zqI2BEUzdSwp4tVRpPmVWsfydkYN3UHh6TMuBXMAIAAAAAAeD6mDnQeLlbC9i0sVgE8+RH6y+e94OJQ0tJ0PvblVSgVsACAAAAAAWp4jvretbDEsqEMzP/WLTnwOiJwCtfrCiB6m8k+yEMoAAzE1AH0AAAAFZAAgAAAAAAZZ538coNPwyRjhEwr5P8Xw32oWOJF+R+nfCGgy2qO3BXMAIAAAAACOPLnJlKwGNPDBReRKnHfteq0wFb3ezhrc7BVXs8RUHwVsACAAAAAA+lGesNk3+SyB/60rSvdQ2aN2vfJPR7llJVhufGTNhHkAAzE2AH0AAAAFZAAgAAAAAFH9l9GGA1I52atJV5jNUf1lx8jBjoEoVoME97v5GFJiBXMAIAAAAAC1qH3Kd78Dr9NGbw7y9D/XYBwv5h1LLO8la5OU7g8UkQVsACAAAAAArZ6atJCYrVfHB8dSNPOFf6nnDADBMJcIEj8ljPvxHp8AAzE3AH0AAAAFZAAgAAAAADtbVEI2tdkrowEMdkacD2w0Y3T3Ofi7PH6HmA6sP0c/BXMAIAAAAADuBSROnZHA+NgUPH8d0LnWFiDsM2bY8bzjC1+elSsIygVsACAAAAAAR0G2m+uANoWknkr/NerFcG+fECVxNIs0cqbY1t/U/0MAAzE4AH0AAAAFZAAgAAAAAAh3WpeMVlikPFYj9hLj+fmIqVt6omCSF75W3TPExyWpBXMAIAAAAAAsQkRmwqeVj2gGE03orb6PtrIzDt6dDU3hgSQi8E2wKgVsACAAAAAA3GHaRE2RAcaBRd8VzmYzWeBD2Gmy91eTK1k8YdWObZcAABJjbQAAAAAAAAAAAAAQcGF5bG9hZElkAAAAAAAQZmlyc3RPcGVyYXRvcgABAAAAEnNwAAEAAAAAAAAAEHBuAAIAAAAQdGYAAQAAABNtbgAAAAAAAAAAAAAAAAAAAD4wE214ANAHAAAAAAAAAAAAAAAAPjAA", + "subType": "06" + } + } + } + } + } + ], + "cursor": {}, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDecimal": "0.0" + }, + "max": { + "$numberDecimal": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + } + }, + "commandName": "aggregate" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-DecimalPrecision-Correctness.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-DecimalPrecision-Correctness.json new file mode 100644 index 0000000000..bc4e1f4508 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-DecimalPrecision-Correctness.json @@ -0,0 +1,1418 @@ +{ + "description": "fle2v2-Rangev2-DecimalPrecision-Correctness", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDecimal": "0.0" + }, + "max": { + "$numberDecimal": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "Find with $gt", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDecimalPrecision": { + "$gt": { + "$numberDecimal": "0.0" + } + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1.0" + } + } + ] + } + ] + }, + { + "description": "Find with $gte", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDecimalPrecision": { + "$gte": { + "$numberDecimal": "0.0" + } + } + }, + "sort": { + "_id": 1 + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0.0" + } + }, + { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1.0" + } + } + ] + } + ] + }, + { + "description": "Find with $gt with no results", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDecimalPrecision": { + "$gt": { + "$numberDecimal": "1.0" + } + } + } + }, + "object": "coll", + "expectResult": [] + } + ] + }, + { + "description": "Find with $lt", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDecimalPrecision": { + "$lt": { + "$numberDecimal": "1.0" + } + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0.0" + } + } + ] + } + ] + }, + { + "description": "Find with $lte", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDecimalPrecision": { + "$lte": { + "$numberDecimal": "1.0" + } + } + }, + "sort": { + "_id": 1 + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0.0" + } + }, + { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1.0" + } + } + ] + } + ] + }, + { + "description": "Find with $lt below min", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDecimalPrecision": { + "$lt": { + "$numberDecimal": "0.0" + } + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "must be greater than the range minimum" + } + } + ] + }, + { + "description": "Find with $gt above max", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDecimalPrecision": { + "$gt": { + "$numberDecimal": "200.0" + } + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "must be less than the range max" + } + } + ] + }, + { + "description": "Find with $gt and $lt", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDecimalPrecision": { + "$gt": { + "$numberDecimal": "0.0" + }, + "$lt": { + "$numberDecimal": "2.0" + } + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1.0" + } + } + ] + } + ] + }, + { + "description": "Find with equality", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDecimalPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0.0" + } + } + ] + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDecimalPrecision": { + "$numberDecimal": "1.0" + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1.0" + } + } + ] + } + ] + }, + { + "description": "Find with full range", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDecimalPrecision": { + "$gte": { + "$numberDecimal": "0.0" + }, + "$lte": { + "$numberDecimal": "200.0" + } + } + }, + "sort": { + "_id": 1 + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0.0" + } + }, + { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1.0" + } + } + ] + } + ] + }, + { + "description": "Find with $in", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDecimalPrecision": { + "$in": [ + { + "$numberDecimal": "0.0" + } + ] + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0.0" + } + } + ] + } + ] + }, + { + "description": "Insert out of range", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "-1" + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "value must be greater than or equal to the minimum value" + } + } + ] + }, + { + "description": "Insert min and max", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 200, + "encryptedDecimalPrecision": { + "$numberDecimal": "200.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": {}, + "sort": { + "_id": 1 + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0.0" + } + }, + { + "_id": 200, + "encryptedDecimalPrecision": { + "$numberDecimal": "200.0" + } + } + ] + } + ] + }, + { + "description": "Aggregate with $gte", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDecimalPrecision": { + "$gte": { + "$numberDecimal": "0.0" + } + } + } + }, + { + "$sort": { + "_id": 1 + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0.0" + } + }, + { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1.0" + } + } + ] + } + ] + }, + { + "description": "Aggregate with $gt with no results", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDecimalPrecision": { + "$gt": { + "$numberDecimal": "1.0" + } + } + } + } + ] + }, + "object": "coll", + "expectResult": [] + } + ] + }, + { + "description": "Aggregate with $lt", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDecimalPrecision": { + "$lt": { + "$numberDecimal": "1.0" + } + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0.0" + } + } + ] + } + ] + }, + { + "description": "Aggregate with $lte", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDecimalPrecision": { + "$lte": { + "$numberDecimal": "1.0" + } + } + } + }, + { + "$sort": { + "_id": 1 + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0.0" + } + }, + { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1.0" + } + } + ] + } + ] + }, + { + "description": "Aggregate with $lt below min", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDecimalPrecision": { + "$lt": { + "$numberDecimal": "0.0" + } + } + } + } + ] + }, + "object": "coll", + "expectError": { + "errorContains": "must be greater than the range minimum" + } + } + ] + }, + { + "description": "Aggregate with $gt above max", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDecimalPrecision": { + "$gt": { + "$numberDecimal": "200.0" + } + } + } + } + ] + }, + "object": "coll", + "expectError": { + "errorContains": "must be less than the range max" + } + } + ] + }, + { + "description": "Aggregate with $gt and $lt", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDecimalPrecision": { + "$gt": { + "$numberDecimal": "0.0" + }, + "$lt": { + "$numberDecimal": "2.0" + } + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1.0" + } + } + ] + } + ] + }, + { + "description": "Aggregate with equality", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDecimalPrecision": { + "$numberDecimal": "0.0" + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0.0" + } + } + ] + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDecimalPrecision": { + "$numberDecimal": "1.0" + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1.0" + } + } + ] + } + ] + }, + { + "description": "Aggregate with full range", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDecimalPrecision": { + "$gte": { + "$numberDecimal": "0.0" + }, + "$lte": { + "$numberDecimal": "200.0" + } + } + } + }, + { + "$sort": { + "_id": 1 + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0.0" + } + }, + { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1.0" + } + } + ] + } + ] + }, + { + "description": "Aggregate with $in", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDecimalPrecision": { + "$in": [ + { + "$numberDecimal": "0.0" + } + ] + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0.0" + } + } + ] + } + ] + }, + { + "description": "Wrong type: Insert Int", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberInt": "0" + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "Cannot encrypt element" + } + } + ] + }, + { + "description": "Wrong type: Find Int", + "operations": [ + { + "name": "find", + "arguments": { + "filter": { + "encryptedDecimalPrecision": { + "$gte": { + "$numberInt": "0" + } + } + }, + "sort": { + "_id": 1 + } + }, + "object": "coll", + "expectError": { + "errorContains": "field type is not supported" + } + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-DecimalPrecision-Delete.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-DecimalPrecision-Delete.json new file mode 100644 index 0000000000..1912f68ee5 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-DecimalPrecision-Delete.json @@ -0,0 +1,539 @@ +{ + "description": "fle2v2-Rangev2-DecimalPrecision-Delete", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDecimal": "0.0" + }, + "max": { + "$numberDecimal": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "FLE2 Range DecimalPrecision. Delete.", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1" + } + } + }, + "object": "coll" + }, + { + "name": "deleteOne", + "arguments": { + "filter": { + "encryptedDecimalPrecision": { + "$gt": { + "$numberDecimal": "0" + } + } + } + }, + "object": "coll", + "expectResult": { + "deletedCount": 1 + } + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": { + "$numberInt": "0" + }, + "encryptedDecimalPrecision": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "Dri0CXmL78L2DOgk9w0DwxHOMGMzih7m6l59vgy+WWo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "x7GR49EN0t3WXQDihkrbonK7qNIBYC87tpL/XEUyIYc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JfYUqWF+OoGjiYkRI4L5iPlF+T1Eleul7Fki22jp4Qc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q1RyGfIgsaQHoZFRw+DD28V26rN5hweApPLwExncvT8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "L2PFeKGvLS6C+DLudR6fGlBq3ERPvjWvRyNRIA2HVb0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CWxaNqL3iP1yCixDkcmf9bmW3E5VeN8TJkg1jJe528s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+vC6araOEo+fpW7PSIP40/EnzBCj1d2N10Jr3rrXJJM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6SV63Mf51Z6A6p2X3rCnJKCu6ku3Oeb45mBYbz+IoAo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "RjBYT2h3ZAoHxhf8DU6/dFbDkEBZp0IxREcsRTu2MXs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "b7d8mRzD1kI1tdc7uNL+YAUonJ6pODLsRLkArfEKSkM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Xg8C1/A0KJaXOw4i+26Rv03/CydaaunOzXh0CIT+gn8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "UoKUDw2wJYToUCcFaIs03YQSTksYR0MIOTJllwODqKc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "c/5cwAT0C5jber2xlJnWD3a5tVDy0nRtr5HG02hoFOY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wSUrRXavAGaajNeqC5mEUH1K67oYl5Wy9RNIzKjwLAM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6vrp4wWDtHEgHWR99I70WVDzevg1Fk/Pw5U8gUDa0OU=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 0, + "encryptedDecimalPrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDecimal": "0.0" + }, + "max": { + "$numberDecimal": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedDecimalPrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDecimal": "0.0" + }, + "max": { + "$numberDecimal": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "delete": "default", + "deletes": [ + { + "q": { + "encryptedDecimalPrecision": { + "$gt": { + "$binary": { + "base64": "DRYKAAADcGF5bG9hZACiCQAABGcAjgkAAAMwAH0AAAAFZAAgAAAAAHdJ2Vnb4MMzqVYVssjSdDy8XU4GVzMTfGifGETgQ2mYBXMAIAAAAAD7cFfKJGIXo6PjyeX2ria02CckW7dWFDoY/3FyBdm1NQVsACAAAAAAhEPSNv4M023A3hzNFuy83+hIKuZ2mKRY954N++aEOBUAAzEAfQAAAAVkACAAAAAAlmvfDrZoydUet4eCVMq7z6a58Ea+1HLJOWxN5lNcrWEFcwAgAAAAAEBo5AWZyC41b9ayjWNQSL4iYEAIwR/JG+ssN8bdoK9RBWwAIAAAAACEndE0SLxFSElOrNnqeX0EPmgDio3udZjVREy4JLS3sQADMgB9AAAABWQAIAAAAABbiLaoxAA6rinMJw1hC8ZUiq6UU1AQaPFn/py/Y06WuQVzACAAAAAAhtDasFkvYE7SCNu1je/hxdE9TJtAvvH3NtdEbKzNbCUFbAAgAAAAAIGepU1RSCF8sWODHEpKglsoqw3VBBH4a/URGxgGzbq2AAMzAH0AAAAFZAAgAAAAALORWwSr+tYNxcil2KIGSbNhTHvcPbdj+rLVQNx21S/KBXMAIAAAAAD6diZBkPEJ1cQy06LAxdbNK8Nlxbb44fH4Wk3Y3260nQVsACAAAAAA1eYAZBFHlDiaDAljWi8blGQ2nvvZa5AO5doeo0SFZsgAAzQAfQAAAAVkACAAAAAAG5XMK96PjClNlUvg82j4pMY1YxsznZfj4uNweD394FoFcwAgAAAAAKHgQLdGJHkrfFg9nB93Ac+3VgBw6aU44MTkKIQ91dZoBWwAIAAAAAAPxXmi+SDJ+40A0KdwfRczexlZQrHjIA+D3oUB0EY9tAADNQB9AAAABWQAIAAAAAA6M++b9I0YFemmWBAWAE3glu2Ah3Ta1FBxAQEIWS0toAVzACAAAAAANXYTqPf1Y6X3Ns6YQIX0C3FKCyWUo+Kk+fNcQvc0WSoFbAAgAAAAAA+uJUw1ICYgyeygSRe206VTWVtUnhdci3iHbyP5YtEVAAM2AH0AAAAFZAAgAAAAAKl8bV1riH/uyJ+X0HHd3+18k2cJl2dQFXCdoagutFcaBXMAIAAAAABm8F2Ew9f0VOABdcF+lP0Bi+zWvEUPniWgrxPq/Sx3uwVsACAAAAAAJfFErjZ6BPhsw5LjJLqNtKDLJ4zV0eIZppQpd9b0wZoAAzcAfQAAAAVkACAAAAAAsYZD8JEP6kYsPncFnNZwJxhu4YtUTKPNcjHtv67H+rYFcwAgAAAAAI4LqZcRkvbs/2F62Flu0pixNcor4WmBD0DHGaf039wLBWwAIAAAAAD4wUR3xd9lKltcqqo8LYvdMQWzCRobkV/ppKB/yn5dUgADOAB9AAAABWQAIAAAAAC0vdAi+dmoIXvZ5LqUqvyKV9/tHqSI2SWiSJO5pTnA2wVzACAAAAAAS2qvf9fvfVUH5WtsVxjxmskpGjYTQV34LwvQQw1y9wIFbAAgAAAAAE0+FKuK7HxbypvCeEJzMTcjOWE0ScYOlTBMUNlIv55hAAM5AH0AAAAFZAAgAAAAAH31lb/srBcrOXkzddCwAnclsR5/3QijEVgECs2JjOWBBXMAIAAAAABg7+prDT73YcCvLE5QbuIrqGcjLc5pQD2Miq0d29yrxgVsACAAAAAAetRiPwDSFWBzpWSWkOKWM6fKStRJ8SyObnpc79ux8p0AAzEwAH0AAAAFZAAgAAAAAOK8brUuc2onBNDRtfYMR736dHj4dQqXod8JG7tAMTsDBXMAIAAAAAAW6SrGAL6Bx0s7ZlsYULFfOAiYIGhEWu6md3r+Rk40awVsACAAAAAAIHYXP8RLcCboUmHN3+OlnEw1DxaLSnbTB9PdF228fFAAAzExAH0AAAAFZAAgAAAAAFdthRhe2Q8CvxGIhjTJZv0Lk97GkHciTPxZ/mckLoNaBXMAIAAAAAAqOxsAr23LOVB0DIHbPf9UDJJRFXY2YoKbjhRqw5psbQVsACAAAAAA0G2GD8ZQjDBntjLpW4rqwKRS6HiUjL03g1N6chANozcAAzEyAH0AAAAFZAAgAAAAAMWymwwbvIeMqmnKWWifUqoCxOsdpnonM2qdLPyjqJO/BXMAIAAAAAB6IDmmpUhBD2zpRj8/y/kmOSXcjuIU14sNh6GKSsg2uwVsACAAAAAAWMFPNOk3EMSQDS9JGPSMIQP0oNGVugxXKKUrIPPlhHgAAzEzAH0AAAAFZAAgAAAAAPcLmtq+V1e+MRlZ7NHq1+mrRVBQje5zj685ZvdsfKvSBXMAIAAAAABdHz/3w2k5km97QN9m7oLFYJaVJneNlMboIlz5yUASQAVsACAAAAAAWbp8JVJnx8fEVAJFa7WMfMa7wXeP5M3C8MX20J/i9n0AAzE0AH0AAAAFZAAgAAAAAJaRYmo8zqI2BEUzdSwp4tVRpPmVWsfydkYN3UHh6TMuBXMAIAAAAAAeD6mDnQeLlbC9i0sVgE8+RH6y+e94OJQ0tJ0PvblVSgVsACAAAAAAWp4jvretbDEsqEMzP/WLTnwOiJwCtfrCiB6m8k+yEMoAAzE1AH0AAAAFZAAgAAAAAAZZ538coNPwyRjhEwr5P8Xw32oWOJF+R+nfCGgy2qO3BXMAIAAAAACOPLnJlKwGNPDBReRKnHfteq0wFb3ezhrc7BVXs8RUHwVsACAAAAAA+lGesNk3+SyB/60rSvdQ2aN2vfJPR7llJVhufGTNhHkAAzE2AH0AAAAFZAAgAAAAAFH9l9GGA1I52atJV5jNUf1lx8jBjoEoVoME97v5GFJiBXMAIAAAAAC1qH3Kd78Dr9NGbw7y9D/XYBwv5h1LLO8la5OU7g8UkQVsACAAAAAArZ6atJCYrVfHB8dSNPOFf6nnDADBMJcIEj8ljPvxHp8AAzE3AH0AAAAFZAAgAAAAADtbVEI2tdkrowEMdkacD2w0Y3T3Ofi7PH6HmA6sP0c/BXMAIAAAAADuBSROnZHA+NgUPH8d0LnWFiDsM2bY8bzjC1+elSsIygVsACAAAAAAR0G2m+uANoWknkr/NerFcG+fECVxNIs0cqbY1t/U/0MAAzE4AH0AAAAFZAAgAAAAAAh3WpeMVlikPFYj9hLj+fmIqVt6omCSF75W3TPExyWpBXMAIAAAAAAsQkRmwqeVj2gGE03orb6PtrIzDt6dDU3hgSQi8E2wKgVsACAAAAAA3GHaRE2RAcaBRd8VzmYzWeBD2Gmy91eTK1k8YdWObZcAABJjbQAAAAAAAAAAAAAQcGF5bG9hZElkAAAAAAAQZmlyc3RPcGVyYXRvcgABAAAAEnNwAAEAAAAAAAAAEHBuAAIAAAAQdGYAAQAAABNtbgAAAAAAAAAAAAAAAAAAAD4wE214ANAHAAAAAAAAAAAAAAAAPjAA", + "subType": "06" + } + } + } + }, + "limit": 1 + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDecimal": "0.0" + }, + "max": { + "$numberDecimal": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + } + }, + "commandName": "delete" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-DecimalPrecision-FindOneAndUpdate.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-DecimalPrecision-FindOneAndUpdate.json new file mode 100644 index 0000000000..9cf4488622 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-DecimalPrecision-FindOneAndUpdate.json @@ -0,0 +1,651 @@ +{ + "description": "fle2v2-Rangev2-DecimalPrecision-FindOneAndUpdate", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDecimal": "0.0" + }, + "max": { + "$numberDecimal": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "FLE2 Range DecimalPrecision. FindOneAndUpdate.", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1" + } + } + }, + "object": "coll" + }, + { + "name": "findOneAndUpdate", + "arguments": { + "filter": { + "encryptedDecimalPrecision": { + "$gt": { + "$numberDecimal": "0" + } + } + }, + "update": { + "$set": { + "encryptedDecimalPrecision": { + "$numberDecimal": "2" + } + } + }, + "returnDocument": "Before" + }, + "object": "coll", + "expectResult": { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1" + } + } + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": { + "$numberInt": "0" + }, + "encryptedDecimalPrecision": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "Dri0CXmL78L2DOgk9w0DwxHOMGMzih7m6l59vgy+WWo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "x7GR49EN0t3WXQDihkrbonK7qNIBYC87tpL/XEUyIYc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JfYUqWF+OoGjiYkRI4L5iPlF+T1Eleul7Fki22jp4Qc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q1RyGfIgsaQHoZFRw+DD28V26rN5hweApPLwExncvT8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "L2PFeKGvLS6C+DLudR6fGlBq3ERPvjWvRyNRIA2HVb0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CWxaNqL3iP1yCixDkcmf9bmW3E5VeN8TJkg1jJe528s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+vC6araOEo+fpW7PSIP40/EnzBCj1d2N10Jr3rrXJJM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6SV63Mf51Z6A6p2X3rCnJKCu6ku3Oeb45mBYbz+IoAo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "RjBYT2h3ZAoHxhf8DU6/dFbDkEBZp0IxREcsRTu2MXs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "b7d8mRzD1kI1tdc7uNL+YAUonJ6pODLsRLkArfEKSkM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Xg8C1/A0KJaXOw4i+26Rv03/CydaaunOzXh0CIT+gn8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "UoKUDw2wJYToUCcFaIs03YQSTksYR0MIOTJllwODqKc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "c/5cwAT0C5jber2xlJnWD3a5tVDy0nRtr5HG02hoFOY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wSUrRXavAGaajNeqC5mEUH1K67oYl5Wy9RNIzKjwLAM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6vrp4wWDtHEgHWR99I70WVDzevg1Fk/Pw5U8gUDa0OU=", + "subType": "00" + } + } + ] + }, + { + "_id": { + "$numberInt": "1" + }, + "encryptedDecimalPrecision": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "V6knyt7Zq2CG3++l75UtBx2m32iGAPjHiAe439Bf02w=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "F08nMDWDZc+DbWM7XCEJNNCEYyinRmrvGP7EWhmp4is=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cXH4688amcDc8kZOJq4UP8cE3R58Zl7e+Qo/1jyspps=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "uURBxvTp3FBCVkd+LPqyuY7d6rMW6SGIJQEPY/wtkZI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "jG3hax1L3RBp9t38vUt53FsBxgr/+Si/vVISpAylYpE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kwtIW8MhH9Ky5xNjBx8gFA/SHh2YVphie7g5FGBzals=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "FHflwFuEMu4xX0ZApHi+pdlBH+oevAtXckCUb5Wv0xU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "0OKSXELxPP85SBVwDGf3LtMEQCJ8TTkFUl/+6jlkdb0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "25j9sQXZCihCmHKvTHgaBsAVZFcGPn7JjHdrCGlwyyw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "uEw0lpQtBppR3vqV9j9+NQRSBF1BzZukb8c9IhyWvxc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zVhZ7Q59O087ji49oMJvBIgeir2oqvUpnh4p53GcTow=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dowrzKs+qJhRMZyKDbhjXbuX43FbmUKOaw9I8YlOZDw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ep5B6cska6THLIF7Mn3tn3RvV9EiwLSt0eZM/CLRUDc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "URNp/YmmDh5wIZUfAzzgPyJeMNiVx9PMsz52DZRujGY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wlM4IAQhhKQEzoVqS8b1Ddd50GB95OFb9LnzOwyjCP4=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 0, + "encryptedDecimalPrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDecimal": "0.0" + }, + "max": { + "$numberDecimal": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedDecimalPrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDecimal": "0.0" + }, + "max": { + "$numberDecimal": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "findAndModify": "default", + "query": { + "encryptedDecimalPrecision": { + "$gt": { + "$binary": { + "base64": "DRYKAAADcGF5bG9hZACiCQAABGcAjgkAAAMwAH0AAAAFZAAgAAAAAHdJ2Vnb4MMzqVYVssjSdDy8XU4GVzMTfGifGETgQ2mYBXMAIAAAAAD7cFfKJGIXo6PjyeX2ria02CckW7dWFDoY/3FyBdm1NQVsACAAAAAAhEPSNv4M023A3hzNFuy83+hIKuZ2mKRY954N++aEOBUAAzEAfQAAAAVkACAAAAAAlmvfDrZoydUet4eCVMq7z6a58Ea+1HLJOWxN5lNcrWEFcwAgAAAAAEBo5AWZyC41b9ayjWNQSL4iYEAIwR/JG+ssN8bdoK9RBWwAIAAAAACEndE0SLxFSElOrNnqeX0EPmgDio3udZjVREy4JLS3sQADMgB9AAAABWQAIAAAAABbiLaoxAA6rinMJw1hC8ZUiq6UU1AQaPFn/py/Y06WuQVzACAAAAAAhtDasFkvYE7SCNu1je/hxdE9TJtAvvH3NtdEbKzNbCUFbAAgAAAAAIGepU1RSCF8sWODHEpKglsoqw3VBBH4a/URGxgGzbq2AAMzAH0AAAAFZAAgAAAAALORWwSr+tYNxcil2KIGSbNhTHvcPbdj+rLVQNx21S/KBXMAIAAAAAD6diZBkPEJ1cQy06LAxdbNK8Nlxbb44fH4Wk3Y3260nQVsACAAAAAA1eYAZBFHlDiaDAljWi8blGQ2nvvZa5AO5doeo0SFZsgAAzQAfQAAAAVkACAAAAAAG5XMK96PjClNlUvg82j4pMY1YxsznZfj4uNweD394FoFcwAgAAAAAKHgQLdGJHkrfFg9nB93Ac+3VgBw6aU44MTkKIQ91dZoBWwAIAAAAAAPxXmi+SDJ+40A0KdwfRczexlZQrHjIA+D3oUB0EY9tAADNQB9AAAABWQAIAAAAAA6M++b9I0YFemmWBAWAE3glu2Ah3Ta1FBxAQEIWS0toAVzACAAAAAANXYTqPf1Y6X3Ns6YQIX0C3FKCyWUo+Kk+fNcQvc0WSoFbAAgAAAAAA+uJUw1ICYgyeygSRe206VTWVtUnhdci3iHbyP5YtEVAAM2AH0AAAAFZAAgAAAAAKl8bV1riH/uyJ+X0HHd3+18k2cJl2dQFXCdoagutFcaBXMAIAAAAABm8F2Ew9f0VOABdcF+lP0Bi+zWvEUPniWgrxPq/Sx3uwVsACAAAAAAJfFErjZ6BPhsw5LjJLqNtKDLJ4zV0eIZppQpd9b0wZoAAzcAfQAAAAVkACAAAAAAsYZD8JEP6kYsPncFnNZwJxhu4YtUTKPNcjHtv67H+rYFcwAgAAAAAI4LqZcRkvbs/2F62Flu0pixNcor4WmBD0DHGaf039wLBWwAIAAAAAD4wUR3xd9lKltcqqo8LYvdMQWzCRobkV/ppKB/yn5dUgADOAB9AAAABWQAIAAAAAC0vdAi+dmoIXvZ5LqUqvyKV9/tHqSI2SWiSJO5pTnA2wVzACAAAAAAS2qvf9fvfVUH5WtsVxjxmskpGjYTQV34LwvQQw1y9wIFbAAgAAAAAE0+FKuK7HxbypvCeEJzMTcjOWE0ScYOlTBMUNlIv55hAAM5AH0AAAAFZAAgAAAAAH31lb/srBcrOXkzddCwAnclsR5/3QijEVgECs2JjOWBBXMAIAAAAABg7+prDT73YcCvLE5QbuIrqGcjLc5pQD2Miq0d29yrxgVsACAAAAAAetRiPwDSFWBzpWSWkOKWM6fKStRJ8SyObnpc79ux8p0AAzEwAH0AAAAFZAAgAAAAAOK8brUuc2onBNDRtfYMR736dHj4dQqXod8JG7tAMTsDBXMAIAAAAAAW6SrGAL6Bx0s7ZlsYULFfOAiYIGhEWu6md3r+Rk40awVsACAAAAAAIHYXP8RLcCboUmHN3+OlnEw1DxaLSnbTB9PdF228fFAAAzExAH0AAAAFZAAgAAAAAFdthRhe2Q8CvxGIhjTJZv0Lk97GkHciTPxZ/mckLoNaBXMAIAAAAAAqOxsAr23LOVB0DIHbPf9UDJJRFXY2YoKbjhRqw5psbQVsACAAAAAA0G2GD8ZQjDBntjLpW4rqwKRS6HiUjL03g1N6chANozcAAzEyAH0AAAAFZAAgAAAAAMWymwwbvIeMqmnKWWifUqoCxOsdpnonM2qdLPyjqJO/BXMAIAAAAAB6IDmmpUhBD2zpRj8/y/kmOSXcjuIU14sNh6GKSsg2uwVsACAAAAAAWMFPNOk3EMSQDS9JGPSMIQP0oNGVugxXKKUrIPPlhHgAAzEzAH0AAAAFZAAgAAAAAPcLmtq+V1e+MRlZ7NHq1+mrRVBQje5zj685ZvdsfKvSBXMAIAAAAABdHz/3w2k5km97QN9m7oLFYJaVJneNlMboIlz5yUASQAVsACAAAAAAWbp8JVJnx8fEVAJFa7WMfMa7wXeP5M3C8MX20J/i9n0AAzE0AH0AAAAFZAAgAAAAAJaRYmo8zqI2BEUzdSwp4tVRpPmVWsfydkYN3UHh6TMuBXMAIAAAAAAeD6mDnQeLlbC9i0sVgE8+RH6y+e94OJQ0tJ0PvblVSgVsACAAAAAAWp4jvretbDEsqEMzP/WLTnwOiJwCtfrCiB6m8k+yEMoAAzE1AH0AAAAFZAAgAAAAAAZZ538coNPwyRjhEwr5P8Xw32oWOJF+R+nfCGgy2qO3BXMAIAAAAACOPLnJlKwGNPDBReRKnHfteq0wFb3ezhrc7BVXs8RUHwVsACAAAAAA+lGesNk3+SyB/60rSvdQ2aN2vfJPR7llJVhufGTNhHkAAzE2AH0AAAAFZAAgAAAAAFH9l9GGA1I52atJV5jNUf1lx8jBjoEoVoME97v5GFJiBXMAIAAAAAC1qH3Kd78Dr9NGbw7y9D/XYBwv5h1LLO8la5OU7g8UkQVsACAAAAAArZ6atJCYrVfHB8dSNPOFf6nnDADBMJcIEj8ljPvxHp8AAzE3AH0AAAAFZAAgAAAAADtbVEI2tdkrowEMdkacD2w0Y3T3Ofi7PH6HmA6sP0c/BXMAIAAAAADuBSROnZHA+NgUPH8d0LnWFiDsM2bY8bzjC1+elSsIygVsACAAAAAAR0G2m+uANoWknkr/NerFcG+fECVxNIs0cqbY1t/U/0MAAzE4AH0AAAAFZAAgAAAAAAh3WpeMVlikPFYj9hLj+fmIqVt6omCSF75W3TPExyWpBXMAIAAAAAAsQkRmwqeVj2gGE03orb6PtrIzDt6dDU3hgSQi8E2wKgVsACAAAAAA3GHaRE2RAcaBRd8VzmYzWeBD2Gmy91eTK1k8YdWObZcAABJjbQAAAAAAAAAAAAAQcGF5bG9hZElkAAAAAAAQZmlyc3RPcGVyYXRvcgABAAAAEnNwAAEAAAAAAAAAEHBuAAIAAAAQdGYAAQAAABNtbgAAAAAAAAAAAAAAAAAAAD4wE214ANAHAAAAAAAAAAAAAAAAPjAA", + "subType": "06" + } + } + } + }, + "update": { + "$set": { + "encryptedDecimalPrecision": { + "$$type": "binData" + } + } + }, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDecimal": "0.0" + }, + "max": { + "$numberDecimal": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + } + }, + "commandName": "findAndModify" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-DecimalPrecision-InsertFind.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-DecimalPrecision-InsertFind.json new file mode 100644 index 0000000000..a9c3a8a46a --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-DecimalPrecision-InsertFind.json @@ -0,0 +1,634 @@ +{ + "description": "fle2v2-Rangev2-DecimalPrecision-InsertFind", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDecimal": "0.0" + }, + "max": { + "$numberDecimal": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "FLE2 Range DecimalPrecision. Insert and Find.", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDecimalPrecision": { + "$gt": { + "$numberDecimal": "0" + } + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1" + } + } + ] + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 0, + "encryptedDecimalPrecision": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "Dri0CXmL78L2DOgk9w0DwxHOMGMzih7m6l59vgy+WWo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "x7GR49EN0t3WXQDihkrbonK7qNIBYC87tpL/XEUyIYc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JfYUqWF+OoGjiYkRI4L5iPlF+T1Eleul7Fki22jp4Qc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q1RyGfIgsaQHoZFRw+DD28V26rN5hweApPLwExncvT8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "L2PFeKGvLS6C+DLudR6fGlBq3ERPvjWvRyNRIA2HVb0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CWxaNqL3iP1yCixDkcmf9bmW3E5VeN8TJkg1jJe528s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+vC6araOEo+fpW7PSIP40/EnzBCj1d2N10Jr3rrXJJM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6SV63Mf51Z6A6p2X3rCnJKCu6ku3Oeb45mBYbz+IoAo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "RjBYT2h3ZAoHxhf8DU6/dFbDkEBZp0IxREcsRTu2MXs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "b7d8mRzD1kI1tdc7uNL+YAUonJ6pODLsRLkArfEKSkM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Xg8C1/A0KJaXOw4i+26Rv03/CydaaunOzXh0CIT+gn8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "UoKUDw2wJYToUCcFaIs03YQSTksYR0MIOTJllwODqKc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "c/5cwAT0C5jber2xlJnWD3a5tVDy0nRtr5HG02hoFOY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wSUrRXavAGaajNeqC5mEUH1K67oYl5Wy9RNIzKjwLAM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6vrp4wWDtHEgHWR99I70WVDzevg1Fk/Pw5U8gUDa0OU=", + "subType": "00" + } + } + ] + }, + { + "_id": 1, + "encryptedDecimalPrecision": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "mVZb+Ra0EYjQ4Zrh9X//E2T8MRj7NMqm5GUJXhRrBEI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "FA74j21GUEJb1DJBOpR9nVnjaDZnd8yAQNuaW9Qi26g=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kJv//KVkbrobIBf+QeWC5jxn20mx/P0R1N6aCSMgKM8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zB+Whi9IUUGxfLEe+lGuIzLX4LFbIhaIAm5lRk65QTc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ybO1QU3CgvhO8JgRXH+HxKszWcpl5aGDYYVa75fHa1g=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "X3Y3eSAbbMg//JgiHHiFpYOpV61t8kkDexI+CQyitH4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SlNHXyqVFGDPrX/2ppwog6l4pwj3PKda2TkZbqgfSfA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "McjV8xwTF3xI7863DYOBdyvIv6UpzThl6v9vBRk05bI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "MgwakFvPyBlwqFTbhWUF79URJQWFoJTGotlEVSPPUsQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "DyBERpMSD5lEM5Nhpcn4WGgxgn/mkUVJp+PYSLX5jsE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "I43iazc0xj1WVbYB/V+uTL/tughN1bBlxh1iypBnNsA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wjOBa/ATMuOywFmuPgC0GF/oeLqu0Z7eK5udzkTPbis=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "gRQVwiR+m+0Vg8ZDXqrQQcVnTyobwCXNaA4BCJVXtMc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "WUZ6huwx0ZbLb0R00uiC9FOJzsUocUN8qE5+YRenkvQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "7s79aKEuPgQcS/YPOOVcYNZvHIo7FFsWtFCrnDKXefA=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 0, + "encryptedDecimalPrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDecimal": "0.0" + }, + "max": { + "$numberDecimal": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedDecimalPrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDecimal": "0.0" + }, + "max": { + "$numberDecimal": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "default", + "filter": { + "encryptedDecimalPrecision": { + "$gt": { + "$binary": { + "base64": "DRYKAAADcGF5bG9hZACiCQAABGcAjgkAAAMwAH0AAAAFZAAgAAAAAHdJ2Vnb4MMzqVYVssjSdDy8XU4GVzMTfGifGETgQ2mYBXMAIAAAAAD7cFfKJGIXo6PjyeX2ria02CckW7dWFDoY/3FyBdm1NQVsACAAAAAAhEPSNv4M023A3hzNFuy83+hIKuZ2mKRY954N++aEOBUAAzEAfQAAAAVkACAAAAAAlmvfDrZoydUet4eCVMq7z6a58Ea+1HLJOWxN5lNcrWEFcwAgAAAAAEBo5AWZyC41b9ayjWNQSL4iYEAIwR/JG+ssN8bdoK9RBWwAIAAAAACEndE0SLxFSElOrNnqeX0EPmgDio3udZjVREy4JLS3sQADMgB9AAAABWQAIAAAAABbiLaoxAA6rinMJw1hC8ZUiq6UU1AQaPFn/py/Y06WuQVzACAAAAAAhtDasFkvYE7SCNu1je/hxdE9TJtAvvH3NtdEbKzNbCUFbAAgAAAAAIGepU1RSCF8sWODHEpKglsoqw3VBBH4a/URGxgGzbq2AAMzAH0AAAAFZAAgAAAAALORWwSr+tYNxcil2KIGSbNhTHvcPbdj+rLVQNx21S/KBXMAIAAAAAD6diZBkPEJ1cQy06LAxdbNK8Nlxbb44fH4Wk3Y3260nQVsACAAAAAA1eYAZBFHlDiaDAljWi8blGQ2nvvZa5AO5doeo0SFZsgAAzQAfQAAAAVkACAAAAAAG5XMK96PjClNlUvg82j4pMY1YxsznZfj4uNweD394FoFcwAgAAAAAKHgQLdGJHkrfFg9nB93Ac+3VgBw6aU44MTkKIQ91dZoBWwAIAAAAAAPxXmi+SDJ+40A0KdwfRczexlZQrHjIA+D3oUB0EY9tAADNQB9AAAABWQAIAAAAAA6M++b9I0YFemmWBAWAE3glu2Ah3Ta1FBxAQEIWS0toAVzACAAAAAANXYTqPf1Y6X3Ns6YQIX0C3FKCyWUo+Kk+fNcQvc0WSoFbAAgAAAAAA+uJUw1ICYgyeygSRe206VTWVtUnhdci3iHbyP5YtEVAAM2AH0AAAAFZAAgAAAAAKl8bV1riH/uyJ+X0HHd3+18k2cJl2dQFXCdoagutFcaBXMAIAAAAABm8F2Ew9f0VOABdcF+lP0Bi+zWvEUPniWgrxPq/Sx3uwVsACAAAAAAJfFErjZ6BPhsw5LjJLqNtKDLJ4zV0eIZppQpd9b0wZoAAzcAfQAAAAVkACAAAAAAsYZD8JEP6kYsPncFnNZwJxhu4YtUTKPNcjHtv67H+rYFcwAgAAAAAI4LqZcRkvbs/2F62Flu0pixNcor4WmBD0DHGaf039wLBWwAIAAAAAD4wUR3xd9lKltcqqo8LYvdMQWzCRobkV/ppKB/yn5dUgADOAB9AAAABWQAIAAAAAC0vdAi+dmoIXvZ5LqUqvyKV9/tHqSI2SWiSJO5pTnA2wVzACAAAAAAS2qvf9fvfVUH5WtsVxjxmskpGjYTQV34LwvQQw1y9wIFbAAgAAAAAE0+FKuK7HxbypvCeEJzMTcjOWE0ScYOlTBMUNlIv55hAAM5AH0AAAAFZAAgAAAAAH31lb/srBcrOXkzddCwAnclsR5/3QijEVgECs2JjOWBBXMAIAAAAABg7+prDT73YcCvLE5QbuIrqGcjLc5pQD2Miq0d29yrxgVsACAAAAAAetRiPwDSFWBzpWSWkOKWM6fKStRJ8SyObnpc79ux8p0AAzEwAH0AAAAFZAAgAAAAAOK8brUuc2onBNDRtfYMR736dHj4dQqXod8JG7tAMTsDBXMAIAAAAAAW6SrGAL6Bx0s7ZlsYULFfOAiYIGhEWu6md3r+Rk40awVsACAAAAAAIHYXP8RLcCboUmHN3+OlnEw1DxaLSnbTB9PdF228fFAAAzExAH0AAAAFZAAgAAAAAFdthRhe2Q8CvxGIhjTJZv0Lk97GkHciTPxZ/mckLoNaBXMAIAAAAAAqOxsAr23LOVB0DIHbPf9UDJJRFXY2YoKbjhRqw5psbQVsACAAAAAA0G2GD8ZQjDBntjLpW4rqwKRS6HiUjL03g1N6chANozcAAzEyAH0AAAAFZAAgAAAAAMWymwwbvIeMqmnKWWifUqoCxOsdpnonM2qdLPyjqJO/BXMAIAAAAAB6IDmmpUhBD2zpRj8/y/kmOSXcjuIU14sNh6GKSsg2uwVsACAAAAAAWMFPNOk3EMSQDS9JGPSMIQP0oNGVugxXKKUrIPPlhHgAAzEzAH0AAAAFZAAgAAAAAPcLmtq+V1e+MRlZ7NHq1+mrRVBQje5zj685ZvdsfKvSBXMAIAAAAABdHz/3w2k5km97QN9m7oLFYJaVJneNlMboIlz5yUASQAVsACAAAAAAWbp8JVJnx8fEVAJFa7WMfMa7wXeP5M3C8MX20J/i9n0AAzE0AH0AAAAFZAAgAAAAAJaRYmo8zqI2BEUzdSwp4tVRpPmVWsfydkYN3UHh6TMuBXMAIAAAAAAeD6mDnQeLlbC9i0sVgE8+RH6y+e94OJQ0tJ0PvblVSgVsACAAAAAAWp4jvretbDEsqEMzP/WLTnwOiJwCtfrCiB6m8k+yEMoAAzE1AH0AAAAFZAAgAAAAAAZZ538coNPwyRjhEwr5P8Xw32oWOJF+R+nfCGgy2qO3BXMAIAAAAACOPLnJlKwGNPDBReRKnHfteq0wFb3ezhrc7BVXs8RUHwVsACAAAAAA+lGesNk3+SyB/60rSvdQ2aN2vfJPR7llJVhufGTNhHkAAzE2AH0AAAAFZAAgAAAAAFH9l9GGA1I52atJV5jNUf1lx8jBjoEoVoME97v5GFJiBXMAIAAAAAC1qH3Kd78Dr9NGbw7y9D/XYBwv5h1LLO8la5OU7g8UkQVsACAAAAAArZ6atJCYrVfHB8dSNPOFf6nnDADBMJcIEj8ljPvxHp8AAzE3AH0AAAAFZAAgAAAAADtbVEI2tdkrowEMdkacD2w0Y3T3Ofi7PH6HmA6sP0c/BXMAIAAAAADuBSROnZHA+NgUPH8d0LnWFiDsM2bY8bzjC1+elSsIygVsACAAAAAAR0G2m+uANoWknkr/NerFcG+fECVxNIs0cqbY1t/U/0MAAzE4AH0AAAAFZAAgAAAAAAh3WpeMVlikPFYj9hLj+fmIqVt6omCSF75W3TPExyWpBXMAIAAAAAAsQkRmwqeVj2gGE03orb6PtrIzDt6dDU3hgSQi8E2wKgVsACAAAAAA3GHaRE2RAcaBRd8VzmYzWeBD2Gmy91eTK1k8YdWObZcAABJjbQAAAAAAAAAAAAAQcGF5bG9hZElkAAAAAAAQZmlyc3RPcGVyYXRvcgABAAAAEnNwAAEAAAAAAAAAEHBuAAIAAAAQdGYAAQAAABNtbgAAAAAAAAAAAAAAAAAAAD4wE214ANAHAAAAAAAAAAAAAAAAPjAA", + "subType": "06" + } + } + } + }, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDecimal": "0.0" + }, + "max": { + "$numberDecimal": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + } + }, + "commandName": "find" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-DecimalPrecision-Update.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-DecimalPrecision-Update.json new file mode 100644 index 0000000000..7f8ea38ae0 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-DecimalPrecision-Update.json @@ -0,0 +1,653 @@ +{ + "description": "fle2v2-Rangev2-DecimalPrecision-Update", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDecimal": "0.0" + }, + "max": { + "$numberDecimal": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "FLE2 Range DecimalPrecision. Update.", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDecimalPrecision": { + "$numberDecimal": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDecimalPrecision": { + "$numberDecimal": "1" + } + } + }, + "object": "coll" + }, + { + "name": "updateOne", + "arguments": { + "filter": { + "encryptedDecimalPrecision": { + "$gt": { + "$numberDecimal": "0" + } + } + }, + "update": { + "$set": { + "encryptedDecimalPrecision": { + "$numberDecimal": "2" + } + } + } + }, + "object": "coll", + "expectResult": { + "matchedCount": 1, + "modifiedCount": 1, + "upsertedCount": 0 + } + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 0, + "encryptedDecimalPrecision": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "Dri0CXmL78L2DOgk9w0DwxHOMGMzih7m6l59vgy+WWo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "x7GR49EN0t3WXQDihkrbonK7qNIBYC87tpL/XEUyIYc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JfYUqWF+OoGjiYkRI4L5iPlF+T1Eleul7Fki22jp4Qc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q1RyGfIgsaQHoZFRw+DD28V26rN5hweApPLwExncvT8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "L2PFeKGvLS6C+DLudR6fGlBq3ERPvjWvRyNRIA2HVb0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CWxaNqL3iP1yCixDkcmf9bmW3E5VeN8TJkg1jJe528s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+vC6araOEo+fpW7PSIP40/EnzBCj1d2N10Jr3rrXJJM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6SV63Mf51Z6A6p2X3rCnJKCu6ku3Oeb45mBYbz+IoAo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "RjBYT2h3ZAoHxhf8DU6/dFbDkEBZp0IxREcsRTu2MXs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "b7d8mRzD1kI1tdc7uNL+YAUonJ6pODLsRLkArfEKSkM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Xg8C1/A0KJaXOw4i+26Rv03/CydaaunOzXh0CIT+gn8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "UoKUDw2wJYToUCcFaIs03YQSTksYR0MIOTJllwODqKc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "c/5cwAT0C5jber2xlJnWD3a5tVDy0nRtr5HG02hoFOY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wSUrRXavAGaajNeqC5mEUH1K67oYl5Wy9RNIzKjwLAM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6vrp4wWDtHEgHWR99I70WVDzevg1Fk/Pw5U8gUDa0OU=", + "subType": "00" + } + } + ] + }, + { + "_id": 1, + "encryptedDecimalPrecision": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "V6knyt7Zq2CG3++l75UtBx2m32iGAPjHiAe439Bf02w=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "F08nMDWDZc+DbWM7XCEJNNCEYyinRmrvGP7EWhmp4is=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cXH4688amcDc8kZOJq4UP8cE3R58Zl7e+Qo/1jyspps=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "uURBxvTp3FBCVkd+LPqyuY7d6rMW6SGIJQEPY/wtkZI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "jG3hax1L3RBp9t38vUt53FsBxgr/+Si/vVISpAylYpE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kwtIW8MhH9Ky5xNjBx8gFA/SHh2YVphie7g5FGBzals=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "FHflwFuEMu4xX0ZApHi+pdlBH+oevAtXckCUb5Wv0xU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "0OKSXELxPP85SBVwDGf3LtMEQCJ8TTkFUl/+6jlkdb0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "25j9sQXZCihCmHKvTHgaBsAVZFcGPn7JjHdrCGlwyyw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "uEw0lpQtBppR3vqV9j9+NQRSBF1BzZukb8c9IhyWvxc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zVhZ7Q59O087ji49oMJvBIgeir2oqvUpnh4p53GcTow=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dowrzKs+qJhRMZyKDbhjXbuX43FbmUKOaw9I8YlOZDw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ep5B6cska6THLIF7Mn3tn3RvV9EiwLSt0eZM/CLRUDc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "URNp/YmmDh5wIZUfAzzgPyJeMNiVx9PMsz52DZRujGY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wlM4IAQhhKQEzoVqS8b1Ddd50GB95OFb9LnzOwyjCP4=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 0, + "encryptedDecimalPrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDecimal": "0.0" + }, + "max": { + "$numberDecimal": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedDecimalPrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDecimal": "0.0" + }, + "max": { + "$numberDecimal": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "update": "default", + "ordered": true, + "updates": [ + { + "q": { + "encryptedDecimalPrecision": { + "$gt": { + "$binary": { + "base64": "DRYKAAADcGF5bG9hZACiCQAABGcAjgkAAAMwAH0AAAAFZAAgAAAAAHdJ2Vnb4MMzqVYVssjSdDy8XU4GVzMTfGifGETgQ2mYBXMAIAAAAAD7cFfKJGIXo6PjyeX2ria02CckW7dWFDoY/3FyBdm1NQVsACAAAAAAhEPSNv4M023A3hzNFuy83+hIKuZ2mKRY954N++aEOBUAAzEAfQAAAAVkACAAAAAAlmvfDrZoydUet4eCVMq7z6a58Ea+1HLJOWxN5lNcrWEFcwAgAAAAAEBo5AWZyC41b9ayjWNQSL4iYEAIwR/JG+ssN8bdoK9RBWwAIAAAAACEndE0SLxFSElOrNnqeX0EPmgDio3udZjVREy4JLS3sQADMgB9AAAABWQAIAAAAABbiLaoxAA6rinMJw1hC8ZUiq6UU1AQaPFn/py/Y06WuQVzACAAAAAAhtDasFkvYE7SCNu1je/hxdE9TJtAvvH3NtdEbKzNbCUFbAAgAAAAAIGepU1RSCF8sWODHEpKglsoqw3VBBH4a/URGxgGzbq2AAMzAH0AAAAFZAAgAAAAALORWwSr+tYNxcil2KIGSbNhTHvcPbdj+rLVQNx21S/KBXMAIAAAAAD6diZBkPEJ1cQy06LAxdbNK8Nlxbb44fH4Wk3Y3260nQVsACAAAAAA1eYAZBFHlDiaDAljWi8blGQ2nvvZa5AO5doeo0SFZsgAAzQAfQAAAAVkACAAAAAAG5XMK96PjClNlUvg82j4pMY1YxsznZfj4uNweD394FoFcwAgAAAAAKHgQLdGJHkrfFg9nB93Ac+3VgBw6aU44MTkKIQ91dZoBWwAIAAAAAAPxXmi+SDJ+40A0KdwfRczexlZQrHjIA+D3oUB0EY9tAADNQB9AAAABWQAIAAAAAA6M++b9I0YFemmWBAWAE3glu2Ah3Ta1FBxAQEIWS0toAVzACAAAAAANXYTqPf1Y6X3Ns6YQIX0C3FKCyWUo+Kk+fNcQvc0WSoFbAAgAAAAAA+uJUw1ICYgyeygSRe206VTWVtUnhdci3iHbyP5YtEVAAM2AH0AAAAFZAAgAAAAAKl8bV1riH/uyJ+X0HHd3+18k2cJl2dQFXCdoagutFcaBXMAIAAAAABm8F2Ew9f0VOABdcF+lP0Bi+zWvEUPniWgrxPq/Sx3uwVsACAAAAAAJfFErjZ6BPhsw5LjJLqNtKDLJ4zV0eIZppQpd9b0wZoAAzcAfQAAAAVkACAAAAAAsYZD8JEP6kYsPncFnNZwJxhu4YtUTKPNcjHtv67H+rYFcwAgAAAAAI4LqZcRkvbs/2F62Flu0pixNcor4WmBD0DHGaf039wLBWwAIAAAAAD4wUR3xd9lKltcqqo8LYvdMQWzCRobkV/ppKB/yn5dUgADOAB9AAAABWQAIAAAAAC0vdAi+dmoIXvZ5LqUqvyKV9/tHqSI2SWiSJO5pTnA2wVzACAAAAAAS2qvf9fvfVUH5WtsVxjxmskpGjYTQV34LwvQQw1y9wIFbAAgAAAAAE0+FKuK7HxbypvCeEJzMTcjOWE0ScYOlTBMUNlIv55hAAM5AH0AAAAFZAAgAAAAAH31lb/srBcrOXkzddCwAnclsR5/3QijEVgECs2JjOWBBXMAIAAAAABg7+prDT73YcCvLE5QbuIrqGcjLc5pQD2Miq0d29yrxgVsACAAAAAAetRiPwDSFWBzpWSWkOKWM6fKStRJ8SyObnpc79ux8p0AAzEwAH0AAAAFZAAgAAAAAOK8brUuc2onBNDRtfYMR736dHj4dQqXod8JG7tAMTsDBXMAIAAAAAAW6SrGAL6Bx0s7ZlsYULFfOAiYIGhEWu6md3r+Rk40awVsACAAAAAAIHYXP8RLcCboUmHN3+OlnEw1DxaLSnbTB9PdF228fFAAAzExAH0AAAAFZAAgAAAAAFdthRhe2Q8CvxGIhjTJZv0Lk97GkHciTPxZ/mckLoNaBXMAIAAAAAAqOxsAr23LOVB0DIHbPf9UDJJRFXY2YoKbjhRqw5psbQVsACAAAAAA0G2GD8ZQjDBntjLpW4rqwKRS6HiUjL03g1N6chANozcAAzEyAH0AAAAFZAAgAAAAAMWymwwbvIeMqmnKWWifUqoCxOsdpnonM2qdLPyjqJO/BXMAIAAAAAB6IDmmpUhBD2zpRj8/y/kmOSXcjuIU14sNh6GKSsg2uwVsACAAAAAAWMFPNOk3EMSQDS9JGPSMIQP0oNGVugxXKKUrIPPlhHgAAzEzAH0AAAAFZAAgAAAAAPcLmtq+V1e+MRlZ7NHq1+mrRVBQje5zj685ZvdsfKvSBXMAIAAAAABdHz/3w2k5km97QN9m7oLFYJaVJneNlMboIlz5yUASQAVsACAAAAAAWbp8JVJnx8fEVAJFa7WMfMa7wXeP5M3C8MX20J/i9n0AAzE0AH0AAAAFZAAgAAAAAJaRYmo8zqI2BEUzdSwp4tVRpPmVWsfydkYN3UHh6TMuBXMAIAAAAAAeD6mDnQeLlbC9i0sVgE8+RH6y+e94OJQ0tJ0PvblVSgVsACAAAAAAWp4jvretbDEsqEMzP/WLTnwOiJwCtfrCiB6m8k+yEMoAAzE1AH0AAAAFZAAgAAAAAAZZ538coNPwyRjhEwr5P8Xw32oWOJF+R+nfCGgy2qO3BXMAIAAAAACOPLnJlKwGNPDBReRKnHfteq0wFb3ezhrc7BVXs8RUHwVsACAAAAAA+lGesNk3+SyB/60rSvdQ2aN2vfJPR7llJVhufGTNhHkAAzE2AH0AAAAFZAAgAAAAAFH9l9GGA1I52atJV5jNUf1lx8jBjoEoVoME97v5GFJiBXMAIAAAAAC1qH3Kd78Dr9NGbw7y9D/XYBwv5h1LLO8la5OU7g8UkQVsACAAAAAArZ6atJCYrVfHB8dSNPOFf6nnDADBMJcIEj8ljPvxHp8AAzE3AH0AAAAFZAAgAAAAADtbVEI2tdkrowEMdkacD2w0Y3T3Ofi7PH6HmA6sP0c/BXMAIAAAAADuBSROnZHA+NgUPH8d0LnWFiDsM2bY8bzjC1+elSsIygVsACAAAAAAR0G2m+uANoWknkr/NerFcG+fECVxNIs0cqbY1t/U/0MAAzE4AH0AAAAFZAAgAAAAAAh3WpeMVlikPFYj9hLj+fmIqVt6omCSF75W3TPExyWpBXMAIAAAAAAsQkRmwqeVj2gGE03orb6PtrIzDt6dDU3hgSQi8E2wKgVsACAAAAAA3GHaRE2RAcaBRd8VzmYzWeBD2Gmy91eTK1k8YdWObZcAABJjbQAAAAAAAAAAAAAQcGF5bG9hZElkAAAAAAAQZmlyc3RPcGVyYXRvcgABAAAAEnNwAAEAAAAAAAAAEHBuAAIAAAAQdGYAAQAAABNtbgAAAAAAAAAAAAAAAAAAAD4wE214ANAHAAAAAAAAAAAAAAAAPjAA", + "subType": "06" + } + } + } + }, + "u": { + "$set": { + "encryptedDecimalPrecision": { + "$$type": "binData" + } + } + }, + "multi": false, + "upsert": false + } + ], + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDecimalPrecision", + "bsonType": "decimal", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDecimal": "0.0" + }, + "max": { + "$numberDecimal": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + }, + "$db": "default" + }, + "commandName": "update" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Defaults.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Defaults.json new file mode 100644 index 0000000000..cdbd169676 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Defaults.json @@ -0,0 +1,444 @@ +{ + "description": "fle2v2-Rangev2-Defaults", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedInt", + "bsonType": "int", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "min": { + "$numberInt": "0" + }, + "max": { + "$numberInt": "200" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "FLE2 Range applies defaults for trimFactor and sparsity", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedInt": { + "$gt": { + "$numberInt": "0" + } + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + ] + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 0, + "encryptedInt": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "RjBYT2h3ZAoHxhf8DU6/dFbDkEBZp0IxREcsRTu2MXs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+vC6araOEo+fpW7PSIP40/EnzBCj1d2N10Jr3rrXJJM=", + "subType": "00" + } + } + ] + }, + { + "_id": { + "$numberInt": "1" + }, + "encryptedInt": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "25j9sQXZCihCmHKvTHgaBsAVZFcGPn7JjHdrCGlwyyw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SlNHXyqVFGDPrX/2ppwog6l4pwj3PKda2TkZbqgfSfA=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 0, + "encryptedInt": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedInt", + "bsonType": "int", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "min": { + "$numberInt": "0" + }, + "max": { + "$numberInt": "200" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedInt": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedInt", + "bsonType": "int", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "min": { + "$numberInt": "0" + }, + "max": { + "$numberInt": "200" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "default", + "filter": { + "encryptedInt": { + "$gt": { + "$binary": { + "base64": "DRgbAAADcGF5bG9hZADEGgAABGcAsBoAAAMwAH0AAAAFZAAgAAAAALGGQ/CRD+pGLD53BZzWcCcYbuGLVEyjzXIx7b+ux/q2BXMAIAAAAACOC6mXEZL27P9hethZbtKYsTXKK+FpgQ9Axxmn9N/cCwVsACAAAAAA+MFEd8XfZSpbXKqqPC2L3TEFswkaG5Ff6aSgf8p+XVIAAzEAfQAAAAVkACAAAAAA30oqY6NKy1KWDWf6Z36DtA2QsL9JRALvHX6smxz8cb4FcwAgAAAAADIhM0hCHwFGH+k7kPGuZlO+v5TjV6RRwA5FqUKM60o0BWwAIAAAAABTMPNUweBKrILSCxc5gcgjn9pTkkKX7KqWXgNMk4q7XgADMgB9AAAABWQAIAAAAACnCDvYEbgR9fWeQ8SatKNX43p0XIXTyFfzc7/395V2swVzACAAAAAAp8pkn2wJrZRBLlD18oE1ZRRiujmtFtuHYTZDzdGNE4kFbAAgAAAAAE2eptD2Jp126h5cd7S6k8IjRB6QJhuuWzPU/SEynDXTAAMzAH0AAAAFZAAgAAAAAH31lb/srBcrOXkzddCwAnclsR5/3QijEVgECs2JjOWBBXMAIAAAAABg7+prDT73YcCvLE5QbuIrqGcjLc5pQD2Miq0d29yrxgVsACAAAAAAetRiPwDSFWBzpWSWkOKWM6fKStRJ8SyObnpc79ux8p0AAzQAfQAAAAVkACAAAAAA8Ci9z02yMVsDNyHvLStLAHR25LO22UO5P/gbUG/IStQFcwAgAAAAAOdfFhaFVq1JPr3dIeLm1EYKWgceZ7hZ5FJT5u/lL/I+BWwAIAAAAADqUyU1hSFDLCmqsz2dhPhefzCShUV/Z2x+4P9xcGw8rwADNQB9AAAABWQAIAAAAAD3g2atCWYVOXW0YbCbvIturqNIAsy210bkL9KmqVMlAAVzACAAAAAAVGEb7L0QCjV/PBTAvUyhlddo467ToKjlMdwI9hsjuE4FbAAgAAAAAJe0bDhUH1sZldnDGWn0xMa1CQuN6cgv/i/6XqnpPS39AAM2AH0AAAAFZAAgAAAAANQOKUE9FOmCoMva2IYg45LZXJX0cMpUR1OvIwFmjLDYBXMAIAAAAAB6dyIKkQ86l/8j8zeWcDYeVGRYKd0USz6To3LbOBAKsAVsACAAAAAAELK0ExI0g4/WxNs+mf+Ua+mie3MuMO3daPGukA23VUYAAzcAfQAAAAVkACAAAAAARQp+fGA08v1bhcnYbfsP0ubXl9yg18QmYMfh2sd8EdEFcwAgAAAAABhe79wEznE298tt02xyRF7bk7a2NH9kwVg1TPY5/lT1BWwAIAAAAAADiGV5f/RRPkwpSrZMGHNBSarmwyqV+SYXI73QW/PmnwADOAB9AAAABWQAIAAAAABnW3CpmSFTglPNKYHJHhJHC/vd5BMWQpztIXQBL0sCngVzACAAAAAAC21qRBu2Px7VUz1lW95Dfn/0tw2yq9AVBtka34HijLgFbAAgAAAAAP8S1s5OA5cJT6ILpA94LanuLsSl9BsRCWHBtufFTMVrAAM5AH0AAAAFZAAgAAAAAJRIWu6DI2LR+2Pi09OaBZEmS2FInyBnGs9wf9Jf2wiIBXMAIAAAAABoDqKzj11qyOfXl4dcfkmGHqZxXyAsnGlgA9wsJRWWUQVsACAAAAAAIsDousyo/D8e4BCwUqvFhrKtOnpcGCSqpN94oFtWaC0AAzEwAH0AAAAFZAAgAAAAAE0h7vfdciFBeqIk1N14ZXw/jzFT0bLfXcNyiPRsg4W4BXMAIAAAAAB0Kbvm3VLBphtd8/OpgNuJtJaJJLhHBCKZJJeK+GcthAVsACAAAAAAKfjHp8xww1JDjzyjTnfamOvjFDc1Z3Hp/v/ZuQnFOOEAAzExAH0AAAAFZAAgAAAAACL9+rQRyywIXa5Pr7g2SnB0s0EjIct7PQtzjEkA69acBXMAIAAAAADz54imCCbu/qQkYP9wW2f5pHoBS+EyCe+xuDwC0UTiYgVsACAAAAAAKv602j4c3Bpn2t10qGl68eAD/fQsIH5lKMj8ANwrf7oAAzEyAH0AAAAFZAAgAAAAAKTK0NLhQ/+Y/HMxjRwBlXpXJAhAmCoWf1fReTegPnVpBXMAIAAAAAD7AlW+P4FfQS4r8d7EEvPVEP1diSbrVDBqg8ZvNl1XRAVsACAAAAAATTSEkff+/JMBjNwUciY2RQ6M66uMQMAtwU+UidDv1y4AAzEzAH0AAAAFZAAgAAAAAGMbgPxi2Wu1AlqoDKTgyBnCZlnCjHm2naxRcizkIbYJBXMAIAAAAADMvSM3VZzVyRFCfUvcLXAXQFRIxlhm0t0dUsnaRZG4hgVsACAAAAAAI7uGriMAQc4A/a70Yi1Y7IAC7o/mfNYf7/FvwELYf80AAzE0AH0AAAAFZAAgAAAAAPnZ1bdmrcX0fsSxliuSqvDbRqwIiVg0tYp0PViRX0nOBXMAIAAAAAAqBdZGg9O74mnwyQF+lILtyzHdLOErDjPSf9sM8EqCugVsACAAAAAAwhuDsz+fCtqY8mW8QvEVQERjDChwrYTw4y7dinlCCOMAAzE1AH0AAAAFZAAgAAAAAJ40Dmb5BUT1AlWjfXB43nIbJgDn9rBg9FAeYR80WK0vBXMAIAAAAAAMPqLMDdNmnKzA3Hq49/NkJfs+/cjnyjSAbmiOFUE5FgVsACAAAAAAxbi7ql49Y4pduqWlLJqpwimRzrEnC7w5fWaMBiinHL8AAzE2AH0AAAAFZAAgAAAAAGelnhqWM2gUVy4P5QE/2Zfd7s9BugPqB/tcnSsFg5X0BXMAIAAAAAAWUhif3G+NMvZ3YPLB5OMuIhfPEu6U8KR9gTvJFz5uIwVsACAAAAAADEs8/aVSj2sJjxjv1K7o/aH8vZzt1bga73YiIKUx5DYAAzE3AH0AAAAFZAAgAAAAAD1xX2wCyf1aK1MoXnBAPfWLeBxsJI2i06tWbuiYKgElBXMAIAAAAACW1NW4RibvY0JRUzPvCmKnVbEy8AIS70fmsY08WgJOEgVsACAAAAAAQq9eIVoLcd4WxXUC3vub+EnxmcI2uP/yUWr3cz0jv9EAAzE4AH0AAAAFZAAgAAAAAHwU1LYeJmTch640sTu3VRRRdQg4YZ7S9IRfVXWHEWU8BXMAIAAAAACozWKD2YlqbQiBVVwJKptfAVM+R2FPJPtXkxVFAhHNXQVsACAAAAAAn7LS0QzTv9sOJzxH0ZqxsLYBYoArEo/PIXkU/zTnpM0AAzE5AH0AAAAFZAAgAAAAAHKaToAsILpmJyCE02I1iwmF/FibqaOb4b5nteuwOayfBXMAIAAAAABPxYjSK5DKgsdUZrZ+hM6ikejPCUK6Rqa0leoN7KOM0QVsACAAAAAAH9rPq5vvOIe9nTAcM1W1dVhQZ+gSkBohgoWLPcZnQXcAAzIwAH0AAAAFZAAgAAAAANTGiHqJVq28n7mMZsJD6gHxVQp1A6z8wgZVW+xV/lhmBXMAIAAAAABCR4BfdNVy7WE+IyQ312vYuIW0aGcXxr2II/MbNz8ZdAVsACAAAAAAng0GYpYJTypRLQUd5tIXWaAjZX5na04T/BypmwwrXPoAAzIxAH0AAAAFZAAgAAAAABooumzjEqp9Hvvd+sn1L82NI2iUGRl0nXQNJTHM7oyVBXMAIAAAAADgjz5L2ursK4C+pXXsJ6XHABhyallj9s/vSUgxXvjiiwVsACAAAAAAPjlAM0tbO6EUmLAeIZt57YMkMsuQfuC3T3d9vtnxgjwAAzIyAH0AAAAFZAAgAAAAAMA4jmE8U2uGkYUeKoYSlb22tfrRq2VlhV1Jq1kn4hV9BXMAIAAAAADG4fLeJUcINPSb1pMfAASJkuYsgS/59Eq/51mET/Y7RQVsACAAAAAAmwwcWOnzvpxm4pROXOL+BlxjEG/7v7hIautb2ubFT44AAzIzAH0AAAAFZAAgAAAAAK8/E3VHzHM6Kjp39GjFy+ci1IiUG5oxh0W6elV+oiX2BXMAIAAAAAA4/F4Q94xxb2TvZcMcji/DVTFrZlH8BL/HzD86RRmqNAVsACAAAAAAif3HPf6B1dTX/W+Vlp6ohadEQk/GAmHYzXfJia2zHeIAAzI0AH0AAAAFZAAgAAAAAGUX9ttLN1cCrOjlzsl/E6jEzQottNDw8Zo94nbO1133BXMAIAAAAAA7uVthFvXH+pbBrgQmnkPcpiHFEVCAi0WA7sAt9tlt3gVsACAAAAAAznaMStSbtGXU1Pb5z9KDTvEd79s6gmWYCKOKdzeijpEAAzI1AH0AAAAFZAAgAAAAAKnT/qg8N85Q9EQvpH7FBqUooxHFgrIjqLlIDheva2QSBXMAIAAAAABGAKkFMKoSIrvClWF7filoYM6fI9xSqOJVNS3dv4lxYwVsACAAAAAAgITE31hQA4ZOxpUFYSYv0mzWbd/6RKgbUXiUY96fBQEAAzI2AH0AAAAFZAAgAAAAAHRDRDT2hJrJ8X9zB9ELT28q8ZsfkYr92chaZYakiLlqBXMAIAAAAAAT0Le67ObldDta/Qb17dYfdslPsJTfGj3bWAgC0JIingVsACAAAAAAMGDrqys8iJ3fCT2Cj+zXIuXtsf4OAXWJl5HoPUMlbNoAAzI3AH0AAAAFZAAgAAAAAOOJcUjYOE0KqcYS1yZ363zglQXfr3XSD+R5fWLSivDoBXMAIAAAAABjeLe+tg37lNa+DdVxtlCtY77tV9PqfJ5X4XEKrfwu0AVsACAAAAAAlbpHiQAPLLTvSF+u58RBCLnYQKB5wciIQmANV9bkzsoAAzI4AH0AAAAFZAAgAAAAAMwWOOaWDDYUusdA1nyoaEB3C4/9GRpFNGags95Ddp4LBXMAIAAAAACLrsQXGWK15fW4mPEUXJ/90by13aG+727qWJep8QJ/WgVsACAAAAAAuThwsAsKUB56QAXC0MjJsZ9736atbiHPlK2tE0urf9QAAzI5AH0AAAAFZAAgAAAAABPRXBK0z8UANcvMDWntBjN9yF7iGMPLbhbaKrvHwcplBXMAIAAAAACZlqWsYPIb+ydmH03BxD3TqSGsSNoI7EVCy0VgW0TpYgVsACAAAAAAD2uaBv8oc7l4EeC5PWx5sfeyGZoas0JdFJ33M3jjgjMAAzMwAH0AAAAFZAAgAAAAAOn9/6pbzjIxFEApugaVOvVKXq23sDCJELv5UtLPDZI3BXMAIAAAAACHIwSDTlof0vFoigF4drbeM/8rdlj/4U386zQsNLtPGwVsACAAAAAAsYt/rXnpL55J9rlWSFRA4seaU6ggix7RgxbrJPu6gO4AAzMxAH0AAAAFZAAgAAAAAIMCESykv5b5d6mYjU5DlnO709lOFCaNoJBLtzBIqmg4BXMAIAAAAADs1Bfuaun4Es3nQ4kr29BzheLRDcFv+9a0gOGkSEcrDgVsACAAAAAA5kW6i/jOBSdoGAsZEZxVNRvt6miv86bP8JfUT+1KJg8AAzMyAH0AAAAFZAAgAAAAAFSPmr27XgKhUkbEvvC6Br5K1w7280NZrrhdzfYF+YGjBXMAIAAAAADv2h+Xq6kM7MHYTLMACRwbe2MzGHu4sdB67FGzDR6H4QVsACAAAAAAKII0MMC7o6GKVfGo2qBW/p35NupBp7MI6Gp0zXYwJOcAAzMzAH0AAAAFZAAgAAAAAPSV9qprvlNZK6OSQZNxKhJmBMs6QCKFESB/oeIvAS0iBXMAIAAAAAA835Jh22/pvZgKoYH6KjE+RRpYkaM1G35TWq6uplk/rgVsACAAAAAA162IdSb079yVlS7GkuSdHU3dOw03a+NS55ZPVBxbD08AAzM0AH0AAAAFZAAgAAAAAGsadEBJFax/UltPXB86G/YPxo6h353ZT+rC62iGy7qqBXMAIAAAAADs9TP3h91f6bTuG8QCQMA3atAVGs8k0ZjVzX3pM8HNAgVsACAAAAAA2ed4R4wYD6DT0P+N6o3gDJPE0DjljbRAv5vme3jb42sAAzM1AH0AAAAFZAAgAAAAAAxgeclNl09H7HvzD1oLwb2YpFca5eaX90uStYXHilqKBXMAIAAAAACMU5pSxzIzWlQxHyW170Xs9EhD1hURASQk+qkx7K5Y6AVsACAAAAAAJbMMwJfNftA7Xom8Bw/ghuZmSa3x12vTZxBUbV8m888AAzM2AH0AAAAFZAAgAAAAAKJY+8+7psFzJb5T+Mg9UWb6gA9Y8NN9j/ML2jZkNDNPBXMAIAAAAAA2R/nCtSYfCim89BzdUPS+DTQGwYDk+2ihFPEBS8h+ygVsACAAAAAAaEQra7xyvA3JS0BasIpRVrz7ZXsp6RpH7OpfJBFzFG8AAzM3AH0AAAAFZAAgAAAAAI4qr+sJiRaqwZRhnenAzD7tTKq+jP1aaLyAln3w1HQuBXMAIAAAAADNYpqV73NpwN+Ta0ms1SRiu+6WNOOdGT+syghL+JAFhQVsACAAAAAAN07Fo9SK+fXp5Odk1J806pyVWc2WHXCtb1gJQknTgqsAAzM4AH0AAAAFZAAgAAAAAISgN1Hid7IWvDESN/3tywFZiBsZPYapOUx9/QjDDxLfBXMAIAAAAAA7lxpEz3+CGdv6/WKIAlIwRYURREKgn7+StwNoVekkDwVsACAAAAAAx+Oa2v1e1R7VomfsvcKO8VkY4eTl7LzjNQQL6Cj6GBQAAzM5AH0AAAAFZAAgAAAAAOTLdk1RIUzCsvK7xCXy+LxGhJf87fEL406U9QKta3JRBXMAIAAAAAD8+6UnUn8sN6AgQuuf7uFxW+2ZJNpZLgp3eKVtjbo9ewVsACAAAAAAQN3mZHmaDM0ZbUnk2O/+wCUjiCs4bnshfHjd/4ygLXcAAzQwAH0AAAAFZAAgAAAAAFH9l9GGA1I52atJV5jNUf1lx8jBjoEoVoME97v5GFJiBXMAIAAAAAC1qH3Kd78Dr9NGbw7y9D/XYBwv5h1LLO8la5OU7g8UkQVsACAAAAAArZ6atJCYrVfHB8dSNPOFf6nnDADBMJcIEj8ljPvxHp8AAzQxAH0AAAAFZAAgAAAAAPLX4XT1eMfokMvj73G6loHEotbdivVFM6cpMbU0zIOmBXMAIAAAAABuTqwm6E60kVBN5iClzLnMBozIQRYjMozzRNKVhixkEAVsACAAAAAAjvY9G0Of8EQcZ4GVfSEVz7jrNn7i4qps2r82jJmngKoAAzQyAH0AAAAFZAAgAAAAAGzGJAUZBcVKRb4bCSNaRxtcDH2TqIgHqMElD9RL7SzDBXMAIAAAAABbJfrLwBrqZ2Ylm9QfL7nkW+GJ8vTlaeMUDT5620ebaAVsACAAAAAASiaS1IlBls5Tan57XqqbR1cuvyOcoSibJJQGREzm4c0AAzQzAH0AAAAFZAAgAAAAAC028abAppwE/ApZHU5RbzZZ8OPD5eJ8/6+NgiSFf4d+BXMAIAAAAAD3THvDUYWULR+AVLuRRPPAMVMeZ2ldWpBYSODboszWbQVsACAAAAAAATOaeYj+kx3MTDeNUcKGbUxLZDeMjC8JrWnlHmWTamQAAzQ0AH0AAAAFZAAgAAAAAHWr8wQYIKLiKeb3wd8kZQuXD/GUHDqXj12K/EQWV11CBXMAIAAAAADo3aFHDuyfls9tcWCxlFqJn4zDXd3WT9CIFYFjJnTYswVsACAAAAAAeMbIatR7DgefzuvF4WyNVDjJxP8KPA6U/rmMQIBvpM0AAzQ1AH0AAAAFZAAgAAAAAMdRi6AAjF1Z9ucMqYl2Ud1PLUGOlOPJFgSrPTjs27u8BXMAIAAAAAAqOdI7+P8srvqCTFadwMM3iggaVOGcf1BB0EjBYeV6RAVsACAAAAAAU+V2GrqgxJYs9mxuak/8JMFICXwQ2vksrBdOvSwWFpoAAzQ2AH0AAAAFZAAgAAAAADKKe++fqh4sn0a8Bb+w3QMFnOqSE5hDI3zGQTcmJGcOBXMAIAAAAAC8ebHa++JmxVISv6LzjuMgEZqzKSZlJyujnSV9syRD9AVsACAAAAAAQcVNSjyetScLu78IrAYaAigerY4kWtnbctmIyb19Wa4AAzQ3AH0AAAAFZAAgAAAAAMKoHwhZcocaQy7asIuRG8+P1qPENgFAwzc3X1gZWYnJBXMAIAAAAAB+R01s+WdJjLa5p7STuEylradWr+2JDxsWx9bKDgXNDQVsACAAAAAADeXTBHsm+FH2pQVoqOBPPIJiTJLqrzGisNnQ3S3xYJAAAzQ4AH0AAAAFZAAgAAAAAF41XuyBvREKcxjDl+wbnillseykpAjCKHmwIu+RNvM7BXMAIAAAAAC2Wzq+2mfO7howoOZxquqvOuH1D2WdlzA1nK+LUp0FMgVsACAAAAAARha+D6DVeDxSjNyXXO5DMY+W70EGyfc7gxR4TjzcYusAAzQ5AH0AAAAFZAAgAAAAAAfONgdhLPEjvsMxTY9K4//7WjREuRmZ6Bpcf3yvdMf3BXMAIAAAAABCy/zjmzucxQkbJ96l5vS5x6SeyHE0Z+Aqp9oZgBcC6QVsACAAAAAAasG/uN4DnWHZLkLhH4cMzXk5F/HL2D+72WH+1jjgH8UAAzUwAH0AAAAFZAAgAAAAAA5ZsebFm5NrSGs2E17+fUt4qkzsVmy4IJA5nGehtSBVBXMAIAAAAAAOzteKfp+YGPqn1fi8u/lKXP7E2Zgouwgt6KAADHX9AQVsACAAAAAA2+FaAbl8JZogfNCI0FFbmZZPy/KLF1u16FGrPspSbEIAAzUxAH0AAAAFZAAgAAAAAHf6LIjrvy6I31w/8b910U9qU8cBIYiWn9mW55NYZF8VBXMAIAAAAACONPisRtnFG9vV2mTQ3hRR/hGuVRA9dGd9Lt9JqDoM8wVsACAAAAAA+h7V/jIYJcd0ALIvFBlwxkFqWxBVlkqT9wFkmumr4QcAAzUyAH0AAAAFZAAgAAAAAEocREw1L0g+roFUchJI2Yd0M0ME2bnErNUYnpyJP1SqBXMAIAAAAAAcE2/JK/8MoSeOchIuAkKh1X3ImoA7p8ujAZIfvIDo6QVsACAAAAAA+W0+zgLr85/PD7P9a94wk6MgNgrizx/XU9aCxAkp1IwAABJjbQAAAAAAAAAAAAAQcGF5bG9hZElkAAAAAAAQZmlyc3RPcGVyYXRvcgABAAAAEnNwAAIAAAAAAAAAEHRmAAYAAAAQbW4AAAAAABBteADIAAAAAA==", + "subType": "06" + } + } + } + }, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedInt", + "bsonType": "int", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "min": { + "$numberInt": "0" + }, + "max": { + "$numberInt": "200" + } + } + } + ] + } + } + } + }, + "commandName": "find" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Double-Aggregate.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Double-Aggregate.json new file mode 100644 index 0000000000..c0211a1a34 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Double-Aggregate.json @@ -0,0 +1,1195 @@ +{ + "description": "fle2v2-Rangev2-Double-Aggregate", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoubleNoPrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "FLE2 Range Double. Aggregate.", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$numberDouble": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$numberDouble": "1" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDoubleNoPrecision": { + "$gt": { + "$numberDouble": "0" + } + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$numberDouble": "1" + } + } + ] + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "6YrBn2ofIw1b5ooakrLOwF41BWrps8OO0H9WH4/rtlE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "n+XAuFnP8Dov9TnhGFxNx0K/MnVM9WbJ7RouEu0ndO0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yRXojuVdn5GQtD97qYlaCL6cOLmZ7Cvcb3wFjkLUIdM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "DuIkdRPITRs55I4SZmgomAHCIsDQmXRhW8+MOznkzSk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SsBk+Et1lTbU+QRPx+xyJ/jMkmfG+QCvQEpip2YYrzA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "crCIzOd8KhHvvUlX7M1v9bhvU4pLdTc+X2SuqoKU5Ek=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "YOWdCw4UrqnxkAaVjqmC4sKQDMVMHEpFGnlxpxdaU6E=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "M3SShp81Ff8tQ632qKbv9MUcN6wjDaBReI0VXNu6Xh4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "gzHlSPxpM0hT75kQvWFzGlOxKvDoiKQZOr19V6l2zXI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "s3JnppOGYw9SL2Q1kMAZs948v2F5PrpXjGei/HioDWs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cG6+3Gk/zEH68P/uuuwiAUVCuyJwa1LeV+t29FlPPAo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dupdvR3AyJtM+g9NDKiaLVOtGca387JQp8w+V03m7Ig=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JqEQc5svj2jTvZ6LLA5ivE+kTb/0aRemSEmxk4G7Zrg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "szcXXXKnob+p3SoM4yED2R920LeJ7cVsclPMFTe4CeI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "o1QoGVXmuBdHwHm7aCtGMlMVKrjFdYvJXpoq6uhIAZ0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Jfm5wPlqqLCJRGQIqRq2NGmpn7s0Vrih2H3YAOoI2YU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zMHLb8ARbsYo8Ld05bqnGFf1Usha6EGb8QKwdSAyps0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yQdtq9lh5pugL7/i0Bj/PuZUUBUIzf+7wj1rl5y736w=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wGWVZdO7qIuyDg/BqDgqjgoQ02h5YYgwXQB1oCin2NE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "by9HMLj6NTEpgztZ5HSN6GxImkXPcaFINYDzgZY33X8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tWo0vbasi7bXmn/MsOx13VC1IsWtpx/nYp0uj4iMzdA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tQQpndUYd5O87lOtrGjH3wl9VsOK0ray7RMasL90sBM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cQjXEDCMsOpKLLf+vlTgIHA+cbSJdzqhbSX9Wvh95aA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "7yMpU48IxK9SzP2cx3VnTownGEwFmeFofuuFT97SuuY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kSOx1kz0CmBgzKQHZlo65ZUY1DIv9A99JRm+Us2y6Ew=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ubQpdPBe6/xvtr+AcXdfYLSvYCR4ot0tivehkCsupb4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xal+iCJ6FTefRQToyoNksc9NCZShyn04NDGi4IYrcoM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "d7jU4iOK50xHxlkSifcxlZFCM46TSgQzoYivxG3HNLY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tJvl2nsBLBVzL3pp6sKWCL4UXeh3q/roYBJjSb74ve0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "OIUCaKRvIx9t1w6Hxlz1IcQTdPNCfdRNwnnTm10W+X0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "A9tvzsiElotOUVIB4CqfQp9mAwqvTM35YkmAR170aHA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "lI8gpK7hpb7c9x4RQugsxMnQay5LZJmwslZdvMx/dcE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dNCzh40U0XvdKnSDi3HRQOWQftEsDVqc4uUvsVFGoq8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "IP+iwEBWBwVVZIdpaMu8k5+soFCz+TZkYn3drKZ9grE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pnqyh6e0y5svHkJDShlN9CHV0WvMBE4QbtJpQw5ZCXc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "elEl42tbVDoRTLjAhZUFEtXiut4b3PVhg/1ZLZSQdtE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "vHuu2FxwclMHqyE6JBYbTYgbEkB0dqb/JuaxsvfwsmY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xTf7NCe3Gf8QpE78HR5OknlLTKfs9J+RN9UZpH6fnso=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "XiWSasRnJAulGR6+LCVD3mwRObXylqYWR9jvpywq12c=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "MZMxEQ5ikx0PG1YFIExv0UnTZogsvgeOEZTpzvBDn4w=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yZMyMZBDrWbAhvnic7vvIYhmO9m5H2iuv0c8KNZrBzY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xxM14hTPY5j0vvcK2C7YAEjzdsfUTFHozHC0hEo1bxI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+01rqR1xVwkpGXcstbk1ItJqFVjH6Q8MGxEN3Cm9Y1A=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xOpLV0Z2VTRJ3iWtnWZcsyjXubTIkYWo31cO+HV1o1k=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "BWUOLqgLBqc5NwxVlSV5H3KFQPXbCp7mdo+jF+8cJqY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "fuQb1S6xZDGlrEbK+kI23aL53PP1PVNwqICnZNt9Yzg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SfscnoibFttahLdPVC4Ee+47ewGFKpDSU7M6HX19bKE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "rpSW2awybNVeKtat91VFxqbINoTfNhPfQAu+d73Xtf8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "9M/CP9ccOIIj2LLFmE0GFDO0Ban2wsNalEXfM6+h+1s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "WrEMG49l1ye4MhXs5ZS9tz8P6h+hDvthIg/2wW9ne1Q=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ImNhbfeyfH8qIEeA5ic0s3dAQBdzzTBS+CPsNih9vZ0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dWP33YDSn04UKJN2ogh2Rui0iW/0q2y18OCDRVcfyoo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "lYv0isAtfGh6H9tdp3cp2eHU7q2J+uk7QrgcxtK3w7Y=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "VGMoamB/+7zTOYcY/pqJc96xlv2PdW4hwsIAEIslTDQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yNeBWMF7BnD9wVwz2PgJsvWr77QiVvvWUvJF0+fqBug=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SfpvObJ+tJBXSvqeN7vlOfmhYign635lciYAJIjUtY8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dsen4NqjzVGjpjufiTMs3+gqeD09EbnuogPgxrJECwg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pxCWVM3sn19NsFEpgHbgLa+PmYlhN3mMiP0Wk8kJhYw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q11KNvJszjYIB9n9HcC+N4uz11a3eRj1L3BH9scKMDQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "A1PmkgcEToWh1JiVWE6mI5jUu7poxWWuCUt/cgRUUDc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "qJo3Hu4PJeanL7XEaWXO/n3YsodhZyd+MJOOmB9Kpd8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "BkBKLO8URFscfRY9Bav/1+L9mLohDgNr/MkZtGiraIs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "rZq5WA3Hx3xthOyHAJXK//f8pE2qbz7YKu3TIMp9GFY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "X07a/Lm80p5xd4RFs1dNmw+90tmPDPdGiAKVZkxd4zY=", + "subType": "00" + } + } + ] + }, + { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "2FIZh/9N+NeJEQwxYIX5ikQT85xJzulBNReXk8PnG/s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "I93Md7QNPGmEEGYU1+VVCqBPBEvXdqHPtTJtMOn06Yk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "GecBFQ1PemlECWZWCl7f74vmsL6eB6mzQ9n6tK6FYfs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "QpjhZl+O1ORifgtCZuWAdcP6OKL7IZ2cA46v8FJcV28=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "FWXI/yZ1M+2fIboeMCDMlp+I2NwPQDtoM/wWselOPYw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "uk26nvN/LdRLaBphiBgIZzT0sSpoO1z0RdDWRm/xrSA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "hiiYSH1KZovAULc7rlmEU74wCjzDR+mm6ZnsgvFQjMw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "hRzvMvWPX0sJme+wck67lwbKDFaWOa+Eyef+JSdc1s4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "PSx5D+zqC9c295dguX4+EobT4IEzfffdfjzC8DWpB5Q=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "QzfXQCVTjPQv2h21v95HYPq8uCsVJ2tPnjv79gAaM9M=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "XcGDO/dlTcEMLqwcm55UmOqK+KpBmbzZO1LIzX7GPaQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Lf+o4E7YB5ynzUPC6KTyW0lj6Cg9oLIu1Sdd1ODHctA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wAuVn02LAVo5Y+TUocvkoenFYWzpu38k0NmGZOsAjS4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yJGDtveLbbo/0HtCtiTSsvVI/0agg/U1bFaQ0yhK12o=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "KsEy0zgYcmkM+O/fWF9z3aJGIk22XCk+Aw96HB6JU68=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "p+AnMI5ZxdJMSIEJmXXya+FeH5yubmOdViwUO89j0Rc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "/jLix56jzeywBtNuGw55lCXyebQoSIhbful0hOKxKDY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "fvDvSPomtJsl1S3+8/tzFCE8scHIdJY5hB9CdTEsoFo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "oV5hOJzPXxfTuRdKIlF4uYEoMDuqH+G7/3qgndDr0PM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "3ALwcvLj3VOfgD6OqXAO13h1ZkOv46R6+Oy6SUKh53I=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "gxaB9FJj0IM+InhvAjwWaex3UIZ9SAnDiUd5WHSY/l0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "66NPvDygJzKJqddfNuDuNOpvGajjFRtvhkwfUkiYmXw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "1dWcQIocRAcO9XnXYqbhl83jc0RgjQpsrWd8dC27trg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "npos0Uf1DT3ztSCjPVY9EImlRnTHB1KLrvmVSqBQ/8E=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "TEI9qBx/tK1l1H0v1scMG8Srmtwo5VxWHADPBSlWrXk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "3wUN2ypQKoj+5ASkeIK9ycxhahVxyTmGopigoUAlyYs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "o/oksSnUS+nIq6ozWTbB5bJh+NoaPj8deAA23uxiWCk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "KExYPruhA31e8xuSwvfUfDcyY/H2Va6taUd0k4yFgLc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "/x+dNfxdd/lkx8Z8VZVfoYl7LPoaZ/iKEzZXBrAtIJc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "DE4cmjFLPqZlmRomO0qQiruUBtzoCe8ZdNRcfNH92pU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "M6EKNcLPw/iojAChgYUSieaBYWcbsjKtB94SaHOr8vk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+qP49lDPeyhaduTvXJgtJEqHNEYANVu9Bg3Bxz7Td9w=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ruMrC2VIS+VKbJwCFb3bfkaLTju9nE+yPONV9s0M0Vo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "EbjDlSB5JKnDKff4d8hOmaOwJ7B9Q6NQFisLj+DPC+0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "C/yYOTB94edyqAbiQNu8/H7FoG3yRRjHDkMykz4+Mv0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CBxqrejG+qQQq2YTd6iP/06kiu2CxxzBFaZK3Ofb1CM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "2ZOQ/fpho+AbDENWBZaln7wRoepIRdhyT648dr8O5cU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "EghIgEPz01+myPgj8oid+PgncvobvC7vjvG3THEEQ0M=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "92CysZYNF8riwAMhdrIPKxfODw9p07cKQy/Snn8XmVY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "VO0LeTBQmsEf7sCHzTnZwUPNTqRZ49R8V5E9XnZ/5N4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "exs8BQMJq7U6ZXYgIizT7XN+X/hOmmn4YEuzev9zgSI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "qHpS4k1I+gPniNp4CA8TY8lLN36vBYmgbKMFpbYMEqg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+7lWKCKAWFw6gPZdHE6E8KIfI14/fSvtWUmllb5WLi0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "YiH/US0q6679hWblFDDKNqUjCgggoU8sUCssTIF1QbU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "YgwkKElEubNfvXL9hJxzqQUQtHiXN/OCGxNL1MUZZlM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "hZFST4INZTTuhvJlGJeMwlUAK270UCOTCDeBAnN4a7g=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "24I1Zw35AuGnK3CqJhbCwYb0IPuu5sCRrM5iyeITOLc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "vgD12JB4Q1S/kGPSQ1KOgp386KnG1GbM/5+60oRGcGw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+wNE+OL+CB9d4AUJdVxd56jUJCAXmmk9fapuB2TAc4g=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "uhQh1B2Pe4RkNw/kPEcgaLenuikKoRf1iyfZhpXdodc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "eu8gjAUIp8ybO204AgeOq5v1neI1yljqy5v3I6lo1lM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "7QG6oVbASBAjrnCPxzzUNnuFSFNlKhbuBafkF8pr7Is=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "PUS1xb2oHSDTdYltutoSSxBiJ1NjxH3l2kA4P1CZLEs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "XPMh/JDC/O93gJJCwwgJDb8ssWZvRvezNmKmyn3nIfk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "jWz+KGwMk/GOvFAK2rOxF3OjxeZAWfmUQ1HGJ7icw4A=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "o7XbW68pc6flYigf3LW4WAGUWxpeqxaQLkHUhUR9RZ8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "nqR+g60+5U0okbqJadSqGgnC+j1JcP8rwMcfzOs2ACI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Hz43qVK95tSfbYFtaE/8fE97XMk1RiO8XpWjwZHB80o=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "noZUWlZ8M6KXU5rkifyo8/duw5IL7/fXbJvT7bNmW9k=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "WONVHCuPSanXDRQQ/3tmyJ0Vq+Lu/4hRaMUf0g0kSuw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "UEaj6vQRoIghE8Movd8AGXhtwIOXlP4cBsECIUvE5Y8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "D3n2YcO8+PB4C8brDo7kxKjF9Y844rVkdRMLTgsQkrw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "C+YA0G9KjxZVaWwOMuh/dcnHnHAlYnbFrRl0IEpmsY0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "rUnmbmQanxrbFPYYrwyQ53x66OSt27yAvF+s48ezKDc=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoubleNoPrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoubleNoPrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "aggregate": "default", + "pipeline": [ + { + "$match": { + "encryptedDoubleNoPrecision": { + "$gt": { + "$binary": { + "base64": "DbMkAAADcGF5bG9hZABXJAAABGcAQyQAAAMwAH0AAAAFZAAgAAAAAHgYoMGjEE6fAlAhICv0+doHcVX8CmMVxyq7+jlyGrvmBXMAIAAAAAC/5MQZgTHuIr/O5Z3mXPvqrom5JTQ8IeSpQGhO9sB+8gVsACAAAAAAuPSXVmJUAUpTQg/A9Bu1hYczZF58KEhVofakygbsvJQAAzEAfQAAAAVkACAAAAAA2kiWNvEc4zunJ1jzvuClFC9hjZMYruKCqAaxq+oY8EAFcwAgAAAAACofIS72Cm6s866UCk+evTH3CvKBj/uZd72sAL608rzTBWwAIAAAAADuCQ/M2xLeALF0UFZtJb22QGOhHmJv6xoO+kZIHcDeiAADMgB9AAAABWQAIAAAAABkfoBGmU3hjYBvQbjNW19kfXneBQsQQPRfUL3UAwI2cAVzACAAAAAAUpK2BUOqX/DGdX5YJniEZMWkofxHqeAbXceEGJxhp8AFbAAgAAAAAKUaLzIldNIZv6RHE+FwbMjzcNHqPESwF/37mm43VPrsAAMzAH0AAAAFZAAgAAAAAFNprhQ3ZwIcYbuzLolAT5n/vc14P9kUUQComDu6eFyKBXMAIAAAAAAcx9z9pk32YbPV/sfPZl9ALIEVsqoLXgqWLVK/tP+heAVsACAAAAAA/qxvuvJbAHwwhfrPVpmCFzNvg2cU/NXaWgqgYUZpgXwAAzQAfQAAAAVkACAAAAAAODI+pB2pCuB+YmNEUAgtMfNdt3DmSkrJ96gRzLphgb8FcwAgAAAAAAT7dewFDxUDECQ3zVq75/cUN4IP+zsqhkP5+czUwlJIBWwAIAAAAACFGeOtd5zBXTJ4JYonkn/HXZfHipUlqGwIRUcH/VTatwADNQB9AAAABWQAIAAAAACNAk+yTZ4Ewk1EnotQK8O3h1gg9I7pr9q2+4po1iJVgAVzACAAAAAAUj/LesmtEsgqYVzMJ67umVA11hJTdDXwbxDoQ71vWyUFbAAgAAAAABlnhpgTQ0WjLb5u0b/vEydrCeFjVynKd7aqb+UnvVLeAAM2AH0AAAAFZAAgAAAAAD/FIrGYFDjyYmVb7oTMVwweWP7A6F9LnyIuNO4MjBnXBXMAIAAAAACIZgJCQRZu7NhuNMyOqCn1tf+DfU1qm10TPCfj5JYV3wVsACAAAAAA5hmY4ptuNxULGf87SUFXQWGAONsL9U29duh8xqsHtxoAAzcAfQAAAAVkACAAAAAAciRW40ORJLVwchOEpz87Svb+5toAFM6LxDWv928ECwQFcwAgAAAAAN0dipyESIkszfjRzdDi8kAGaa2Hf4wrPAtiWwboZLuxBWwAIAAAAAANr4o/+l1OIbbaX5lZ3fQ/WIeOcEXjNI1F0WbSgQrzaQADOAB9AAAABWQAIAAAAACZqAyCzYQupJ95mrBJX54yIz9VY7I0WrxpNYElCI4dTQVzACAAAAAA/eyJb6d1xfE+jJlVXMTD3HS/NEYENPVKAuj56Dr2dSEFbAAgAAAAANkSt154Or/JKb31VvbZFV46RPgUp8ff/hcPORL7PpFBAAM5AH0AAAAFZAAgAAAAAI5bm3YO0Xgf0VT+qjVTTfvckecM3Cwqj7DTKZXf8/NXBXMAIAAAAAD/m+h8fBhWaHm6Ykuz0WX1xL4Eme3ErLObyEVJf8NCywVsACAAAAAAfb1VZZCqs2ivYbRzX4p5CtaCkKW+g20Pr57FWXzEZi8AAzEwAH0AAAAFZAAgAAAAANqo4+p6qdtCzcB4BX1wQ6llU7eFBnuu4MtZwp4B6mDlBXMAIAAAAAAGiz+VaukMZ+6IH4jtn4KWWdKK4/W+O+gRioQDrfzpMgVsACAAAAAAG4YYkTp80EKo59mlHExDodRQFR7njhR5dmISwUJ6ukAAAzExAH0AAAAFZAAgAAAAAPrFXmHP2Y4YAm7b/aqsdn/DPoDkv7B8egWkfe23XsM1BXMAIAAAAAAGhwpKAr7skeqHm3oseSbO7qKNhmYsuUrECBxJ5k+D2AVsACAAAAAAAqPQi9luYAu3GrFCEsVjd9z2zIDcp6SPTR2w6KQEr+IAAzEyAH0AAAAFZAAgAAAAABzjYxwAjXxXc0Uxv18rH8I3my0Aguow0kTwKyxbrm+cBXMAIAAAAADVbqJVr6IdokuhXkEtXF0C2gINLiAjMVN20lE20Vmp2QVsACAAAAAAD7K1Fx4gFaaizkIUrf+EGXQeG7QX1jadhGc6Ji471H8AAzEzAH0AAAAFZAAgAAAAAFMm2feF2fFCm/UC6AfIyepX/xJDSmnnolQIBnHcPmb5BXMAIAAAAABLI11kFrQoaNVZFmq/38aRNImPOjdJh0Lo6irI8M/AaAVsACAAAAAAOWul0oVqJ9CejD2RqphhTC98DJeRQy5EwbNerU2+4l8AAzE0AH0AAAAFZAAgAAAAAJvXB3KyNiNtQko4SSzo/9b2qmM2zU9CQTTDfLSBWMgRBXMAIAAAAAAvjuVP7KsLRDeqVqRziTKpBrjVyqKiIbO9Gw8Wl2wFTAVsACAAAAAADlE+oc1ins+paNcaOZJhBlKlObDJ4VQORWjFYocM4LgAAzE1AH0AAAAFZAAgAAAAAPGdcxDiid8z8XYnfdDivNMYVPgBKdGOUw6UStU+48CdBXMAIAAAAAARj6g1Ap0eEfuCZ4X2TsEw+Djrhto3fA5nLwPaY0vCTgVsACAAAAAAoHqiwGOUkBu8SX5U1yHho+UIFdSN2MdQN5s6bQ0EsJYAAzE2AH0AAAAFZAAgAAAAAP5rGPrYGt3aKob5f/ldP0qrW7bmWvqnKY4QwdDWz400BXMAIAAAAADTQkW2ymaaf/bhteOOGmSrIR97bAnJx+yN3yMj1bTeewVsACAAAAAADyQnHGH2gF4w4L8axUsSTf6Ubk7L5/eoFOJk12MtZAoAAzE3AH0AAAAFZAAgAAAAAAlz6wJze5UkIxKpJOZFGCOf3v2KByWyI6NB6JM9wNcBBXMAIAAAAABUC7P/neUIHHoZtq0jFVBHY75tSFYr1Y5S16YN5XxC1QVsACAAAAAAgvxRbXDisNnLY3pfsjDdnFLtkvYUC4lhA68eBXc7KAwAAzE4AH0AAAAFZAAgAAAAAFJ8AtHcjia/9Y5pLEc3qVgH5xKiXw12G9Kn2A1EY8McBXMAIAAAAAAxe7Bdw7eUSBk/oAawa7uicTEDgXLymRNhBy1LAxhDvwVsACAAAAAAxKPaIBKVx3jTA+R/el7P7AZ7efrmTGjJs3Hj/YdMddwAAzE5AH0AAAAFZAAgAAAAAO8uwQUaKFb6vqR3Sv3Wn4QAonC2exOC9lGG1juqP5DtBXMAIAAAAABZf1KyJgQg8/Rf5c02DgDK2aQu0rNCOvaL60ohDHyY+gVsACAAAAAAqyEjfKC8lYoIfoXYHUqHZPoaA6EK5BAZy5dxXZmay4kAAzIwAH0AAAAFZAAgAAAAAE8YtqyRsGCeiR6hhiyisR/hccmK4nZqIMzO4lUBmEFzBXMAIAAAAAC1UYOSKqAeG1UJiKjWFVskRhuFKpj9Ezy+lICZvFlN5AVsACAAAAAA6Ct9nNMKyRazn1OKnRKagm746CGu+jyhbL1qJnZxGi0AAzIxAH0AAAAFZAAgAAAAAPhCrMausDx1QUIEqp9rUdRKyM6a9AAx7jQ3ILIu8wNIBXMAIAAAAACmH8lotGCiF2q9VQxhsS+7LAZv79VUAsOUALaGxE/EpAVsACAAAAAAnc1xCKfdvbUEc8F7XZqlNn1C+hZTtC0I9I3LL06iaNkAAzIyAH0AAAAFZAAgAAAAAOBi/GAYFcstMSJPgp3VkMiuuUUCrZytvqYaU8dwm8v2BXMAIAAAAACEZSZVyD3pKzGlbdwlYmWQhHHTV5SnNLknl2Gw8IaUTQVsACAAAAAAfsLZsEDcWSuNsIo/TD1ReyQW75HPMgmuKZuWFOLKRLoAAzIzAH0AAAAFZAAgAAAAAIQuup+YGfH3mflzWopN8J1X8o8a0d9CSGIvrA5HOzraBXMAIAAAAADYvNLURXsC2ITMqK14LABQBI+hZZ5wNf24JMcKLW+84AVsACAAAAAACzfjbTBH7IwDU91OqLAz94RFkoqBOkzKAqQb55gT4/MAAzI0AH0AAAAFZAAgAAAAAKsh0ADyOnVocFrOrf6MpTrNvAj8iaiE923DPryu124gBXMAIAAAAADg24a8NVE1GyScc6tmnTbmu5ulzO+896fE92lN08MeswVsACAAAAAAaPxcOIxnU7But88/yadOuDJDMcCywwrRitaxMODT4msAAzI1AH0AAAAFZAAgAAAAAKkVC2Y6HtRmv72tDnPUSjJBvse7SxLqnr09/Uuj9sVVBXMAIAAAAABYNFUkH7ylPMN+Bc3HWX1e0flGYNbtJNCY9SltJCW/UAVsACAAAAAAZYK/f9H4OeihmpiFMH7Wm7uLvs2s92zNA8wyrNZTsuMAAzI2AH0AAAAFZAAgAAAAADDggcwcb/Yn1Kk39sOHsv7BO/MfP3m/AJzjGH506Wf9BXMAIAAAAAAYZIsdjICS0+BDyRUPnrSAZfPrwtuMaEDEn0/ijLNQmAVsACAAAAAAGPnYVvo2ulO9z4LGd/69NAklfIcZqZvFX2KK0s+FcTUAAzI3AH0AAAAFZAAgAAAAAEWY7dEUOJBgjOoWVht1wLehsWAzB3rSOBtLgTuM2HC8BXMAIAAAAAAAoswiHRROurjwUW8u8D5EUT+67yvrgpB/j6PzBDAfVwVsACAAAAAA6NhRTYFL/Sz4tao7vpPjLNgAJ0FX6P/IyMW65qT6YsMAAzI4AH0AAAAFZAAgAAAAAPZaapeAUUFPA7JTCMOWHJa9lnPFh0/gXfAPjA1ezm4ZBXMAIAAAAACmJvLY2nivw7/b3DOKH/X7bBXjJwoowqb1GtEFO3OYgAVsACAAAAAA/JcUoyKacCB1NfmH8vYqC1f7rd13KShrQqV2r9QBP44AAzI5AH0AAAAFZAAgAAAAAK00u6jadxCZAiA+fTsPVDsnW5p5LCr4+kZZZOTDuZlfBXMAIAAAAAAote4zTEYMDgaaQbAdN8Dzv93ljPLdGjJzvnRn3KXgtQVsACAAAAAAxXd9Mh6R3mnJy8m7UfqMKi6oD5DlZpkaOz6bEjMOdiwAAzMwAH0AAAAFZAAgAAAAAFbgabdyymiEVYYwtJSWa7lfl/oYuj/SukzJeDOR6wPVBXMAIAAAAADAFGFjS1vPbN6mQEhkDYTD6V2V23Ys9gUEUMGNvMPkaAVsACAAAAAAL/D5Sze/ZoEanZLK0IeEkhgVkxEjMWVCfmJaD3a8uNIAAzMxAH0AAAAFZAAgAAAAABNMR6UBv2E627CqLtQ/eDYx7OEwQ7JrR4mSHFa1N8tLBXMAIAAAAAAxH4gucI4UmNVB7625C6hFSVCuIpJO3lusJlPuL8H5EQVsACAAAAAAVLHNg0OUVqZ7WGOP53BkTap9FOw9dr1P4J8HxqFqU04AAzMyAH0AAAAFZAAgAAAAAG8cd6WBneNunlqrQ2EmNf35W7OGObGq9WL4ePX+LUDmBXMAIAAAAAAjJ2+sX87NSis9hBsgb1QprVRnO7Bf+GObCGoUqyPE4wVsACAAAAAAs9c9SM49/pWmyUQKslpt3RTMBNSRppfNO0JBvUqHPg0AAzMzAH0AAAAFZAAgAAAAAFWOUGkUpy8yf6gB3dio/aOfRKh7XuhvsUj48iESFJrGBXMAIAAAAAAY7sCDMcrUXvNuL6dO0m11WyijzXZvPIcOKob6IpC4PQVsACAAAAAAJOP+EHz6awDb1qK2bZQ3kTV7wsj5Daj/IGAWh4g7omAAAzM0AH0AAAAFZAAgAAAAAGUrIdKxOihwNmo6B+aG+Ag1qa0+iqdksHOjQj+Oy9bZBXMAIAAAAABwa5dbI2KmzBDNBTQBEkjZv4sPaeRkRNejcjdVymRFKQVsACAAAAAA4ml/nm0gJNTcJ4vuD+T2Qfq2fQZlibJp/j6MOGDrbHMAAzM1AH0AAAAFZAAgAAAAAOx89xV/hRk64/CkM9N2EMK6aldII0c8smdcsZ46NbP8BXMAIAAAAADBF6tfQ+7q9kTuLyuyrSnDgmrdmrXkdhl980i1KHuGHgVsACAAAAAACUqiFqHZdGbwAA+hN0YUE5zFg+H+dabIB4dj5/75W/YAAzM2AH0AAAAFZAAgAAAAAMkN0L1oQWXhjwn9rAdudcYeN8/5VdCKU8cmDt7BokjsBXMAIAAAAAAT62pGXoRwExe9uvgYOI0hg5tOxilrWfoEmT0SMglWJwVsACAAAAAAlVz4dhiprSbUero6JFfxzSJGclg63oAkAmgbSwbcYxIAAzM3AH0AAAAFZAAgAAAAANxfa4xCoaaB7k1C1RoH1LBhsCbN2yEq15BT9b+iqEC4BXMAIAAAAACAX9LV8Pemfw7NF0iB1/85NzM1Ef+1mUfyehacUVgobQVsACAAAAAAVq4xpbymLk0trPC/a2MvB39I7hRiX8EJsVSI5E5hSBkAAzM4AH0AAAAFZAAgAAAAAOYIYoWkX7dGuyKfi3XssUlc7u/gWzqrR9KMkikKVdmSBXMAIAAAAABVF2OYjRTGi9Tw8XCAwZWLpX35Yl271TlNWp6N/nROhAVsACAAAAAA0nWwYzXQ1+EkDvnGq+SMlq20z+j32Su+i/A95SggPb4AAzM5AH0AAAAFZAAgAAAAAIy0+bXZi10QC+q7oSOLXK5Fee7VEk/qHSXukfeVIfgzBXMAIAAAAAAQ3IIV/JQCHW95AEbH5zGIHtJqyuPjWPMIZ+VmQHlxEwVsACAAAAAAp0jYsyohKv9Pm+4k+DplEGbl9WLZpAJzitrcDj4CNsMAAzQwAH0AAAAFZAAgAAAAAL5SOJQ3LOhgdXJ5v086NNeAl1qonQnchObdpZJ1kHeEBXMAIAAAAAA+tEqTXODtik+ydJZSnUqXF9f18bPeze9eWtR7ExZJgQVsACAAAAAAbrkZCVgB9Qsp4IAbdf+bD4fT6Boqk5UtuA/zhNrh1y0AAzQxAH0AAAAFZAAgAAAAAKl8zcHJRDjSjJeV/WvMxulW1zrTFtaeBy/aKKhadc6UBXMAIAAAAADBdWQl5SBIvtZZLIHszePwkO14W1mQ0izUk2Ov21cPNAVsACAAAAAAHErCYycpqiIcCZHdmPL1hi+ovLQk4TAvENpfLdTRamQAAzQyAH0AAAAFZAAgAAAAAFvotcNaoKnVt5CBCOPwjexFO0WGWuaIGL6H/6KSau+6BXMAIAAAAAD2y2mBN5xPu5PJoY2zcr0GnQDtHRBogA5+xzIxccE9fwVsACAAAAAAdS34xzJesnUfxLCcc1U7XzUqLy8MAzV/tcjbqaD3lkMAAzQzAH0AAAAFZAAgAAAAAPezU0/vNT4Q4YKbTbaeHqcwNLT+IjW/Y9QFpIooihjPBXMAIAAAAACj2x4O4rHter8ZnTws5LAP9jJ/6kk9C/V3vL50LoFZHAVsACAAAAAAQdBDF3747uCVP5lB/zr8VmzxJfTSZHBKeIgm5FyONXwAAzQ0AH0AAAAFZAAgAAAAAMqpayM2XotEFmm0gwQd9rIzApy0X+7HfOhNk6VU7F5lBXMAIAAAAACJR9+q5T9qFHXFNgGbZnPubG8rkO6cwWhzITQTmd6VgwVsACAAAAAAOZLQ6o7e4mVfDzbpQioa4d3RoTvqwgnbmc5Qh2wsZuoAAzQ1AH0AAAAFZAAgAAAAANCeyW+3oebaQk+aqxNVhAcT/BZ5nhsTVdKS3tMrLSvWBXMAIAAAAADxRFMDhkyuEc++WnndMfoUMLNL7T7rWoeblcrpSI6soQVsACAAAAAAdBuBMJ1lxt0DRq9pOZldQqchLs3B/W02txcMLD490FEAAzQ2AH0AAAAFZAAgAAAAAIbo5YBTxXM7HQhl7UP9NNgpPGFkBx871r1B65G47+K8BXMAIAAAAAC21dJSxnEhnxO5gzN5/34BL4von45e1meW92qowzb8fQVsACAAAAAAm3Hk2cvBN0ANaR5jzeZE5TsdxDvJCTOT1I01X7cNVaYAAzQ3AH0AAAAFZAAgAAAAABm/6pF96j26Jm7z5KkY1y33zcAEXLx2n0DwC03bs/ixBXMAIAAAAAD01OMvTZI/mqMgxIhA5nLs068mW+GKl3OW3ilf2D8+LgVsACAAAAAAaLvJDrqBESTNZSdcXsd+8GXPl8ZkUsGpeYuyYVv/kygAAzQ4AH0AAAAFZAAgAAAAAJ/D3+17gaQdkBqkL2wMwccdmCaVOtxzIkM8VyI4xI5zBXMAIAAAAAAggLVmkc5u+YzBR+oNE+XgLVp64fC6MzUb/Ilu/Jsw0AVsACAAAAAACz3HVKdWkx82/kGbVpcbAeZtsj2R5Zr0dEPfle4IErkAAzQ5AH0AAAAFZAAgAAAAAJMRyUW50oaTzspS6A3TUoXyC3gNYQoShUGPakMmeVZrBXMAIAAAAACona2Pqwt4U2PmFrtmu37jB9kQ/12okyAVtYa8TQkDiQVsACAAAAAAltJJKjCMyBTJ+4PkdDCPJdeX695P8P5h7WOZ+kmExMAAAzUwAH0AAAAFZAAgAAAAAByuYl8dBvfaZ0LO/81JW4hYypeNmvLMaxsIdvqMPrWoBXMAIAAAAABNddwobOUJzm9HOUD8BMZJqkNCUCqstHZkC76FIdNg9AVsACAAAAAAQQOkIQtkyNavqCnhQbNg3HfqrJdsAGaoxSJePJl1qXsAAzUxAH0AAAAFZAAgAAAAAHEzLtfmF/sBcYPPdj8867VmmQyU1xK9I/3Y0478azvABXMAIAAAAAAcmyFajZPnBTbO+oLInNwlApBocUekKkxz2hYFeSlQ+gVsACAAAAAAZ6IkrOVRcC8vSA6Vb4fPWZJrYexXhEabIuYIeXNsCSgAAzUyAH0AAAAFZAAgAAAAAJam7JYsZe2cN20ZYm2W3v1pisNt5PLiniMzymBLWyMtBXMAIAAAAABxCsKVMZMTn3n+R2L7pVz5nW804r8HcK0mCBw3jUXKXAVsACAAAAAA7j3JGnNtR64P4dJLeUoScFRGfa8ekjh3dvhw46sRFk0AAzUzAH0AAAAFZAAgAAAAAMXrXx0saZ+5gORmwM2FLuZG6iuO2YS+1IGPoAtDKoKBBXMAIAAAAADIQsxCr8CfFKaBcx8kIeSywnGh7JHjKRJ9vJd9x79y7wVsACAAAAAAcvBV+SykDYhmRFyVYwFYB9oBKBSHr55Jdz2cXeowsUQAAzU0AH0AAAAFZAAgAAAAACbzcUD3INSnCRspOKF7ubne74OK9L0FTZvi9Ay0JVDYBXMAIAAAAADPebVQH8Btk9rhBIoUOdSAdpPvz7qIY4UC2i6IGisSAQVsACAAAAAAiBunJi0mPnnXdnldiq+If8dcb/n6apHnaIFt+oyYO1kAAzU1AH0AAAAFZAAgAAAAACUc2CtD1MK/UTxtv+8iA9FoHEyTwdl43HKeSwDw2Lp5BXMAIAAAAACCIduIdw65bQMzRYRfjBJj62bc69T4QqH4QoWanwlvowVsACAAAAAAM0TV7S+aPVVzJOQ+cpSNKHTwyQ0mWa8tcHzfk3nR+9IAAzU2AH0AAAAFZAAgAAAAAHSaHWs/dnmI9sc7nB50VB2Bzs0kHapMHCQdyVEYY30TBXMAIAAAAACkV22lhEjWv/9/DubfHBAcwJggKI5mIbSK5L2nyqloqQVsACAAAAAAS19m7DccQxgryOsBJ3GsCs37yfQqNi1G+S6fCXpEhn4AAzU3AH0AAAAFZAAgAAAAAAL8jhNBG0KXXZhmZ0bPXtfgapJCB/AI+BEHB0eZ3C75BXMAIAAAAADHx/fPa639EBmGV5quLi8IQT600ifiKSOhTDOK19DnzwVsACAAAAAAlyLTDVkHxbayklD6Qymh3odIK1JHaOtps4f4HR+PcDgAAzU4AH0AAAAFZAAgAAAAAAxgeclNl09H7HvzD1oLwb2YpFca5eaX90uStYXHilqKBXMAIAAAAACMU5pSxzIzWlQxHyW170Xs9EhD1hURASQk+qkx7K5Y6AVsACAAAAAAJbMMwJfNftA7Xom8Bw/ghuZmSa3x12vTZxBUbV8m888AAzU5AH0AAAAFZAAgAAAAABmO7QD9vxWMmFjIHz13lyOeV6vHT6mYCsWxF7hb/yOjBXMAIAAAAACT9lmgkiqzuWG24afuzYiCeK9gmJqacmxAruIukd0xEAVsACAAAAAAZa0/FI/GkZR7CtX18Xg9Tn9zfxkD0UoaSt+pIO5t1t4AAzYwAH0AAAAFZAAgAAAAAB89SjLtDJkqEghRGyj6aQ/2qvWLNuMROoXmzbYbCMKMBXMAIAAAAAC8sywgND+CjhVTF6HnRQeay8y9/HnVzDI42dEPah28LQVsACAAAAAAoxv7UKh0RqUAWcOsQvU123zO1qZn73Xfib0qncZCB34AAzYxAH0AAAAFZAAgAAAAABN2alGq9Aats1mwERNGwL/fIwZSvVCe9/8XMHTFlpUpBXMAIAAAAACuDPjJgvvbBYhbLpjMiWUCsVppiYrhvR+yMysNPN8cZAVsACAAAAAAKpADjc4bzIZMi9Q/+oe0EMRJHYQt6dlo1x/lRquagqkAAzYyAH0AAAAFZAAgAAAAAL8YB6VAqGBiWD4CBv16IBscg5J7VQCTZu87n6pj+86KBXMAIAAAAAAmxm8e68geeyAdUjSMWBHzUjneVB0pG9TBXIoE6467hAVsACAAAAAAV76JZAlYpgC/Zl8awx2ArCg1uuyy2XVTSkp0wUMi/7UAAzYzAH0AAAAFZAAgAAAAAL4yLkCTV5Dmxa5toBu4JT8ge/cITAaURIOuFuOtFUkeBXMAIAAAAAAXoFNQOMGkAj7qEJP0wQafmFSXgWGeorDVbwyOxWLIsgVsACAAAAAAc4Un6dtIFe+AQ+RSfNWs3q63RTHhmyc+5GKRRdpWRv8AAzY0AH0AAAAFZAAgAAAAAEU8DoUp46YtYjNFS9kNXwdYxQ9IW27vCTb+VcqqfnKNBXMAIAAAAADe7vBOgYReE8X78k5ARuUnv4GmzPZzg6SbConf4L2G3wVsACAAAAAA78YHWVkp6HbZ0zS4UL2z/2pj9vPDcMDt7zTv6NcRsVsAAzY1AH0AAAAFZAAgAAAAAPa4yKTtkUtySuWo1ZQsp2QXtPb5SYqzA5vYDnS1P6c0BXMAIAAAAADKnF58R1sXlHlsHIvCBR3YWW/qk54z9CTDhZydkD1cOQVsACAAAAAAHW3ERalTFWKMzjuXF3nFh0pSrQxM/ojnPbPhc4v5MaQAAzY2AH0AAAAFZAAgAAAAAN5WJnMBmfgpuQPyonmY5X6OdRvuHw4nhsnGRnFAQ95VBXMAIAAAAACwftzu7KVV1rmGKwXtJjs3cJ1gE3apr8+N0SAg1F2cHwVsACAAAAAATDW0reyaCjbJuVLJzbSLx1OBuBoQu+090kgW4RurVacAAzY3AH0AAAAFZAAgAAAAACHvDsaPhoSb6DeGnKQ1QOpGYAgK82qpnqwcmzSeWaJHBXMAIAAAAABRq3C5+dOfnkAHM5Mg5hPB3O4jhwQlBgQWLA7Ph5bhgwVsACAAAAAAqkC8zYASvkVrp0pqmDyFCkPaDmD/ePAJpMuNOCBhni8AAzY4AH0AAAAFZAAgAAAAAOBePJvccPMJmy515KB1AkXF5Pi8NOG4V8psWy0SPRP+BXMAIAAAAAB3dOJG9xIDtEKCRzeNnPS3bFZepMj8UKBobKpSoCPqpgVsACAAAAAAPG3IxQVOdZrr509ggm5FKizWWoZPuVtOgOIGZ3m+pdEAAzY5AH0AAAAFZAAgAAAAABUvRrDQKEXLMdhnzXRdhiL6AGNs2TojPky+YVLXs+JnBXMAIAAAAAD1kYicbEEcPzD4QtuSYQQWDPq8fuUWGddpWayKn3dT9QVsACAAAAAA9+Sf7PbyFcY45hP9oTfjQiOUS3vEIAT8C0vOHymwYSUAAzcwAH0AAAAFZAAgAAAAAOvSnpujeKNen4pqc2HR63C5s5oJ1Vf4CsbKoYQvkwl5BXMAIAAAAACw2+vAMdibzd2YVVNfk81yXkFZP0WLJ82JBxJmXnYE+QVsACAAAAAArQ/E1ACyhK4ZyLqH9mNkCU7WClqRQTGyW9tciSGG/EMAAzcxAH0AAAAFZAAgAAAAAAo0xfGG7tJ3GWhgPVhW5Zn239nTD3PadShCNRc9TwdNBXMAIAAAAADZh243oOhenu0s/P/5KZLBDh9ADqKHtSWcXpO9D2sIjgVsACAAAAAAlgTPaoQKz+saU8rwCT3UiNOdG6hdpjzFx9GBn08ZkBEAABJjbQAAAAAAAAAAAAAQcGF5bG9hZElkAAAAAAAQZmlyc3RPcGVyYXRvcgABAAAAEnNwAAEAAAAAAAAAEHRmAAEAAAABbW4A////////7/8BbXgA////////738A", + "subType": "06" + } + } + } + } + } + ], + "cursor": {}, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoubleNoPrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + } + }, + "commandName": "aggregate" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Double-Correctness.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Double-Correctness.json new file mode 100644 index 0000000000..3bffc95191 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Double-Correctness.json @@ -0,0 +1,1018 @@ +{ + "description": "fle2v2-Rangev2-Double-Correctness", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoubleNoPrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "Find with $gt", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$numberDouble": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDoubleNoPrecision": { + "$gt": { + "$numberDouble": "0.0" + } + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$numberDouble": "1.0" + } + } + ] + } + ] + }, + { + "description": "Find with $gte", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$numberDouble": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDoubleNoPrecision": { + "$gte": { + "$numberDouble": "0.0" + } + } + }, + "sort": { + "_id": 1 + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$numberDouble": "0.0" + } + }, + { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$numberDouble": "1.0" + } + } + ] + } + ] + }, + { + "description": "Find with $gt with no results", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$numberDouble": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDoubleNoPrecision": { + "$gt": { + "$numberDouble": "1.0" + } + } + } + }, + "object": "coll", + "expectResult": [] + } + ] + }, + { + "description": "Find with $lt", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$numberDouble": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDoubleNoPrecision": { + "$lt": { + "$numberDouble": "1.0" + } + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$numberDouble": "0.0" + } + } + ] + } + ] + }, + { + "description": "Find with $lte", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$numberDouble": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDoubleNoPrecision": { + "$lte": { + "$numberDouble": "1.0" + } + } + }, + "sort": { + "_id": 1 + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$numberDouble": "0.0" + } + }, + { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$numberDouble": "1.0" + } + } + ] + } + ] + }, + { + "description": "Find with $gt and $lt", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$numberDouble": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDoubleNoPrecision": { + "$gt": { + "$numberDouble": "0.0" + }, + "$lt": { + "$numberDouble": "2.0" + } + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$numberDouble": "1.0" + } + } + ] + } + ] + }, + { + "description": "Find with equality", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$numberDouble": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDoubleNoPrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$numberDouble": "0.0" + } + } + ] + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDoubleNoPrecision": { + "$numberDouble": "1.0" + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$numberDouble": "1.0" + } + } + ] + } + ] + }, + { + "description": "Find with $in", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$numberDouble": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDoubleNoPrecision": { + "$in": [ + { + "$numberDouble": "0.0" + } + ] + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$numberDouble": "0.0" + } + } + ] + } + ] + }, + { + "description": "Aggregate with $gte", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$numberDouble": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDoubleNoPrecision": { + "$gte": { + "$numberDouble": "0.0" + } + } + } + }, + { + "$sort": { + "_id": 1 + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$numberDouble": "0.0" + } + }, + { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$numberDouble": "1.0" + } + } + ] + } + ] + }, + { + "description": "Aggregate with $gt with no results", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$numberDouble": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDoubleNoPrecision": { + "$gt": { + "$numberDouble": "1.0" + } + } + } + } + ] + }, + "object": "coll", + "expectResult": [] + } + ] + }, + { + "description": "Aggregate with $lt", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$numberDouble": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDoubleNoPrecision": { + "$lt": { + "$numberDouble": "1.0" + } + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$numberDouble": "0.0" + } + } + ] + } + ] + }, + { + "description": "Aggregate with $lte", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$numberDouble": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDoubleNoPrecision": { + "$lte": { + "$numberDouble": "1.0" + } + } + } + }, + { + "$sort": { + "_id": 1 + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$numberDouble": "0.0" + } + }, + { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$numberDouble": "1.0" + } + } + ] + } + ] + }, + { + "description": "Aggregate with $gt and $lt", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$numberDouble": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDoubleNoPrecision": { + "$gt": { + "$numberDouble": "0.0" + }, + "$lt": { + "$numberDouble": "2.0" + } + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$numberDouble": "1.0" + } + } + ] + } + ] + }, + { + "description": "Aggregate with equality", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$numberDouble": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDoubleNoPrecision": { + "$numberDouble": "0.0" + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$numberDouble": "0.0" + } + } + ] + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDoubleNoPrecision": { + "$numberDouble": "1.0" + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$numberDouble": "1.0" + } + } + ] + } + ] + }, + { + "description": "Aggregate with $in", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$numberDouble": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDoubleNoPrecision": { + "$in": [ + { + "$numberDouble": "0.0" + } + ] + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$numberDouble": "0.0" + } + } + ] + } + ] + }, + { + "description": "Wrong type: Insert Int", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$numberInt": "0" + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "Cannot encrypt element" + } + } + ] + }, + { + "description": "Wrong type: Find Int", + "operations": [ + { + "name": "find", + "arguments": { + "filter": { + "encryptedDoubleNoPrecision": { + "$gte": { + "$numberInt": "0" + } + } + }, + "sort": { + "_id": 1 + } + }, + "object": "coll", + "expectError": { + "errorContains": "field type is not supported" + } + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Double-Delete.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Double-Delete.json new file mode 100644 index 0000000000..ac82c52b14 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Double-Delete.json @@ -0,0 +1,795 @@ +{ + "description": "fle2v2-Rangev2-Double-Delete", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoubleNoPrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "FLE2 Range Double. Delete.", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$numberDouble": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$numberDouble": "1" + } + } + }, + "object": "coll" + }, + { + "name": "deleteOne", + "arguments": { + "filter": { + "encryptedDoubleNoPrecision": { + "$gt": { + "$numberDouble": "0" + } + } + } + }, + "object": "coll", + "expectResult": { + "deletedCount": 1 + } + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "6YrBn2ofIw1b5ooakrLOwF41BWrps8OO0H9WH4/rtlE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "n+XAuFnP8Dov9TnhGFxNx0K/MnVM9WbJ7RouEu0ndO0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yRXojuVdn5GQtD97qYlaCL6cOLmZ7Cvcb3wFjkLUIdM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "DuIkdRPITRs55I4SZmgomAHCIsDQmXRhW8+MOznkzSk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SsBk+Et1lTbU+QRPx+xyJ/jMkmfG+QCvQEpip2YYrzA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "crCIzOd8KhHvvUlX7M1v9bhvU4pLdTc+X2SuqoKU5Ek=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "YOWdCw4UrqnxkAaVjqmC4sKQDMVMHEpFGnlxpxdaU6E=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "M3SShp81Ff8tQ632qKbv9MUcN6wjDaBReI0VXNu6Xh4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "gzHlSPxpM0hT75kQvWFzGlOxKvDoiKQZOr19V6l2zXI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "s3JnppOGYw9SL2Q1kMAZs948v2F5PrpXjGei/HioDWs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cG6+3Gk/zEH68P/uuuwiAUVCuyJwa1LeV+t29FlPPAo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dupdvR3AyJtM+g9NDKiaLVOtGca387JQp8w+V03m7Ig=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JqEQc5svj2jTvZ6LLA5ivE+kTb/0aRemSEmxk4G7Zrg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "szcXXXKnob+p3SoM4yED2R920LeJ7cVsclPMFTe4CeI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "o1QoGVXmuBdHwHm7aCtGMlMVKrjFdYvJXpoq6uhIAZ0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Jfm5wPlqqLCJRGQIqRq2NGmpn7s0Vrih2H3YAOoI2YU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zMHLb8ARbsYo8Ld05bqnGFf1Usha6EGb8QKwdSAyps0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yQdtq9lh5pugL7/i0Bj/PuZUUBUIzf+7wj1rl5y736w=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wGWVZdO7qIuyDg/BqDgqjgoQ02h5YYgwXQB1oCin2NE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "by9HMLj6NTEpgztZ5HSN6GxImkXPcaFINYDzgZY33X8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tWo0vbasi7bXmn/MsOx13VC1IsWtpx/nYp0uj4iMzdA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tQQpndUYd5O87lOtrGjH3wl9VsOK0ray7RMasL90sBM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cQjXEDCMsOpKLLf+vlTgIHA+cbSJdzqhbSX9Wvh95aA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "7yMpU48IxK9SzP2cx3VnTownGEwFmeFofuuFT97SuuY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kSOx1kz0CmBgzKQHZlo65ZUY1DIv9A99JRm+Us2y6Ew=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ubQpdPBe6/xvtr+AcXdfYLSvYCR4ot0tivehkCsupb4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xal+iCJ6FTefRQToyoNksc9NCZShyn04NDGi4IYrcoM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "d7jU4iOK50xHxlkSifcxlZFCM46TSgQzoYivxG3HNLY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tJvl2nsBLBVzL3pp6sKWCL4UXeh3q/roYBJjSb74ve0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "OIUCaKRvIx9t1w6Hxlz1IcQTdPNCfdRNwnnTm10W+X0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "A9tvzsiElotOUVIB4CqfQp9mAwqvTM35YkmAR170aHA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "lI8gpK7hpb7c9x4RQugsxMnQay5LZJmwslZdvMx/dcE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dNCzh40U0XvdKnSDi3HRQOWQftEsDVqc4uUvsVFGoq8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "IP+iwEBWBwVVZIdpaMu8k5+soFCz+TZkYn3drKZ9grE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pnqyh6e0y5svHkJDShlN9CHV0WvMBE4QbtJpQw5ZCXc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "elEl42tbVDoRTLjAhZUFEtXiut4b3PVhg/1ZLZSQdtE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "vHuu2FxwclMHqyE6JBYbTYgbEkB0dqb/JuaxsvfwsmY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xTf7NCe3Gf8QpE78HR5OknlLTKfs9J+RN9UZpH6fnso=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "XiWSasRnJAulGR6+LCVD3mwRObXylqYWR9jvpywq12c=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "MZMxEQ5ikx0PG1YFIExv0UnTZogsvgeOEZTpzvBDn4w=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yZMyMZBDrWbAhvnic7vvIYhmO9m5H2iuv0c8KNZrBzY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xxM14hTPY5j0vvcK2C7YAEjzdsfUTFHozHC0hEo1bxI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+01rqR1xVwkpGXcstbk1ItJqFVjH6Q8MGxEN3Cm9Y1A=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xOpLV0Z2VTRJ3iWtnWZcsyjXubTIkYWo31cO+HV1o1k=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "BWUOLqgLBqc5NwxVlSV5H3KFQPXbCp7mdo+jF+8cJqY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "fuQb1S6xZDGlrEbK+kI23aL53PP1PVNwqICnZNt9Yzg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SfscnoibFttahLdPVC4Ee+47ewGFKpDSU7M6HX19bKE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "rpSW2awybNVeKtat91VFxqbINoTfNhPfQAu+d73Xtf8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "9M/CP9ccOIIj2LLFmE0GFDO0Ban2wsNalEXfM6+h+1s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "WrEMG49l1ye4MhXs5ZS9tz8P6h+hDvthIg/2wW9ne1Q=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ImNhbfeyfH8qIEeA5ic0s3dAQBdzzTBS+CPsNih9vZ0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dWP33YDSn04UKJN2ogh2Rui0iW/0q2y18OCDRVcfyoo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "lYv0isAtfGh6H9tdp3cp2eHU7q2J+uk7QrgcxtK3w7Y=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "VGMoamB/+7zTOYcY/pqJc96xlv2PdW4hwsIAEIslTDQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yNeBWMF7BnD9wVwz2PgJsvWr77QiVvvWUvJF0+fqBug=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SfpvObJ+tJBXSvqeN7vlOfmhYign635lciYAJIjUtY8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dsen4NqjzVGjpjufiTMs3+gqeD09EbnuogPgxrJECwg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pxCWVM3sn19NsFEpgHbgLa+PmYlhN3mMiP0Wk8kJhYw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q11KNvJszjYIB9n9HcC+N4uz11a3eRj1L3BH9scKMDQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "A1PmkgcEToWh1JiVWE6mI5jUu7poxWWuCUt/cgRUUDc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "qJo3Hu4PJeanL7XEaWXO/n3YsodhZyd+MJOOmB9Kpd8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "BkBKLO8URFscfRY9Bav/1+L9mLohDgNr/MkZtGiraIs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "rZq5WA3Hx3xthOyHAJXK//f8pE2qbz7YKu3TIMp9GFY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "X07a/Lm80p5xd4RFs1dNmw+90tmPDPdGiAKVZkxd4zY=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoubleNoPrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoubleNoPrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "delete": "default", + "deletes": [ + { + "q": { + "encryptedDoubleNoPrecision": { + "$gt": { + "$binary": { + "base64": "DbMkAAADcGF5bG9hZABXJAAABGcAQyQAAAMwAH0AAAAFZAAgAAAAAHgYoMGjEE6fAlAhICv0+doHcVX8CmMVxyq7+jlyGrvmBXMAIAAAAAC/5MQZgTHuIr/O5Z3mXPvqrom5JTQ8IeSpQGhO9sB+8gVsACAAAAAAuPSXVmJUAUpTQg/A9Bu1hYczZF58KEhVofakygbsvJQAAzEAfQAAAAVkACAAAAAA2kiWNvEc4zunJ1jzvuClFC9hjZMYruKCqAaxq+oY8EAFcwAgAAAAACofIS72Cm6s866UCk+evTH3CvKBj/uZd72sAL608rzTBWwAIAAAAADuCQ/M2xLeALF0UFZtJb22QGOhHmJv6xoO+kZIHcDeiAADMgB9AAAABWQAIAAAAABkfoBGmU3hjYBvQbjNW19kfXneBQsQQPRfUL3UAwI2cAVzACAAAAAAUpK2BUOqX/DGdX5YJniEZMWkofxHqeAbXceEGJxhp8AFbAAgAAAAAKUaLzIldNIZv6RHE+FwbMjzcNHqPESwF/37mm43VPrsAAMzAH0AAAAFZAAgAAAAAFNprhQ3ZwIcYbuzLolAT5n/vc14P9kUUQComDu6eFyKBXMAIAAAAAAcx9z9pk32YbPV/sfPZl9ALIEVsqoLXgqWLVK/tP+heAVsACAAAAAA/qxvuvJbAHwwhfrPVpmCFzNvg2cU/NXaWgqgYUZpgXwAAzQAfQAAAAVkACAAAAAAODI+pB2pCuB+YmNEUAgtMfNdt3DmSkrJ96gRzLphgb8FcwAgAAAAAAT7dewFDxUDECQ3zVq75/cUN4IP+zsqhkP5+czUwlJIBWwAIAAAAACFGeOtd5zBXTJ4JYonkn/HXZfHipUlqGwIRUcH/VTatwADNQB9AAAABWQAIAAAAACNAk+yTZ4Ewk1EnotQK8O3h1gg9I7pr9q2+4po1iJVgAVzACAAAAAAUj/LesmtEsgqYVzMJ67umVA11hJTdDXwbxDoQ71vWyUFbAAgAAAAABlnhpgTQ0WjLb5u0b/vEydrCeFjVynKd7aqb+UnvVLeAAM2AH0AAAAFZAAgAAAAAD/FIrGYFDjyYmVb7oTMVwweWP7A6F9LnyIuNO4MjBnXBXMAIAAAAACIZgJCQRZu7NhuNMyOqCn1tf+DfU1qm10TPCfj5JYV3wVsACAAAAAA5hmY4ptuNxULGf87SUFXQWGAONsL9U29duh8xqsHtxoAAzcAfQAAAAVkACAAAAAAciRW40ORJLVwchOEpz87Svb+5toAFM6LxDWv928ECwQFcwAgAAAAAN0dipyESIkszfjRzdDi8kAGaa2Hf4wrPAtiWwboZLuxBWwAIAAAAAANr4o/+l1OIbbaX5lZ3fQ/WIeOcEXjNI1F0WbSgQrzaQADOAB9AAAABWQAIAAAAACZqAyCzYQupJ95mrBJX54yIz9VY7I0WrxpNYElCI4dTQVzACAAAAAA/eyJb6d1xfE+jJlVXMTD3HS/NEYENPVKAuj56Dr2dSEFbAAgAAAAANkSt154Or/JKb31VvbZFV46RPgUp8ff/hcPORL7PpFBAAM5AH0AAAAFZAAgAAAAAI5bm3YO0Xgf0VT+qjVTTfvckecM3Cwqj7DTKZXf8/NXBXMAIAAAAAD/m+h8fBhWaHm6Ykuz0WX1xL4Eme3ErLObyEVJf8NCywVsACAAAAAAfb1VZZCqs2ivYbRzX4p5CtaCkKW+g20Pr57FWXzEZi8AAzEwAH0AAAAFZAAgAAAAANqo4+p6qdtCzcB4BX1wQ6llU7eFBnuu4MtZwp4B6mDlBXMAIAAAAAAGiz+VaukMZ+6IH4jtn4KWWdKK4/W+O+gRioQDrfzpMgVsACAAAAAAG4YYkTp80EKo59mlHExDodRQFR7njhR5dmISwUJ6ukAAAzExAH0AAAAFZAAgAAAAAPrFXmHP2Y4YAm7b/aqsdn/DPoDkv7B8egWkfe23XsM1BXMAIAAAAAAGhwpKAr7skeqHm3oseSbO7qKNhmYsuUrECBxJ5k+D2AVsACAAAAAAAqPQi9luYAu3GrFCEsVjd9z2zIDcp6SPTR2w6KQEr+IAAzEyAH0AAAAFZAAgAAAAABzjYxwAjXxXc0Uxv18rH8I3my0Aguow0kTwKyxbrm+cBXMAIAAAAADVbqJVr6IdokuhXkEtXF0C2gINLiAjMVN20lE20Vmp2QVsACAAAAAAD7K1Fx4gFaaizkIUrf+EGXQeG7QX1jadhGc6Ji471H8AAzEzAH0AAAAFZAAgAAAAAFMm2feF2fFCm/UC6AfIyepX/xJDSmnnolQIBnHcPmb5BXMAIAAAAABLI11kFrQoaNVZFmq/38aRNImPOjdJh0Lo6irI8M/AaAVsACAAAAAAOWul0oVqJ9CejD2RqphhTC98DJeRQy5EwbNerU2+4l8AAzE0AH0AAAAFZAAgAAAAAJvXB3KyNiNtQko4SSzo/9b2qmM2zU9CQTTDfLSBWMgRBXMAIAAAAAAvjuVP7KsLRDeqVqRziTKpBrjVyqKiIbO9Gw8Wl2wFTAVsACAAAAAADlE+oc1ins+paNcaOZJhBlKlObDJ4VQORWjFYocM4LgAAzE1AH0AAAAFZAAgAAAAAPGdcxDiid8z8XYnfdDivNMYVPgBKdGOUw6UStU+48CdBXMAIAAAAAARj6g1Ap0eEfuCZ4X2TsEw+Djrhto3fA5nLwPaY0vCTgVsACAAAAAAoHqiwGOUkBu8SX5U1yHho+UIFdSN2MdQN5s6bQ0EsJYAAzE2AH0AAAAFZAAgAAAAAP5rGPrYGt3aKob5f/ldP0qrW7bmWvqnKY4QwdDWz400BXMAIAAAAADTQkW2ymaaf/bhteOOGmSrIR97bAnJx+yN3yMj1bTeewVsACAAAAAADyQnHGH2gF4w4L8axUsSTf6Ubk7L5/eoFOJk12MtZAoAAzE3AH0AAAAFZAAgAAAAAAlz6wJze5UkIxKpJOZFGCOf3v2KByWyI6NB6JM9wNcBBXMAIAAAAABUC7P/neUIHHoZtq0jFVBHY75tSFYr1Y5S16YN5XxC1QVsACAAAAAAgvxRbXDisNnLY3pfsjDdnFLtkvYUC4lhA68eBXc7KAwAAzE4AH0AAAAFZAAgAAAAAFJ8AtHcjia/9Y5pLEc3qVgH5xKiXw12G9Kn2A1EY8McBXMAIAAAAAAxe7Bdw7eUSBk/oAawa7uicTEDgXLymRNhBy1LAxhDvwVsACAAAAAAxKPaIBKVx3jTA+R/el7P7AZ7efrmTGjJs3Hj/YdMddwAAzE5AH0AAAAFZAAgAAAAAO8uwQUaKFb6vqR3Sv3Wn4QAonC2exOC9lGG1juqP5DtBXMAIAAAAABZf1KyJgQg8/Rf5c02DgDK2aQu0rNCOvaL60ohDHyY+gVsACAAAAAAqyEjfKC8lYoIfoXYHUqHZPoaA6EK5BAZy5dxXZmay4kAAzIwAH0AAAAFZAAgAAAAAE8YtqyRsGCeiR6hhiyisR/hccmK4nZqIMzO4lUBmEFzBXMAIAAAAAC1UYOSKqAeG1UJiKjWFVskRhuFKpj9Ezy+lICZvFlN5AVsACAAAAAA6Ct9nNMKyRazn1OKnRKagm746CGu+jyhbL1qJnZxGi0AAzIxAH0AAAAFZAAgAAAAAPhCrMausDx1QUIEqp9rUdRKyM6a9AAx7jQ3ILIu8wNIBXMAIAAAAACmH8lotGCiF2q9VQxhsS+7LAZv79VUAsOUALaGxE/EpAVsACAAAAAAnc1xCKfdvbUEc8F7XZqlNn1C+hZTtC0I9I3LL06iaNkAAzIyAH0AAAAFZAAgAAAAAOBi/GAYFcstMSJPgp3VkMiuuUUCrZytvqYaU8dwm8v2BXMAIAAAAACEZSZVyD3pKzGlbdwlYmWQhHHTV5SnNLknl2Gw8IaUTQVsACAAAAAAfsLZsEDcWSuNsIo/TD1ReyQW75HPMgmuKZuWFOLKRLoAAzIzAH0AAAAFZAAgAAAAAIQuup+YGfH3mflzWopN8J1X8o8a0d9CSGIvrA5HOzraBXMAIAAAAADYvNLURXsC2ITMqK14LABQBI+hZZ5wNf24JMcKLW+84AVsACAAAAAACzfjbTBH7IwDU91OqLAz94RFkoqBOkzKAqQb55gT4/MAAzI0AH0AAAAFZAAgAAAAAKsh0ADyOnVocFrOrf6MpTrNvAj8iaiE923DPryu124gBXMAIAAAAADg24a8NVE1GyScc6tmnTbmu5ulzO+896fE92lN08MeswVsACAAAAAAaPxcOIxnU7But88/yadOuDJDMcCywwrRitaxMODT4msAAzI1AH0AAAAFZAAgAAAAAKkVC2Y6HtRmv72tDnPUSjJBvse7SxLqnr09/Uuj9sVVBXMAIAAAAABYNFUkH7ylPMN+Bc3HWX1e0flGYNbtJNCY9SltJCW/UAVsACAAAAAAZYK/f9H4OeihmpiFMH7Wm7uLvs2s92zNA8wyrNZTsuMAAzI2AH0AAAAFZAAgAAAAADDggcwcb/Yn1Kk39sOHsv7BO/MfP3m/AJzjGH506Wf9BXMAIAAAAAAYZIsdjICS0+BDyRUPnrSAZfPrwtuMaEDEn0/ijLNQmAVsACAAAAAAGPnYVvo2ulO9z4LGd/69NAklfIcZqZvFX2KK0s+FcTUAAzI3AH0AAAAFZAAgAAAAAEWY7dEUOJBgjOoWVht1wLehsWAzB3rSOBtLgTuM2HC8BXMAIAAAAAAAoswiHRROurjwUW8u8D5EUT+67yvrgpB/j6PzBDAfVwVsACAAAAAA6NhRTYFL/Sz4tao7vpPjLNgAJ0FX6P/IyMW65qT6YsMAAzI4AH0AAAAFZAAgAAAAAPZaapeAUUFPA7JTCMOWHJa9lnPFh0/gXfAPjA1ezm4ZBXMAIAAAAACmJvLY2nivw7/b3DOKH/X7bBXjJwoowqb1GtEFO3OYgAVsACAAAAAA/JcUoyKacCB1NfmH8vYqC1f7rd13KShrQqV2r9QBP44AAzI5AH0AAAAFZAAgAAAAAK00u6jadxCZAiA+fTsPVDsnW5p5LCr4+kZZZOTDuZlfBXMAIAAAAAAote4zTEYMDgaaQbAdN8Dzv93ljPLdGjJzvnRn3KXgtQVsACAAAAAAxXd9Mh6R3mnJy8m7UfqMKi6oD5DlZpkaOz6bEjMOdiwAAzMwAH0AAAAFZAAgAAAAAFbgabdyymiEVYYwtJSWa7lfl/oYuj/SukzJeDOR6wPVBXMAIAAAAADAFGFjS1vPbN6mQEhkDYTD6V2V23Ys9gUEUMGNvMPkaAVsACAAAAAAL/D5Sze/ZoEanZLK0IeEkhgVkxEjMWVCfmJaD3a8uNIAAzMxAH0AAAAFZAAgAAAAABNMR6UBv2E627CqLtQ/eDYx7OEwQ7JrR4mSHFa1N8tLBXMAIAAAAAAxH4gucI4UmNVB7625C6hFSVCuIpJO3lusJlPuL8H5EQVsACAAAAAAVLHNg0OUVqZ7WGOP53BkTap9FOw9dr1P4J8HxqFqU04AAzMyAH0AAAAFZAAgAAAAAG8cd6WBneNunlqrQ2EmNf35W7OGObGq9WL4ePX+LUDmBXMAIAAAAAAjJ2+sX87NSis9hBsgb1QprVRnO7Bf+GObCGoUqyPE4wVsACAAAAAAs9c9SM49/pWmyUQKslpt3RTMBNSRppfNO0JBvUqHPg0AAzMzAH0AAAAFZAAgAAAAAFWOUGkUpy8yf6gB3dio/aOfRKh7XuhvsUj48iESFJrGBXMAIAAAAAAY7sCDMcrUXvNuL6dO0m11WyijzXZvPIcOKob6IpC4PQVsACAAAAAAJOP+EHz6awDb1qK2bZQ3kTV7wsj5Daj/IGAWh4g7omAAAzM0AH0AAAAFZAAgAAAAAGUrIdKxOihwNmo6B+aG+Ag1qa0+iqdksHOjQj+Oy9bZBXMAIAAAAABwa5dbI2KmzBDNBTQBEkjZv4sPaeRkRNejcjdVymRFKQVsACAAAAAA4ml/nm0gJNTcJ4vuD+T2Qfq2fQZlibJp/j6MOGDrbHMAAzM1AH0AAAAFZAAgAAAAAOx89xV/hRk64/CkM9N2EMK6aldII0c8smdcsZ46NbP8BXMAIAAAAADBF6tfQ+7q9kTuLyuyrSnDgmrdmrXkdhl980i1KHuGHgVsACAAAAAACUqiFqHZdGbwAA+hN0YUE5zFg+H+dabIB4dj5/75W/YAAzM2AH0AAAAFZAAgAAAAAMkN0L1oQWXhjwn9rAdudcYeN8/5VdCKU8cmDt7BokjsBXMAIAAAAAAT62pGXoRwExe9uvgYOI0hg5tOxilrWfoEmT0SMglWJwVsACAAAAAAlVz4dhiprSbUero6JFfxzSJGclg63oAkAmgbSwbcYxIAAzM3AH0AAAAFZAAgAAAAANxfa4xCoaaB7k1C1RoH1LBhsCbN2yEq15BT9b+iqEC4BXMAIAAAAACAX9LV8Pemfw7NF0iB1/85NzM1Ef+1mUfyehacUVgobQVsACAAAAAAVq4xpbymLk0trPC/a2MvB39I7hRiX8EJsVSI5E5hSBkAAzM4AH0AAAAFZAAgAAAAAOYIYoWkX7dGuyKfi3XssUlc7u/gWzqrR9KMkikKVdmSBXMAIAAAAABVF2OYjRTGi9Tw8XCAwZWLpX35Yl271TlNWp6N/nROhAVsACAAAAAA0nWwYzXQ1+EkDvnGq+SMlq20z+j32Su+i/A95SggPb4AAzM5AH0AAAAFZAAgAAAAAIy0+bXZi10QC+q7oSOLXK5Fee7VEk/qHSXukfeVIfgzBXMAIAAAAAAQ3IIV/JQCHW95AEbH5zGIHtJqyuPjWPMIZ+VmQHlxEwVsACAAAAAAp0jYsyohKv9Pm+4k+DplEGbl9WLZpAJzitrcDj4CNsMAAzQwAH0AAAAFZAAgAAAAAL5SOJQ3LOhgdXJ5v086NNeAl1qonQnchObdpZJ1kHeEBXMAIAAAAAA+tEqTXODtik+ydJZSnUqXF9f18bPeze9eWtR7ExZJgQVsACAAAAAAbrkZCVgB9Qsp4IAbdf+bD4fT6Boqk5UtuA/zhNrh1y0AAzQxAH0AAAAFZAAgAAAAAKl8zcHJRDjSjJeV/WvMxulW1zrTFtaeBy/aKKhadc6UBXMAIAAAAADBdWQl5SBIvtZZLIHszePwkO14W1mQ0izUk2Ov21cPNAVsACAAAAAAHErCYycpqiIcCZHdmPL1hi+ovLQk4TAvENpfLdTRamQAAzQyAH0AAAAFZAAgAAAAAFvotcNaoKnVt5CBCOPwjexFO0WGWuaIGL6H/6KSau+6BXMAIAAAAAD2y2mBN5xPu5PJoY2zcr0GnQDtHRBogA5+xzIxccE9fwVsACAAAAAAdS34xzJesnUfxLCcc1U7XzUqLy8MAzV/tcjbqaD3lkMAAzQzAH0AAAAFZAAgAAAAAPezU0/vNT4Q4YKbTbaeHqcwNLT+IjW/Y9QFpIooihjPBXMAIAAAAACj2x4O4rHter8ZnTws5LAP9jJ/6kk9C/V3vL50LoFZHAVsACAAAAAAQdBDF3747uCVP5lB/zr8VmzxJfTSZHBKeIgm5FyONXwAAzQ0AH0AAAAFZAAgAAAAAMqpayM2XotEFmm0gwQd9rIzApy0X+7HfOhNk6VU7F5lBXMAIAAAAACJR9+q5T9qFHXFNgGbZnPubG8rkO6cwWhzITQTmd6VgwVsACAAAAAAOZLQ6o7e4mVfDzbpQioa4d3RoTvqwgnbmc5Qh2wsZuoAAzQ1AH0AAAAFZAAgAAAAANCeyW+3oebaQk+aqxNVhAcT/BZ5nhsTVdKS3tMrLSvWBXMAIAAAAADxRFMDhkyuEc++WnndMfoUMLNL7T7rWoeblcrpSI6soQVsACAAAAAAdBuBMJ1lxt0DRq9pOZldQqchLs3B/W02txcMLD490FEAAzQ2AH0AAAAFZAAgAAAAAIbo5YBTxXM7HQhl7UP9NNgpPGFkBx871r1B65G47+K8BXMAIAAAAAC21dJSxnEhnxO5gzN5/34BL4von45e1meW92qowzb8fQVsACAAAAAAm3Hk2cvBN0ANaR5jzeZE5TsdxDvJCTOT1I01X7cNVaYAAzQ3AH0AAAAFZAAgAAAAABm/6pF96j26Jm7z5KkY1y33zcAEXLx2n0DwC03bs/ixBXMAIAAAAAD01OMvTZI/mqMgxIhA5nLs068mW+GKl3OW3ilf2D8+LgVsACAAAAAAaLvJDrqBESTNZSdcXsd+8GXPl8ZkUsGpeYuyYVv/kygAAzQ4AH0AAAAFZAAgAAAAAJ/D3+17gaQdkBqkL2wMwccdmCaVOtxzIkM8VyI4xI5zBXMAIAAAAAAggLVmkc5u+YzBR+oNE+XgLVp64fC6MzUb/Ilu/Jsw0AVsACAAAAAACz3HVKdWkx82/kGbVpcbAeZtsj2R5Zr0dEPfle4IErkAAzQ5AH0AAAAFZAAgAAAAAJMRyUW50oaTzspS6A3TUoXyC3gNYQoShUGPakMmeVZrBXMAIAAAAACona2Pqwt4U2PmFrtmu37jB9kQ/12okyAVtYa8TQkDiQVsACAAAAAAltJJKjCMyBTJ+4PkdDCPJdeX695P8P5h7WOZ+kmExMAAAzUwAH0AAAAFZAAgAAAAAByuYl8dBvfaZ0LO/81JW4hYypeNmvLMaxsIdvqMPrWoBXMAIAAAAABNddwobOUJzm9HOUD8BMZJqkNCUCqstHZkC76FIdNg9AVsACAAAAAAQQOkIQtkyNavqCnhQbNg3HfqrJdsAGaoxSJePJl1qXsAAzUxAH0AAAAFZAAgAAAAAHEzLtfmF/sBcYPPdj8867VmmQyU1xK9I/3Y0478azvABXMAIAAAAAAcmyFajZPnBTbO+oLInNwlApBocUekKkxz2hYFeSlQ+gVsACAAAAAAZ6IkrOVRcC8vSA6Vb4fPWZJrYexXhEabIuYIeXNsCSgAAzUyAH0AAAAFZAAgAAAAAJam7JYsZe2cN20ZYm2W3v1pisNt5PLiniMzymBLWyMtBXMAIAAAAABxCsKVMZMTn3n+R2L7pVz5nW804r8HcK0mCBw3jUXKXAVsACAAAAAA7j3JGnNtR64P4dJLeUoScFRGfa8ekjh3dvhw46sRFk0AAzUzAH0AAAAFZAAgAAAAAMXrXx0saZ+5gORmwM2FLuZG6iuO2YS+1IGPoAtDKoKBBXMAIAAAAADIQsxCr8CfFKaBcx8kIeSywnGh7JHjKRJ9vJd9x79y7wVsACAAAAAAcvBV+SykDYhmRFyVYwFYB9oBKBSHr55Jdz2cXeowsUQAAzU0AH0AAAAFZAAgAAAAACbzcUD3INSnCRspOKF7ubne74OK9L0FTZvi9Ay0JVDYBXMAIAAAAADPebVQH8Btk9rhBIoUOdSAdpPvz7qIY4UC2i6IGisSAQVsACAAAAAAiBunJi0mPnnXdnldiq+If8dcb/n6apHnaIFt+oyYO1kAAzU1AH0AAAAFZAAgAAAAACUc2CtD1MK/UTxtv+8iA9FoHEyTwdl43HKeSwDw2Lp5BXMAIAAAAACCIduIdw65bQMzRYRfjBJj62bc69T4QqH4QoWanwlvowVsACAAAAAAM0TV7S+aPVVzJOQ+cpSNKHTwyQ0mWa8tcHzfk3nR+9IAAzU2AH0AAAAFZAAgAAAAAHSaHWs/dnmI9sc7nB50VB2Bzs0kHapMHCQdyVEYY30TBXMAIAAAAACkV22lhEjWv/9/DubfHBAcwJggKI5mIbSK5L2nyqloqQVsACAAAAAAS19m7DccQxgryOsBJ3GsCs37yfQqNi1G+S6fCXpEhn4AAzU3AH0AAAAFZAAgAAAAAAL8jhNBG0KXXZhmZ0bPXtfgapJCB/AI+BEHB0eZ3C75BXMAIAAAAADHx/fPa639EBmGV5quLi8IQT600ifiKSOhTDOK19DnzwVsACAAAAAAlyLTDVkHxbayklD6Qymh3odIK1JHaOtps4f4HR+PcDgAAzU4AH0AAAAFZAAgAAAAAAxgeclNl09H7HvzD1oLwb2YpFca5eaX90uStYXHilqKBXMAIAAAAACMU5pSxzIzWlQxHyW170Xs9EhD1hURASQk+qkx7K5Y6AVsACAAAAAAJbMMwJfNftA7Xom8Bw/ghuZmSa3x12vTZxBUbV8m888AAzU5AH0AAAAFZAAgAAAAABmO7QD9vxWMmFjIHz13lyOeV6vHT6mYCsWxF7hb/yOjBXMAIAAAAACT9lmgkiqzuWG24afuzYiCeK9gmJqacmxAruIukd0xEAVsACAAAAAAZa0/FI/GkZR7CtX18Xg9Tn9zfxkD0UoaSt+pIO5t1t4AAzYwAH0AAAAFZAAgAAAAAB89SjLtDJkqEghRGyj6aQ/2qvWLNuMROoXmzbYbCMKMBXMAIAAAAAC8sywgND+CjhVTF6HnRQeay8y9/HnVzDI42dEPah28LQVsACAAAAAAoxv7UKh0RqUAWcOsQvU123zO1qZn73Xfib0qncZCB34AAzYxAH0AAAAFZAAgAAAAABN2alGq9Aats1mwERNGwL/fIwZSvVCe9/8XMHTFlpUpBXMAIAAAAACuDPjJgvvbBYhbLpjMiWUCsVppiYrhvR+yMysNPN8cZAVsACAAAAAAKpADjc4bzIZMi9Q/+oe0EMRJHYQt6dlo1x/lRquagqkAAzYyAH0AAAAFZAAgAAAAAL8YB6VAqGBiWD4CBv16IBscg5J7VQCTZu87n6pj+86KBXMAIAAAAAAmxm8e68geeyAdUjSMWBHzUjneVB0pG9TBXIoE6467hAVsACAAAAAAV76JZAlYpgC/Zl8awx2ArCg1uuyy2XVTSkp0wUMi/7UAAzYzAH0AAAAFZAAgAAAAAL4yLkCTV5Dmxa5toBu4JT8ge/cITAaURIOuFuOtFUkeBXMAIAAAAAAXoFNQOMGkAj7qEJP0wQafmFSXgWGeorDVbwyOxWLIsgVsACAAAAAAc4Un6dtIFe+AQ+RSfNWs3q63RTHhmyc+5GKRRdpWRv8AAzY0AH0AAAAFZAAgAAAAAEU8DoUp46YtYjNFS9kNXwdYxQ9IW27vCTb+VcqqfnKNBXMAIAAAAADe7vBOgYReE8X78k5ARuUnv4GmzPZzg6SbConf4L2G3wVsACAAAAAA78YHWVkp6HbZ0zS4UL2z/2pj9vPDcMDt7zTv6NcRsVsAAzY1AH0AAAAFZAAgAAAAAPa4yKTtkUtySuWo1ZQsp2QXtPb5SYqzA5vYDnS1P6c0BXMAIAAAAADKnF58R1sXlHlsHIvCBR3YWW/qk54z9CTDhZydkD1cOQVsACAAAAAAHW3ERalTFWKMzjuXF3nFh0pSrQxM/ojnPbPhc4v5MaQAAzY2AH0AAAAFZAAgAAAAAN5WJnMBmfgpuQPyonmY5X6OdRvuHw4nhsnGRnFAQ95VBXMAIAAAAACwftzu7KVV1rmGKwXtJjs3cJ1gE3apr8+N0SAg1F2cHwVsACAAAAAATDW0reyaCjbJuVLJzbSLx1OBuBoQu+090kgW4RurVacAAzY3AH0AAAAFZAAgAAAAACHvDsaPhoSb6DeGnKQ1QOpGYAgK82qpnqwcmzSeWaJHBXMAIAAAAABRq3C5+dOfnkAHM5Mg5hPB3O4jhwQlBgQWLA7Ph5bhgwVsACAAAAAAqkC8zYASvkVrp0pqmDyFCkPaDmD/ePAJpMuNOCBhni8AAzY4AH0AAAAFZAAgAAAAAOBePJvccPMJmy515KB1AkXF5Pi8NOG4V8psWy0SPRP+BXMAIAAAAAB3dOJG9xIDtEKCRzeNnPS3bFZepMj8UKBobKpSoCPqpgVsACAAAAAAPG3IxQVOdZrr509ggm5FKizWWoZPuVtOgOIGZ3m+pdEAAzY5AH0AAAAFZAAgAAAAABUvRrDQKEXLMdhnzXRdhiL6AGNs2TojPky+YVLXs+JnBXMAIAAAAAD1kYicbEEcPzD4QtuSYQQWDPq8fuUWGddpWayKn3dT9QVsACAAAAAA9+Sf7PbyFcY45hP9oTfjQiOUS3vEIAT8C0vOHymwYSUAAzcwAH0AAAAFZAAgAAAAAOvSnpujeKNen4pqc2HR63C5s5oJ1Vf4CsbKoYQvkwl5BXMAIAAAAACw2+vAMdibzd2YVVNfk81yXkFZP0WLJ82JBxJmXnYE+QVsACAAAAAArQ/E1ACyhK4ZyLqH9mNkCU7WClqRQTGyW9tciSGG/EMAAzcxAH0AAAAFZAAgAAAAAAo0xfGG7tJ3GWhgPVhW5Zn239nTD3PadShCNRc9TwdNBXMAIAAAAADZh243oOhenu0s/P/5KZLBDh9ADqKHtSWcXpO9D2sIjgVsACAAAAAAlgTPaoQKz+saU8rwCT3UiNOdG6hdpjzFx9GBn08ZkBEAABJjbQAAAAAAAAAAAAAQcGF5bG9hZElkAAAAAAAQZmlyc3RPcGVyYXRvcgABAAAAEnNwAAEAAAAAAAAAEHRmAAEAAAABbW4A////////7/8BbXgA////////738A", + "subType": "06" + } + } + } + }, + "limit": 1 + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoubleNoPrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + } + }, + "commandName": "delete" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Double-FindOneAndUpdate.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Double-FindOneAndUpdate.json new file mode 100644 index 0000000000..ce1be99a3a --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Double-FindOneAndUpdate.json @@ -0,0 +1,1199 @@ +{ + "description": "fle2v2-Rangev2-Double-FindOneAndUpdate", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoubleNoPrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "FLE2 Range Double. FindOneAndUpdate.", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$numberDouble": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$numberDouble": "1" + } + } + }, + "object": "coll" + }, + { + "name": "findOneAndUpdate", + "arguments": { + "filter": { + "encryptedDoubleNoPrecision": { + "$gt": { + "$numberDouble": "0" + } + } + }, + "update": { + "$set": { + "encryptedDoubleNoPrecision": { + "$numberDouble": "2" + } + } + }, + "returnDocument": "Before" + }, + "object": "coll", + "expectResult": { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$numberDouble": "1" + } + } + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "6YrBn2ofIw1b5ooakrLOwF41BWrps8OO0H9WH4/rtlE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "n+XAuFnP8Dov9TnhGFxNx0K/MnVM9WbJ7RouEu0ndO0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yRXojuVdn5GQtD97qYlaCL6cOLmZ7Cvcb3wFjkLUIdM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "DuIkdRPITRs55I4SZmgomAHCIsDQmXRhW8+MOznkzSk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SsBk+Et1lTbU+QRPx+xyJ/jMkmfG+QCvQEpip2YYrzA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "crCIzOd8KhHvvUlX7M1v9bhvU4pLdTc+X2SuqoKU5Ek=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "YOWdCw4UrqnxkAaVjqmC4sKQDMVMHEpFGnlxpxdaU6E=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "M3SShp81Ff8tQ632qKbv9MUcN6wjDaBReI0VXNu6Xh4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "gzHlSPxpM0hT75kQvWFzGlOxKvDoiKQZOr19V6l2zXI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "s3JnppOGYw9SL2Q1kMAZs948v2F5PrpXjGei/HioDWs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cG6+3Gk/zEH68P/uuuwiAUVCuyJwa1LeV+t29FlPPAo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dupdvR3AyJtM+g9NDKiaLVOtGca387JQp8w+V03m7Ig=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JqEQc5svj2jTvZ6LLA5ivE+kTb/0aRemSEmxk4G7Zrg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "szcXXXKnob+p3SoM4yED2R920LeJ7cVsclPMFTe4CeI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "o1QoGVXmuBdHwHm7aCtGMlMVKrjFdYvJXpoq6uhIAZ0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Jfm5wPlqqLCJRGQIqRq2NGmpn7s0Vrih2H3YAOoI2YU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zMHLb8ARbsYo8Ld05bqnGFf1Usha6EGb8QKwdSAyps0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yQdtq9lh5pugL7/i0Bj/PuZUUBUIzf+7wj1rl5y736w=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wGWVZdO7qIuyDg/BqDgqjgoQ02h5YYgwXQB1oCin2NE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "by9HMLj6NTEpgztZ5HSN6GxImkXPcaFINYDzgZY33X8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tWo0vbasi7bXmn/MsOx13VC1IsWtpx/nYp0uj4iMzdA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tQQpndUYd5O87lOtrGjH3wl9VsOK0ray7RMasL90sBM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cQjXEDCMsOpKLLf+vlTgIHA+cbSJdzqhbSX9Wvh95aA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "7yMpU48IxK9SzP2cx3VnTownGEwFmeFofuuFT97SuuY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kSOx1kz0CmBgzKQHZlo65ZUY1DIv9A99JRm+Us2y6Ew=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ubQpdPBe6/xvtr+AcXdfYLSvYCR4ot0tivehkCsupb4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xal+iCJ6FTefRQToyoNksc9NCZShyn04NDGi4IYrcoM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "d7jU4iOK50xHxlkSifcxlZFCM46TSgQzoYivxG3HNLY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tJvl2nsBLBVzL3pp6sKWCL4UXeh3q/roYBJjSb74ve0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "OIUCaKRvIx9t1w6Hxlz1IcQTdPNCfdRNwnnTm10W+X0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "A9tvzsiElotOUVIB4CqfQp9mAwqvTM35YkmAR170aHA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "lI8gpK7hpb7c9x4RQugsxMnQay5LZJmwslZdvMx/dcE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dNCzh40U0XvdKnSDi3HRQOWQftEsDVqc4uUvsVFGoq8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "IP+iwEBWBwVVZIdpaMu8k5+soFCz+TZkYn3drKZ9grE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pnqyh6e0y5svHkJDShlN9CHV0WvMBE4QbtJpQw5ZCXc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "elEl42tbVDoRTLjAhZUFEtXiut4b3PVhg/1ZLZSQdtE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "vHuu2FxwclMHqyE6JBYbTYgbEkB0dqb/JuaxsvfwsmY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xTf7NCe3Gf8QpE78HR5OknlLTKfs9J+RN9UZpH6fnso=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "XiWSasRnJAulGR6+LCVD3mwRObXylqYWR9jvpywq12c=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "MZMxEQ5ikx0PG1YFIExv0UnTZogsvgeOEZTpzvBDn4w=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yZMyMZBDrWbAhvnic7vvIYhmO9m5H2iuv0c8KNZrBzY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xxM14hTPY5j0vvcK2C7YAEjzdsfUTFHozHC0hEo1bxI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+01rqR1xVwkpGXcstbk1ItJqFVjH6Q8MGxEN3Cm9Y1A=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xOpLV0Z2VTRJ3iWtnWZcsyjXubTIkYWo31cO+HV1o1k=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "BWUOLqgLBqc5NwxVlSV5H3KFQPXbCp7mdo+jF+8cJqY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "fuQb1S6xZDGlrEbK+kI23aL53PP1PVNwqICnZNt9Yzg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SfscnoibFttahLdPVC4Ee+47ewGFKpDSU7M6HX19bKE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "rpSW2awybNVeKtat91VFxqbINoTfNhPfQAu+d73Xtf8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "9M/CP9ccOIIj2LLFmE0GFDO0Ban2wsNalEXfM6+h+1s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "WrEMG49l1ye4MhXs5ZS9tz8P6h+hDvthIg/2wW9ne1Q=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ImNhbfeyfH8qIEeA5ic0s3dAQBdzzTBS+CPsNih9vZ0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dWP33YDSn04UKJN2ogh2Rui0iW/0q2y18OCDRVcfyoo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "lYv0isAtfGh6H9tdp3cp2eHU7q2J+uk7QrgcxtK3w7Y=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "VGMoamB/+7zTOYcY/pqJc96xlv2PdW4hwsIAEIslTDQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yNeBWMF7BnD9wVwz2PgJsvWr77QiVvvWUvJF0+fqBug=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SfpvObJ+tJBXSvqeN7vlOfmhYign635lciYAJIjUtY8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dsen4NqjzVGjpjufiTMs3+gqeD09EbnuogPgxrJECwg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pxCWVM3sn19NsFEpgHbgLa+PmYlhN3mMiP0Wk8kJhYw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q11KNvJszjYIB9n9HcC+N4uz11a3eRj1L3BH9scKMDQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "A1PmkgcEToWh1JiVWE6mI5jUu7poxWWuCUt/cgRUUDc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "qJo3Hu4PJeanL7XEaWXO/n3YsodhZyd+MJOOmB9Kpd8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "BkBKLO8URFscfRY9Bav/1+L9mLohDgNr/MkZtGiraIs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "rZq5WA3Hx3xthOyHAJXK//f8pE2qbz7YKu3TIMp9GFY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "X07a/Lm80p5xd4RFs1dNmw+90tmPDPdGiAKVZkxd4zY=", + "subType": "00" + } + } + ] + }, + { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "HI88j1zrIsFoijIXKybr9mYubNV5uVeODyLHFH4Ueco=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wXVD/HSbBljko0jJcaxJ1nrzs2+pchLQqYR3vywS8SU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "KhscCh+tt/pp8lxtKZQSPPUU94RvJYPKG/sjtzIa4Ws=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "RISnuNrTTVNW5HnwCgQJ301pFw8DOcYrAMQIwVwjOkI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Ra5zukLh2boua0Bh74qA+mtIoixGXlsNsxiJqHtqdTI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "eqr0v+NNWXWszi9ni8qH58Q6gw5x737tJvH3lPaNHO4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "d42QupriWIwGrFAquXNFi0ehEuidIbHLFZtg1Sm2nN8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "2azRVxaaTIJKcgY2FU012gcyP8Y05cRDpfUaMnCBaQU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "3nlgkM4K/AAcHesRYYdEu24UGetHodVnVfHzw4yxZBM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "hqy91FNmAAac2zUaPO6eWFkx0/37rOWGrwXN+fzL0tU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "akX+fmscSDSF9pB5MPj56iaJPtohr0hfXNk/OPWsGv8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "1ZvUb10Q7cN4cNLktd5yNjqgtawsYnkbeVBZV6WuY/I=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "otCwtuKiY4hCyXvYzXvo10OcnzZppebo38KsAlq49QM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Mty8EscckeT/dhMfrPFyDbLnmMOcYRUQ3mLK4KTu6V8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tnvgLLkJINO7csREYu4dEVe1ICrBeu7OP+HdfoX3M2E=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kOefsHgEVhkJ17UuP7Dxogy6sAQbzf1SFPKCj6XRlrQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "F+JQ79xavpaHdJzdhvwyHbzdZJLNHAymc/+67La3gao=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "NCZ9zp5rDRceENuSgAfTLEyKg0YgmXAhK0B8WSj7+Pw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wL1CJ7cYR5slx8mHq++uMdjDfkt9037lQTUztEMF56M=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "txefkzTMITZE+XvvRFZ7QcgwDT/7m8jNmxRk4QBaoZI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "jFunW3v1tSYMyZtQQD28eEy9qqDp4Kqo7gMN29N4bfQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "QMO915KUiS3X3R1bU1YoafVM2s0NeHo3EjgTA9PnGwY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "nwdKJEXdilzvb7494vbuDJ+y6SrfJahza1dYIsHIWVI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "vpWMX+T/VXXajFo0UbuYjtp0AEzBU0Y+lP+ih2EQ7mg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "1lmzG0J1DhKDRhhq5y5Buygu4G8eV2X0t7kUY90EohM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SiKqpXqO0trwhFvBWK274hMklpCgMhNs/JY84yyn/NE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "7cPGPYCKPTay+ZR9Gx6oOueduOgaFrSuAXmNDpDHXdI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "4THEYvAkjs2Fh7FIe5LC45P4i4N0L7ob67UOVbhp6Nk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "B+UGsChLLZR7iqnt8yq91OgmTgwiUKTJhFxY4NT0O6c=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "X1uYwBCsCg1H+PnKdwtBqXlt0zKEURi8bOM940GcPfk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xYOgT5l7shlNXCwHlguovmDkcEnF8dXyYlTyYrgZ8GE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "vFMTZqV8bh1+gcKzTkXweMddJlgdUnwX0DWzUUaMok4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "4HI0y9FrtleZxZ7M6INdNhLelrQ2Rv/+ykWCBl+tMC8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "jpJ0bBE474OUkn1vUiLWumIBtYmwc7J5+LQU/nyeLQc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "jQTPeXZvdxY/DjtPfYfKUArIDsf0E9MVFy2O26sv1ec=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "QLLto0ExR2ZYMGqlyaMZc/hXFFTlwmgtKbiVq/xJIeI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yBJNviU1nchbGbhx6InXCVRXa90sEepz1EwbYuKXu2U=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "jpEf0vHxrPu9gTJutNXSi2g/2Mc4WXFEN7yHonZEb7A=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "E09kLFckMYwNuhggMxmPtwndyvIAx+Vl+b2CV6FP75s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "N+ue6/cLPb5NssmJCCeo18LlbKPz6r2z20AsnTKRvOo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yVQNZP8hhsvNGyDph2QP2qTNdXZTiIEVineKg+Qf33o=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cSC9uI+9c5S8X+0G7amVyug1p0ZlgBsbEDYYyezBevQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "1NpZGjoQzuQtekj80Rifxe9HbE08W07dfwxaFHaVn84=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "5Ghuq/8l11Ug9Uf/RTwf9On3OxOwIXUcb9soiy4J7/w=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "0LWKaEty6ywxLFhDaAqulqfMnYc+tgPfH4apyEeKg80=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "OwSthmCBtt6NIAoAh7aCbj82Yr/+9t8U7WuBQhFT3AQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "iYiyg6/1isqbMdvFPIGucu3cNM4NAZNtJhHpGZ4eM+c=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "waBgs8jWuGJPIF5zCRh6OmIyfK5GCBQgTMfmKSR2wyY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "1Jdtbe2BKJXPU2G9ywOrlODZ/cNYEQlKzAW3aMe1Hy4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xaLEnNUS/2ySerBpb9dN/D31t+wYcKekwTfkwtni0Mc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "bIVBrOhOvr6cL55Tr24+B+CC9MiG7U6K54aAr2IXXuw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6Cdq5wroGu2TEFnekuT7LhOpd/K/+PcipIljcHU9QL4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "K5l64vI4S/pLviLW6Pl0U3iQkI3ge0xg4RAHcEsyKJo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "bzhuvZ0Ls22yIOX+Hz51eAHlSuDbWR/e0u4EhfdpHbc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Qv+fr6uD4o0bZRp69QJCFL6zvn3G82c7L+N1IFzj7H0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "XAmISMbD3aEyQT+BQEphCKFNa0F0GDKFuhM9cGceKoQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "4VLCokntMfm1AogpUnYGvhV7nllWSo3mS3hVESMy+hA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xiXNLj/CipEH63Vb5cidi8q9X47EF4f3HtJSOH7mfM8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "4XlCYfYBjI9XA5zOSgTiEBYcZsdwyXL+f5XtH2xUIOc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "k6DfQy7ZYJIkEly2B5hjOZznL4NcgMkllZjJLb7yq7w=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ZzM6gwWesa3lxbZVZthpPFs2s3GV0RZREE2zOMhBRBo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "US+jeMeeOd7J0wR0efJtq2/18lcO8YFvhT4O3DeaonQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "b6iSxiI1FM9SzxuG1bHqGA1i4+3GOi0/SPW00XB4L7o=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kn3LsxAVkzIZKK9I6fi0Cctr0yjXOYgaQWMCoj4hLpM=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoubleNoPrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoubleNoPrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "findAndModify": "default", + "query": { + "encryptedDoubleNoPrecision": { + "$gt": { + "$binary": { + "base64": "DbMkAAADcGF5bG9hZABXJAAABGcAQyQAAAMwAH0AAAAFZAAgAAAAAHgYoMGjEE6fAlAhICv0+doHcVX8CmMVxyq7+jlyGrvmBXMAIAAAAAC/5MQZgTHuIr/O5Z3mXPvqrom5JTQ8IeSpQGhO9sB+8gVsACAAAAAAuPSXVmJUAUpTQg/A9Bu1hYczZF58KEhVofakygbsvJQAAzEAfQAAAAVkACAAAAAA2kiWNvEc4zunJ1jzvuClFC9hjZMYruKCqAaxq+oY8EAFcwAgAAAAACofIS72Cm6s866UCk+evTH3CvKBj/uZd72sAL608rzTBWwAIAAAAADuCQ/M2xLeALF0UFZtJb22QGOhHmJv6xoO+kZIHcDeiAADMgB9AAAABWQAIAAAAABkfoBGmU3hjYBvQbjNW19kfXneBQsQQPRfUL3UAwI2cAVzACAAAAAAUpK2BUOqX/DGdX5YJniEZMWkofxHqeAbXceEGJxhp8AFbAAgAAAAAKUaLzIldNIZv6RHE+FwbMjzcNHqPESwF/37mm43VPrsAAMzAH0AAAAFZAAgAAAAAFNprhQ3ZwIcYbuzLolAT5n/vc14P9kUUQComDu6eFyKBXMAIAAAAAAcx9z9pk32YbPV/sfPZl9ALIEVsqoLXgqWLVK/tP+heAVsACAAAAAA/qxvuvJbAHwwhfrPVpmCFzNvg2cU/NXaWgqgYUZpgXwAAzQAfQAAAAVkACAAAAAAODI+pB2pCuB+YmNEUAgtMfNdt3DmSkrJ96gRzLphgb8FcwAgAAAAAAT7dewFDxUDECQ3zVq75/cUN4IP+zsqhkP5+czUwlJIBWwAIAAAAACFGeOtd5zBXTJ4JYonkn/HXZfHipUlqGwIRUcH/VTatwADNQB9AAAABWQAIAAAAACNAk+yTZ4Ewk1EnotQK8O3h1gg9I7pr9q2+4po1iJVgAVzACAAAAAAUj/LesmtEsgqYVzMJ67umVA11hJTdDXwbxDoQ71vWyUFbAAgAAAAABlnhpgTQ0WjLb5u0b/vEydrCeFjVynKd7aqb+UnvVLeAAM2AH0AAAAFZAAgAAAAAD/FIrGYFDjyYmVb7oTMVwweWP7A6F9LnyIuNO4MjBnXBXMAIAAAAACIZgJCQRZu7NhuNMyOqCn1tf+DfU1qm10TPCfj5JYV3wVsACAAAAAA5hmY4ptuNxULGf87SUFXQWGAONsL9U29duh8xqsHtxoAAzcAfQAAAAVkACAAAAAAciRW40ORJLVwchOEpz87Svb+5toAFM6LxDWv928ECwQFcwAgAAAAAN0dipyESIkszfjRzdDi8kAGaa2Hf4wrPAtiWwboZLuxBWwAIAAAAAANr4o/+l1OIbbaX5lZ3fQ/WIeOcEXjNI1F0WbSgQrzaQADOAB9AAAABWQAIAAAAACZqAyCzYQupJ95mrBJX54yIz9VY7I0WrxpNYElCI4dTQVzACAAAAAA/eyJb6d1xfE+jJlVXMTD3HS/NEYENPVKAuj56Dr2dSEFbAAgAAAAANkSt154Or/JKb31VvbZFV46RPgUp8ff/hcPORL7PpFBAAM5AH0AAAAFZAAgAAAAAI5bm3YO0Xgf0VT+qjVTTfvckecM3Cwqj7DTKZXf8/NXBXMAIAAAAAD/m+h8fBhWaHm6Ykuz0WX1xL4Eme3ErLObyEVJf8NCywVsACAAAAAAfb1VZZCqs2ivYbRzX4p5CtaCkKW+g20Pr57FWXzEZi8AAzEwAH0AAAAFZAAgAAAAANqo4+p6qdtCzcB4BX1wQ6llU7eFBnuu4MtZwp4B6mDlBXMAIAAAAAAGiz+VaukMZ+6IH4jtn4KWWdKK4/W+O+gRioQDrfzpMgVsACAAAAAAG4YYkTp80EKo59mlHExDodRQFR7njhR5dmISwUJ6ukAAAzExAH0AAAAFZAAgAAAAAPrFXmHP2Y4YAm7b/aqsdn/DPoDkv7B8egWkfe23XsM1BXMAIAAAAAAGhwpKAr7skeqHm3oseSbO7qKNhmYsuUrECBxJ5k+D2AVsACAAAAAAAqPQi9luYAu3GrFCEsVjd9z2zIDcp6SPTR2w6KQEr+IAAzEyAH0AAAAFZAAgAAAAABzjYxwAjXxXc0Uxv18rH8I3my0Aguow0kTwKyxbrm+cBXMAIAAAAADVbqJVr6IdokuhXkEtXF0C2gINLiAjMVN20lE20Vmp2QVsACAAAAAAD7K1Fx4gFaaizkIUrf+EGXQeG7QX1jadhGc6Ji471H8AAzEzAH0AAAAFZAAgAAAAAFMm2feF2fFCm/UC6AfIyepX/xJDSmnnolQIBnHcPmb5BXMAIAAAAABLI11kFrQoaNVZFmq/38aRNImPOjdJh0Lo6irI8M/AaAVsACAAAAAAOWul0oVqJ9CejD2RqphhTC98DJeRQy5EwbNerU2+4l8AAzE0AH0AAAAFZAAgAAAAAJvXB3KyNiNtQko4SSzo/9b2qmM2zU9CQTTDfLSBWMgRBXMAIAAAAAAvjuVP7KsLRDeqVqRziTKpBrjVyqKiIbO9Gw8Wl2wFTAVsACAAAAAADlE+oc1ins+paNcaOZJhBlKlObDJ4VQORWjFYocM4LgAAzE1AH0AAAAFZAAgAAAAAPGdcxDiid8z8XYnfdDivNMYVPgBKdGOUw6UStU+48CdBXMAIAAAAAARj6g1Ap0eEfuCZ4X2TsEw+Djrhto3fA5nLwPaY0vCTgVsACAAAAAAoHqiwGOUkBu8SX5U1yHho+UIFdSN2MdQN5s6bQ0EsJYAAzE2AH0AAAAFZAAgAAAAAP5rGPrYGt3aKob5f/ldP0qrW7bmWvqnKY4QwdDWz400BXMAIAAAAADTQkW2ymaaf/bhteOOGmSrIR97bAnJx+yN3yMj1bTeewVsACAAAAAADyQnHGH2gF4w4L8axUsSTf6Ubk7L5/eoFOJk12MtZAoAAzE3AH0AAAAFZAAgAAAAAAlz6wJze5UkIxKpJOZFGCOf3v2KByWyI6NB6JM9wNcBBXMAIAAAAABUC7P/neUIHHoZtq0jFVBHY75tSFYr1Y5S16YN5XxC1QVsACAAAAAAgvxRbXDisNnLY3pfsjDdnFLtkvYUC4lhA68eBXc7KAwAAzE4AH0AAAAFZAAgAAAAAFJ8AtHcjia/9Y5pLEc3qVgH5xKiXw12G9Kn2A1EY8McBXMAIAAAAAAxe7Bdw7eUSBk/oAawa7uicTEDgXLymRNhBy1LAxhDvwVsACAAAAAAxKPaIBKVx3jTA+R/el7P7AZ7efrmTGjJs3Hj/YdMddwAAzE5AH0AAAAFZAAgAAAAAO8uwQUaKFb6vqR3Sv3Wn4QAonC2exOC9lGG1juqP5DtBXMAIAAAAABZf1KyJgQg8/Rf5c02DgDK2aQu0rNCOvaL60ohDHyY+gVsACAAAAAAqyEjfKC8lYoIfoXYHUqHZPoaA6EK5BAZy5dxXZmay4kAAzIwAH0AAAAFZAAgAAAAAE8YtqyRsGCeiR6hhiyisR/hccmK4nZqIMzO4lUBmEFzBXMAIAAAAAC1UYOSKqAeG1UJiKjWFVskRhuFKpj9Ezy+lICZvFlN5AVsACAAAAAA6Ct9nNMKyRazn1OKnRKagm746CGu+jyhbL1qJnZxGi0AAzIxAH0AAAAFZAAgAAAAAPhCrMausDx1QUIEqp9rUdRKyM6a9AAx7jQ3ILIu8wNIBXMAIAAAAACmH8lotGCiF2q9VQxhsS+7LAZv79VUAsOUALaGxE/EpAVsACAAAAAAnc1xCKfdvbUEc8F7XZqlNn1C+hZTtC0I9I3LL06iaNkAAzIyAH0AAAAFZAAgAAAAAOBi/GAYFcstMSJPgp3VkMiuuUUCrZytvqYaU8dwm8v2BXMAIAAAAACEZSZVyD3pKzGlbdwlYmWQhHHTV5SnNLknl2Gw8IaUTQVsACAAAAAAfsLZsEDcWSuNsIo/TD1ReyQW75HPMgmuKZuWFOLKRLoAAzIzAH0AAAAFZAAgAAAAAIQuup+YGfH3mflzWopN8J1X8o8a0d9CSGIvrA5HOzraBXMAIAAAAADYvNLURXsC2ITMqK14LABQBI+hZZ5wNf24JMcKLW+84AVsACAAAAAACzfjbTBH7IwDU91OqLAz94RFkoqBOkzKAqQb55gT4/MAAzI0AH0AAAAFZAAgAAAAAKsh0ADyOnVocFrOrf6MpTrNvAj8iaiE923DPryu124gBXMAIAAAAADg24a8NVE1GyScc6tmnTbmu5ulzO+896fE92lN08MeswVsACAAAAAAaPxcOIxnU7But88/yadOuDJDMcCywwrRitaxMODT4msAAzI1AH0AAAAFZAAgAAAAAKkVC2Y6HtRmv72tDnPUSjJBvse7SxLqnr09/Uuj9sVVBXMAIAAAAABYNFUkH7ylPMN+Bc3HWX1e0flGYNbtJNCY9SltJCW/UAVsACAAAAAAZYK/f9H4OeihmpiFMH7Wm7uLvs2s92zNA8wyrNZTsuMAAzI2AH0AAAAFZAAgAAAAADDggcwcb/Yn1Kk39sOHsv7BO/MfP3m/AJzjGH506Wf9BXMAIAAAAAAYZIsdjICS0+BDyRUPnrSAZfPrwtuMaEDEn0/ijLNQmAVsACAAAAAAGPnYVvo2ulO9z4LGd/69NAklfIcZqZvFX2KK0s+FcTUAAzI3AH0AAAAFZAAgAAAAAEWY7dEUOJBgjOoWVht1wLehsWAzB3rSOBtLgTuM2HC8BXMAIAAAAAAAoswiHRROurjwUW8u8D5EUT+67yvrgpB/j6PzBDAfVwVsACAAAAAA6NhRTYFL/Sz4tao7vpPjLNgAJ0FX6P/IyMW65qT6YsMAAzI4AH0AAAAFZAAgAAAAAPZaapeAUUFPA7JTCMOWHJa9lnPFh0/gXfAPjA1ezm4ZBXMAIAAAAACmJvLY2nivw7/b3DOKH/X7bBXjJwoowqb1GtEFO3OYgAVsACAAAAAA/JcUoyKacCB1NfmH8vYqC1f7rd13KShrQqV2r9QBP44AAzI5AH0AAAAFZAAgAAAAAK00u6jadxCZAiA+fTsPVDsnW5p5LCr4+kZZZOTDuZlfBXMAIAAAAAAote4zTEYMDgaaQbAdN8Dzv93ljPLdGjJzvnRn3KXgtQVsACAAAAAAxXd9Mh6R3mnJy8m7UfqMKi6oD5DlZpkaOz6bEjMOdiwAAzMwAH0AAAAFZAAgAAAAAFbgabdyymiEVYYwtJSWa7lfl/oYuj/SukzJeDOR6wPVBXMAIAAAAADAFGFjS1vPbN6mQEhkDYTD6V2V23Ys9gUEUMGNvMPkaAVsACAAAAAAL/D5Sze/ZoEanZLK0IeEkhgVkxEjMWVCfmJaD3a8uNIAAzMxAH0AAAAFZAAgAAAAABNMR6UBv2E627CqLtQ/eDYx7OEwQ7JrR4mSHFa1N8tLBXMAIAAAAAAxH4gucI4UmNVB7625C6hFSVCuIpJO3lusJlPuL8H5EQVsACAAAAAAVLHNg0OUVqZ7WGOP53BkTap9FOw9dr1P4J8HxqFqU04AAzMyAH0AAAAFZAAgAAAAAG8cd6WBneNunlqrQ2EmNf35W7OGObGq9WL4ePX+LUDmBXMAIAAAAAAjJ2+sX87NSis9hBsgb1QprVRnO7Bf+GObCGoUqyPE4wVsACAAAAAAs9c9SM49/pWmyUQKslpt3RTMBNSRppfNO0JBvUqHPg0AAzMzAH0AAAAFZAAgAAAAAFWOUGkUpy8yf6gB3dio/aOfRKh7XuhvsUj48iESFJrGBXMAIAAAAAAY7sCDMcrUXvNuL6dO0m11WyijzXZvPIcOKob6IpC4PQVsACAAAAAAJOP+EHz6awDb1qK2bZQ3kTV7wsj5Daj/IGAWh4g7omAAAzM0AH0AAAAFZAAgAAAAAGUrIdKxOihwNmo6B+aG+Ag1qa0+iqdksHOjQj+Oy9bZBXMAIAAAAABwa5dbI2KmzBDNBTQBEkjZv4sPaeRkRNejcjdVymRFKQVsACAAAAAA4ml/nm0gJNTcJ4vuD+T2Qfq2fQZlibJp/j6MOGDrbHMAAzM1AH0AAAAFZAAgAAAAAOx89xV/hRk64/CkM9N2EMK6aldII0c8smdcsZ46NbP8BXMAIAAAAADBF6tfQ+7q9kTuLyuyrSnDgmrdmrXkdhl980i1KHuGHgVsACAAAAAACUqiFqHZdGbwAA+hN0YUE5zFg+H+dabIB4dj5/75W/YAAzM2AH0AAAAFZAAgAAAAAMkN0L1oQWXhjwn9rAdudcYeN8/5VdCKU8cmDt7BokjsBXMAIAAAAAAT62pGXoRwExe9uvgYOI0hg5tOxilrWfoEmT0SMglWJwVsACAAAAAAlVz4dhiprSbUero6JFfxzSJGclg63oAkAmgbSwbcYxIAAzM3AH0AAAAFZAAgAAAAANxfa4xCoaaB7k1C1RoH1LBhsCbN2yEq15BT9b+iqEC4BXMAIAAAAACAX9LV8Pemfw7NF0iB1/85NzM1Ef+1mUfyehacUVgobQVsACAAAAAAVq4xpbymLk0trPC/a2MvB39I7hRiX8EJsVSI5E5hSBkAAzM4AH0AAAAFZAAgAAAAAOYIYoWkX7dGuyKfi3XssUlc7u/gWzqrR9KMkikKVdmSBXMAIAAAAABVF2OYjRTGi9Tw8XCAwZWLpX35Yl271TlNWp6N/nROhAVsACAAAAAA0nWwYzXQ1+EkDvnGq+SMlq20z+j32Su+i/A95SggPb4AAzM5AH0AAAAFZAAgAAAAAIy0+bXZi10QC+q7oSOLXK5Fee7VEk/qHSXukfeVIfgzBXMAIAAAAAAQ3IIV/JQCHW95AEbH5zGIHtJqyuPjWPMIZ+VmQHlxEwVsACAAAAAAp0jYsyohKv9Pm+4k+DplEGbl9WLZpAJzitrcDj4CNsMAAzQwAH0AAAAFZAAgAAAAAL5SOJQ3LOhgdXJ5v086NNeAl1qonQnchObdpZJ1kHeEBXMAIAAAAAA+tEqTXODtik+ydJZSnUqXF9f18bPeze9eWtR7ExZJgQVsACAAAAAAbrkZCVgB9Qsp4IAbdf+bD4fT6Boqk5UtuA/zhNrh1y0AAzQxAH0AAAAFZAAgAAAAAKl8zcHJRDjSjJeV/WvMxulW1zrTFtaeBy/aKKhadc6UBXMAIAAAAADBdWQl5SBIvtZZLIHszePwkO14W1mQ0izUk2Ov21cPNAVsACAAAAAAHErCYycpqiIcCZHdmPL1hi+ovLQk4TAvENpfLdTRamQAAzQyAH0AAAAFZAAgAAAAAFvotcNaoKnVt5CBCOPwjexFO0WGWuaIGL6H/6KSau+6BXMAIAAAAAD2y2mBN5xPu5PJoY2zcr0GnQDtHRBogA5+xzIxccE9fwVsACAAAAAAdS34xzJesnUfxLCcc1U7XzUqLy8MAzV/tcjbqaD3lkMAAzQzAH0AAAAFZAAgAAAAAPezU0/vNT4Q4YKbTbaeHqcwNLT+IjW/Y9QFpIooihjPBXMAIAAAAACj2x4O4rHter8ZnTws5LAP9jJ/6kk9C/V3vL50LoFZHAVsACAAAAAAQdBDF3747uCVP5lB/zr8VmzxJfTSZHBKeIgm5FyONXwAAzQ0AH0AAAAFZAAgAAAAAMqpayM2XotEFmm0gwQd9rIzApy0X+7HfOhNk6VU7F5lBXMAIAAAAACJR9+q5T9qFHXFNgGbZnPubG8rkO6cwWhzITQTmd6VgwVsACAAAAAAOZLQ6o7e4mVfDzbpQioa4d3RoTvqwgnbmc5Qh2wsZuoAAzQ1AH0AAAAFZAAgAAAAANCeyW+3oebaQk+aqxNVhAcT/BZ5nhsTVdKS3tMrLSvWBXMAIAAAAADxRFMDhkyuEc++WnndMfoUMLNL7T7rWoeblcrpSI6soQVsACAAAAAAdBuBMJ1lxt0DRq9pOZldQqchLs3B/W02txcMLD490FEAAzQ2AH0AAAAFZAAgAAAAAIbo5YBTxXM7HQhl7UP9NNgpPGFkBx871r1B65G47+K8BXMAIAAAAAC21dJSxnEhnxO5gzN5/34BL4von45e1meW92qowzb8fQVsACAAAAAAm3Hk2cvBN0ANaR5jzeZE5TsdxDvJCTOT1I01X7cNVaYAAzQ3AH0AAAAFZAAgAAAAABm/6pF96j26Jm7z5KkY1y33zcAEXLx2n0DwC03bs/ixBXMAIAAAAAD01OMvTZI/mqMgxIhA5nLs068mW+GKl3OW3ilf2D8+LgVsACAAAAAAaLvJDrqBESTNZSdcXsd+8GXPl8ZkUsGpeYuyYVv/kygAAzQ4AH0AAAAFZAAgAAAAAJ/D3+17gaQdkBqkL2wMwccdmCaVOtxzIkM8VyI4xI5zBXMAIAAAAAAggLVmkc5u+YzBR+oNE+XgLVp64fC6MzUb/Ilu/Jsw0AVsACAAAAAACz3HVKdWkx82/kGbVpcbAeZtsj2R5Zr0dEPfle4IErkAAzQ5AH0AAAAFZAAgAAAAAJMRyUW50oaTzspS6A3TUoXyC3gNYQoShUGPakMmeVZrBXMAIAAAAACona2Pqwt4U2PmFrtmu37jB9kQ/12okyAVtYa8TQkDiQVsACAAAAAAltJJKjCMyBTJ+4PkdDCPJdeX695P8P5h7WOZ+kmExMAAAzUwAH0AAAAFZAAgAAAAAByuYl8dBvfaZ0LO/81JW4hYypeNmvLMaxsIdvqMPrWoBXMAIAAAAABNddwobOUJzm9HOUD8BMZJqkNCUCqstHZkC76FIdNg9AVsACAAAAAAQQOkIQtkyNavqCnhQbNg3HfqrJdsAGaoxSJePJl1qXsAAzUxAH0AAAAFZAAgAAAAAHEzLtfmF/sBcYPPdj8867VmmQyU1xK9I/3Y0478azvABXMAIAAAAAAcmyFajZPnBTbO+oLInNwlApBocUekKkxz2hYFeSlQ+gVsACAAAAAAZ6IkrOVRcC8vSA6Vb4fPWZJrYexXhEabIuYIeXNsCSgAAzUyAH0AAAAFZAAgAAAAAJam7JYsZe2cN20ZYm2W3v1pisNt5PLiniMzymBLWyMtBXMAIAAAAABxCsKVMZMTn3n+R2L7pVz5nW804r8HcK0mCBw3jUXKXAVsACAAAAAA7j3JGnNtR64P4dJLeUoScFRGfa8ekjh3dvhw46sRFk0AAzUzAH0AAAAFZAAgAAAAAMXrXx0saZ+5gORmwM2FLuZG6iuO2YS+1IGPoAtDKoKBBXMAIAAAAADIQsxCr8CfFKaBcx8kIeSywnGh7JHjKRJ9vJd9x79y7wVsACAAAAAAcvBV+SykDYhmRFyVYwFYB9oBKBSHr55Jdz2cXeowsUQAAzU0AH0AAAAFZAAgAAAAACbzcUD3INSnCRspOKF7ubne74OK9L0FTZvi9Ay0JVDYBXMAIAAAAADPebVQH8Btk9rhBIoUOdSAdpPvz7qIY4UC2i6IGisSAQVsACAAAAAAiBunJi0mPnnXdnldiq+If8dcb/n6apHnaIFt+oyYO1kAAzU1AH0AAAAFZAAgAAAAACUc2CtD1MK/UTxtv+8iA9FoHEyTwdl43HKeSwDw2Lp5BXMAIAAAAACCIduIdw65bQMzRYRfjBJj62bc69T4QqH4QoWanwlvowVsACAAAAAAM0TV7S+aPVVzJOQ+cpSNKHTwyQ0mWa8tcHzfk3nR+9IAAzU2AH0AAAAFZAAgAAAAAHSaHWs/dnmI9sc7nB50VB2Bzs0kHapMHCQdyVEYY30TBXMAIAAAAACkV22lhEjWv/9/DubfHBAcwJggKI5mIbSK5L2nyqloqQVsACAAAAAAS19m7DccQxgryOsBJ3GsCs37yfQqNi1G+S6fCXpEhn4AAzU3AH0AAAAFZAAgAAAAAAL8jhNBG0KXXZhmZ0bPXtfgapJCB/AI+BEHB0eZ3C75BXMAIAAAAADHx/fPa639EBmGV5quLi8IQT600ifiKSOhTDOK19DnzwVsACAAAAAAlyLTDVkHxbayklD6Qymh3odIK1JHaOtps4f4HR+PcDgAAzU4AH0AAAAFZAAgAAAAAAxgeclNl09H7HvzD1oLwb2YpFca5eaX90uStYXHilqKBXMAIAAAAACMU5pSxzIzWlQxHyW170Xs9EhD1hURASQk+qkx7K5Y6AVsACAAAAAAJbMMwJfNftA7Xom8Bw/ghuZmSa3x12vTZxBUbV8m888AAzU5AH0AAAAFZAAgAAAAABmO7QD9vxWMmFjIHz13lyOeV6vHT6mYCsWxF7hb/yOjBXMAIAAAAACT9lmgkiqzuWG24afuzYiCeK9gmJqacmxAruIukd0xEAVsACAAAAAAZa0/FI/GkZR7CtX18Xg9Tn9zfxkD0UoaSt+pIO5t1t4AAzYwAH0AAAAFZAAgAAAAAB89SjLtDJkqEghRGyj6aQ/2qvWLNuMROoXmzbYbCMKMBXMAIAAAAAC8sywgND+CjhVTF6HnRQeay8y9/HnVzDI42dEPah28LQVsACAAAAAAoxv7UKh0RqUAWcOsQvU123zO1qZn73Xfib0qncZCB34AAzYxAH0AAAAFZAAgAAAAABN2alGq9Aats1mwERNGwL/fIwZSvVCe9/8XMHTFlpUpBXMAIAAAAACuDPjJgvvbBYhbLpjMiWUCsVppiYrhvR+yMysNPN8cZAVsACAAAAAAKpADjc4bzIZMi9Q/+oe0EMRJHYQt6dlo1x/lRquagqkAAzYyAH0AAAAFZAAgAAAAAL8YB6VAqGBiWD4CBv16IBscg5J7VQCTZu87n6pj+86KBXMAIAAAAAAmxm8e68geeyAdUjSMWBHzUjneVB0pG9TBXIoE6467hAVsACAAAAAAV76JZAlYpgC/Zl8awx2ArCg1uuyy2XVTSkp0wUMi/7UAAzYzAH0AAAAFZAAgAAAAAL4yLkCTV5Dmxa5toBu4JT8ge/cITAaURIOuFuOtFUkeBXMAIAAAAAAXoFNQOMGkAj7qEJP0wQafmFSXgWGeorDVbwyOxWLIsgVsACAAAAAAc4Un6dtIFe+AQ+RSfNWs3q63RTHhmyc+5GKRRdpWRv8AAzY0AH0AAAAFZAAgAAAAAEU8DoUp46YtYjNFS9kNXwdYxQ9IW27vCTb+VcqqfnKNBXMAIAAAAADe7vBOgYReE8X78k5ARuUnv4GmzPZzg6SbConf4L2G3wVsACAAAAAA78YHWVkp6HbZ0zS4UL2z/2pj9vPDcMDt7zTv6NcRsVsAAzY1AH0AAAAFZAAgAAAAAPa4yKTtkUtySuWo1ZQsp2QXtPb5SYqzA5vYDnS1P6c0BXMAIAAAAADKnF58R1sXlHlsHIvCBR3YWW/qk54z9CTDhZydkD1cOQVsACAAAAAAHW3ERalTFWKMzjuXF3nFh0pSrQxM/ojnPbPhc4v5MaQAAzY2AH0AAAAFZAAgAAAAAN5WJnMBmfgpuQPyonmY5X6OdRvuHw4nhsnGRnFAQ95VBXMAIAAAAACwftzu7KVV1rmGKwXtJjs3cJ1gE3apr8+N0SAg1F2cHwVsACAAAAAATDW0reyaCjbJuVLJzbSLx1OBuBoQu+090kgW4RurVacAAzY3AH0AAAAFZAAgAAAAACHvDsaPhoSb6DeGnKQ1QOpGYAgK82qpnqwcmzSeWaJHBXMAIAAAAABRq3C5+dOfnkAHM5Mg5hPB3O4jhwQlBgQWLA7Ph5bhgwVsACAAAAAAqkC8zYASvkVrp0pqmDyFCkPaDmD/ePAJpMuNOCBhni8AAzY4AH0AAAAFZAAgAAAAAOBePJvccPMJmy515KB1AkXF5Pi8NOG4V8psWy0SPRP+BXMAIAAAAAB3dOJG9xIDtEKCRzeNnPS3bFZepMj8UKBobKpSoCPqpgVsACAAAAAAPG3IxQVOdZrr509ggm5FKizWWoZPuVtOgOIGZ3m+pdEAAzY5AH0AAAAFZAAgAAAAABUvRrDQKEXLMdhnzXRdhiL6AGNs2TojPky+YVLXs+JnBXMAIAAAAAD1kYicbEEcPzD4QtuSYQQWDPq8fuUWGddpWayKn3dT9QVsACAAAAAA9+Sf7PbyFcY45hP9oTfjQiOUS3vEIAT8C0vOHymwYSUAAzcwAH0AAAAFZAAgAAAAAOvSnpujeKNen4pqc2HR63C5s5oJ1Vf4CsbKoYQvkwl5BXMAIAAAAACw2+vAMdibzd2YVVNfk81yXkFZP0WLJ82JBxJmXnYE+QVsACAAAAAArQ/E1ACyhK4ZyLqH9mNkCU7WClqRQTGyW9tciSGG/EMAAzcxAH0AAAAFZAAgAAAAAAo0xfGG7tJ3GWhgPVhW5Zn239nTD3PadShCNRc9TwdNBXMAIAAAAADZh243oOhenu0s/P/5KZLBDh9ADqKHtSWcXpO9D2sIjgVsACAAAAAAlgTPaoQKz+saU8rwCT3UiNOdG6hdpjzFx9GBn08ZkBEAABJjbQAAAAAAAAAAAAAQcGF5bG9hZElkAAAAAAAQZmlyc3RPcGVyYXRvcgABAAAAEnNwAAEAAAAAAAAAEHRmAAEAAAABbW4A////////7/8BbXgA////////738A", + "subType": "06" + } + } + } + }, + "update": { + "$set": { + "encryptedDoubleNoPrecision": { + "$$type": "binData" + } + } + }, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoubleNoPrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + } + }, + "commandName": "findAndModify" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Double-InsertFind.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Double-InsertFind.json new file mode 100644 index 0000000000..cac8bcafea --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Double-InsertFind.json @@ -0,0 +1,1186 @@ +{ + "description": "fle2v2-Rangev2-Double-InsertFind", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoubleNoPrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "FLE2 Range Double. Insert and Find.", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$numberDouble": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$numberDouble": "1" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDoubleNoPrecision": { + "$gt": { + "$numberDouble": "0" + } + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$numberDouble": "1" + } + } + ] + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "6YrBn2ofIw1b5ooakrLOwF41BWrps8OO0H9WH4/rtlE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "n+XAuFnP8Dov9TnhGFxNx0K/MnVM9WbJ7RouEu0ndO0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yRXojuVdn5GQtD97qYlaCL6cOLmZ7Cvcb3wFjkLUIdM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "DuIkdRPITRs55I4SZmgomAHCIsDQmXRhW8+MOznkzSk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SsBk+Et1lTbU+QRPx+xyJ/jMkmfG+QCvQEpip2YYrzA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "crCIzOd8KhHvvUlX7M1v9bhvU4pLdTc+X2SuqoKU5Ek=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "YOWdCw4UrqnxkAaVjqmC4sKQDMVMHEpFGnlxpxdaU6E=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "M3SShp81Ff8tQ632qKbv9MUcN6wjDaBReI0VXNu6Xh4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "gzHlSPxpM0hT75kQvWFzGlOxKvDoiKQZOr19V6l2zXI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "s3JnppOGYw9SL2Q1kMAZs948v2F5PrpXjGei/HioDWs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cG6+3Gk/zEH68P/uuuwiAUVCuyJwa1LeV+t29FlPPAo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dupdvR3AyJtM+g9NDKiaLVOtGca387JQp8w+V03m7Ig=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JqEQc5svj2jTvZ6LLA5ivE+kTb/0aRemSEmxk4G7Zrg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "szcXXXKnob+p3SoM4yED2R920LeJ7cVsclPMFTe4CeI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "o1QoGVXmuBdHwHm7aCtGMlMVKrjFdYvJXpoq6uhIAZ0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Jfm5wPlqqLCJRGQIqRq2NGmpn7s0Vrih2H3YAOoI2YU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zMHLb8ARbsYo8Ld05bqnGFf1Usha6EGb8QKwdSAyps0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yQdtq9lh5pugL7/i0Bj/PuZUUBUIzf+7wj1rl5y736w=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wGWVZdO7qIuyDg/BqDgqjgoQ02h5YYgwXQB1oCin2NE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "by9HMLj6NTEpgztZ5HSN6GxImkXPcaFINYDzgZY33X8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tWo0vbasi7bXmn/MsOx13VC1IsWtpx/nYp0uj4iMzdA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tQQpndUYd5O87lOtrGjH3wl9VsOK0ray7RMasL90sBM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cQjXEDCMsOpKLLf+vlTgIHA+cbSJdzqhbSX9Wvh95aA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "7yMpU48IxK9SzP2cx3VnTownGEwFmeFofuuFT97SuuY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kSOx1kz0CmBgzKQHZlo65ZUY1DIv9A99JRm+Us2y6Ew=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ubQpdPBe6/xvtr+AcXdfYLSvYCR4ot0tivehkCsupb4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xal+iCJ6FTefRQToyoNksc9NCZShyn04NDGi4IYrcoM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "d7jU4iOK50xHxlkSifcxlZFCM46TSgQzoYivxG3HNLY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tJvl2nsBLBVzL3pp6sKWCL4UXeh3q/roYBJjSb74ve0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "OIUCaKRvIx9t1w6Hxlz1IcQTdPNCfdRNwnnTm10W+X0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "A9tvzsiElotOUVIB4CqfQp9mAwqvTM35YkmAR170aHA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "lI8gpK7hpb7c9x4RQugsxMnQay5LZJmwslZdvMx/dcE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dNCzh40U0XvdKnSDi3HRQOWQftEsDVqc4uUvsVFGoq8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "IP+iwEBWBwVVZIdpaMu8k5+soFCz+TZkYn3drKZ9grE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pnqyh6e0y5svHkJDShlN9CHV0WvMBE4QbtJpQw5ZCXc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "elEl42tbVDoRTLjAhZUFEtXiut4b3PVhg/1ZLZSQdtE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "vHuu2FxwclMHqyE6JBYbTYgbEkB0dqb/JuaxsvfwsmY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xTf7NCe3Gf8QpE78HR5OknlLTKfs9J+RN9UZpH6fnso=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "XiWSasRnJAulGR6+LCVD3mwRObXylqYWR9jvpywq12c=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "MZMxEQ5ikx0PG1YFIExv0UnTZogsvgeOEZTpzvBDn4w=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yZMyMZBDrWbAhvnic7vvIYhmO9m5H2iuv0c8KNZrBzY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xxM14hTPY5j0vvcK2C7YAEjzdsfUTFHozHC0hEo1bxI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+01rqR1xVwkpGXcstbk1ItJqFVjH6Q8MGxEN3Cm9Y1A=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xOpLV0Z2VTRJ3iWtnWZcsyjXubTIkYWo31cO+HV1o1k=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "BWUOLqgLBqc5NwxVlSV5H3KFQPXbCp7mdo+jF+8cJqY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "fuQb1S6xZDGlrEbK+kI23aL53PP1PVNwqICnZNt9Yzg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SfscnoibFttahLdPVC4Ee+47ewGFKpDSU7M6HX19bKE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "rpSW2awybNVeKtat91VFxqbINoTfNhPfQAu+d73Xtf8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "9M/CP9ccOIIj2LLFmE0GFDO0Ban2wsNalEXfM6+h+1s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "WrEMG49l1ye4MhXs5ZS9tz8P6h+hDvthIg/2wW9ne1Q=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ImNhbfeyfH8qIEeA5ic0s3dAQBdzzTBS+CPsNih9vZ0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dWP33YDSn04UKJN2ogh2Rui0iW/0q2y18OCDRVcfyoo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "lYv0isAtfGh6H9tdp3cp2eHU7q2J+uk7QrgcxtK3w7Y=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "VGMoamB/+7zTOYcY/pqJc96xlv2PdW4hwsIAEIslTDQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yNeBWMF7BnD9wVwz2PgJsvWr77QiVvvWUvJF0+fqBug=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SfpvObJ+tJBXSvqeN7vlOfmhYign635lciYAJIjUtY8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dsen4NqjzVGjpjufiTMs3+gqeD09EbnuogPgxrJECwg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pxCWVM3sn19NsFEpgHbgLa+PmYlhN3mMiP0Wk8kJhYw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q11KNvJszjYIB9n9HcC+N4uz11a3eRj1L3BH9scKMDQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "A1PmkgcEToWh1JiVWE6mI5jUu7poxWWuCUt/cgRUUDc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "qJo3Hu4PJeanL7XEaWXO/n3YsodhZyd+MJOOmB9Kpd8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "BkBKLO8URFscfRY9Bav/1+L9mLohDgNr/MkZtGiraIs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "rZq5WA3Hx3xthOyHAJXK//f8pE2qbz7YKu3TIMp9GFY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "X07a/Lm80p5xd4RFs1dNmw+90tmPDPdGiAKVZkxd4zY=", + "subType": "00" + } + } + ] + }, + { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "2FIZh/9N+NeJEQwxYIX5ikQT85xJzulBNReXk8PnG/s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "I93Md7QNPGmEEGYU1+VVCqBPBEvXdqHPtTJtMOn06Yk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "GecBFQ1PemlECWZWCl7f74vmsL6eB6mzQ9n6tK6FYfs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "QpjhZl+O1ORifgtCZuWAdcP6OKL7IZ2cA46v8FJcV28=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "FWXI/yZ1M+2fIboeMCDMlp+I2NwPQDtoM/wWselOPYw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "uk26nvN/LdRLaBphiBgIZzT0sSpoO1z0RdDWRm/xrSA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "hiiYSH1KZovAULc7rlmEU74wCjzDR+mm6ZnsgvFQjMw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "hRzvMvWPX0sJme+wck67lwbKDFaWOa+Eyef+JSdc1s4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "PSx5D+zqC9c295dguX4+EobT4IEzfffdfjzC8DWpB5Q=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "QzfXQCVTjPQv2h21v95HYPq8uCsVJ2tPnjv79gAaM9M=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "XcGDO/dlTcEMLqwcm55UmOqK+KpBmbzZO1LIzX7GPaQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Lf+o4E7YB5ynzUPC6KTyW0lj6Cg9oLIu1Sdd1ODHctA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wAuVn02LAVo5Y+TUocvkoenFYWzpu38k0NmGZOsAjS4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yJGDtveLbbo/0HtCtiTSsvVI/0agg/U1bFaQ0yhK12o=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "KsEy0zgYcmkM+O/fWF9z3aJGIk22XCk+Aw96HB6JU68=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "p+AnMI5ZxdJMSIEJmXXya+FeH5yubmOdViwUO89j0Rc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "/jLix56jzeywBtNuGw55lCXyebQoSIhbful0hOKxKDY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "fvDvSPomtJsl1S3+8/tzFCE8scHIdJY5hB9CdTEsoFo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "oV5hOJzPXxfTuRdKIlF4uYEoMDuqH+G7/3qgndDr0PM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "3ALwcvLj3VOfgD6OqXAO13h1ZkOv46R6+Oy6SUKh53I=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "gxaB9FJj0IM+InhvAjwWaex3UIZ9SAnDiUd5WHSY/l0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "66NPvDygJzKJqddfNuDuNOpvGajjFRtvhkwfUkiYmXw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "1dWcQIocRAcO9XnXYqbhl83jc0RgjQpsrWd8dC27trg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "npos0Uf1DT3ztSCjPVY9EImlRnTHB1KLrvmVSqBQ/8E=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "TEI9qBx/tK1l1H0v1scMG8Srmtwo5VxWHADPBSlWrXk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "3wUN2ypQKoj+5ASkeIK9ycxhahVxyTmGopigoUAlyYs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "o/oksSnUS+nIq6ozWTbB5bJh+NoaPj8deAA23uxiWCk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "KExYPruhA31e8xuSwvfUfDcyY/H2Va6taUd0k4yFgLc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "/x+dNfxdd/lkx8Z8VZVfoYl7LPoaZ/iKEzZXBrAtIJc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "DE4cmjFLPqZlmRomO0qQiruUBtzoCe8ZdNRcfNH92pU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "M6EKNcLPw/iojAChgYUSieaBYWcbsjKtB94SaHOr8vk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+qP49lDPeyhaduTvXJgtJEqHNEYANVu9Bg3Bxz7Td9w=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ruMrC2VIS+VKbJwCFb3bfkaLTju9nE+yPONV9s0M0Vo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "EbjDlSB5JKnDKff4d8hOmaOwJ7B9Q6NQFisLj+DPC+0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "C/yYOTB94edyqAbiQNu8/H7FoG3yRRjHDkMykz4+Mv0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CBxqrejG+qQQq2YTd6iP/06kiu2CxxzBFaZK3Ofb1CM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "2ZOQ/fpho+AbDENWBZaln7wRoepIRdhyT648dr8O5cU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "EghIgEPz01+myPgj8oid+PgncvobvC7vjvG3THEEQ0M=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "92CysZYNF8riwAMhdrIPKxfODw9p07cKQy/Snn8XmVY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "VO0LeTBQmsEf7sCHzTnZwUPNTqRZ49R8V5E9XnZ/5N4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "exs8BQMJq7U6ZXYgIizT7XN+X/hOmmn4YEuzev9zgSI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "qHpS4k1I+gPniNp4CA8TY8lLN36vBYmgbKMFpbYMEqg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+7lWKCKAWFw6gPZdHE6E8KIfI14/fSvtWUmllb5WLi0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "YiH/US0q6679hWblFDDKNqUjCgggoU8sUCssTIF1QbU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "YgwkKElEubNfvXL9hJxzqQUQtHiXN/OCGxNL1MUZZlM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "hZFST4INZTTuhvJlGJeMwlUAK270UCOTCDeBAnN4a7g=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "24I1Zw35AuGnK3CqJhbCwYb0IPuu5sCRrM5iyeITOLc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "vgD12JB4Q1S/kGPSQ1KOgp386KnG1GbM/5+60oRGcGw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+wNE+OL+CB9d4AUJdVxd56jUJCAXmmk9fapuB2TAc4g=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "uhQh1B2Pe4RkNw/kPEcgaLenuikKoRf1iyfZhpXdodc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "eu8gjAUIp8ybO204AgeOq5v1neI1yljqy5v3I6lo1lM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "7QG6oVbASBAjrnCPxzzUNnuFSFNlKhbuBafkF8pr7Is=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "PUS1xb2oHSDTdYltutoSSxBiJ1NjxH3l2kA4P1CZLEs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "XPMh/JDC/O93gJJCwwgJDb8ssWZvRvezNmKmyn3nIfk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "jWz+KGwMk/GOvFAK2rOxF3OjxeZAWfmUQ1HGJ7icw4A=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "o7XbW68pc6flYigf3LW4WAGUWxpeqxaQLkHUhUR9RZ8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "nqR+g60+5U0okbqJadSqGgnC+j1JcP8rwMcfzOs2ACI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Hz43qVK95tSfbYFtaE/8fE97XMk1RiO8XpWjwZHB80o=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "noZUWlZ8M6KXU5rkifyo8/duw5IL7/fXbJvT7bNmW9k=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "WONVHCuPSanXDRQQ/3tmyJ0Vq+Lu/4hRaMUf0g0kSuw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "UEaj6vQRoIghE8Movd8AGXhtwIOXlP4cBsECIUvE5Y8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "D3n2YcO8+PB4C8brDo7kxKjF9Y844rVkdRMLTgsQkrw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "C+YA0G9KjxZVaWwOMuh/dcnHnHAlYnbFrRl0IEpmsY0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "rUnmbmQanxrbFPYYrwyQ53x66OSt27yAvF+s48ezKDc=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoubleNoPrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoubleNoPrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "default", + "filter": { + "encryptedDoubleNoPrecision": { + "$gt": { + "$binary": { + "base64": "DbMkAAADcGF5bG9hZABXJAAABGcAQyQAAAMwAH0AAAAFZAAgAAAAAHgYoMGjEE6fAlAhICv0+doHcVX8CmMVxyq7+jlyGrvmBXMAIAAAAAC/5MQZgTHuIr/O5Z3mXPvqrom5JTQ8IeSpQGhO9sB+8gVsACAAAAAAuPSXVmJUAUpTQg/A9Bu1hYczZF58KEhVofakygbsvJQAAzEAfQAAAAVkACAAAAAA2kiWNvEc4zunJ1jzvuClFC9hjZMYruKCqAaxq+oY8EAFcwAgAAAAACofIS72Cm6s866UCk+evTH3CvKBj/uZd72sAL608rzTBWwAIAAAAADuCQ/M2xLeALF0UFZtJb22QGOhHmJv6xoO+kZIHcDeiAADMgB9AAAABWQAIAAAAABkfoBGmU3hjYBvQbjNW19kfXneBQsQQPRfUL3UAwI2cAVzACAAAAAAUpK2BUOqX/DGdX5YJniEZMWkofxHqeAbXceEGJxhp8AFbAAgAAAAAKUaLzIldNIZv6RHE+FwbMjzcNHqPESwF/37mm43VPrsAAMzAH0AAAAFZAAgAAAAAFNprhQ3ZwIcYbuzLolAT5n/vc14P9kUUQComDu6eFyKBXMAIAAAAAAcx9z9pk32YbPV/sfPZl9ALIEVsqoLXgqWLVK/tP+heAVsACAAAAAA/qxvuvJbAHwwhfrPVpmCFzNvg2cU/NXaWgqgYUZpgXwAAzQAfQAAAAVkACAAAAAAODI+pB2pCuB+YmNEUAgtMfNdt3DmSkrJ96gRzLphgb8FcwAgAAAAAAT7dewFDxUDECQ3zVq75/cUN4IP+zsqhkP5+czUwlJIBWwAIAAAAACFGeOtd5zBXTJ4JYonkn/HXZfHipUlqGwIRUcH/VTatwADNQB9AAAABWQAIAAAAACNAk+yTZ4Ewk1EnotQK8O3h1gg9I7pr9q2+4po1iJVgAVzACAAAAAAUj/LesmtEsgqYVzMJ67umVA11hJTdDXwbxDoQ71vWyUFbAAgAAAAABlnhpgTQ0WjLb5u0b/vEydrCeFjVynKd7aqb+UnvVLeAAM2AH0AAAAFZAAgAAAAAD/FIrGYFDjyYmVb7oTMVwweWP7A6F9LnyIuNO4MjBnXBXMAIAAAAACIZgJCQRZu7NhuNMyOqCn1tf+DfU1qm10TPCfj5JYV3wVsACAAAAAA5hmY4ptuNxULGf87SUFXQWGAONsL9U29duh8xqsHtxoAAzcAfQAAAAVkACAAAAAAciRW40ORJLVwchOEpz87Svb+5toAFM6LxDWv928ECwQFcwAgAAAAAN0dipyESIkszfjRzdDi8kAGaa2Hf4wrPAtiWwboZLuxBWwAIAAAAAANr4o/+l1OIbbaX5lZ3fQ/WIeOcEXjNI1F0WbSgQrzaQADOAB9AAAABWQAIAAAAACZqAyCzYQupJ95mrBJX54yIz9VY7I0WrxpNYElCI4dTQVzACAAAAAA/eyJb6d1xfE+jJlVXMTD3HS/NEYENPVKAuj56Dr2dSEFbAAgAAAAANkSt154Or/JKb31VvbZFV46RPgUp8ff/hcPORL7PpFBAAM5AH0AAAAFZAAgAAAAAI5bm3YO0Xgf0VT+qjVTTfvckecM3Cwqj7DTKZXf8/NXBXMAIAAAAAD/m+h8fBhWaHm6Ykuz0WX1xL4Eme3ErLObyEVJf8NCywVsACAAAAAAfb1VZZCqs2ivYbRzX4p5CtaCkKW+g20Pr57FWXzEZi8AAzEwAH0AAAAFZAAgAAAAANqo4+p6qdtCzcB4BX1wQ6llU7eFBnuu4MtZwp4B6mDlBXMAIAAAAAAGiz+VaukMZ+6IH4jtn4KWWdKK4/W+O+gRioQDrfzpMgVsACAAAAAAG4YYkTp80EKo59mlHExDodRQFR7njhR5dmISwUJ6ukAAAzExAH0AAAAFZAAgAAAAAPrFXmHP2Y4YAm7b/aqsdn/DPoDkv7B8egWkfe23XsM1BXMAIAAAAAAGhwpKAr7skeqHm3oseSbO7qKNhmYsuUrECBxJ5k+D2AVsACAAAAAAAqPQi9luYAu3GrFCEsVjd9z2zIDcp6SPTR2w6KQEr+IAAzEyAH0AAAAFZAAgAAAAABzjYxwAjXxXc0Uxv18rH8I3my0Aguow0kTwKyxbrm+cBXMAIAAAAADVbqJVr6IdokuhXkEtXF0C2gINLiAjMVN20lE20Vmp2QVsACAAAAAAD7K1Fx4gFaaizkIUrf+EGXQeG7QX1jadhGc6Ji471H8AAzEzAH0AAAAFZAAgAAAAAFMm2feF2fFCm/UC6AfIyepX/xJDSmnnolQIBnHcPmb5BXMAIAAAAABLI11kFrQoaNVZFmq/38aRNImPOjdJh0Lo6irI8M/AaAVsACAAAAAAOWul0oVqJ9CejD2RqphhTC98DJeRQy5EwbNerU2+4l8AAzE0AH0AAAAFZAAgAAAAAJvXB3KyNiNtQko4SSzo/9b2qmM2zU9CQTTDfLSBWMgRBXMAIAAAAAAvjuVP7KsLRDeqVqRziTKpBrjVyqKiIbO9Gw8Wl2wFTAVsACAAAAAADlE+oc1ins+paNcaOZJhBlKlObDJ4VQORWjFYocM4LgAAzE1AH0AAAAFZAAgAAAAAPGdcxDiid8z8XYnfdDivNMYVPgBKdGOUw6UStU+48CdBXMAIAAAAAARj6g1Ap0eEfuCZ4X2TsEw+Djrhto3fA5nLwPaY0vCTgVsACAAAAAAoHqiwGOUkBu8SX5U1yHho+UIFdSN2MdQN5s6bQ0EsJYAAzE2AH0AAAAFZAAgAAAAAP5rGPrYGt3aKob5f/ldP0qrW7bmWvqnKY4QwdDWz400BXMAIAAAAADTQkW2ymaaf/bhteOOGmSrIR97bAnJx+yN3yMj1bTeewVsACAAAAAADyQnHGH2gF4w4L8axUsSTf6Ubk7L5/eoFOJk12MtZAoAAzE3AH0AAAAFZAAgAAAAAAlz6wJze5UkIxKpJOZFGCOf3v2KByWyI6NB6JM9wNcBBXMAIAAAAABUC7P/neUIHHoZtq0jFVBHY75tSFYr1Y5S16YN5XxC1QVsACAAAAAAgvxRbXDisNnLY3pfsjDdnFLtkvYUC4lhA68eBXc7KAwAAzE4AH0AAAAFZAAgAAAAAFJ8AtHcjia/9Y5pLEc3qVgH5xKiXw12G9Kn2A1EY8McBXMAIAAAAAAxe7Bdw7eUSBk/oAawa7uicTEDgXLymRNhBy1LAxhDvwVsACAAAAAAxKPaIBKVx3jTA+R/el7P7AZ7efrmTGjJs3Hj/YdMddwAAzE5AH0AAAAFZAAgAAAAAO8uwQUaKFb6vqR3Sv3Wn4QAonC2exOC9lGG1juqP5DtBXMAIAAAAABZf1KyJgQg8/Rf5c02DgDK2aQu0rNCOvaL60ohDHyY+gVsACAAAAAAqyEjfKC8lYoIfoXYHUqHZPoaA6EK5BAZy5dxXZmay4kAAzIwAH0AAAAFZAAgAAAAAE8YtqyRsGCeiR6hhiyisR/hccmK4nZqIMzO4lUBmEFzBXMAIAAAAAC1UYOSKqAeG1UJiKjWFVskRhuFKpj9Ezy+lICZvFlN5AVsACAAAAAA6Ct9nNMKyRazn1OKnRKagm746CGu+jyhbL1qJnZxGi0AAzIxAH0AAAAFZAAgAAAAAPhCrMausDx1QUIEqp9rUdRKyM6a9AAx7jQ3ILIu8wNIBXMAIAAAAACmH8lotGCiF2q9VQxhsS+7LAZv79VUAsOUALaGxE/EpAVsACAAAAAAnc1xCKfdvbUEc8F7XZqlNn1C+hZTtC0I9I3LL06iaNkAAzIyAH0AAAAFZAAgAAAAAOBi/GAYFcstMSJPgp3VkMiuuUUCrZytvqYaU8dwm8v2BXMAIAAAAACEZSZVyD3pKzGlbdwlYmWQhHHTV5SnNLknl2Gw8IaUTQVsACAAAAAAfsLZsEDcWSuNsIo/TD1ReyQW75HPMgmuKZuWFOLKRLoAAzIzAH0AAAAFZAAgAAAAAIQuup+YGfH3mflzWopN8J1X8o8a0d9CSGIvrA5HOzraBXMAIAAAAADYvNLURXsC2ITMqK14LABQBI+hZZ5wNf24JMcKLW+84AVsACAAAAAACzfjbTBH7IwDU91OqLAz94RFkoqBOkzKAqQb55gT4/MAAzI0AH0AAAAFZAAgAAAAAKsh0ADyOnVocFrOrf6MpTrNvAj8iaiE923DPryu124gBXMAIAAAAADg24a8NVE1GyScc6tmnTbmu5ulzO+896fE92lN08MeswVsACAAAAAAaPxcOIxnU7But88/yadOuDJDMcCywwrRitaxMODT4msAAzI1AH0AAAAFZAAgAAAAAKkVC2Y6HtRmv72tDnPUSjJBvse7SxLqnr09/Uuj9sVVBXMAIAAAAABYNFUkH7ylPMN+Bc3HWX1e0flGYNbtJNCY9SltJCW/UAVsACAAAAAAZYK/f9H4OeihmpiFMH7Wm7uLvs2s92zNA8wyrNZTsuMAAzI2AH0AAAAFZAAgAAAAADDggcwcb/Yn1Kk39sOHsv7BO/MfP3m/AJzjGH506Wf9BXMAIAAAAAAYZIsdjICS0+BDyRUPnrSAZfPrwtuMaEDEn0/ijLNQmAVsACAAAAAAGPnYVvo2ulO9z4LGd/69NAklfIcZqZvFX2KK0s+FcTUAAzI3AH0AAAAFZAAgAAAAAEWY7dEUOJBgjOoWVht1wLehsWAzB3rSOBtLgTuM2HC8BXMAIAAAAAAAoswiHRROurjwUW8u8D5EUT+67yvrgpB/j6PzBDAfVwVsACAAAAAA6NhRTYFL/Sz4tao7vpPjLNgAJ0FX6P/IyMW65qT6YsMAAzI4AH0AAAAFZAAgAAAAAPZaapeAUUFPA7JTCMOWHJa9lnPFh0/gXfAPjA1ezm4ZBXMAIAAAAACmJvLY2nivw7/b3DOKH/X7bBXjJwoowqb1GtEFO3OYgAVsACAAAAAA/JcUoyKacCB1NfmH8vYqC1f7rd13KShrQqV2r9QBP44AAzI5AH0AAAAFZAAgAAAAAK00u6jadxCZAiA+fTsPVDsnW5p5LCr4+kZZZOTDuZlfBXMAIAAAAAAote4zTEYMDgaaQbAdN8Dzv93ljPLdGjJzvnRn3KXgtQVsACAAAAAAxXd9Mh6R3mnJy8m7UfqMKi6oD5DlZpkaOz6bEjMOdiwAAzMwAH0AAAAFZAAgAAAAAFbgabdyymiEVYYwtJSWa7lfl/oYuj/SukzJeDOR6wPVBXMAIAAAAADAFGFjS1vPbN6mQEhkDYTD6V2V23Ys9gUEUMGNvMPkaAVsACAAAAAAL/D5Sze/ZoEanZLK0IeEkhgVkxEjMWVCfmJaD3a8uNIAAzMxAH0AAAAFZAAgAAAAABNMR6UBv2E627CqLtQ/eDYx7OEwQ7JrR4mSHFa1N8tLBXMAIAAAAAAxH4gucI4UmNVB7625C6hFSVCuIpJO3lusJlPuL8H5EQVsACAAAAAAVLHNg0OUVqZ7WGOP53BkTap9FOw9dr1P4J8HxqFqU04AAzMyAH0AAAAFZAAgAAAAAG8cd6WBneNunlqrQ2EmNf35W7OGObGq9WL4ePX+LUDmBXMAIAAAAAAjJ2+sX87NSis9hBsgb1QprVRnO7Bf+GObCGoUqyPE4wVsACAAAAAAs9c9SM49/pWmyUQKslpt3RTMBNSRppfNO0JBvUqHPg0AAzMzAH0AAAAFZAAgAAAAAFWOUGkUpy8yf6gB3dio/aOfRKh7XuhvsUj48iESFJrGBXMAIAAAAAAY7sCDMcrUXvNuL6dO0m11WyijzXZvPIcOKob6IpC4PQVsACAAAAAAJOP+EHz6awDb1qK2bZQ3kTV7wsj5Daj/IGAWh4g7omAAAzM0AH0AAAAFZAAgAAAAAGUrIdKxOihwNmo6B+aG+Ag1qa0+iqdksHOjQj+Oy9bZBXMAIAAAAABwa5dbI2KmzBDNBTQBEkjZv4sPaeRkRNejcjdVymRFKQVsACAAAAAA4ml/nm0gJNTcJ4vuD+T2Qfq2fQZlibJp/j6MOGDrbHMAAzM1AH0AAAAFZAAgAAAAAOx89xV/hRk64/CkM9N2EMK6aldII0c8smdcsZ46NbP8BXMAIAAAAADBF6tfQ+7q9kTuLyuyrSnDgmrdmrXkdhl980i1KHuGHgVsACAAAAAACUqiFqHZdGbwAA+hN0YUE5zFg+H+dabIB4dj5/75W/YAAzM2AH0AAAAFZAAgAAAAAMkN0L1oQWXhjwn9rAdudcYeN8/5VdCKU8cmDt7BokjsBXMAIAAAAAAT62pGXoRwExe9uvgYOI0hg5tOxilrWfoEmT0SMglWJwVsACAAAAAAlVz4dhiprSbUero6JFfxzSJGclg63oAkAmgbSwbcYxIAAzM3AH0AAAAFZAAgAAAAANxfa4xCoaaB7k1C1RoH1LBhsCbN2yEq15BT9b+iqEC4BXMAIAAAAACAX9LV8Pemfw7NF0iB1/85NzM1Ef+1mUfyehacUVgobQVsACAAAAAAVq4xpbymLk0trPC/a2MvB39I7hRiX8EJsVSI5E5hSBkAAzM4AH0AAAAFZAAgAAAAAOYIYoWkX7dGuyKfi3XssUlc7u/gWzqrR9KMkikKVdmSBXMAIAAAAABVF2OYjRTGi9Tw8XCAwZWLpX35Yl271TlNWp6N/nROhAVsACAAAAAA0nWwYzXQ1+EkDvnGq+SMlq20z+j32Su+i/A95SggPb4AAzM5AH0AAAAFZAAgAAAAAIy0+bXZi10QC+q7oSOLXK5Fee7VEk/qHSXukfeVIfgzBXMAIAAAAAAQ3IIV/JQCHW95AEbH5zGIHtJqyuPjWPMIZ+VmQHlxEwVsACAAAAAAp0jYsyohKv9Pm+4k+DplEGbl9WLZpAJzitrcDj4CNsMAAzQwAH0AAAAFZAAgAAAAAL5SOJQ3LOhgdXJ5v086NNeAl1qonQnchObdpZJ1kHeEBXMAIAAAAAA+tEqTXODtik+ydJZSnUqXF9f18bPeze9eWtR7ExZJgQVsACAAAAAAbrkZCVgB9Qsp4IAbdf+bD4fT6Boqk5UtuA/zhNrh1y0AAzQxAH0AAAAFZAAgAAAAAKl8zcHJRDjSjJeV/WvMxulW1zrTFtaeBy/aKKhadc6UBXMAIAAAAADBdWQl5SBIvtZZLIHszePwkO14W1mQ0izUk2Ov21cPNAVsACAAAAAAHErCYycpqiIcCZHdmPL1hi+ovLQk4TAvENpfLdTRamQAAzQyAH0AAAAFZAAgAAAAAFvotcNaoKnVt5CBCOPwjexFO0WGWuaIGL6H/6KSau+6BXMAIAAAAAD2y2mBN5xPu5PJoY2zcr0GnQDtHRBogA5+xzIxccE9fwVsACAAAAAAdS34xzJesnUfxLCcc1U7XzUqLy8MAzV/tcjbqaD3lkMAAzQzAH0AAAAFZAAgAAAAAPezU0/vNT4Q4YKbTbaeHqcwNLT+IjW/Y9QFpIooihjPBXMAIAAAAACj2x4O4rHter8ZnTws5LAP9jJ/6kk9C/V3vL50LoFZHAVsACAAAAAAQdBDF3747uCVP5lB/zr8VmzxJfTSZHBKeIgm5FyONXwAAzQ0AH0AAAAFZAAgAAAAAMqpayM2XotEFmm0gwQd9rIzApy0X+7HfOhNk6VU7F5lBXMAIAAAAACJR9+q5T9qFHXFNgGbZnPubG8rkO6cwWhzITQTmd6VgwVsACAAAAAAOZLQ6o7e4mVfDzbpQioa4d3RoTvqwgnbmc5Qh2wsZuoAAzQ1AH0AAAAFZAAgAAAAANCeyW+3oebaQk+aqxNVhAcT/BZ5nhsTVdKS3tMrLSvWBXMAIAAAAADxRFMDhkyuEc++WnndMfoUMLNL7T7rWoeblcrpSI6soQVsACAAAAAAdBuBMJ1lxt0DRq9pOZldQqchLs3B/W02txcMLD490FEAAzQ2AH0AAAAFZAAgAAAAAIbo5YBTxXM7HQhl7UP9NNgpPGFkBx871r1B65G47+K8BXMAIAAAAAC21dJSxnEhnxO5gzN5/34BL4von45e1meW92qowzb8fQVsACAAAAAAm3Hk2cvBN0ANaR5jzeZE5TsdxDvJCTOT1I01X7cNVaYAAzQ3AH0AAAAFZAAgAAAAABm/6pF96j26Jm7z5KkY1y33zcAEXLx2n0DwC03bs/ixBXMAIAAAAAD01OMvTZI/mqMgxIhA5nLs068mW+GKl3OW3ilf2D8+LgVsACAAAAAAaLvJDrqBESTNZSdcXsd+8GXPl8ZkUsGpeYuyYVv/kygAAzQ4AH0AAAAFZAAgAAAAAJ/D3+17gaQdkBqkL2wMwccdmCaVOtxzIkM8VyI4xI5zBXMAIAAAAAAggLVmkc5u+YzBR+oNE+XgLVp64fC6MzUb/Ilu/Jsw0AVsACAAAAAACz3HVKdWkx82/kGbVpcbAeZtsj2R5Zr0dEPfle4IErkAAzQ5AH0AAAAFZAAgAAAAAJMRyUW50oaTzspS6A3TUoXyC3gNYQoShUGPakMmeVZrBXMAIAAAAACona2Pqwt4U2PmFrtmu37jB9kQ/12okyAVtYa8TQkDiQVsACAAAAAAltJJKjCMyBTJ+4PkdDCPJdeX695P8P5h7WOZ+kmExMAAAzUwAH0AAAAFZAAgAAAAAByuYl8dBvfaZ0LO/81JW4hYypeNmvLMaxsIdvqMPrWoBXMAIAAAAABNddwobOUJzm9HOUD8BMZJqkNCUCqstHZkC76FIdNg9AVsACAAAAAAQQOkIQtkyNavqCnhQbNg3HfqrJdsAGaoxSJePJl1qXsAAzUxAH0AAAAFZAAgAAAAAHEzLtfmF/sBcYPPdj8867VmmQyU1xK9I/3Y0478azvABXMAIAAAAAAcmyFajZPnBTbO+oLInNwlApBocUekKkxz2hYFeSlQ+gVsACAAAAAAZ6IkrOVRcC8vSA6Vb4fPWZJrYexXhEabIuYIeXNsCSgAAzUyAH0AAAAFZAAgAAAAAJam7JYsZe2cN20ZYm2W3v1pisNt5PLiniMzymBLWyMtBXMAIAAAAABxCsKVMZMTn3n+R2L7pVz5nW804r8HcK0mCBw3jUXKXAVsACAAAAAA7j3JGnNtR64P4dJLeUoScFRGfa8ekjh3dvhw46sRFk0AAzUzAH0AAAAFZAAgAAAAAMXrXx0saZ+5gORmwM2FLuZG6iuO2YS+1IGPoAtDKoKBBXMAIAAAAADIQsxCr8CfFKaBcx8kIeSywnGh7JHjKRJ9vJd9x79y7wVsACAAAAAAcvBV+SykDYhmRFyVYwFYB9oBKBSHr55Jdz2cXeowsUQAAzU0AH0AAAAFZAAgAAAAACbzcUD3INSnCRspOKF7ubne74OK9L0FTZvi9Ay0JVDYBXMAIAAAAADPebVQH8Btk9rhBIoUOdSAdpPvz7qIY4UC2i6IGisSAQVsACAAAAAAiBunJi0mPnnXdnldiq+If8dcb/n6apHnaIFt+oyYO1kAAzU1AH0AAAAFZAAgAAAAACUc2CtD1MK/UTxtv+8iA9FoHEyTwdl43HKeSwDw2Lp5BXMAIAAAAACCIduIdw65bQMzRYRfjBJj62bc69T4QqH4QoWanwlvowVsACAAAAAAM0TV7S+aPVVzJOQ+cpSNKHTwyQ0mWa8tcHzfk3nR+9IAAzU2AH0AAAAFZAAgAAAAAHSaHWs/dnmI9sc7nB50VB2Bzs0kHapMHCQdyVEYY30TBXMAIAAAAACkV22lhEjWv/9/DubfHBAcwJggKI5mIbSK5L2nyqloqQVsACAAAAAAS19m7DccQxgryOsBJ3GsCs37yfQqNi1G+S6fCXpEhn4AAzU3AH0AAAAFZAAgAAAAAAL8jhNBG0KXXZhmZ0bPXtfgapJCB/AI+BEHB0eZ3C75BXMAIAAAAADHx/fPa639EBmGV5quLi8IQT600ifiKSOhTDOK19DnzwVsACAAAAAAlyLTDVkHxbayklD6Qymh3odIK1JHaOtps4f4HR+PcDgAAzU4AH0AAAAFZAAgAAAAAAxgeclNl09H7HvzD1oLwb2YpFca5eaX90uStYXHilqKBXMAIAAAAACMU5pSxzIzWlQxHyW170Xs9EhD1hURASQk+qkx7K5Y6AVsACAAAAAAJbMMwJfNftA7Xom8Bw/ghuZmSa3x12vTZxBUbV8m888AAzU5AH0AAAAFZAAgAAAAABmO7QD9vxWMmFjIHz13lyOeV6vHT6mYCsWxF7hb/yOjBXMAIAAAAACT9lmgkiqzuWG24afuzYiCeK9gmJqacmxAruIukd0xEAVsACAAAAAAZa0/FI/GkZR7CtX18Xg9Tn9zfxkD0UoaSt+pIO5t1t4AAzYwAH0AAAAFZAAgAAAAAB89SjLtDJkqEghRGyj6aQ/2qvWLNuMROoXmzbYbCMKMBXMAIAAAAAC8sywgND+CjhVTF6HnRQeay8y9/HnVzDI42dEPah28LQVsACAAAAAAoxv7UKh0RqUAWcOsQvU123zO1qZn73Xfib0qncZCB34AAzYxAH0AAAAFZAAgAAAAABN2alGq9Aats1mwERNGwL/fIwZSvVCe9/8XMHTFlpUpBXMAIAAAAACuDPjJgvvbBYhbLpjMiWUCsVppiYrhvR+yMysNPN8cZAVsACAAAAAAKpADjc4bzIZMi9Q/+oe0EMRJHYQt6dlo1x/lRquagqkAAzYyAH0AAAAFZAAgAAAAAL8YB6VAqGBiWD4CBv16IBscg5J7VQCTZu87n6pj+86KBXMAIAAAAAAmxm8e68geeyAdUjSMWBHzUjneVB0pG9TBXIoE6467hAVsACAAAAAAV76JZAlYpgC/Zl8awx2ArCg1uuyy2XVTSkp0wUMi/7UAAzYzAH0AAAAFZAAgAAAAAL4yLkCTV5Dmxa5toBu4JT8ge/cITAaURIOuFuOtFUkeBXMAIAAAAAAXoFNQOMGkAj7qEJP0wQafmFSXgWGeorDVbwyOxWLIsgVsACAAAAAAc4Un6dtIFe+AQ+RSfNWs3q63RTHhmyc+5GKRRdpWRv8AAzY0AH0AAAAFZAAgAAAAAEU8DoUp46YtYjNFS9kNXwdYxQ9IW27vCTb+VcqqfnKNBXMAIAAAAADe7vBOgYReE8X78k5ARuUnv4GmzPZzg6SbConf4L2G3wVsACAAAAAA78YHWVkp6HbZ0zS4UL2z/2pj9vPDcMDt7zTv6NcRsVsAAzY1AH0AAAAFZAAgAAAAAPa4yKTtkUtySuWo1ZQsp2QXtPb5SYqzA5vYDnS1P6c0BXMAIAAAAADKnF58R1sXlHlsHIvCBR3YWW/qk54z9CTDhZydkD1cOQVsACAAAAAAHW3ERalTFWKMzjuXF3nFh0pSrQxM/ojnPbPhc4v5MaQAAzY2AH0AAAAFZAAgAAAAAN5WJnMBmfgpuQPyonmY5X6OdRvuHw4nhsnGRnFAQ95VBXMAIAAAAACwftzu7KVV1rmGKwXtJjs3cJ1gE3apr8+N0SAg1F2cHwVsACAAAAAATDW0reyaCjbJuVLJzbSLx1OBuBoQu+090kgW4RurVacAAzY3AH0AAAAFZAAgAAAAACHvDsaPhoSb6DeGnKQ1QOpGYAgK82qpnqwcmzSeWaJHBXMAIAAAAABRq3C5+dOfnkAHM5Mg5hPB3O4jhwQlBgQWLA7Ph5bhgwVsACAAAAAAqkC8zYASvkVrp0pqmDyFCkPaDmD/ePAJpMuNOCBhni8AAzY4AH0AAAAFZAAgAAAAAOBePJvccPMJmy515KB1AkXF5Pi8NOG4V8psWy0SPRP+BXMAIAAAAAB3dOJG9xIDtEKCRzeNnPS3bFZepMj8UKBobKpSoCPqpgVsACAAAAAAPG3IxQVOdZrr509ggm5FKizWWoZPuVtOgOIGZ3m+pdEAAzY5AH0AAAAFZAAgAAAAABUvRrDQKEXLMdhnzXRdhiL6AGNs2TojPky+YVLXs+JnBXMAIAAAAAD1kYicbEEcPzD4QtuSYQQWDPq8fuUWGddpWayKn3dT9QVsACAAAAAA9+Sf7PbyFcY45hP9oTfjQiOUS3vEIAT8C0vOHymwYSUAAzcwAH0AAAAFZAAgAAAAAOvSnpujeKNen4pqc2HR63C5s5oJ1Vf4CsbKoYQvkwl5BXMAIAAAAACw2+vAMdibzd2YVVNfk81yXkFZP0WLJ82JBxJmXnYE+QVsACAAAAAArQ/E1ACyhK4ZyLqH9mNkCU7WClqRQTGyW9tciSGG/EMAAzcxAH0AAAAFZAAgAAAAAAo0xfGG7tJ3GWhgPVhW5Zn239nTD3PadShCNRc9TwdNBXMAIAAAAADZh243oOhenu0s/P/5KZLBDh9ADqKHtSWcXpO9D2sIjgVsACAAAAAAlgTPaoQKz+saU8rwCT3UiNOdG6hdpjzFx9GBn08ZkBEAABJjbQAAAAAAAAAAAAAQcGF5bG9hZElkAAAAAAAQZmlyc3RPcGVyYXRvcgABAAAAEnNwAAEAAAAAAAAAEHRmAAEAAAABbW4A////////7/8BbXgA////////738A", + "subType": "06" + } + } + } + }, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoubleNoPrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + } + }, + "commandName": "find" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Double-Update.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Double-Update.json new file mode 100644 index 0000000000..938657c91c --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Double-Update.json @@ -0,0 +1,1205 @@ +{ + "description": "fle2v2-Rangev2-Double-Update", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoubleNoPrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "FLE2 Range Double. Update.", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$numberDouble": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$numberDouble": "1" + } + } + }, + "object": "coll" + }, + { + "name": "updateOne", + "arguments": { + "filter": { + "encryptedDoubleNoPrecision": { + "$gt": { + "$numberDouble": "0" + } + } + }, + "update": { + "$set": { + "encryptedDoubleNoPrecision": { + "$numberDouble": "2" + } + } + } + }, + "object": "coll", + "expectResult": { + "matchedCount": 1, + "modifiedCount": 1, + "upsertedCount": 0 + } + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "6YrBn2ofIw1b5ooakrLOwF41BWrps8OO0H9WH4/rtlE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "n+XAuFnP8Dov9TnhGFxNx0K/MnVM9WbJ7RouEu0ndO0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yRXojuVdn5GQtD97qYlaCL6cOLmZ7Cvcb3wFjkLUIdM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "DuIkdRPITRs55I4SZmgomAHCIsDQmXRhW8+MOznkzSk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SsBk+Et1lTbU+QRPx+xyJ/jMkmfG+QCvQEpip2YYrzA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "crCIzOd8KhHvvUlX7M1v9bhvU4pLdTc+X2SuqoKU5Ek=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "YOWdCw4UrqnxkAaVjqmC4sKQDMVMHEpFGnlxpxdaU6E=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "M3SShp81Ff8tQ632qKbv9MUcN6wjDaBReI0VXNu6Xh4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "gzHlSPxpM0hT75kQvWFzGlOxKvDoiKQZOr19V6l2zXI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "s3JnppOGYw9SL2Q1kMAZs948v2F5PrpXjGei/HioDWs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cG6+3Gk/zEH68P/uuuwiAUVCuyJwa1LeV+t29FlPPAo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dupdvR3AyJtM+g9NDKiaLVOtGca387JQp8w+V03m7Ig=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JqEQc5svj2jTvZ6LLA5ivE+kTb/0aRemSEmxk4G7Zrg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "szcXXXKnob+p3SoM4yED2R920LeJ7cVsclPMFTe4CeI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "o1QoGVXmuBdHwHm7aCtGMlMVKrjFdYvJXpoq6uhIAZ0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Jfm5wPlqqLCJRGQIqRq2NGmpn7s0Vrih2H3YAOoI2YU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zMHLb8ARbsYo8Ld05bqnGFf1Usha6EGb8QKwdSAyps0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yQdtq9lh5pugL7/i0Bj/PuZUUBUIzf+7wj1rl5y736w=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wGWVZdO7qIuyDg/BqDgqjgoQ02h5YYgwXQB1oCin2NE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "by9HMLj6NTEpgztZ5HSN6GxImkXPcaFINYDzgZY33X8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tWo0vbasi7bXmn/MsOx13VC1IsWtpx/nYp0uj4iMzdA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tQQpndUYd5O87lOtrGjH3wl9VsOK0ray7RMasL90sBM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cQjXEDCMsOpKLLf+vlTgIHA+cbSJdzqhbSX9Wvh95aA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "7yMpU48IxK9SzP2cx3VnTownGEwFmeFofuuFT97SuuY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kSOx1kz0CmBgzKQHZlo65ZUY1DIv9A99JRm+Us2y6Ew=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ubQpdPBe6/xvtr+AcXdfYLSvYCR4ot0tivehkCsupb4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xal+iCJ6FTefRQToyoNksc9NCZShyn04NDGi4IYrcoM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "d7jU4iOK50xHxlkSifcxlZFCM46TSgQzoYivxG3HNLY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tJvl2nsBLBVzL3pp6sKWCL4UXeh3q/roYBJjSb74ve0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "OIUCaKRvIx9t1w6Hxlz1IcQTdPNCfdRNwnnTm10W+X0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "A9tvzsiElotOUVIB4CqfQp9mAwqvTM35YkmAR170aHA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "lI8gpK7hpb7c9x4RQugsxMnQay5LZJmwslZdvMx/dcE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dNCzh40U0XvdKnSDi3HRQOWQftEsDVqc4uUvsVFGoq8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "IP+iwEBWBwVVZIdpaMu8k5+soFCz+TZkYn3drKZ9grE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pnqyh6e0y5svHkJDShlN9CHV0WvMBE4QbtJpQw5ZCXc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "elEl42tbVDoRTLjAhZUFEtXiut4b3PVhg/1ZLZSQdtE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "vHuu2FxwclMHqyE6JBYbTYgbEkB0dqb/JuaxsvfwsmY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xTf7NCe3Gf8QpE78HR5OknlLTKfs9J+RN9UZpH6fnso=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "XiWSasRnJAulGR6+LCVD3mwRObXylqYWR9jvpywq12c=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "MZMxEQ5ikx0PG1YFIExv0UnTZogsvgeOEZTpzvBDn4w=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yZMyMZBDrWbAhvnic7vvIYhmO9m5H2iuv0c8KNZrBzY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xxM14hTPY5j0vvcK2C7YAEjzdsfUTFHozHC0hEo1bxI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+01rqR1xVwkpGXcstbk1ItJqFVjH6Q8MGxEN3Cm9Y1A=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xOpLV0Z2VTRJ3iWtnWZcsyjXubTIkYWo31cO+HV1o1k=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "BWUOLqgLBqc5NwxVlSV5H3KFQPXbCp7mdo+jF+8cJqY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "fuQb1S6xZDGlrEbK+kI23aL53PP1PVNwqICnZNt9Yzg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SfscnoibFttahLdPVC4Ee+47ewGFKpDSU7M6HX19bKE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "rpSW2awybNVeKtat91VFxqbINoTfNhPfQAu+d73Xtf8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "9M/CP9ccOIIj2LLFmE0GFDO0Ban2wsNalEXfM6+h+1s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "WrEMG49l1ye4MhXs5ZS9tz8P6h+hDvthIg/2wW9ne1Q=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ImNhbfeyfH8qIEeA5ic0s3dAQBdzzTBS+CPsNih9vZ0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dWP33YDSn04UKJN2ogh2Rui0iW/0q2y18OCDRVcfyoo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "lYv0isAtfGh6H9tdp3cp2eHU7q2J+uk7QrgcxtK3w7Y=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "VGMoamB/+7zTOYcY/pqJc96xlv2PdW4hwsIAEIslTDQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yNeBWMF7BnD9wVwz2PgJsvWr77QiVvvWUvJF0+fqBug=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SfpvObJ+tJBXSvqeN7vlOfmhYign635lciYAJIjUtY8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dsen4NqjzVGjpjufiTMs3+gqeD09EbnuogPgxrJECwg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "pxCWVM3sn19NsFEpgHbgLa+PmYlhN3mMiP0Wk8kJhYw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q11KNvJszjYIB9n9HcC+N4uz11a3eRj1L3BH9scKMDQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "A1PmkgcEToWh1JiVWE6mI5jUu7poxWWuCUt/cgRUUDc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "qJo3Hu4PJeanL7XEaWXO/n3YsodhZyd+MJOOmB9Kpd8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "BkBKLO8URFscfRY9Bav/1+L9mLohDgNr/MkZtGiraIs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "rZq5WA3Hx3xthOyHAJXK//f8pE2qbz7YKu3TIMp9GFY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "X07a/Lm80p5xd4RFs1dNmw+90tmPDPdGiAKVZkxd4zY=", + "subType": "00" + } + } + ] + }, + { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "HI88j1zrIsFoijIXKybr9mYubNV5uVeODyLHFH4Ueco=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wXVD/HSbBljko0jJcaxJ1nrzs2+pchLQqYR3vywS8SU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "KhscCh+tt/pp8lxtKZQSPPUU94RvJYPKG/sjtzIa4Ws=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "RISnuNrTTVNW5HnwCgQJ301pFw8DOcYrAMQIwVwjOkI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Ra5zukLh2boua0Bh74qA+mtIoixGXlsNsxiJqHtqdTI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "eqr0v+NNWXWszi9ni8qH58Q6gw5x737tJvH3lPaNHO4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "d42QupriWIwGrFAquXNFi0ehEuidIbHLFZtg1Sm2nN8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "2azRVxaaTIJKcgY2FU012gcyP8Y05cRDpfUaMnCBaQU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "3nlgkM4K/AAcHesRYYdEu24UGetHodVnVfHzw4yxZBM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "hqy91FNmAAac2zUaPO6eWFkx0/37rOWGrwXN+fzL0tU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "akX+fmscSDSF9pB5MPj56iaJPtohr0hfXNk/OPWsGv8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "1ZvUb10Q7cN4cNLktd5yNjqgtawsYnkbeVBZV6WuY/I=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "otCwtuKiY4hCyXvYzXvo10OcnzZppebo38KsAlq49QM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Mty8EscckeT/dhMfrPFyDbLnmMOcYRUQ3mLK4KTu6V8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "tnvgLLkJINO7csREYu4dEVe1ICrBeu7OP+HdfoX3M2E=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kOefsHgEVhkJ17UuP7Dxogy6sAQbzf1SFPKCj6XRlrQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "F+JQ79xavpaHdJzdhvwyHbzdZJLNHAymc/+67La3gao=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "NCZ9zp5rDRceENuSgAfTLEyKg0YgmXAhK0B8WSj7+Pw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wL1CJ7cYR5slx8mHq++uMdjDfkt9037lQTUztEMF56M=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "txefkzTMITZE+XvvRFZ7QcgwDT/7m8jNmxRk4QBaoZI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "jFunW3v1tSYMyZtQQD28eEy9qqDp4Kqo7gMN29N4bfQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "QMO915KUiS3X3R1bU1YoafVM2s0NeHo3EjgTA9PnGwY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "nwdKJEXdilzvb7494vbuDJ+y6SrfJahza1dYIsHIWVI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "vpWMX+T/VXXajFo0UbuYjtp0AEzBU0Y+lP+ih2EQ7mg=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "1lmzG0J1DhKDRhhq5y5Buygu4G8eV2X0t7kUY90EohM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SiKqpXqO0trwhFvBWK274hMklpCgMhNs/JY84yyn/NE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "7cPGPYCKPTay+ZR9Gx6oOueduOgaFrSuAXmNDpDHXdI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "4THEYvAkjs2Fh7FIe5LC45P4i4N0L7ob67UOVbhp6Nk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "B+UGsChLLZR7iqnt8yq91OgmTgwiUKTJhFxY4NT0O6c=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "X1uYwBCsCg1H+PnKdwtBqXlt0zKEURi8bOM940GcPfk=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xYOgT5l7shlNXCwHlguovmDkcEnF8dXyYlTyYrgZ8GE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "vFMTZqV8bh1+gcKzTkXweMddJlgdUnwX0DWzUUaMok4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "4HI0y9FrtleZxZ7M6INdNhLelrQ2Rv/+ykWCBl+tMC8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "jpJ0bBE474OUkn1vUiLWumIBtYmwc7J5+LQU/nyeLQc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "jQTPeXZvdxY/DjtPfYfKUArIDsf0E9MVFy2O26sv1ec=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "QLLto0ExR2ZYMGqlyaMZc/hXFFTlwmgtKbiVq/xJIeI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yBJNviU1nchbGbhx6InXCVRXa90sEepz1EwbYuKXu2U=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "jpEf0vHxrPu9gTJutNXSi2g/2Mc4WXFEN7yHonZEb7A=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "E09kLFckMYwNuhggMxmPtwndyvIAx+Vl+b2CV6FP75s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "N+ue6/cLPb5NssmJCCeo18LlbKPz6r2z20AsnTKRvOo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "yVQNZP8hhsvNGyDph2QP2qTNdXZTiIEVineKg+Qf33o=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cSC9uI+9c5S8X+0G7amVyug1p0ZlgBsbEDYYyezBevQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "1NpZGjoQzuQtekj80Rifxe9HbE08W07dfwxaFHaVn84=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "5Ghuq/8l11Ug9Uf/RTwf9On3OxOwIXUcb9soiy4J7/w=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "0LWKaEty6ywxLFhDaAqulqfMnYc+tgPfH4apyEeKg80=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "OwSthmCBtt6NIAoAh7aCbj82Yr/+9t8U7WuBQhFT3AQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "iYiyg6/1isqbMdvFPIGucu3cNM4NAZNtJhHpGZ4eM+c=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "waBgs8jWuGJPIF5zCRh6OmIyfK5GCBQgTMfmKSR2wyY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "1Jdtbe2BKJXPU2G9ywOrlODZ/cNYEQlKzAW3aMe1Hy4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xaLEnNUS/2ySerBpb9dN/D31t+wYcKekwTfkwtni0Mc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "bIVBrOhOvr6cL55Tr24+B+CC9MiG7U6K54aAr2IXXuw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6Cdq5wroGu2TEFnekuT7LhOpd/K/+PcipIljcHU9QL4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "K5l64vI4S/pLviLW6Pl0U3iQkI3ge0xg4RAHcEsyKJo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "bzhuvZ0Ls22yIOX+Hz51eAHlSuDbWR/e0u4EhfdpHbc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Qv+fr6uD4o0bZRp69QJCFL6zvn3G82c7L+N1IFzj7H0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "XAmISMbD3aEyQT+BQEphCKFNa0F0GDKFuhM9cGceKoQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "4VLCokntMfm1AogpUnYGvhV7nllWSo3mS3hVESMy+hA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "xiXNLj/CipEH63Vb5cidi8q9X47EF4f3HtJSOH7mfM8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "4XlCYfYBjI9XA5zOSgTiEBYcZsdwyXL+f5XtH2xUIOc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "k6DfQy7ZYJIkEly2B5hjOZznL4NcgMkllZjJLb7yq7w=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ZzM6gwWesa3lxbZVZthpPFs2s3GV0RZREE2zOMhBRBo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "US+jeMeeOd7J0wR0efJtq2/18lcO8YFvhT4O3DeaonQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "b6iSxiI1FM9SzxuG1bHqGA1i4+3GOi0/SPW00XB4L7o=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kn3LsxAVkzIZKK9I6fi0Cctr0yjXOYgaQWMCoj4hLpM=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 0, + "encryptedDoubleNoPrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoubleNoPrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedDoubleNoPrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoubleNoPrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "update": "default", + "ordered": true, + "updates": [ + { + "q": { + "encryptedDoubleNoPrecision": { + "$gt": { + "$binary": { + "base64": "DbMkAAADcGF5bG9hZABXJAAABGcAQyQAAAMwAH0AAAAFZAAgAAAAAHgYoMGjEE6fAlAhICv0+doHcVX8CmMVxyq7+jlyGrvmBXMAIAAAAAC/5MQZgTHuIr/O5Z3mXPvqrom5JTQ8IeSpQGhO9sB+8gVsACAAAAAAuPSXVmJUAUpTQg/A9Bu1hYczZF58KEhVofakygbsvJQAAzEAfQAAAAVkACAAAAAA2kiWNvEc4zunJ1jzvuClFC9hjZMYruKCqAaxq+oY8EAFcwAgAAAAACofIS72Cm6s866UCk+evTH3CvKBj/uZd72sAL608rzTBWwAIAAAAADuCQ/M2xLeALF0UFZtJb22QGOhHmJv6xoO+kZIHcDeiAADMgB9AAAABWQAIAAAAABkfoBGmU3hjYBvQbjNW19kfXneBQsQQPRfUL3UAwI2cAVzACAAAAAAUpK2BUOqX/DGdX5YJniEZMWkofxHqeAbXceEGJxhp8AFbAAgAAAAAKUaLzIldNIZv6RHE+FwbMjzcNHqPESwF/37mm43VPrsAAMzAH0AAAAFZAAgAAAAAFNprhQ3ZwIcYbuzLolAT5n/vc14P9kUUQComDu6eFyKBXMAIAAAAAAcx9z9pk32YbPV/sfPZl9ALIEVsqoLXgqWLVK/tP+heAVsACAAAAAA/qxvuvJbAHwwhfrPVpmCFzNvg2cU/NXaWgqgYUZpgXwAAzQAfQAAAAVkACAAAAAAODI+pB2pCuB+YmNEUAgtMfNdt3DmSkrJ96gRzLphgb8FcwAgAAAAAAT7dewFDxUDECQ3zVq75/cUN4IP+zsqhkP5+czUwlJIBWwAIAAAAACFGeOtd5zBXTJ4JYonkn/HXZfHipUlqGwIRUcH/VTatwADNQB9AAAABWQAIAAAAACNAk+yTZ4Ewk1EnotQK8O3h1gg9I7pr9q2+4po1iJVgAVzACAAAAAAUj/LesmtEsgqYVzMJ67umVA11hJTdDXwbxDoQ71vWyUFbAAgAAAAABlnhpgTQ0WjLb5u0b/vEydrCeFjVynKd7aqb+UnvVLeAAM2AH0AAAAFZAAgAAAAAD/FIrGYFDjyYmVb7oTMVwweWP7A6F9LnyIuNO4MjBnXBXMAIAAAAACIZgJCQRZu7NhuNMyOqCn1tf+DfU1qm10TPCfj5JYV3wVsACAAAAAA5hmY4ptuNxULGf87SUFXQWGAONsL9U29duh8xqsHtxoAAzcAfQAAAAVkACAAAAAAciRW40ORJLVwchOEpz87Svb+5toAFM6LxDWv928ECwQFcwAgAAAAAN0dipyESIkszfjRzdDi8kAGaa2Hf4wrPAtiWwboZLuxBWwAIAAAAAANr4o/+l1OIbbaX5lZ3fQ/WIeOcEXjNI1F0WbSgQrzaQADOAB9AAAABWQAIAAAAACZqAyCzYQupJ95mrBJX54yIz9VY7I0WrxpNYElCI4dTQVzACAAAAAA/eyJb6d1xfE+jJlVXMTD3HS/NEYENPVKAuj56Dr2dSEFbAAgAAAAANkSt154Or/JKb31VvbZFV46RPgUp8ff/hcPORL7PpFBAAM5AH0AAAAFZAAgAAAAAI5bm3YO0Xgf0VT+qjVTTfvckecM3Cwqj7DTKZXf8/NXBXMAIAAAAAD/m+h8fBhWaHm6Ykuz0WX1xL4Eme3ErLObyEVJf8NCywVsACAAAAAAfb1VZZCqs2ivYbRzX4p5CtaCkKW+g20Pr57FWXzEZi8AAzEwAH0AAAAFZAAgAAAAANqo4+p6qdtCzcB4BX1wQ6llU7eFBnuu4MtZwp4B6mDlBXMAIAAAAAAGiz+VaukMZ+6IH4jtn4KWWdKK4/W+O+gRioQDrfzpMgVsACAAAAAAG4YYkTp80EKo59mlHExDodRQFR7njhR5dmISwUJ6ukAAAzExAH0AAAAFZAAgAAAAAPrFXmHP2Y4YAm7b/aqsdn/DPoDkv7B8egWkfe23XsM1BXMAIAAAAAAGhwpKAr7skeqHm3oseSbO7qKNhmYsuUrECBxJ5k+D2AVsACAAAAAAAqPQi9luYAu3GrFCEsVjd9z2zIDcp6SPTR2w6KQEr+IAAzEyAH0AAAAFZAAgAAAAABzjYxwAjXxXc0Uxv18rH8I3my0Aguow0kTwKyxbrm+cBXMAIAAAAADVbqJVr6IdokuhXkEtXF0C2gINLiAjMVN20lE20Vmp2QVsACAAAAAAD7K1Fx4gFaaizkIUrf+EGXQeG7QX1jadhGc6Ji471H8AAzEzAH0AAAAFZAAgAAAAAFMm2feF2fFCm/UC6AfIyepX/xJDSmnnolQIBnHcPmb5BXMAIAAAAABLI11kFrQoaNVZFmq/38aRNImPOjdJh0Lo6irI8M/AaAVsACAAAAAAOWul0oVqJ9CejD2RqphhTC98DJeRQy5EwbNerU2+4l8AAzE0AH0AAAAFZAAgAAAAAJvXB3KyNiNtQko4SSzo/9b2qmM2zU9CQTTDfLSBWMgRBXMAIAAAAAAvjuVP7KsLRDeqVqRziTKpBrjVyqKiIbO9Gw8Wl2wFTAVsACAAAAAADlE+oc1ins+paNcaOZJhBlKlObDJ4VQORWjFYocM4LgAAzE1AH0AAAAFZAAgAAAAAPGdcxDiid8z8XYnfdDivNMYVPgBKdGOUw6UStU+48CdBXMAIAAAAAARj6g1Ap0eEfuCZ4X2TsEw+Djrhto3fA5nLwPaY0vCTgVsACAAAAAAoHqiwGOUkBu8SX5U1yHho+UIFdSN2MdQN5s6bQ0EsJYAAzE2AH0AAAAFZAAgAAAAAP5rGPrYGt3aKob5f/ldP0qrW7bmWvqnKY4QwdDWz400BXMAIAAAAADTQkW2ymaaf/bhteOOGmSrIR97bAnJx+yN3yMj1bTeewVsACAAAAAADyQnHGH2gF4w4L8axUsSTf6Ubk7L5/eoFOJk12MtZAoAAzE3AH0AAAAFZAAgAAAAAAlz6wJze5UkIxKpJOZFGCOf3v2KByWyI6NB6JM9wNcBBXMAIAAAAABUC7P/neUIHHoZtq0jFVBHY75tSFYr1Y5S16YN5XxC1QVsACAAAAAAgvxRbXDisNnLY3pfsjDdnFLtkvYUC4lhA68eBXc7KAwAAzE4AH0AAAAFZAAgAAAAAFJ8AtHcjia/9Y5pLEc3qVgH5xKiXw12G9Kn2A1EY8McBXMAIAAAAAAxe7Bdw7eUSBk/oAawa7uicTEDgXLymRNhBy1LAxhDvwVsACAAAAAAxKPaIBKVx3jTA+R/el7P7AZ7efrmTGjJs3Hj/YdMddwAAzE5AH0AAAAFZAAgAAAAAO8uwQUaKFb6vqR3Sv3Wn4QAonC2exOC9lGG1juqP5DtBXMAIAAAAABZf1KyJgQg8/Rf5c02DgDK2aQu0rNCOvaL60ohDHyY+gVsACAAAAAAqyEjfKC8lYoIfoXYHUqHZPoaA6EK5BAZy5dxXZmay4kAAzIwAH0AAAAFZAAgAAAAAE8YtqyRsGCeiR6hhiyisR/hccmK4nZqIMzO4lUBmEFzBXMAIAAAAAC1UYOSKqAeG1UJiKjWFVskRhuFKpj9Ezy+lICZvFlN5AVsACAAAAAA6Ct9nNMKyRazn1OKnRKagm746CGu+jyhbL1qJnZxGi0AAzIxAH0AAAAFZAAgAAAAAPhCrMausDx1QUIEqp9rUdRKyM6a9AAx7jQ3ILIu8wNIBXMAIAAAAACmH8lotGCiF2q9VQxhsS+7LAZv79VUAsOUALaGxE/EpAVsACAAAAAAnc1xCKfdvbUEc8F7XZqlNn1C+hZTtC0I9I3LL06iaNkAAzIyAH0AAAAFZAAgAAAAAOBi/GAYFcstMSJPgp3VkMiuuUUCrZytvqYaU8dwm8v2BXMAIAAAAACEZSZVyD3pKzGlbdwlYmWQhHHTV5SnNLknl2Gw8IaUTQVsACAAAAAAfsLZsEDcWSuNsIo/TD1ReyQW75HPMgmuKZuWFOLKRLoAAzIzAH0AAAAFZAAgAAAAAIQuup+YGfH3mflzWopN8J1X8o8a0d9CSGIvrA5HOzraBXMAIAAAAADYvNLURXsC2ITMqK14LABQBI+hZZ5wNf24JMcKLW+84AVsACAAAAAACzfjbTBH7IwDU91OqLAz94RFkoqBOkzKAqQb55gT4/MAAzI0AH0AAAAFZAAgAAAAAKsh0ADyOnVocFrOrf6MpTrNvAj8iaiE923DPryu124gBXMAIAAAAADg24a8NVE1GyScc6tmnTbmu5ulzO+896fE92lN08MeswVsACAAAAAAaPxcOIxnU7But88/yadOuDJDMcCywwrRitaxMODT4msAAzI1AH0AAAAFZAAgAAAAAKkVC2Y6HtRmv72tDnPUSjJBvse7SxLqnr09/Uuj9sVVBXMAIAAAAABYNFUkH7ylPMN+Bc3HWX1e0flGYNbtJNCY9SltJCW/UAVsACAAAAAAZYK/f9H4OeihmpiFMH7Wm7uLvs2s92zNA8wyrNZTsuMAAzI2AH0AAAAFZAAgAAAAADDggcwcb/Yn1Kk39sOHsv7BO/MfP3m/AJzjGH506Wf9BXMAIAAAAAAYZIsdjICS0+BDyRUPnrSAZfPrwtuMaEDEn0/ijLNQmAVsACAAAAAAGPnYVvo2ulO9z4LGd/69NAklfIcZqZvFX2KK0s+FcTUAAzI3AH0AAAAFZAAgAAAAAEWY7dEUOJBgjOoWVht1wLehsWAzB3rSOBtLgTuM2HC8BXMAIAAAAAAAoswiHRROurjwUW8u8D5EUT+67yvrgpB/j6PzBDAfVwVsACAAAAAA6NhRTYFL/Sz4tao7vpPjLNgAJ0FX6P/IyMW65qT6YsMAAzI4AH0AAAAFZAAgAAAAAPZaapeAUUFPA7JTCMOWHJa9lnPFh0/gXfAPjA1ezm4ZBXMAIAAAAACmJvLY2nivw7/b3DOKH/X7bBXjJwoowqb1GtEFO3OYgAVsACAAAAAA/JcUoyKacCB1NfmH8vYqC1f7rd13KShrQqV2r9QBP44AAzI5AH0AAAAFZAAgAAAAAK00u6jadxCZAiA+fTsPVDsnW5p5LCr4+kZZZOTDuZlfBXMAIAAAAAAote4zTEYMDgaaQbAdN8Dzv93ljPLdGjJzvnRn3KXgtQVsACAAAAAAxXd9Mh6R3mnJy8m7UfqMKi6oD5DlZpkaOz6bEjMOdiwAAzMwAH0AAAAFZAAgAAAAAFbgabdyymiEVYYwtJSWa7lfl/oYuj/SukzJeDOR6wPVBXMAIAAAAADAFGFjS1vPbN6mQEhkDYTD6V2V23Ys9gUEUMGNvMPkaAVsACAAAAAAL/D5Sze/ZoEanZLK0IeEkhgVkxEjMWVCfmJaD3a8uNIAAzMxAH0AAAAFZAAgAAAAABNMR6UBv2E627CqLtQ/eDYx7OEwQ7JrR4mSHFa1N8tLBXMAIAAAAAAxH4gucI4UmNVB7625C6hFSVCuIpJO3lusJlPuL8H5EQVsACAAAAAAVLHNg0OUVqZ7WGOP53BkTap9FOw9dr1P4J8HxqFqU04AAzMyAH0AAAAFZAAgAAAAAG8cd6WBneNunlqrQ2EmNf35W7OGObGq9WL4ePX+LUDmBXMAIAAAAAAjJ2+sX87NSis9hBsgb1QprVRnO7Bf+GObCGoUqyPE4wVsACAAAAAAs9c9SM49/pWmyUQKslpt3RTMBNSRppfNO0JBvUqHPg0AAzMzAH0AAAAFZAAgAAAAAFWOUGkUpy8yf6gB3dio/aOfRKh7XuhvsUj48iESFJrGBXMAIAAAAAAY7sCDMcrUXvNuL6dO0m11WyijzXZvPIcOKob6IpC4PQVsACAAAAAAJOP+EHz6awDb1qK2bZQ3kTV7wsj5Daj/IGAWh4g7omAAAzM0AH0AAAAFZAAgAAAAAGUrIdKxOihwNmo6B+aG+Ag1qa0+iqdksHOjQj+Oy9bZBXMAIAAAAABwa5dbI2KmzBDNBTQBEkjZv4sPaeRkRNejcjdVymRFKQVsACAAAAAA4ml/nm0gJNTcJ4vuD+T2Qfq2fQZlibJp/j6MOGDrbHMAAzM1AH0AAAAFZAAgAAAAAOx89xV/hRk64/CkM9N2EMK6aldII0c8smdcsZ46NbP8BXMAIAAAAADBF6tfQ+7q9kTuLyuyrSnDgmrdmrXkdhl980i1KHuGHgVsACAAAAAACUqiFqHZdGbwAA+hN0YUE5zFg+H+dabIB4dj5/75W/YAAzM2AH0AAAAFZAAgAAAAAMkN0L1oQWXhjwn9rAdudcYeN8/5VdCKU8cmDt7BokjsBXMAIAAAAAAT62pGXoRwExe9uvgYOI0hg5tOxilrWfoEmT0SMglWJwVsACAAAAAAlVz4dhiprSbUero6JFfxzSJGclg63oAkAmgbSwbcYxIAAzM3AH0AAAAFZAAgAAAAANxfa4xCoaaB7k1C1RoH1LBhsCbN2yEq15BT9b+iqEC4BXMAIAAAAACAX9LV8Pemfw7NF0iB1/85NzM1Ef+1mUfyehacUVgobQVsACAAAAAAVq4xpbymLk0trPC/a2MvB39I7hRiX8EJsVSI5E5hSBkAAzM4AH0AAAAFZAAgAAAAAOYIYoWkX7dGuyKfi3XssUlc7u/gWzqrR9KMkikKVdmSBXMAIAAAAABVF2OYjRTGi9Tw8XCAwZWLpX35Yl271TlNWp6N/nROhAVsACAAAAAA0nWwYzXQ1+EkDvnGq+SMlq20z+j32Su+i/A95SggPb4AAzM5AH0AAAAFZAAgAAAAAIy0+bXZi10QC+q7oSOLXK5Fee7VEk/qHSXukfeVIfgzBXMAIAAAAAAQ3IIV/JQCHW95AEbH5zGIHtJqyuPjWPMIZ+VmQHlxEwVsACAAAAAAp0jYsyohKv9Pm+4k+DplEGbl9WLZpAJzitrcDj4CNsMAAzQwAH0AAAAFZAAgAAAAAL5SOJQ3LOhgdXJ5v086NNeAl1qonQnchObdpZJ1kHeEBXMAIAAAAAA+tEqTXODtik+ydJZSnUqXF9f18bPeze9eWtR7ExZJgQVsACAAAAAAbrkZCVgB9Qsp4IAbdf+bD4fT6Boqk5UtuA/zhNrh1y0AAzQxAH0AAAAFZAAgAAAAAKl8zcHJRDjSjJeV/WvMxulW1zrTFtaeBy/aKKhadc6UBXMAIAAAAADBdWQl5SBIvtZZLIHszePwkO14W1mQ0izUk2Ov21cPNAVsACAAAAAAHErCYycpqiIcCZHdmPL1hi+ovLQk4TAvENpfLdTRamQAAzQyAH0AAAAFZAAgAAAAAFvotcNaoKnVt5CBCOPwjexFO0WGWuaIGL6H/6KSau+6BXMAIAAAAAD2y2mBN5xPu5PJoY2zcr0GnQDtHRBogA5+xzIxccE9fwVsACAAAAAAdS34xzJesnUfxLCcc1U7XzUqLy8MAzV/tcjbqaD3lkMAAzQzAH0AAAAFZAAgAAAAAPezU0/vNT4Q4YKbTbaeHqcwNLT+IjW/Y9QFpIooihjPBXMAIAAAAACj2x4O4rHter8ZnTws5LAP9jJ/6kk9C/V3vL50LoFZHAVsACAAAAAAQdBDF3747uCVP5lB/zr8VmzxJfTSZHBKeIgm5FyONXwAAzQ0AH0AAAAFZAAgAAAAAMqpayM2XotEFmm0gwQd9rIzApy0X+7HfOhNk6VU7F5lBXMAIAAAAACJR9+q5T9qFHXFNgGbZnPubG8rkO6cwWhzITQTmd6VgwVsACAAAAAAOZLQ6o7e4mVfDzbpQioa4d3RoTvqwgnbmc5Qh2wsZuoAAzQ1AH0AAAAFZAAgAAAAANCeyW+3oebaQk+aqxNVhAcT/BZ5nhsTVdKS3tMrLSvWBXMAIAAAAADxRFMDhkyuEc++WnndMfoUMLNL7T7rWoeblcrpSI6soQVsACAAAAAAdBuBMJ1lxt0DRq9pOZldQqchLs3B/W02txcMLD490FEAAzQ2AH0AAAAFZAAgAAAAAIbo5YBTxXM7HQhl7UP9NNgpPGFkBx871r1B65G47+K8BXMAIAAAAAC21dJSxnEhnxO5gzN5/34BL4von45e1meW92qowzb8fQVsACAAAAAAm3Hk2cvBN0ANaR5jzeZE5TsdxDvJCTOT1I01X7cNVaYAAzQ3AH0AAAAFZAAgAAAAABm/6pF96j26Jm7z5KkY1y33zcAEXLx2n0DwC03bs/ixBXMAIAAAAAD01OMvTZI/mqMgxIhA5nLs068mW+GKl3OW3ilf2D8+LgVsACAAAAAAaLvJDrqBESTNZSdcXsd+8GXPl8ZkUsGpeYuyYVv/kygAAzQ4AH0AAAAFZAAgAAAAAJ/D3+17gaQdkBqkL2wMwccdmCaVOtxzIkM8VyI4xI5zBXMAIAAAAAAggLVmkc5u+YzBR+oNE+XgLVp64fC6MzUb/Ilu/Jsw0AVsACAAAAAACz3HVKdWkx82/kGbVpcbAeZtsj2R5Zr0dEPfle4IErkAAzQ5AH0AAAAFZAAgAAAAAJMRyUW50oaTzspS6A3TUoXyC3gNYQoShUGPakMmeVZrBXMAIAAAAACona2Pqwt4U2PmFrtmu37jB9kQ/12okyAVtYa8TQkDiQVsACAAAAAAltJJKjCMyBTJ+4PkdDCPJdeX695P8P5h7WOZ+kmExMAAAzUwAH0AAAAFZAAgAAAAAByuYl8dBvfaZ0LO/81JW4hYypeNmvLMaxsIdvqMPrWoBXMAIAAAAABNddwobOUJzm9HOUD8BMZJqkNCUCqstHZkC76FIdNg9AVsACAAAAAAQQOkIQtkyNavqCnhQbNg3HfqrJdsAGaoxSJePJl1qXsAAzUxAH0AAAAFZAAgAAAAAHEzLtfmF/sBcYPPdj8867VmmQyU1xK9I/3Y0478azvABXMAIAAAAAAcmyFajZPnBTbO+oLInNwlApBocUekKkxz2hYFeSlQ+gVsACAAAAAAZ6IkrOVRcC8vSA6Vb4fPWZJrYexXhEabIuYIeXNsCSgAAzUyAH0AAAAFZAAgAAAAAJam7JYsZe2cN20ZYm2W3v1pisNt5PLiniMzymBLWyMtBXMAIAAAAABxCsKVMZMTn3n+R2L7pVz5nW804r8HcK0mCBw3jUXKXAVsACAAAAAA7j3JGnNtR64P4dJLeUoScFRGfa8ekjh3dvhw46sRFk0AAzUzAH0AAAAFZAAgAAAAAMXrXx0saZ+5gORmwM2FLuZG6iuO2YS+1IGPoAtDKoKBBXMAIAAAAADIQsxCr8CfFKaBcx8kIeSywnGh7JHjKRJ9vJd9x79y7wVsACAAAAAAcvBV+SykDYhmRFyVYwFYB9oBKBSHr55Jdz2cXeowsUQAAzU0AH0AAAAFZAAgAAAAACbzcUD3INSnCRspOKF7ubne74OK9L0FTZvi9Ay0JVDYBXMAIAAAAADPebVQH8Btk9rhBIoUOdSAdpPvz7qIY4UC2i6IGisSAQVsACAAAAAAiBunJi0mPnnXdnldiq+If8dcb/n6apHnaIFt+oyYO1kAAzU1AH0AAAAFZAAgAAAAACUc2CtD1MK/UTxtv+8iA9FoHEyTwdl43HKeSwDw2Lp5BXMAIAAAAACCIduIdw65bQMzRYRfjBJj62bc69T4QqH4QoWanwlvowVsACAAAAAAM0TV7S+aPVVzJOQ+cpSNKHTwyQ0mWa8tcHzfk3nR+9IAAzU2AH0AAAAFZAAgAAAAAHSaHWs/dnmI9sc7nB50VB2Bzs0kHapMHCQdyVEYY30TBXMAIAAAAACkV22lhEjWv/9/DubfHBAcwJggKI5mIbSK5L2nyqloqQVsACAAAAAAS19m7DccQxgryOsBJ3GsCs37yfQqNi1G+S6fCXpEhn4AAzU3AH0AAAAFZAAgAAAAAAL8jhNBG0KXXZhmZ0bPXtfgapJCB/AI+BEHB0eZ3C75BXMAIAAAAADHx/fPa639EBmGV5quLi8IQT600ifiKSOhTDOK19DnzwVsACAAAAAAlyLTDVkHxbayklD6Qymh3odIK1JHaOtps4f4HR+PcDgAAzU4AH0AAAAFZAAgAAAAAAxgeclNl09H7HvzD1oLwb2YpFca5eaX90uStYXHilqKBXMAIAAAAACMU5pSxzIzWlQxHyW170Xs9EhD1hURASQk+qkx7K5Y6AVsACAAAAAAJbMMwJfNftA7Xom8Bw/ghuZmSa3x12vTZxBUbV8m888AAzU5AH0AAAAFZAAgAAAAABmO7QD9vxWMmFjIHz13lyOeV6vHT6mYCsWxF7hb/yOjBXMAIAAAAACT9lmgkiqzuWG24afuzYiCeK9gmJqacmxAruIukd0xEAVsACAAAAAAZa0/FI/GkZR7CtX18Xg9Tn9zfxkD0UoaSt+pIO5t1t4AAzYwAH0AAAAFZAAgAAAAAB89SjLtDJkqEghRGyj6aQ/2qvWLNuMROoXmzbYbCMKMBXMAIAAAAAC8sywgND+CjhVTF6HnRQeay8y9/HnVzDI42dEPah28LQVsACAAAAAAoxv7UKh0RqUAWcOsQvU123zO1qZn73Xfib0qncZCB34AAzYxAH0AAAAFZAAgAAAAABN2alGq9Aats1mwERNGwL/fIwZSvVCe9/8XMHTFlpUpBXMAIAAAAACuDPjJgvvbBYhbLpjMiWUCsVppiYrhvR+yMysNPN8cZAVsACAAAAAAKpADjc4bzIZMi9Q/+oe0EMRJHYQt6dlo1x/lRquagqkAAzYyAH0AAAAFZAAgAAAAAL8YB6VAqGBiWD4CBv16IBscg5J7VQCTZu87n6pj+86KBXMAIAAAAAAmxm8e68geeyAdUjSMWBHzUjneVB0pG9TBXIoE6467hAVsACAAAAAAV76JZAlYpgC/Zl8awx2ArCg1uuyy2XVTSkp0wUMi/7UAAzYzAH0AAAAFZAAgAAAAAL4yLkCTV5Dmxa5toBu4JT8ge/cITAaURIOuFuOtFUkeBXMAIAAAAAAXoFNQOMGkAj7qEJP0wQafmFSXgWGeorDVbwyOxWLIsgVsACAAAAAAc4Un6dtIFe+AQ+RSfNWs3q63RTHhmyc+5GKRRdpWRv8AAzY0AH0AAAAFZAAgAAAAAEU8DoUp46YtYjNFS9kNXwdYxQ9IW27vCTb+VcqqfnKNBXMAIAAAAADe7vBOgYReE8X78k5ARuUnv4GmzPZzg6SbConf4L2G3wVsACAAAAAA78YHWVkp6HbZ0zS4UL2z/2pj9vPDcMDt7zTv6NcRsVsAAzY1AH0AAAAFZAAgAAAAAPa4yKTtkUtySuWo1ZQsp2QXtPb5SYqzA5vYDnS1P6c0BXMAIAAAAADKnF58R1sXlHlsHIvCBR3YWW/qk54z9CTDhZydkD1cOQVsACAAAAAAHW3ERalTFWKMzjuXF3nFh0pSrQxM/ojnPbPhc4v5MaQAAzY2AH0AAAAFZAAgAAAAAN5WJnMBmfgpuQPyonmY5X6OdRvuHw4nhsnGRnFAQ95VBXMAIAAAAACwftzu7KVV1rmGKwXtJjs3cJ1gE3apr8+N0SAg1F2cHwVsACAAAAAATDW0reyaCjbJuVLJzbSLx1OBuBoQu+090kgW4RurVacAAzY3AH0AAAAFZAAgAAAAACHvDsaPhoSb6DeGnKQ1QOpGYAgK82qpnqwcmzSeWaJHBXMAIAAAAABRq3C5+dOfnkAHM5Mg5hPB3O4jhwQlBgQWLA7Ph5bhgwVsACAAAAAAqkC8zYASvkVrp0pqmDyFCkPaDmD/ePAJpMuNOCBhni8AAzY4AH0AAAAFZAAgAAAAAOBePJvccPMJmy515KB1AkXF5Pi8NOG4V8psWy0SPRP+BXMAIAAAAAB3dOJG9xIDtEKCRzeNnPS3bFZepMj8UKBobKpSoCPqpgVsACAAAAAAPG3IxQVOdZrr509ggm5FKizWWoZPuVtOgOIGZ3m+pdEAAzY5AH0AAAAFZAAgAAAAABUvRrDQKEXLMdhnzXRdhiL6AGNs2TojPky+YVLXs+JnBXMAIAAAAAD1kYicbEEcPzD4QtuSYQQWDPq8fuUWGddpWayKn3dT9QVsACAAAAAA9+Sf7PbyFcY45hP9oTfjQiOUS3vEIAT8C0vOHymwYSUAAzcwAH0AAAAFZAAgAAAAAOvSnpujeKNen4pqc2HR63C5s5oJ1Vf4CsbKoYQvkwl5BXMAIAAAAACw2+vAMdibzd2YVVNfk81yXkFZP0WLJ82JBxJmXnYE+QVsACAAAAAArQ/E1ACyhK4ZyLqH9mNkCU7WClqRQTGyW9tciSGG/EMAAzcxAH0AAAAFZAAgAAAAAAo0xfGG7tJ3GWhgPVhW5Zn239nTD3PadShCNRc9TwdNBXMAIAAAAADZh243oOhenu0s/P/5KZLBDh9ADqKHtSWcXpO9D2sIjgVsACAAAAAAlgTPaoQKz+saU8rwCT3UiNOdG6hdpjzFx9GBn08ZkBEAABJjbQAAAAAAAAAAAAAQcGF5bG9hZElkAAAAAAAQZmlyc3RPcGVyYXRvcgABAAAAEnNwAAEAAAAAAAAAEHRmAAEAAAABbW4A////////7/8BbXgA////////738A", + "subType": "06" + } + } + } + }, + "u": { + "$set": { + "encryptedDoubleNoPrecision": { + "$$type": "binData" + } + } + }, + "multi": false, + "upsert": false + } + ], + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoubleNoPrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + } + } + } + ] + } + } + }, + "$db": "default" + }, + "commandName": "update" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-DoublePrecision-Aggregate.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-DoublePrecision-Aggregate.json new file mode 100644 index 0000000000..2046630a7b --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-DoublePrecision-Aggregate.json @@ -0,0 +1,643 @@ +{ + "description": "fle2v2-Rangev2-DoublePrecision-Aggregate", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoublePrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDouble": "0.0" + }, + "max": { + "$numberDouble": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "FLE2 Range DoublePrecision. Aggregate.", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDoublePrecision": { + "$gt": { + "$numberDouble": "0" + } + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1" + } + } + ] + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 0, + "encryptedDoublePrecision": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "Dri0CXmL78L2DOgk9w0DwxHOMGMzih7m6l59vgy+WWo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "x7GR49EN0t3WXQDihkrbonK7qNIBYC87tpL/XEUyIYc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JfYUqWF+OoGjiYkRI4L5iPlF+T1Eleul7Fki22jp4Qc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q1RyGfIgsaQHoZFRw+DD28V26rN5hweApPLwExncvT8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "L2PFeKGvLS6C+DLudR6fGlBq3ERPvjWvRyNRIA2HVb0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CWxaNqL3iP1yCixDkcmf9bmW3E5VeN8TJkg1jJe528s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+vC6araOEo+fpW7PSIP40/EnzBCj1d2N10Jr3rrXJJM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6SV63Mf51Z6A6p2X3rCnJKCu6ku3Oeb45mBYbz+IoAo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "RjBYT2h3ZAoHxhf8DU6/dFbDkEBZp0IxREcsRTu2MXs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "b7d8mRzD1kI1tdc7uNL+YAUonJ6pODLsRLkArfEKSkM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Xg8C1/A0KJaXOw4i+26Rv03/CydaaunOzXh0CIT+gn8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "UoKUDw2wJYToUCcFaIs03YQSTksYR0MIOTJllwODqKc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "c/5cwAT0C5jber2xlJnWD3a5tVDy0nRtr5HG02hoFOY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wSUrRXavAGaajNeqC5mEUH1K67oYl5Wy9RNIzKjwLAM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6vrp4wWDtHEgHWR99I70WVDzevg1Fk/Pw5U8gUDa0OU=", + "subType": "00" + } + } + ] + }, + { + "_id": 1, + "encryptedDoublePrecision": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "mVZb+Ra0EYjQ4Zrh9X//E2T8MRj7NMqm5GUJXhRrBEI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "FA74j21GUEJb1DJBOpR9nVnjaDZnd8yAQNuaW9Qi26g=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kJv//KVkbrobIBf+QeWC5jxn20mx/P0R1N6aCSMgKM8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zB+Whi9IUUGxfLEe+lGuIzLX4LFbIhaIAm5lRk65QTc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ybO1QU3CgvhO8JgRXH+HxKszWcpl5aGDYYVa75fHa1g=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "X3Y3eSAbbMg//JgiHHiFpYOpV61t8kkDexI+CQyitH4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SlNHXyqVFGDPrX/2ppwog6l4pwj3PKda2TkZbqgfSfA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "McjV8xwTF3xI7863DYOBdyvIv6UpzThl6v9vBRk05bI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "MgwakFvPyBlwqFTbhWUF79URJQWFoJTGotlEVSPPUsQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "DyBERpMSD5lEM5Nhpcn4WGgxgn/mkUVJp+PYSLX5jsE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "I43iazc0xj1WVbYB/V+uTL/tughN1bBlxh1iypBnNsA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wjOBa/ATMuOywFmuPgC0GF/oeLqu0Z7eK5udzkTPbis=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "gRQVwiR+m+0Vg8ZDXqrQQcVnTyobwCXNaA4BCJVXtMc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "WUZ6huwx0ZbLb0R00uiC9FOJzsUocUN8qE5+YRenkvQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "7s79aKEuPgQcS/YPOOVcYNZvHIo7FFsWtFCrnDKXefA=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 0, + "encryptedDoublePrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoublePrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDouble": "0.0" + }, + "max": { + "$numberDouble": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedDoublePrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoublePrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDouble": "0.0" + }, + "max": { + "$numberDouble": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "aggregate": "default", + "pipeline": [ + { + "$match": { + "encryptedDoublePrecision": { + "$gt": { + "$binary": { + "base64": "DQYKAAADcGF5bG9hZACiCQAABGcAjgkAAAMwAH0AAAAFZAAgAAAAAHdJ2Vnb4MMzqVYVssjSdDy8XU4GVzMTfGifGETgQ2mYBXMAIAAAAAD7cFfKJGIXo6PjyeX2ria02CckW7dWFDoY/3FyBdm1NQVsACAAAAAAhEPSNv4M023A3hzNFuy83+hIKuZ2mKRY954N++aEOBUAAzEAfQAAAAVkACAAAAAAlmvfDrZoydUet4eCVMq7z6a58Ea+1HLJOWxN5lNcrWEFcwAgAAAAAEBo5AWZyC41b9ayjWNQSL4iYEAIwR/JG+ssN8bdoK9RBWwAIAAAAACEndE0SLxFSElOrNnqeX0EPmgDio3udZjVREy4JLS3sQADMgB9AAAABWQAIAAAAABbiLaoxAA6rinMJw1hC8ZUiq6UU1AQaPFn/py/Y06WuQVzACAAAAAAhtDasFkvYE7SCNu1je/hxdE9TJtAvvH3NtdEbKzNbCUFbAAgAAAAAIGepU1RSCF8sWODHEpKglsoqw3VBBH4a/URGxgGzbq2AAMzAH0AAAAFZAAgAAAAALORWwSr+tYNxcil2KIGSbNhTHvcPbdj+rLVQNx21S/KBXMAIAAAAAD6diZBkPEJ1cQy06LAxdbNK8Nlxbb44fH4Wk3Y3260nQVsACAAAAAA1eYAZBFHlDiaDAljWi8blGQ2nvvZa5AO5doeo0SFZsgAAzQAfQAAAAVkACAAAAAAG5XMK96PjClNlUvg82j4pMY1YxsznZfj4uNweD394FoFcwAgAAAAAKHgQLdGJHkrfFg9nB93Ac+3VgBw6aU44MTkKIQ91dZoBWwAIAAAAAAPxXmi+SDJ+40A0KdwfRczexlZQrHjIA+D3oUB0EY9tAADNQB9AAAABWQAIAAAAAA6M++b9I0YFemmWBAWAE3glu2Ah3Ta1FBxAQEIWS0toAVzACAAAAAANXYTqPf1Y6X3Ns6YQIX0C3FKCyWUo+Kk+fNcQvc0WSoFbAAgAAAAAA+uJUw1ICYgyeygSRe206VTWVtUnhdci3iHbyP5YtEVAAM2AH0AAAAFZAAgAAAAAKl8bV1riH/uyJ+X0HHd3+18k2cJl2dQFXCdoagutFcaBXMAIAAAAABm8F2Ew9f0VOABdcF+lP0Bi+zWvEUPniWgrxPq/Sx3uwVsACAAAAAAJfFErjZ6BPhsw5LjJLqNtKDLJ4zV0eIZppQpd9b0wZoAAzcAfQAAAAVkACAAAAAAsYZD8JEP6kYsPncFnNZwJxhu4YtUTKPNcjHtv67H+rYFcwAgAAAAAI4LqZcRkvbs/2F62Flu0pixNcor4WmBD0DHGaf039wLBWwAIAAAAAD4wUR3xd9lKltcqqo8LYvdMQWzCRobkV/ppKB/yn5dUgADOAB9AAAABWQAIAAAAAC0vdAi+dmoIXvZ5LqUqvyKV9/tHqSI2SWiSJO5pTnA2wVzACAAAAAAS2qvf9fvfVUH5WtsVxjxmskpGjYTQV34LwvQQw1y9wIFbAAgAAAAAE0+FKuK7HxbypvCeEJzMTcjOWE0ScYOlTBMUNlIv55hAAM5AH0AAAAFZAAgAAAAAH31lb/srBcrOXkzddCwAnclsR5/3QijEVgECs2JjOWBBXMAIAAAAABg7+prDT73YcCvLE5QbuIrqGcjLc5pQD2Miq0d29yrxgVsACAAAAAAetRiPwDSFWBzpWSWkOKWM6fKStRJ8SyObnpc79ux8p0AAzEwAH0AAAAFZAAgAAAAAOK8brUuc2onBNDRtfYMR736dHj4dQqXod8JG7tAMTsDBXMAIAAAAAAW6SrGAL6Bx0s7ZlsYULFfOAiYIGhEWu6md3r+Rk40awVsACAAAAAAIHYXP8RLcCboUmHN3+OlnEw1DxaLSnbTB9PdF228fFAAAzExAH0AAAAFZAAgAAAAAFdthRhe2Q8CvxGIhjTJZv0Lk97GkHciTPxZ/mckLoNaBXMAIAAAAAAqOxsAr23LOVB0DIHbPf9UDJJRFXY2YoKbjhRqw5psbQVsACAAAAAA0G2GD8ZQjDBntjLpW4rqwKRS6HiUjL03g1N6chANozcAAzEyAH0AAAAFZAAgAAAAAMWymwwbvIeMqmnKWWifUqoCxOsdpnonM2qdLPyjqJO/BXMAIAAAAAB6IDmmpUhBD2zpRj8/y/kmOSXcjuIU14sNh6GKSsg2uwVsACAAAAAAWMFPNOk3EMSQDS9JGPSMIQP0oNGVugxXKKUrIPPlhHgAAzEzAH0AAAAFZAAgAAAAAPcLmtq+V1e+MRlZ7NHq1+mrRVBQje5zj685ZvdsfKvSBXMAIAAAAABdHz/3w2k5km97QN9m7oLFYJaVJneNlMboIlz5yUASQAVsACAAAAAAWbp8JVJnx8fEVAJFa7WMfMa7wXeP5M3C8MX20J/i9n0AAzE0AH0AAAAFZAAgAAAAAJaRYmo8zqI2BEUzdSwp4tVRpPmVWsfydkYN3UHh6TMuBXMAIAAAAAAeD6mDnQeLlbC9i0sVgE8+RH6y+e94OJQ0tJ0PvblVSgVsACAAAAAAWp4jvretbDEsqEMzP/WLTnwOiJwCtfrCiB6m8k+yEMoAAzE1AH0AAAAFZAAgAAAAAAZZ538coNPwyRjhEwr5P8Xw32oWOJF+R+nfCGgy2qO3BXMAIAAAAACOPLnJlKwGNPDBReRKnHfteq0wFb3ezhrc7BVXs8RUHwVsACAAAAAA+lGesNk3+SyB/60rSvdQ2aN2vfJPR7llJVhufGTNhHkAAzE2AH0AAAAFZAAgAAAAAFH9l9GGA1I52atJV5jNUf1lx8jBjoEoVoME97v5GFJiBXMAIAAAAAC1qH3Kd78Dr9NGbw7y9D/XYBwv5h1LLO8la5OU7g8UkQVsACAAAAAArZ6atJCYrVfHB8dSNPOFf6nnDADBMJcIEj8ljPvxHp8AAzE3AH0AAAAFZAAgAAAAADtbVEI2tdkrowEMdkacD2w0Y3T3Ofi7PH6HmA6sP0c/BXMAIAAAAADuBSROnZHA+NgUPH8d0LnWFiDsM2bY8bzjC1+elSsIygVsACAAAAAAR0G2m+uANoWknkr/NerFcG+fECVxNIs0cqbY1t/U/0MAAzE4AH0AAAAFZAAgAAAAAAh3WpeMVlikPFYj9hLj+fmIqVt6omCSF75W3TPExyWpBXMAIAAAAAAsQkRmwqeVj2gGE03orb6PtrIzDt6dDU3hgSQi8E2wKgVsACAAAAAA3GHaRE2RAcaBRd8VzmYzWeBD2Gmy91eTK1k8YdWObZcAABJjbQAAAAAAAAAAAAAQcGF5bG9hZElkAAAAAAAQZmlyc3RPcGVyYXRvcgABAAAAEnNwAAEAAAAAAAAAEHBuAAIAAAAQdGYAAQAAAAFtbgAAAAAAAAAAAAFteAAAAAAAAABpQAA=", + "subType": "06" + } + } + } + } + } + ], + "cursor": {}, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoublePrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDouble": "0.0" + }, + "max": { + "$numberDouble": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + } + }, + "commandName": "aggregate" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-DoublePrecision-Correctness.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-DoublePrecision-Correctness.json new file mode 100644 index 0000000000..939a12c9f8 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-DoublePrecision-Correctness.json @@ -0,0 +1,1418 @@ +{ + "description": "fle2v2-Rangev2-DoublePrecision-Correctness", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoublePrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDouble": "0.0" + }, + "max": { + "$numberDouble": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "Find with $gt", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDoublePrecision": { + "$gt": { + "$numberDouble": "0.0" + } + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1.0" + } + } + ] + } + ] + }, + { + "description": "Find with $gte", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDoublePrecision": { + "$gte": { + "$numberDouble": "0.0" + } + } + }, + "sort": { + "_id": 1 + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0.0" + } + }, + { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1.0" + } + } + ] + } + ] + }, + { + "description": "Find with $gt with no results", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDoublePrecision": { + "$gt": { + "$numberDouble": "1.0" + } + } + } + }, + "object": "coll", + "expectResult": [] + } + ] + }, + { + "description": "Find with $lt", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDoublePrecision": { + "$lt": { + "$numberDouble": "1.0" + } + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0.0" + } + } + ] + } + ] + }, + { + "description": "Find with $lte", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDoublePrecision": { + "$lte": { + "$numberDouble": "1.0" + } + } + }, + "sort": { + "_id": 1 + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0.0" + } + }, + { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1.0" + } + } + ] + } + ] + }, + { + "description": "Find with $lt below min", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDoublePrecision": { + "$lt": { + "$numberDouble": "0.0" + } + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "must be greater than the range minimum" + } + } + ] + }, + { + "description": "Find with $gt above max", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDoublePrecision": { + "$gt": { + "$numberDouble": "200.0" + } + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "must be less than the range max" + } + } + ] + }, + { + "description": "Find with $gt and $lt", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDoublePrecision": { + "$gt": { + "$numberDouble": "0.0" + }, + "$lt": { + "$numberDouble": "2.0" + } + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1.0" + } + } + ] + } + ] + }, + { + "description": "Find with equality", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDoublePrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0.0" + } + } + ] + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDoublePrecision": { + "$numberDouble": "1.0" + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1.0" + } + } + ] + } + ] + }, + { + "description": "Find with full range", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDoublePrecision": { + "$gte": { + "$numberDouble": "0.0" + }, + "$lte": { + "$numberDouble": "200.0" + } + } + }, + "sort": { + "_id": 1 + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0.0" + } + }, + { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1.0" + } + } + ] + } + ] + }, + { + "description": "Find with $in", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDoublePrecision": { + "$in": [ + { + "$numberDouble": "0.0" + } + ] + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0.0" + } + } + ] + } + ] + }, + { + "description": "Insert out of range", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "-1" + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "value must be greater than or equal to the minimum value" + } + } + ] + }, + { + "description": "Insert min and max", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 200, + "encryptedDoublePrecision": { + "$numberDouble": "200.0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": {}, + "sort": { + "_id": 1 + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0.0" + } + }, + { + "_id": 200, + "encryptedDoublePrecision": { + "$numberDouble": "200.0" + } + } + ] + } + ] + }, + { + "description": "Aggregate with $gte", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDoublePrecision": { + "$gte": { + "$numberDouble": "0.0" + } + } + } + }, + { + "$sort": { + "_id": 1 + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0.0" + } + }, + { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1.0" + } + } + ] + } + ] + }, + { + "description": "Aggregate with $gt with no results", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDoublePrecision": { + "$gt": { + "$numberDouble": "1.0" + } + } + } + } + ] + }, + "object": "coll", + "expectResult": [] + } + ] + }, + { + "description": "Aggregate with $lt", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDoublePrecision": { + "$lt": { + "$numberDouble": "1.0" + } + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0.0" + } + } + ] + } + ] + }, + { + "description": "Aggregate with $lte", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDoublePrecision": { + "$lte": { + "$numberDouble": "1.0" + } + } + } + }, + { + "$sort": { + "_id": 1 + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0.0" + } + }, + { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1.0" + } + } + ] + } + ] + }, + { + "description": "Aggregate with $lt below min", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDoublePrecision": { + "$lt": { + "$numberDouble": "0.0" + } + } + } + } + ] + }, + "object": "coll", + "expectError": { + "errorContains": "must be greater than the range minimum" + } + } + ] + }, + { + "description": "Aggregate with $gt above max", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDoublePrecision": { + "$gt": { + "$numberDouble": "200.0" + } + } + } + } + ] + }, + "object": "coll", + "expectError": { + "errorContains": "must be less than the range max" + } + } + ] + }, + { + "description": "Aggregate with $gt and $lt", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDoublePrecision": { + "$gt": { + "$numberDouble": "0.0" + }, + "$lt": { + "$numberDouble": "2.0" + } + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1.0" + } + } + ] + } + ] + }, + { + "description": "Aggregate with equality", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDoublePrecision": { + "$numberDouble": "0.0" + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0.0" + } + } + ] + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDoublePrecision": { + "$numberDouble": "1.0" + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1.0" + } + } + ] + } + ] + }, + { + "description": "Aggregate with full range", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDoublePrecision": { + "$gte": { + "$numberDouble": "0.0" + }, + "$lte": { + "$numberDouble": "200.0" + } + } + } + }, + { + "$sort": { + "_id": 1 + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0.0" + } + }, + { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1.0" + } + } + ] + } + ] + }, + { + "description": "Aggregate with $in", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0.0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1.0" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedDoublePrecision": { + "$in": [ + { + "$numberDouble": "0.0" + } + ] + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0.0" + } + } + ] + } + ] + }, + { + "description": "Wrong type: Insert Int", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoublePrecision": { + "$numberInt": "0" + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "Cannot encrypt element" + } + } + ] + }, + { + "description": "Wrong type: Find Int", + "operations": [ + { + "name": "find", + "arguments": { + "filter": { + "encryptedDoublePrecision": { + "$gte": { + "$numberInt": "0" + } + } + }, + "sort": { + "_id": 1 + } + }, + "object": "coll", + "expectError": { + "errorContains": "field type is not supported" + } + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-DoublePrecision-Delete.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-DoublePrecision-Delete.json new file mode 100644 index 0000000000..db615d6fe3 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-DoublePrecision-Delete.json @@ -0,0 +1,537 @@ +{ + "description": "fle2v2-Rangev2-DoublePrecision-Delete", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoublePrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDouble": "0.0" + }, + "max": { + "$numberDouble": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "FLE2 Range DoublePrecision. Delete.", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1" + } + } + }, + "object": "coll" + }, + { + "name": "deleteOne", + "arguments": { + "filter": { + "encryptedDoublePrecision": { + "$gt": { + "$numberDouble": "0" + } + } + } + }, + "object": "coll", + "expectResult": { + "deletedCount": 1 + } + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 0, + "encryptedDoublePrecision": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "Dri0CXmL78L2DOgk9w0DwxHOMGMzih7m6l59vgy+WWo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "x7GR49EN0t3WXQDihkrbonK7qNIBYC87tpL/XEUyIYc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JfYUqWF+OoGjiYkRI4L5iPlF+T1Eleul7Fki22jp4Qc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q1RyGfIgsaQHoZFRw+DD28V26rN5hweApPLwExncvT8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "L2PFeKGvLS6C+DLudR6fGlBq3ERPvjWvRyNRIA2HVb0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CWxaNqL3iP1yCixDkcmf9bmW3E5VeN8TJkg1jJe528s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+vC6araOEo+fpW7PSIP40/EnzBCj1d2N10Jr3rrXJJM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6SV63Mf51Z6A6p2X3rCnJKCu6ku3Oeb45mBYbz+IoAo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "RjBYT2h3ZAoHxhf8DU6/dFbDkEBZp0IxREcsRTu2MXs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "b7d8mRzD1kI1tdc7uNL+YAUonJ6pODLsRLkArfEKSkM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Xg8C1/A0KJaXOw4i+26Rv03/CydaaunOzXh0CIT+gn8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "UoKUDw2wJYToUCcFaIs03YQSTksYR0MIOTJllwODqKc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "c/5cwAT0C5jber2xlJnWD3a5tVDy0nRtr5HG02hoFOY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wSUrRXavAGaajNeqC5mEUH1K67oYl5Wy9RNIzKjwLAM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6vrp4wWDtHEgHWR99I70WVDzevg1Fk/Pw5U8gUDa0OU=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 0, + "encryptedDoublePrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoublePrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDouble": "0.0" + }, + "max": { + "$numberDouble": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedDoublePrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoublePrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDouble": "0.0" + }, + "max": { + "$numberDouble": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "delete": "default", + "deletes": [ + { + "q": { + "encryptedDoublePrecision": { + "$gt": { + "$binary": { + "base64": "DQYKAAADcGF5bG9hZACiCQAABGcAjgkAAAMwAH0AAAAFZAAgAAAAAHdJ2Vnb4MMzqVYVssjSdDy8XU4GVzMTfGifGETgQ2mYBXMAIAAAAAD7cFfKJGIXo6PjyeX2ria02CckW7dWFDoY/3FyBdm1NQVsACAAAAAAhEPSNv4M023A3hzNFuy83+hIKuZ2mKRY954N++aEOBUAAzEAfQAAAAVkACAAAAAAlmvfDrZoydUet4eCVMq7z6a58Ea+1HLJOWxN5lNcrWEFcwAgAAAAAEBo5AWZyC41b9ayjWNQSL4iYEAIwR/JG+ssN8bdoK9RBWwAIAAAAACEndE0SLxFSElOrNnqeX0EPmgDio3udZjVREy4JLS3sQADMgB9AAAABWQAIAAAAABbiLaoxAA6rinMJw1hC8ZUiq6UU1AQaPFn/py/Y06WuQVzACAAAAAAhtDasFkvYE7SCNu1je/hxdE9TJtAvvH3NtdEbKzNbCUFbAAgAAAAAIGepU1RSCF8sWODHEpKglsoqw3VBBH4a/URGxgGzbq2AAMzAH0AAAAFZAAgAAAAALORWwSr+tYNxcil2KIGSbNhTHvcPbdj+rLVQNx21S/KBXMAIAAAAAD6diZBkPEJ1cQy06LAxdbNK8Nlxbb44fH4Wk3Y3260nQVsACAAAAAA1eYAZBFHlDiaDAljWi8blGQ2nvvZa5AO5doeo0SFZsgAAzQAfQAAAAVkACAAAAAAG5XMK96PjClNlUvg82j4pMY1YxsznZfj4uNweD394FoFcwAgAAAAAKHgQLdGJHkrfFg9nB93Ac+3VgBw6aU44MTkKIQ91dZoBWwAIAAAAAAPxXmi+SDJ+40A0KdwfRczexlZQrHjIA+D3oUB0EY9tAADNQB9AAAABWQAIAAAAAA6M++b9I0YFemmWBAWAE3glu2Ah3Ta1FBxAQEIWS0toAVzACAAAAAANXYTqPf1Y6X3Ns6YQIX0C3FKCyWUo+Kk+fNcQvc0WSoFbAAgAAAAAA+uJUw1ICYgyeygSRe206VTWVtUnhdci3iHbyP5YtEVAAM2AH0AAAAFZAAgAAAAAKl8bV1riH/uyJ+X0HHd3+18k2cJl2dQFXCdoagutFcaBXMAIAAAAABm8F2Ew9f0VOABdcF+lP0Bi+zWvEUPniWgrxPq/Sx3uwVsACAAAAAAJfFErjZ6BPhsw5LjJLqNtKDLJ4zV0eIZppQpd9b0wZoAAzcAfQAAAAVkACAAAAAAsYZD8JEP6kYsPncFnNZwJxhu4YtUTKPNcjHtv67H+rYFcwAgAAAAAI4LqZcRkvbs/2F62Flu0pixNcor4WmBD0DHGaf039wLBWwAIAAAAAD4wUR3xd9lKltcqqo8LYvdMQWzCRobkV/ppKB/yn5dUgADOAB9AAAABWQAIAAAAAC0vdAi+dmoIXvZ5LqUqvyKV9/tHqSI2SWiSJO5pTnA2wVzACAAAAAAS2qvf9fvfVUH5WtsVxjxmskpGjYTQV34LwvQQw1y9wIFbAAgAAAAAE0+FKuK7HxbypvCeEJzMTcjOWE0ScYOlTBMUNlIv55hAAM5AH0AAAAFZAAgAAAAAH31lb/srBcrOXkzddCwAnclsR5/3QijEVgECs2JjOWBBXMAIAAAAABg7+prDT73YcCvLE5QbuIrqGcjLc5pQD2Miq0d29yrxgVsACAAAAAAetRiPwDSFWBzpWSWkOKWM6fKStRJ8SyObnpc79ux8p0AAzEwAH0AAAAFZAAgAAAAAOK8brUuc2onBNDRtfYMR736dHj4dQqXod8JG7tAMTsDBXMAIAAAAAAW6SrGAL6Bx0s7ZlsYULFfOAiYIGhEWu6md3r+Rk40awVsACAAAAAAIHYXP8RLcCboUmHN3+OlnEw1DxaLSnbTB9PdF228fFAAAzExAH0AAAAFZAAgAAAAAFdthRhe2Q8CvxGIhjTJZv0Lk97GkHciTPxZ/mckLoNaBXMAIAAAAAAqOxsAr23LOVB0DIHbPf9UDJJRFXY2YoKbjhRqw5psbQVsACAAAAAA0G2GD8ZQjDBntjLpW4rqwKRS6HiUjL03g1N6chANozcAAzEyAH0AAAAFZAAgAAAAAMWymwwbvIeMqmnKWWifUqoCxOsdpnonM2qdLPyjqJO/BXMAIAAAAAB6IDmmpUhBD2zpRj8/y/kmOSXcjuIU14sNh6GKSsg2uwVsACAAAAAAWMFPNOk3EMSQDS9JGPSMIQP0oNGVugxXKKUrIPPlhHgAAzEzAH0AAAAFZAAgAAAAAPcLmtq+V1e+MRlZ7NHq1+mrRVBQje5zj685ZvdsfKvSBXMAIAAAAABdHz/3w2k5km97QN9m7oLFYJaVJneNlMboIlz5yUASQAVsACAAAAAAWbp8JVJnx8fEVAJFa7WMfMa7wXeP5M3C8MX20J/i9n0AAzE0AH0AAAAFZAAgAAAAAJaRYmo8zqI2BEUzdSwp4tVRpPmVWsfydkYN3UHh6TMuBXMAIAAAAAAeD6mDnQeLlbC9i0sVgE8+RH6y+e94OJQ0tJ0PvblVSgVsACAAAAAAWp4jvretbDEsqEMzP/WLTnwOiJwCtfrCiB6m8k+yEMoAAzE1AH0AAAAFZAAgAAAAAAZZ538coNPwyRjhEwr5P8Xw32oWOJF+R+nfCGgy2qO3BXMAIAAAAACOPLnJlKwGNPDBReRKnHfteq0wFb3ezhrc7BVXs8RUHwVsACAAAAAA+lGesNk3+SyB/60rSvdQ2aN2vfJPR7llJVhufGTNhHkAAzE2AH0AAAAFZAAgAAAAAFH9l9GGA1I52atJV5jNUf1lx8jBjoEoVoME97v5GFJiBXMAIAAAAAC1qH3Kd78Dr9NGbw7y9D/XYBwv5h1LLO8la5OU7g8UkQVsACAAAAAArZ6atJCYrVfHB8dSNPOFf6nnDADBMJcIEj8ljPvxHp8AAzE3AH0AAAAFZAAgAAAAADtbVEI2tdkrowEMdkacD2w0Y3T3Ofi7PH6HmA6sP0c/BXMAIAAAAADuBSROnZHA+NgUPH8d0LnWFiDsM2bY8bzjC1+elSsIygVsACAAAAAAR0G2m+uANoWknkr/NerFcG+fECVxNIs0cqbY1t/U/0MAAzE4AH0AAAAFZAAgAAAAAAh3WpeMVlikPFYj9hLj+fmIqVt6omCSF75W3TPExyWpBXMAIAAAAAAsQkRmwqeVj2gGE03orb6PtrIzDt6dDU3hgSQi8E2wKgVsACAAAAAA3GHaRE2RAcaBRd8VzmYzWeBD2Gmy91eTK1k8YdWObZcAABJjbQAAAAAAAAAAAAAQcGF5bG9hZElkAAAAAAAQZmlyc3RPcGVyYXRvcgABAAAAEnNwAAEAAAAAAAAAEHBuAAIAAAAQdGYAAQAAAAFtbgAAAAAAAAAAAAFteAAAAAAAAABpQAA=", + "subType": "06" + } + } + } + }, + "limit": 1 + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoublePrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDouble": "0.0" + }, + "max": { + "$numberDouble": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + } + }, + "commandName": "delete" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-DoublePrecision-FindOneAndUpdate.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-DoublePrecision-FindOneAndUpdate.json new file mode 100644 index 0000000000..a8f87596e8 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-DoublePrecision-FindOneAndUpdate.json @@ -0,0 +1,647 @@ +{ + "description": "fle2v2-Rangev2-DoublePrecision-FindOneAndUpdate", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoublePrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDouble": "0.0" + }, + "max": { + "$numberDouble": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "FLE2 Range DoublePrecision. FindOneAndUpdate.", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1" + } + } + }, + "object": "coll" + }, + { + "name": "findOneAndUpdate", + "arguments": { + "filter": { + "encryptedDoublePrecision": { + "$gt": { + "$numberDouble": "0" + } + } + }, + "update": { + "$set": { + "encryptedDoublePrecision": { + "$numberDouble": "2" + } + } + }, + "returnDocument": "Before" + }, + "object": "coll", + "expectResult": { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1" + } + } + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 0, + "encryptedDoublePrecision": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "Dri0CXmL78L2DOgk9w0DwxHOMGMzih7m6l59vgy+WWo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "x7GR49EN0t3WXQDihkrbonK7qNIBYC87tpL/XEUyIYc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JfYUqWF+OoGjiYkRI4L5iPlF+T1Eleul7Fki22jp4Qc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q1RyGfIgsaQHoZFRw+DD28V26rN5hweApPLwExncvT8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "L2PFeKGvLS6C+DLudR6fGlBq3ERPvjWvRyNRIA2HVb0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CWxaNqL3iP1yCixDkcmf9bmW3E5VeN8TJkg1jJe528s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+vC6araOEo+fpW7PSIP40/EnzBCj1d2N10Jr3rrXJJM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6SV63Mf51Z6A6p2X3rCnJKCu6ku3Oeb45mBYbz+IoAo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "RjBYT2h3ZAoHxhf8DU6/dFbDkEBZp0IxREcsRTu2MXs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "b7d8mRzD1kI1tdc7uNL+YAUonJ6pODLsRLkArfEKSkM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Xg8C1/A0KJaXOw4i+26Rv03/CydaaunOzXh0CIT+gn8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "UoKUDw2wJYToUCcFaIs03YQSTksYR0MIOTJllwODqKc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "c/5cwAT0C5jber2xlJnWD3a5tVDy0nRtr5HG02hoFOY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wSUrRXavAGaajNeqC5mEUH1K67oYl5Wy9RNIzKjwLAM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6vrp4wWDtHEgHWR99I70WVDzevg1Fk/Pw5U8gUDa0OU=", + "subType": "00" + } + } + ] + }, + { + "_id": 1, + "encryptedDoublePrecision": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "V6knyt7Zq2CG3++l75UtBx2m32iGAPjHiAe439Bf02w=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "F08nMDWDZc+DbWM7XCEJNNCEYyinRmrvGP7EWhmp4is=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cXH4688amcDc8kZOJq4UP8cE3R58Zl7e+Qo/1jyspps=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "uURBxvTp3FBCVkd+LPqyuY7d6rMW6SGIJQEPY/wtkZI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "jG3hax1L3RBp9t38vUt53FsBxgr/+Si/vVISpAylYpE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kwtIW8MhH9Ky5xNjBx8gFA/SHh2YVphie7g5FGBzals=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "FHflwFuEMu4xX0ZApHi+pdlBH+oevAtXckCUb5Wv0xU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "0OKSXELxPP85SBVwDGf3LtMEQCJ8TTkFUl/+6jlkdb0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "25j9sQXZCihCmHKvTHgaBsAVZFcGPn7JjHdrCGlwyyw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "uEw0lpQtBppR3vqV9j9+NQRSBF1BzZukb8c9IhyWvxc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zVhZ7Q59O087ji49oMJvBIgeir2oqvUpnh4p53GcTow=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dowrzKs+qJhRMZyKDbhjXbuX43FbmUKOaw9I8YlOZDw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ep5B6cska6THLIF7Mn3tn3RvV9EiwLSt0eZM/CLRUDc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "URNp/YmmDh5wIZUfAzzgPyJeMNiVx9PMsz52DZRujGY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wlM4IAQhhKQEzoVqS8b1Ddd50GB95OFb9LnzOwyjCP4=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 0, + "encryptedDoublePrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoublePrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDouble": "0.0" + }, + "max": { + "$numberDouble": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedDoublePrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoublePrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDouble": "0.0" + }, + "max": { + "$numberDouble": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "findAndModify": "default", + "query": { + "encryptedDoublePrecision": { + "$gt": { + "$binary": { + "base64": "DQYKAAADcGF5bG9hZACiCQAABGcAjgkAAAMwAH0AAAAFZAAgAAAAAHdJ2Vnb4MMzqVYVssjSdDy8XU4GVzMTfGifGETgQ2mYBXMAIAAAAAD7cFfKJGIXo6PjyeX2ria02CckW7dWFDoY/3FyBdm1NQVsACAAAAAAhEPSNv4M023A3hzNFuy83+hIKuZ2mKRY954N++aEOBUAAzEAfQAAAAVkACAAAAAAlmvfDrZoydUet4eCVMq7z6a58Ea+1HLJOWxN5lNcrWEFcwAgAAAAAEBo5AWZyC41b9ayjWNQSL4iYEAIwR/JG+ssN8bdoK9RBWwAIAAAAACEndE0SLxFSElOrNnqeX0EPmgDio3udZjVREy4JLS3sQADMgB9AAAABWQAIAAAAABbiLaoxAA6rinMJw1hC8ZUiq6UU1AQaPFn/py/Y06WuQVzACAAAAAAhtDasFkvYE7SCNu1je/hxdE9TJtAvvH3NtdEbKzNbCUFbAAgAAAAAIGepU1RSCF8sWODHEpKglsoqw3VBBH4a/URGxgGzbq2AAMzAH0AAAAFZAAgAAAAALORWwSr+tYNxcil2KIGSbNhTHvcPbdj+rLVQNx21S/KBXMAIAAAAAD6diZBkPEJ1cQy06LAxdbNK8Nlxbb44fH4Wk3Y3260nQVsACAAAAAA1eYAZBFHlDiaDAljWi8blGQ2nvvZa5AO5doeo0SFZsgAAzQAfQAAAAVkACAAAAAAG5XMK96PjClNlUvg82j4pMY1YxsznZfj4uNweD394FoFcwAgAAAAAKHgQLdGJHkrfFg9nB93Ac+3VgBw6aU44MTkKIQ91dZoBWwAIAAAAAAPxXmi+SDJ+40A0KdwfRczexlZQrHjIA+D3oUB0EY9tAADNQB9AAAABWQAIAAAAAA6M++b9I0YFemmWBAWAE3glu2Ah3Ta1FBxAQEIWS0toAVzACAAAAAANXYTqPf1Y6X3Ns6YQIX0C3FKCyWUo+Kk+fNcQvc0WSoFbAAgAAAAAA+uJUw1ICYgyeygSRe206VTWVtUnhdci3iHbyP5YtEVAAM2AH0AAAAFZAAgAAAAAKl8bV1riH/uyJ+X0HHd3+18k2cJl2dQFXCdoagutFcaBXMAIAAAAABm8F2Ew9f0VOABdcF+lP0Bi+zWvEUPniWgrxPq/Sx3uwVsACAAAAAAJfFErjZ6BPhsw5LjJLqNtKDLJ4zV0eIZppQpd9b0wZoAAzcAfQAAAAVkACAAAAAAsYZD8JEP6kYsPncFnNZwJxhu4YtUTKPNcjHtv67H+rYFcwAgAAAAAI4LqZcRkvbs/2F62Flu0pixNcor4WmBD0DHGaf039wLBWwAIAAAAAD4wUR3xd9lKltcqqo8LYvdMQWzCRobkV/ppKB/yn5dUgADOAB9AAAABWQAIAAAAAC0vdAi+dmoIXvZ5LqUqvyKV9/tHqSI2SWiSJO5pTnA2wVzACAAAAAAS2qvf9fvfVUH5WtsVxjxmskpGjYTQV34LwvQQw1y9wIFbAAgAAAAAE0+FKuK7HxbypvCeEJzMTcjOWE0ScYOlTBMUNlIv55hAAM5AH0AAAAFZAAgAAAAAH31lb/srBcrOXkzddCwAnclsR5/3QijEVgECs2JjOWBBXMAIAAAAABg7+prDT73YcCvLE5QbuIrqGcjLc5pQD2Miq0d29yrxgVsACAAAAAAetRiPwDSFWBzpWSWkOKWM6fKStRJ8SyObnpc79ux8p0AAzEwAH0AAAAFZAAgAAAAAOK8brUuc2onBNDRtfYMR736dHj4dQqXod8JG7tAMTsDBXMAIAAAAAAW6SrGAL6Bx0s7ZlsYULFfOAiYIGhEWu6md3r+Rk40awVsACAAAAAAIHYXP8RLcCboUmHN3+OlnEw1DxaLSnbTB9PdF228fFAAAzExAH0AAAAFZAAgAAAAAFdthRhe2Q8CvxGIhjTJZv0Lk97GkHciTPxZ/mckLoNaBXMAIAAAAAAqOxsAr23LOVB0DIHbPf9UDJJRFXY2YoKbjhRqw5psbQVsACAAAAAA0G2GD8ZQjDBntjLpW4rqwKRS6HiUjL03g1N6chANozcAAzEyAH0AAAAFZAAgAAAAAMWymwwbvIeMqmnKWWifUqoCxOsdpnonM2qdLPyjqJO/BXMAIAAAAAB6IDmmpUhBD2zpRj8/y/kmOSXcjuIU14sNh6GKSsg2uwVsACAAAAAAWMFPNOk3EMSQDS9JGPSMIQP0oNGVugxXKKUrIPPlhHgAAzEzAH0AAAAFZAAgAAAAAPcLmtq+V1e+MRlZ7NHq1+mrRVBQje5zj685ZvdsfKvSBXMAIAAAAABdHz/3w2k5km97QN9m7oLFYJaVJneNlMboIlz5yUASQAVsACAAAAAAWbp8JVJnx8fEVAJFa7WMfMa7wXeP5M3C8MX20J/i9n0AAzE0AH0AAAAFZAAgAAAAAJaRYmo8zqI2BEUzdSwp4tVRpPmVWsfydkYN3UHh6TMuBXMAIAAAAAAeD6mDnQeLlbC9i0sVgE8+RH6y+e94OJQ0tJ0PvblVSgVsACAAAAAAWp4jvretbDEsqEMzP/WLTnwOiJwCtfrCiB6m8k+yEMoAAzE1AH0AAAAFZAAgAAAAAAZZ538coNPwyRjhEwr5P8Xw32oWOJF+R+nfCGgy2qO3BXMAIAAAAACOPLnJlKwGNPDBReRKnHfteq0wFb3ezhrc7BVXs8RUHwVsACAAAAAA+lGesNk3+SyB/60rSvdQ2aN2vfJPR7llJVhufGTNhHkAAzE2AH0AAAAFZAAgAAAAAFH9l9GGA1I52atJV5jNUf1lx8jBjoEoVoME97v5GFJiBXMAIAAAAAC1qH3Kd78Dr9NGbw7y9D/XYBwv5h1LLO8la5OU7g8UkQVsACAAAAAArZ6atJCYrVfHB8dSNPOFf6nnDADBMJcIEj8ljPvxHp8AAzE3AH0AAAAFZAAgAAAAADtbVEI2tdkrowEMdkacD2w0Y3T3Ofi7PH6HmA6sP0c/BXMAIAAAAADuBSROnZHA+NgUPH8d0LnWFiDsM2bY8bzjC1+elSsIygVsACAAAAAAR0G2m+uANoWknkr/NerFcG+fECVxNIs0cqbY1t/U/0MAAzE4AH0AAAAFZAAgAAAAAAh3WpeMVlikPFYj9hLj+fmIqVt6omCSF75W3TPExyWpBXMAIAAAAAAsQkRmwqeVj2gGE03orb6PtrIzDt6dDU3hgSQi8E2wKgVsACAAAAAA3GHaRE2RAcaBRd8VzmYzWeBD2Gmy91eTK1k8YdWObZcAABJjbQAAAAAAAAAAAAAQcGF5bG9hZElkAAAAAAAQZmlyc3RPcGVyYXRvcgABAAAAEnNwAAEAAAAAAAAAEHBuAAIAAAAQdGYAAQAAAAFtbgAAAAAAAAAAAAFteAAAAAAAAABpQAA=", + "subType": "06" + } + } + } + }, + "update": { + "$set": { + "encryptedDoublePrecision": { + "$$type": "binData" + } + } + }, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoublePrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDouble": "0.0" + }, + "max": { + "$numberDouble": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + } + }, + "commandName": "findAndModify" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-DoublePrecision-InsertFind.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-DoublePrecision-InsertFind.json new file mode 100644 index 0000000000..5e4aa5f1e0 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-DoublePrecision-InsertFind.json @@ -0,0 +1,634 @@ +{ + "description": "fle2v2-Rangev2-DoublePrecision-InsertFind", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoublePrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDouble": "0.0" + }, + "max": { + "$numberDouble": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "FLE2 Range DoublePrecision. Insert and Find.", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedDoublePrecision": { + "$gt": { + "$numberDouble": "0" + } + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1" + } + } + ] + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 0, + "encryptedDoublePrecision": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "Dri0CXmL78L2DOgk9w0DwxHOMGMzih7m6l59vgy+WWo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "x7GR49EN0t3WXQDihkrbonK7qNIBYC87tpL/XEUyIYc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JfYUqWF+OoGjiYkRI4L5iPlF+T1Eleul7Fki22jp4Qc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q1RyGfIgsaQHoZFRw+DD28V26rN5hweApPLwExncvT8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "L2PFeKGvLS6C+DLudR6fGlBq3ERPvjWvRyNRIA2HVb0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CWxaNqL3iP1yCixDkcmf9bmW3E5VeN8TJkg1jJe528s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+vC6araOEo+fpW7PSIP40/EnzBCj1d2N10Jr3rrXJJM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6SV63Mf51Z6A6p2X3rCnJKCu6ku3Oeb45mBYbz+IoAo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "RjBYT2h3ZAoHxhf8DU6/dFbDkEBZp0IxREcsRTu2MXs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "b7d8mRzD1kI1tdc7uNL+YAUonJ6pODLsRLkArfEKSkM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Xg8C1/A0KJaXOw4i+26Rv03/CydaaunOzXh0CIT+gn8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "UoKUDw2wJYToUCcFaIs03YQSTksYR0MIOTJllwODqKc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "c/5cwAT0C5jber2xlJnWD3a5tVDy0nRtr5HG02hoFOY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wSUrRXavAGaajNeqC5mEUH1K67oYl5Wy9RNIzKjwLAM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6vrp4wWDtHEgHWR99I70WVDzevg1Fk/Pw5U8gUDa0OU=", + "subType": "00" + } + } + ] + }, + { + "_id": 1, + "encryptedDoublePrecision": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "mVZb+Ra0EYjQ4Zrh9X//E2T8MRj7NMqm5GUJXhRrBEI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "FA74j21GUEJb1DJBOpR9nVnjaDZnd8yAQNuaW9Qi26g=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kJv//KVkbrobIBf+QeWC5jxn20mx/P0R1N6aCSMgKM8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zB+Whi9IUUGxfLEe+lGuIzLX4LFbIhaIAm5lRk65QTc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ybO1QU3CgvhO8JgRXH+HxKszWcpl5aGDYYVa75fHa1g=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "X3Y3eSAbbMg//JgiHHiFpYOpV61t8kkDexI+CQyitH4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SlNHXyqVFGDPrX/2ppwog6l4pwj3PKda2TkZbqgfSfA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "McjV8xwTF3xI7863DYOBdyvIv6UpzThl6v9vBRk05bI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "MgwakFvPyBlwqFTbhWUF79URJQWFoJTGotlEVSPPUsQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "DyBERpMSD5lEM5Nhpcn4WGgxgn/mkUVJp+PYSLX5jsE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "I43iazc0xj1WVbYB/V+uTL/tughN1bBlxh1iypBnNsA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wjOBa/ATMuOywFmuPgC0GF/oeLqu0Z7eK5udzkTPbis=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "gRQVwiR+m+0Vg8ZDXqrQQcVnTyobwCXNaA4BCJVXtMc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "WUZ6huwx0ZbLb0R00uiC9FOJzsUocUN8qE5+YRenkvQ=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "7s79aKEuPgQcS/YPOOVcYNZvHIo7FFsWtFCrnDKXefA=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 0, + "encryptedDoublePrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoublePrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDouble": "0.0" + }, + "max": { + "$numberDouble": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedDoublePrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoublePrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDouble": "0.0" + }, + "max": { + "$numberDouble": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "default", + "filter": { + "encryptedDoublePrecision": { + "$gt": { + "$binary": { + "base64": "DQYKAAADcGF5bG9hZACiCQAABGcAjgkAAAMwAH0AAAAFZAAgAAAAAHdJ2Vnb4MMzqVYVssjSdDy8XU4GVzMTfGifGETgQ2mYBXMAIAAAAAD7cFfKJGIXo6PjyeX2ria02CckW7dWFDoY/3FyBdm1NQVsACAAAAAAhEPSNv4M023A3hzNFuy83+hIKuZ2mKRY954N++aEOBUAAzEAfQAAAAVkACAAAAAAlmvfDrZoydUet4eCVMq7z6a58Ea+1HLJOWxN5lNcrWEFcwAgAAAAAEBo5AWZyC41b9ayjWNQSL4iYEAIwR/JG+ssN8bdoK9RBWwAIAAAAACEndE0SLxFSElOrNnqeX0EPmgDio3udZjVREy4JLS3sQADMgB9AAAABWQAIAAAAABbiLaoxAA6rinMJw1hC8ZUiq6UU1AQaPFn/py/Y06WuQVzACAAAAAAhtDasFkvYE7SCNu1je/hxdE9TJtAvvH3NtdEbKzNbCUFbAAgAAAAAIGepU1RSCF8sWODHEpKglsoqw3VBBH4a/URGxgGzbq2AAMzAH0AAAAFZAAgAAAAALORWwSr+tYNxcil2KIGSbNhTHvcPbdj+rLVQNx21S/KBXMAIAAAAAD6diZBkPEJ1cQy06LAxdbNK8Nlxbb44fH4Wk3Y3260nQVsACAAAAAA1eYAZBFHlDiaDAljWi8blGQ2nvvZa5AO5doeo0SFZsgAAzQAfQAAAAVkACAAAAAAG5XMK96PjClNlUvg82j4pMY1YxsznZfj4uNweD394FoFcwAgAAAAAKHgQLdGJHkrfFg9nB93Ac+3VgBw6aU44MTkKIQ91dZoBWwAIAAAAAAPxXmi+SDJ+40A0KdwfRczexlZQrHjIA+D3oUB0EY9tAADNQB9AAAABWQAIAAAAAA6M++b9I0YFemmWBAWAE3glu2Ah3Ta1FBxAQEIWS0toAVzACAAAAAANXYTqPf1Y6X3Ns6YQIX0C3FKCyWUo+Kk+fNcQvc0WSoFbAAgAAAAAA+uJUw1ICYgyeygSRe206VTWVtUnhdci3iHbyP5YtEVAAM2AH0AAAAFZAAgAAAAAKl8bV1riH/uyJ+X0HHd3+18k2cJl2dQFXCdoagutFcaBXMAIAAAAABm8F2Ew9f0VOABdcF+lP0Bi+zWvEUPniWgrxPq/Sx3uwVsACAAAAAAJfFErjZ6BPhsw5LjJLqNtKDLJ4zV0eIZppQpd9b0wZoAAzcAfQAAAAVkACAAAAAAsYZD8JEP6kYsPncFnNZwJxhu4YtUTKPNcjHtv67H+rYFcwAgAAAAAI4LqZcRkvbs/2F62Flu0pixNcor4WmBD0DHGaf039wLBWwAIAAAAAD4wUR3xd9lKltcqqo8LYvdMQWzCRobkV/ppKB/yn5dUgADOAB9AAAABWQAIAAAAAC0vdAi+dmoIXvZ5LqUqvyKV9/tHqSI2SWiSJO5pTnA2wVzACAAAAAAS2qvf9fvfVUH5WtsVxjxmskpGjYTQV34LwvQQw1y9wIFbAAgAAAAAE0+FKuK7HxbypvCeEJzMTcjOWE0ScYOlTBMUNlIv55hAAM5AH0AAAAFZAAgAAAAAH31lb/srBcrOXkzddCwAnclsR5/3QijEVgECs2JjOWBBXMAIAAAAABg7+prDT73YcCvLE5QbuIrqGcjLc5pQD2Miq0d29yrxgVsACAAAAAAetRiPwDSFWBzpWSWkOKWM6fKStRJ8SyObnpc79ux8p0AAzEwAH0AAAAFZAAgAAAAAOK8brUuc2onBNDRtfYMR736dHj4dQqXod8JG7tAMTsDBXMAIAAAAAAW6SrGAL6Bx0s7ZlsYULFfOAiYIGhEWu6md3r+Rk40awVsACAAAAAAIHYXP8RLcCboUmHN3+OlnEw1DxaLSnbTB9PdF228fFAAAzExAH0AAAAFZAAgAAAAAFdthRhe2Q8CvxGIhjTJZv0Lk97GkHciTPxZ/mckLoNaBXMAIAAAAAAqOxsAr23LOVB0DIHbPf9UDJJRFXY2YoKbjhRqw5psbQVsACAAAAAA0G2GD8ZQjDBntjLpW4rqwKRS6HiUjL03g1N6chANozcAAzEyAH0AAAAFZAAgAAAAAMWymwwbvIeMqmnKWWifUqoCxOsdpnonM2qdLPyjqJO/BXMAIAAAAAB6IDmmpUhBD2zpRj8/y/kmOSXcjuIU14sNh6GKSsg2uwVsACAAAAAAWMFPNOk3EMSQDS9JGPSMIQP0oNGVugxXKKUrIPPlhHgAAzEzAH0AAAAFZAAgAAAAAPcLmtq+V1e+MRlZ7NHq1+mrRVBQje5zj685ZvdsfKvSBXMAIAAAAABdHz/3w2k5km97QN9m7oLFYJaVJneNlMboIlz5yUASQAVsACAAAAAAWbp8JVJnx8fEVAJFa7WMfMa7wXeP5M3C8MX20J/i9n0AAzE0AH0AAAAFZAAgAAAAAJaRYmo8zqI2BEUzdSwp4tVRpPmVWsfydkYN3UHh6TMuBXMAIAAAAAAeD6mDnQeLlbC9i0sVgE8+RH6y+e94OJQ0tJ0PvblVSgVsACAAAAAAWp4jvretbDEsqEMzP/WLTnwOiJwCtfrCiB6m8k+yEMoAAzE1AH0AAAAFZAAgAAAAAAZZ538coNPwyRjhEwr5P8Xw32oWOJF+R+nfCGgy2qO3BXMAIAAAAACOPLnJlKwGNPDBReRKnHfteq0wFb3ezhrc7BVXs8RUHwVsACAAAAAA+lGesNk3+SyB/60rSvdQ2aN2vfJPR7llJVhufGTNhHkAAzE2AH0AAAAFZAAgAAAAAFH9l9GGA1I52atJV5jNUf1lx8jBjoEoVoME97v5GFJiBXMAIAAAAAC1qH3Kd78Dr9NGbw7y9D/XYBwv5h1LLO8la5OU7g8UkQVsACAAAAAArZ6atJCYrVfHB8dSNPOFf6nnDADBMJcIEj8ljPvxHp8AAzE3AH0AAAAFZAAgAAAAADtbVEI2tdkrowEMdkacD2w0Y3T3Ofi7PH6HmA6sP0c/BXMAIAAAAADuBSROnZHA+NgUPH8d0LnWFiDsM2bY8bzjC1+elSsIygVsACAAAAAAR0G2m+uANoWknkr/NerFcG+fECVxNIs0cqbY1t/U/0MAAzE4AH0AAAAFZAAgAAAAAAh3WpeMVlikPFYj9hLj+fmIqVt6omCSF75W3TPExyWpBXMAIAAAAAAsQkRmwqeVj2gGE03orb6PtrIzDt6dDU3hgSQi8E2wKgVsACAAAAAA3GHaRE2RAcaBRd8VzmYzWeBD2Gmy91eTK1k8YdWObZcAABJjbQAAAAAAAAAAAAAQcGF5bG9hZElkAAAAAAAQZmlyc3RPcGVyYXRvcgABAAAAEnNwAAEAAAAAAAAAEHBuAAIAAAAQdGYAAQAAAAFtbgAAAAAAAAAAAAFteAAAAAAAAABpQAA=", + "subType": "06" + } + } + } + }, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoublePrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDouble": "0.0" + }, + "max": { + "$numberDouble": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + } + }, + "commandName": "find" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-DoublePrecision-Update.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-DoublePrecision-Update.json new file mode 100644 index 0000000000..10cae6be89 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-DoublePrecision-Update.json @@ -0,0 +1,653 @@ +{ + "description": "fle2v2-Rangev2-DoublePrecision-Update", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoublePrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDouble": "0.0" + }, + "max": { + "$numberDouble": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "FLE2 Range DoublePrecision. Update.", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedDoublePrecision": { + "$numberDouble": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedDoublePrecision": { + "$numberDouble": "1" + } + } + }, + "object": "coll" + }, + { + "name": "updateOne", + "arguments": { + "filter": { + "encryptedDoublePrecision": { + "$gt": { + "$numberDouble": "0" + } + } + }, + "update": { + "$set": { + "encryptedDoublePrecision": { + "$numberDouble": "2" + } + } + } + }, + "object": "coll", + "expectResult": { + "matchedCount": 1, + "modifiedCount": 1, + "upsertedCount": 0 + } + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 0, + "encryptedDoublePrecision": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "Dri0CXmL78L2DOgk9w0DwxHOMGMzih7m6l59vgy+WWo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "x7GR49EN0t3WXQDihkrbonK7qNIBYC87tpL/XEUyIYc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JfYUqWF+OoGjiYkRI4L5iPlF+T1Eleul7Fki22jp4Qc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q1RyGfIgsaQHoZFRw+DD28V26rN5hweApPLwExncvT8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "L2PFeKGvLS6C+DLudR6fGlBq3ERPvjWvRyNRIA2HVb0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CWxaNqL3iP1yCixDkcmf9bmW3E5VeN8TJkg1jJe528s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+vC6araOEo+fpW7PSIP40/EnzBCj1d2N10Jr3rrXJJM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6SV63Mf51Z6A6p2X3rCnJKCu6ku3Oeb45mBYbz+IoAo=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "RjBYT2h3ZAoHxhf8DU6/dFbDkEBZp0IxREcsRTu2MXs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "b7d8mRzD1kI1tdc7uNL+YAUonJ6pODLsRLkArfEKSkM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "Xg8C1/A0KJaXOw4i+26Rv03/CydaaunOzXh0CIT+gn8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "UoKUDw2wJYToUCcFaIs03YQSTksYR0MIOTJllwODqKc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "c/5cwAT0C5jber2xlJnWD3a5tVDy0nRtr5HG02hoFOY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wSUrRXavAGaajNeqC5mEUH1K67oYl5Wy9RNIzKjwLAM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6vrp4wWDtHEgHWR99I70WVDzevg1Fk/Pw5U8gUDa0OU=", + "subType": "00" + } + } + ] + }, + { + "_id": 1, + "encryptedDoublePrecision": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "V6knyt7Zq2CG3++l75UtBx2m32iGAPjHiAe439Bf02w=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "F08nMDWDZc+DbWM7XCEJNNCEYyinRmrvGP7EWhmp4is=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cXH4688amcDc8kZOJq4UP8cE3R58Zl7e+Qo/1jyspps=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "uURBxvTp3FBCVkd+LPqyuY7d6rMW6SGIJQEPY/wtkZI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "jG3hax1L3RBp9t38vUt53FsBxgr/+Si/vVISpAylYpE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kwtIW8MhH9Ky5xNjBx8gFA/SHh2YVphie7g5FGBzals=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "FHflwFuEMu4xX0ZApHi+pdlBH+oevAtXckCUb5Wv0xU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "0OKSXELxPP85SBVwDGf3LtMEQCJ8TTkFUl/+6jlkdb0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "25j9sQXZCihCmHKvTHgaBsAVZFcGPn7JjHdrCGlwyyw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "uEw0lpQtBppR3vqV9j9+NQRSBF1BzZukb8c9IhyWvxc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zVhZ7Q59O087ji49oMJvBIgeir2oqvUpnh4p53GcTow=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "dowrzKs+qJhRMZyKDbhjXbuX43FbmUKOaw9I8YlOZDw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ep5B6cska6THLIF7Mn3tn3RvV9EiwLSt0eZM/CLRUDc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "URNp/YmmDh5wIZUfAzzgPyJeMNiVx9PMsz52DZRujGY=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "wlM4IAQhhKQEzoVqS8b1Ddd50GB95OFb9LnzOwyjCP4=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 0, + "encryptedDoublePrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoublePrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDouble": "0.0" + }, + "max": { + "$numberDouble": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedDoublePrecision": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoublePrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDouble": "0.0" + }, + "max": { + "$numberDouble": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "update": "default", + "ordered": true, + "updates": [ + { + "q": { + "encryptedDoublePrecision": { + "$gt": { + "$binary": { + "base64": "DQYKAAADcGF5bG9hZACiCQAABGcAjgkAAAMwAH0AAAAFZAAgAAAAAHdJ2Vnb4MMzqVYVssjSdDy8XU4GVzMTfGifGETgQ2mYBXMAIAAAAAD7cFfKJGIXo6PjyeX2ria02CckW7dWFDoY/3FyBdm1NQVsACAAAAAAhEPSNv4M023A3hzNFuy83+hIKuZ2mKRY954N++aEOBUAAzEAfQAAAAVkACAAAAAAlmvfDrZoydUet4eCVMq7z6a58Ea+1HLJOWxN5lNcrWEFcwAgAAAAAEBo5AWZyC41b9ayjWNQSL4iYEAIwR/JG+ssN8bdoK9RBWwAIAAAAACEndE0SLxFSElOrNnqeX0EPmgDio3udZjVREy4JLS3sQADMgB9AAAABWQAIAAAAABbiLaoxAA6rinMJw1hC8ZUiq6UU1AQaPFn/py/Y06WuQVzACAAAAAAhtDasFkvYE7SCNu1je/hxdE9TJtAvvH3NtdEbKzNbCUFbAAgAAAAAIGepU1RSCF8sWODHEpKglsoqw3VBBH4a/URGxgGzbq2AAMzAH0AAAAFZAAgAAAAALORWwSr+tYNxcil2KIGSbNhTHvcPbdj+rLVQNx21S/KBXMAIAAAAAD6diZBkPEJ1cQy06LAxdbNK8Nlxbb44fH4Wk3Y3260nQVsACAAAAAA1eYAZBFHlDiaDAljWi8blGQ2nvvZa5AO5doeo0SFZsgAAzQAfQAAAAVkACAAAAAAG5XMK96PjClNlUvg82j4pMY1YxsznZfj4uNweD394FoFcwAgAAAAAKHgQLdGJHkrfFg9nB93Ac+3VgBw6aU44MTkKIQ91dZoBWwAIAAAAAAPxXmi+SDJ+40A0KdwfRczexlZQrHjIA+D3oUB0EY9tAADNQB9AAAABWQAIAAAAAA6M++b9I0YFemmWBAWAE3glu2Ah3Ta1FBxAQEIWS0toAVzACAAAAAANXYTqPf1Y6X3Ns6YQIX0C3FKCyWUo+Kk+fNcQvc0WSoFbAAgAAAAAA+uJUw1ICYgyeygSRe206VTWVtUnhdci3iHbyP5YtEVAAM2AH0AAAAFZAAgAAAAAKl8bV1riH/uyJ+X0HHd3+18k2cJl2dQFXCdoagutFcaBXMAIAAAAABm8F2Ew9f0VOABdcF+lP0Bi+zWvEUPniWgrxPq/Sx3uwVsACAAAAAAJfFErjZ6BPhsw5LjJLqNtKDLJ4zV0eIZppQpd9b0wZoAAzcAfQAAAAVkACAAAAAAsYZD8JEP6kYsPncFnNZwJxhu4YtUTKPNcjHtv67H+rYFcwAgAAAAAI4LqZcRkvbs/2F62Flu0pixNcor4WmBD0DHGaf039wLBWwAIAAAAAD4wUR3xd9lKltcqqo8LYvdMQWzCRobkV/ppKB/yn5dUgADOAB9AAAABWQAIAAAAAC0vdAi+dmoIXvZ5LqUqvyKV9/tHqSI2SWiSJO5pTnA2wVzACAAAAAAS2qvf9fvfVUH5WtsVxjxmskpGjYTQV34LwvQQw1y9wIFbAAgAAAAAE0+FKuK7HxbypvCeEJzMTcjOWE0ScYOlTBMUNlIv55hAAM5AH0AAAAFZAAgAAAAAH31lb/srBcrOXkzddCwAnclsR5/3QijEVgECs2JjOWBBXMAIAAAAABg7+prDT73YcCvLE5QbuIrqGcjLc5pQD2Miq0d29yrxgVsACAAAAAAetRiPwDSFWBzpWSWkOKWM6fKStRJ8SyObnpc79ux8p0AAzEwAH0AAAAFZAAgAAAAAOK8brUuc2onBNDRtfYMR736dHj4dQqXod8JG7tAMTsDBXMAIAAAAAAW6SrGAL6Bx0s7ZlsYULFfOAiYIGhEWu6md3r+Rk40awVsACAAAAAAIHYXP8RLcCboUmHN3+OlnEw1DxaLSnbTB9PdF228fFAAAzExAH0AAAAFZAAgAAAAAFdthRhe2Q8CvxGIhjTJZv0Lk97GkHciTPxZ/mckLoNaBXMAIAAAAAAqOxsAr23LOVB0DIHbPf9UDJJRFXY2YoKbjhRqw5psbQVsACAAAAAA0G2GD8ZQjDBntjLpW4rqwKRS6HiUjL03g1N6chANozcAAzEyAH0AAAAFZAAgAAAAAMWymwwbvIeMqmnKWWifUqoCxOsdpnonM2qdLPyjqJO/BXMAIAAAAAB6IDmmpUhBD2zpRj8/y/kmOSXcjuIU14sNh6GKSsg2uwVsACAAAAAAWMFPNOk3EMSQDS9JGPSMIQP0oNGVugxXKKUrIPPlhHgAAzEzAH0AAAAFZAAgAAAAAPcLmtq+V1e+MRlZ7NHq1+mrRVBQje5zj685ZvdsfKvSBXMAIAAAAABdHz/3w2k5km97QN9m7oLFYJaVJneNlMboIlz5yUASQAVsACAAAAAAWbp8JVJnx8fEVAJFa7WMfMa7wXeP5M3C8MX20J/i9n0AAzE0AH0AAAAFZAAgAAAAAJaRYmo8zqI2BEUzdSwp4tVRpPmVWsfydkYN3UHh6TMuBXMAIAAAAAAeD6mDnQeLlbC9i0sVgE8+RH6y+e94OJQ0tJ0PvblVSgVsACAAAAAAWp4jvretbDEsqEMzP/WLTnwOiJwCtfrCiB6m8k+yEMoAAzE1AH0AAAAFZAAgAAAAAAZZ538coNPwyRjhEwr5P8Xw32oWOJF+R+nfCGgy2qO3BXMAIAAAAACOPLnJlKwGNPDBReRKnHfteq0wFb3ezhrc7BVXs8RUHwVsACAAAAAA+lGesNk3+SyB/60rSvdQ2aN2vfJPR7llJVhufGTNhHkAAzE2AH0AAAAFZAAgAAAAAFH9l9GGA1I52atJV5jNUf1lx8jBjoEoVoME97v5GFJiBXMAIAAAAAC1qH3Kd78Dr9NGbw7y9D/XYBwv5h1LLO8la5OU7g8UkQVsACAAAAAArZ6atJCYrVfHB8dSNPOFf6nnDADBMJcIEj8ljPvxHp8AAzE3AH0AAAAFZAAgAAAAADtbVEI2tdkrowEMdkacD2w0Y3T3Ofi7PH6HmA6sP0c/BXMAIAAAAADuBSROnZHA+NgUPH8d0LnWFiDsM2bY8bzjC1+elSsIygVsACAAAAAAR0G2m+uANoWknkr/NerFcG+fECVxNIs0cqbY1t/U/0MAAzE4AH0AAAAFZAAgAAAAAAh3WpeMVlikPFYj9hLj+fmIqVt6omCSF75W3TPExyWpBXMAIAAAAAAsQkRmwqeVj2gGE03orb6PtrIzDt6dDU3hgSQi8E2wKgVsACAAAAAA3GHaRE2RAcaBRd8VzmYzWeBD2Gmy91eTK1k8YdWObZcAABJjbQAAAAAAAAAAAAAQcGF5bG9hZElkAAAAAAAQZmlyc3RPcGVyYXRvcgABAAAAEnNwAAEAAAAAAAAAEHBuAAIAAAAQdGYAAQAAAAFtbgAAAAAAAAAAAAFteAAAAAAAAABpQAA=", + "subType": "06" + } + } + } + }, + "u": { + "$set": { + "encryptedDoublePrecision": { + "$$type": "binData" + } + } + }, + "multi": false, + "upsert": false + } + ], + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedDoublePrecision", + "bsonType": "double", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberDouble": "0.0" + }, + "max": { + "$numberDouble": "200.0" + }, + "precision": { + "$numberInt": "2" + } + } + } + ] + } + } + }, + "$db": "default" + }, + "commandName": "update" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Int-Aggregate.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Int-Aggregate.json new file mode 100644 index 0000000000..77a8f43e9c --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Int-Aggregate.json @@ -0,0 +1,547 @@ +{ + "description": "fle2v2-Rangev2-Int-Aggregate", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedInt", + "bsonType": "int", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberInt": "0" + }, + "max": { + "$numberInt": "200" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "FLE2 Range Int. Aggregate.", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedInt": { + "$gt": { + "$numberInt": "0" + } + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + ] + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 0, + "encryptedInt": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "RjBYT2h3ZAoHxhf8DU6/dFbDkEBZp0IxREcsRTu2MXs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "x7GR49EN0t3WXQDihkrbonK7qNIBYC87tpL/XEUyIYc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JfYUqWF+OoGjiYkRI4L5iPlF+T1Eleul7Fki22jp4Qc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q1RyGfIgsaQHoZFRw+DD28V26rN5hweApPLwExncvT8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "L2PFeKGvLS6C+DLudR6fGlBq3ERPvjWvRyNRIA2HVb0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CWxaNqL3iP1yCixDkcmf9bmW3E5VeN8TJkg1jJe528s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+vC6araOEo+fpW7PSIP40/EnzBCj1d2N10Jr3rrXJJM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6SV63Mf51Z6A6p2X3rCnJKCu6ku3Oeb45mBYbz+IoAo=", + "subType": "00" + } + } + ] + }, + { + "_id": 1, + "encryptedInt": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "25j9sQXZCihCmHKvTHgaBsAVZFcGPn7JjHdrCGlwyyw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "FA74j21GUEJb1DJBOpR9nVnjaDZnd8yAQNuaW9Qi26g=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kJv//KVkbrobIBf+QeWC5jxn20mx/P0R1N6aCSMgKM8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zB+Whi9IUUGxfLEe+lGuIzLX4LFbIhaIAm5lRk65QTc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ybO1QU3CgvhO8JgRXH+HxKszWcpl5aGDYYVa75fHa1g=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "X3Y3eSAbbMg//JgiHHiFpYOpV61t8kkDexI+CQyitH4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SlNHXyqVFGDPrX/2ppwog6l4pwj3PKda2TkZbqgfSfA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "McjV8xwTF3xI7863DYOBdyvIv6UpzThl6v9vBRk05bI=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 0, + "encryptedInt": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedInt", + "bsonType": "int", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberInt": "0" + }, + "max": { + "$numberInt": "200" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedInt": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedInt", + "bsonType": "int", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberInt": "0" + }, + "max": { + "$numberInt": "200" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "aggregate": "default", + "pipeline": [ + { + "$match": { + "encryptedInt": { + "$gt": { + "$binary": { + "base64": "DW0FAAADcGF5bG9hZAAZBQAABGcABQUAAAMwAH0AAAAFZAAgAAAAALGGQ/CRD+pGLD53BZzWcCcYbuGLVEyjzXIx7b+ux/q2BXMAIAAAAACOC6mXEZL27P9hethZbtKYsTXKK+FpgQ9Axxmn9N/cCwVsACAAAAAA+MFEd8XfZSpbXKqqPC2L3TEFswkaG5Ff6aSgf8p+XVIAAzEAfQAAAAVkACAAAAAAtL3QIvnZqCF72eS6lKr8ilff7R6kiNklokiTuaU5wNsFcwAgAAAAAEtqr3/X731VB+VrbFcY8ZrJKRo2E0Fd+C8L0EMNcvcCBWwAIAAAAABNPhSriux8W8qbwnhCczE3IzlhNEnGDpUwTFDZSL+eYQADMgB9AAAABWQAIAAAAAB99ZW/7KwXKzl5M3XQsAJ3JbEef90IoxFYBArNiYzlgQVzACAAAAAAYO/qaw0+92HAryxOUG7iK6hnIy3OaUA9jIqtHdvcq8YFbAAgAAAAAHrUYj8A0hVgc6VklpDiljOnykrUSfEsjm56XO/bsfKdAAMzAH0AAAAFZAAgAAAAAOK8brUuc2onBNDRtfYMR736dHj4dQqXod8JG7tAMTsDBXMAIAAAAAAW6SrGAL6Bx0s7ZlsYULFfOAiYIGhEWu6md3r+Rk40awVsACAAAAAAIHYXP8RLcCboUmHN3+OlnEw1DxaLSnbTB9PdF228fFAAAzQAfQAAAAVkACAAAAAAV22FGF7ZDwK/EYiGNMlm/QuT3saQdyJM/Fn+ZyQug1oFcwAgAAAAACo7GwCvbcs5UHQMgds9/1QMklEVdjZigpuOFGrDmmxtBWwAIAAAAADQbYYPxlCMMGe2MulbiurApFLoeJSMvTeDU3pyEA2jNwADNQB9AAAABWQAIAAAAADFspsMG7yHjKppyllon1KqAsTrHaZ6JzNqnSz8o6iTvwVzACAAAAAAeiA5pqVIQQ9s6UY/P8v5Jjkl3I7iFNeLDYehikrINrsFbAAgAAAAAFjBTzTpNxDEkA0vSRj0jCED9KDRlboMVyilKyDz5YR4AAM2AH0AAAAFZAAgAAAAAPcLmtq+V1e+MRlZ7NHq1+mrRVBQje5zj685ZvdsfKvSBXMAIAAAAABdHz/3w2k5km97QN9m7oLFYJaVJneNlMboIlz5yUASQAVsACAAAAAAWbp8JVJnx8fEVAJFa7WMfMa7wXeP5M3C8MX20J/i9n0AAzcAfQAAAAVkACAAAAAAYfLwnoxK6XAGQrJFy8+TIJoq38ldBaO75h4zA4ZX5tQFcwAgAAAAAC2wk8UcJH5X5XGnDBYmel6srpBkzBhHtt3Jw1u5TSJ1BWwAIAAAAAA9/YU9eI3D7QbXKIw/3/gzWJ6MZrCYhG0j1wNKgRQp5wADOAB9AAAABWQAIAAAAADGvyrtKkIcaV17ynZA7b2k5Pz6OhvxdWNkDvDWJIja8wVzACAAAAAAOLypVKNxf/wR1G8OZjUUsTQzDYeNNhhITxGMSp7euS4FbAAgAAAAAA9EsxoV1B2DcQ1NJRwuxXnvVR+vkD0wbbDYEI/zFEnDAAM5AH0AAAAFZAAgAAAAAEocREw1L0g+roFUchJI2Yd0M0ME2bnErNUYnpyJP1SqBXMAIAAAAAAcE2/JK/8MoSeOchIuAkKh1X3ImoA7p8ujAZIfvIDo6QVsACAAAAAA+W0+zgLr85/PD7P9a94wk6MgNgrizx/XU9aCxAkp1IwAABJjbQAAAAAAAAAAAAAQcGF5bG9hZElkAAAAAAAQZmlyc3RPcGVyYXRvcgABAAAAEnNwAAEAAAAAAAAAEHRmAAEAAAAQbW4AAAAAABBteADIAAAAAA==", + "subType": "06" + } + } + } + } + } + ], + "cursor": {}, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedInt", + "bsonType": "int", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberInt": "0" + }, + "max": { + "$numberInt": "200" + } + } + } + ] + } + } + } + }, + "commandName": "aggregate" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Int-Correctness.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Int-Correctness.json new file mode 100644 index 0000000000..dde5ec371b --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Int-Correctness.json @@ -0,0 +1,1412 @@ +{ + "description": "fle2v2-Rangev2-Int-Correctness", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedInt", + "bsonType": "int", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberInt": "0" + }, + "max": { + "$numberInt": "200" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "Find with $gt", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedInt": { + "$gt": { + "$numberInt": "0" + } + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + ] + } + ] + }, + { + "description": "Find with $gte", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedInt": { + "$gte": { + "$numberInt": "0" + } + } + }, + "sort": { + "_id": 1 + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + }, + { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + ] + } + ] + }, + { + "description": "Find with $gt with no results", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedInt": { + "$gt": { + "$numberInt": "1" + } + } + } + }, + "object": "coll", + "expectResult": [] + } + ] + }, + { + "description": "Find with $lt", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedInt": { + "$lt": { + "$numberInt": "1" + } + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + } + ] + } + ] + }, + { + "description": "Find with $lte", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedInt": { + "$lte": { + "$numberInt": "1" + } + } + }, + "sort": { + "_id": 1 + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + }, + { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + ] + } + ] + }, + { + "description": "Find with $lt below min", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedInt": { + "$lt": { + "$numberInt": "0" + } + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "must be greater than the range minimum" + } + } + ] + }, + { + "description": "Find with $gt above max", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedInt": { + "$gt": { + "$numberInt": "200" + } + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "must be less than the range maximum" + } + } + ] + }, + { + "description": "Find with $gt and $lt", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedInt": { + "$gt": { + "$numberInt": "0" + }, + "$lt": { + "$numberInt": "2" + } + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + ] + } + ] + }, + { + "description": "Find with equality", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedInt": { + "$numberInt": "0" + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + } + ] + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedInt": { + "$numberInt": "1" + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + ] + } + ] + }, + { + "description": "Find with full range", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedInt": { + "$gte": { + "$numberInt": "0" + }, + "$lte": { + "$numberInt": "200" + } + } + }, + "sort": { + "_id": 1 + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + }, + { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + ] + } + ] + }, + { + "description": "Find with $in", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedInt": { + "$in": [ + { + "$numberInt": "0" + } + ] + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + } + ] + } + ] + }, + { + "description": "Insert out of range", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedInt": { + "$numberInt": "-1" + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "value must be greater than or equal to the minimum value" + } + } + ] + }, + { + "description": "Insert min and max", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 200, + "encryptedInt": { + "$numberInt": "200" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": {}, + "sort": { + "_id": 1 + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + }, + { + "_id": 200, + "encryptedInt": { + "$numberInt": "200" + } + } + ] + } + ] + }, + { + "description": "Aggregate with $gte", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedInt": { + "$gte": { + "$numberInt": "0" + } + } + } + }, + { + "$sort": { + "_id": 1 + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + }, + { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + ] + } + ] + }, + { + "description": "Aggregate with $gt with no results", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedInt": { + "$gt": { + "$numberInt": "1" + } + } + } + } + ] + }, + "object": "coll", + "expectResult": [] + } + ] + }, + { + "description": "Aggregate with $lt", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedInt": { + "$lt": { + "$numberInt": "1" + } + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + } + ] + } + ] + }, + { + "description": "Aggregate with $lte", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedInt": { + "$lte": { + "$numberInt": "1" + } + } + } + }, + { + "$sort": { + "_id": 1 + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + }, + { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + ] + } + ] + }, + { + "description": "Aggregate with $lt below min", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedInt": { + "$lt": { + "$numberInt": "0" + } + } + } + } + ] + }, + "object": "coll", + "expectError": { + "errorContains": "must be greater than the range minimum" + } + } + ] + }, + { + "description": "Aggregate with $gt above max", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedInt": { + "$gt": { + "$numberInt": "200" + } + } + } + } + ] + }, + "object": "coll", + "expectError": { + "errorContains": "must be less than the range maximum" + } + } + ] + }, + { + "description": "Aggregate with $gt and $lt", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedInt": { + "$gt": { + "$numberInt": "0" + }, + "$lt": { + "$numberInt": "2" + } + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + ] + } + ] + }, + { + "description": "Aggregate with equality", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedInt": { + "$numberInt": "0" + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + } + ] + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedInt": { + "$numberInt": "1" + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + ] + } + ] + }, + { + "description": "Aggregate with full range", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedInt": { + "$gte": { + "$numberInt": "0" + }, + "$lte": { + "$numberInt": "200" + } + } + } + }, + { + "$sort": { + "_id": 1 + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + }, + { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + ] + } + ] + }, + { + "description": "Aggregate with $in", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedInt": { + "$in": [ + { + "$numberInt": "0" + } + ] + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + } + ] + } + ] + }, + { + "description": "Wrong type: Insert Double", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedInt": { + "$numberDouble": "0" + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "Cannot encrypt element" + } + } + ] + }, + { + "description": "Wrong type: Find Double", + "operations": [ + { + "name": "find", + "arguments": { + "filter": { + "encryptedInt": { + "$gte": { + "$numberDouble": "0" + } + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "field type is not supported" + } + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Int-Delete.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Int-Delete.json new file mode 100644 index 0000000000..1c54c6e0f6 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Int-Delete.json @@ -0,0 +1,483 @@ +{ + "description": "fle2v2-Rangev2-Int-Delete", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedInt", + "bsonType": "int", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberInt": "0" + }, + "max": { + "$numberInt": "200" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "FLE2 Range Int. Delete.", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + }, + "object": "coll" + }, + { + "name": "deleteOne", + "arguments": { + "filter": { + "encryptedInt": { + "$gt": { + "$numberInt": "0" + } + } + } + }, + "object": "coll", + "expectResult": { + "deletedCount": 1 + } + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 0, + "encryptedInt": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "RjBYT2h3ZAoHxhf8DU6/dFbDkEBZp0IxREcsRTu2MXs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "x7GR49EN0t3WXQDihkrbonK7qNIBYC87tpL/XEUyIYc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JfYUqWF+OoGjiYkRI4L5iPlF+T1Eleul7Fki22jp4Qc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q1RyGfIgsaQHoZFRw+DD28V26rN5hweApPLwExncvT8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "L2PFeKGvLS6C+DLudR6fGlBq3ERPvjWvRyNRIA2HVb0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CWxaNqL3iP1yCixDkcmf9bmW3E5VeN8TJkg1jJe528s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+vC6araOEo+fpW7PSIP40/EnzBCj1d2N10Jr3rrXJJM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6SV63Mf51Z6A6p2X3rCnJKCu6ku3Oeb45mBYbz+IoAo=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 0, + "encryptedInt": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedInt", + "bsonType": "int", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberInt": "0" + }, + "max": { + "$numberInt": "200" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedInt": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedInt", + "bsonType": "int", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberInt": "0" + }, + "max": { + "$numberInt": "200" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "delete": "default", + "deletes": [ + { + "q": { + "encryptedInt": { + "$gt": { + "$binary": { + "base64": "DW0FAAADcGF5bG9hZAAZBQAABGcABQUAAAMwAH0AAAAFZAAgAAAAALGGQ/CRD+pGLD53BZzWcCcYbuGLVEyjzXIx7b+ux/q2BXMAIAAAAACOC6mXEZL27P9hethZbtKYsTXKK+FpgQ9Axxmn9N/cCwVsACAAAAAA+MFEd8XfZSpbXKqqPC2L3TEFswkaG5Ff6aSgf8p+XVIAAzEAfQAAAAVkACAAAAAAtL3QIvnZqCF72eS6lKr8ilff7R6kiNklokiTuaU5wNsFcwAgAAAAAEtqr3/X731VB+VrbFcY8ZrJKRo2E0Fd+C8L0EMNcvcCBWwAIAAAAABNPhSriux8W8qbwnhCczE3IzlhNEnGDpUwTFDZSL+eYQADMgB9AAAABWQAIAAAAAB99ZW/7KwXKzl5M3XQsAJ3JbEef90IoxFYBArNiYzlgQVzACAAAAAAYO/qaw0+92HAryxOUG7iK6hnIy3OaUA9jIqtHdvcq8YFbAAgAAAAAHrUYj8A0hVgc6VklpDiljOnykrUSfEsjm56XO/bsfKdAAMzAH0AAAAFZAAgAAAAAOK8brUuc2onBNDRtfYMR736dHj4dQqXod8JG7tAMTsDBXMAIAAAAAAW6SrGAL6Bx0s7ZlsYULFfOAiYIGhEWu6md3r+Rk40awVsACAAAAAAIHYXP8RLcCboUmHN3+OlnEw1DxaLSnbTB9PdF228fFAAAzQAfQAAAAVkACAAAAAAV22FGF7ZDwK/EYiGNMlm/QuT3saQdyJM/Fn+ZyQug1oFcwAgAAAAACo7GwCvbcs5UHQMgds9/1QMklEVdjZigpuOFGrDmmxtBWwAIAAAAADQbYYPxlCMMGe2MulbiurApFLoeJSMvTeDU3pyEA2jNwADNQB9AAAABWQAIAAAAADFspsMG7yHjKppyllon1KqAsTrHaZ6JzNqnSz8o6iTvwVzACAAAAAAeiA5pqVIQQ9s6UY/P8v5Jjkl3I7iFNeLDYehikrINrsFbAAgAAAAAFjBTzTpNxDEkA0vSRj0jCED9KDRlboMVyilKyDz5YR4AAM2AH0AAAAFZAAgAAAAAPcLmtq+V1e+MRlZ7NHq1+mrRVBQje5zj685ZvdsfKvSBXMAIAAAAABdHz/3w2k5km97QN9m7oLFYJaVJneNlMboIlz5yUASQAVsACAAAAAAWbp8JVJnx8fEVAJFa7WMfMa7wXeP5M3C8MX20J/i9n0AAzcAfQAAAAVkACAAAAAAYfLwnoxK6XAGQrJFy8+TIJoq38ldBaO75h4zA4ZX5tQFcwAgAAAAAC2wk8UcJH5X5XGnDBYmel6srpBkzBhHtt3Jw1u5TSJ1BWwAIAAAAAA9/YU9eI3D7QbXKIw/3/gzWJ6MZrCYhG0j1wNKgRQp5wADOAB9AAAABWQAIAAAAADGvyrtKkIcaV17ynZA7b2k5Pz6OhvxdWNkDvDWJIja8wVzACAAAAAAOLypVKNxf/wR1G8OZjUUsTQzDYeNNhhITxGMSp7euS4FbAAgAAAAAA9EsxoV1B2DcQ1NJRwuxXnvVR+vkD0wbbDYEI/zFEnDAAM5AH0AAAAFZAAgAAAAAEocREw1L0g+roFUchJI2Yd0M0ME2bnErNUYnpyJP1SqBXMAIAAAAAAcE2/JK/8MoSeOchIuAkKh1X3ImoA7p8ujAZIfvIDo6QVsACAAAAAA+W0+zgLr85/PD7P9a94wk6MgNgrizx/XU9aCxAkp1IwAABJjbQAAAAAAAAAAAAAQcGF5bG9hZElkAAAAAAAQZmlyc3RPcGVyYXRvcgABAAAAEnNwAAEAAAAAAAAAEHRmAAEAAAAQbW4AAAAAABBteADIAAAAAA==", + "subType": "06" + } + } + } + }, + "limit": 1 + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedInt", + "bsonType": "int", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberInt": "0" + }, + "max": { + "$numberInt": "200" + } + } + } + ] + } + } + } + }, + "commandName": "delete" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Int-FindOneAndUpdate.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Int-FindOneAndUpdate.json new file mode 100644 index 0000000000..265a0c6f0d --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Int-FindOneAndUpdate.json @@ -0,0 +1,551 @@ +{ + "description": "fle2v2-Rangev2-Int-FindOneAndUpdate", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedInt", + "bsonType": "int", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberInt": "0" + }, + "max": { + "$numberInt": "200" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "FLE2 Range Int. FindOneAndUpdate.", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + }, + "object": "coll" + }, + { + "name": "findOneAndUpdate", + "arguments": { + "filter": { + "encryptedInt": { + "$gt": { + "$numberInt": "0" + } + } + }, + "update": { + "$set": { + "encryptedInt": { + "$numberInt": "2" + } + } + }, + "returnDocument": "Before" + }, + "object": "coll", + "expectResult": { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 0, + "encryptedInt": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "RjBYT2h3ZAoHxhf8DU6/dFbDkEBZp0IxREcsRTu2MXs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "x7GR49EN0t3WXQDihkrbonK7qNIBYC87tpL/XEUyIYc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JfYUqWF+OoGjiYkRI4L5iPlF+T1Eleul7Fki22jp4Qc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q1RyGfIgsaQHoZFRw+DD28V26rN5hweApPLwExncvT8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "L2PFeKGvLS6C+DLudR6fGlBq3ERPvjWvRyNRIA2HVb0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CWxaNqL3iP1yCixDkcmf9bmW3E5VeN8TJkg1jJe528s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+vC6araOEo+fpW7PSIP40/EnzBCj1d2N10Jr3rrXJJM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6SV63Mf51Z6A6p2X3rCnJKCu6ku3Oeb45mBYbz+IoAo=", + "subType": "00" + } + } + ] + }, + { + "_id": 1, + "encryptedInt": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "hyDcE6QQjPrYJaIS/n7evEZFYcm31Tj89CpEYGF45cI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "F08nMDWDZc+DbWM7XCEJNNCEYyinRmrvGP7EWhmp4is=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cXH4688amcDc8kZOJq4UP8cE3R58Zl7e+Qo/1jyspps=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "uURBxvTp3FBCVkd+LPqyuY7d6rMW6SGIJQEPY/wtkZI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "jG3hax1L3RBp9t38vUt53FsBxgr/+Si/vVISpAylYpE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kwtIW8MhH9Ky5xNjBx8gFA/SHh2YVphie7g5FGBzals=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "FHflwFuEMu4xX0ZApHi+pdlBH+oevAtXckCUb5Wv0xU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ty4cnzJdAlbQKnh7px3GEYjBnvO+jIOaKjoTRDtmh3M=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 0, + "encryptedInt": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedInt", + "bsonType": "int", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberInt": "0" + }, + "max": { + "$numberInt": "200" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedInt": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedInt", + "bsonType": "int", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberInt": "0" + }, + "max": { + "$numberInt": "200" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "findAndModify": "default", + "query": { + "encryptedInt": { + "$gt": { + "$binary": { + "base64": "DW0FAAADcGF5bG9hZAAZBQAABGcABQUAAAMwAH0AAAAFZAAgAAAAALGGQ/CRD+pGLD53BZzWcCcYbuGLVEyjzXIx7b+ux/q2BXMAIAAAAACOC6mXEZL27P9hethZbtKYsTXKK+FpgQ9Axxmn9N/cCwVsACAAAAAA+MFEd8XfZSpbXKqqPC2L3TEFswkaG5Ff6aSgf8p+XVIAAzEAfQAAAAVkACAAAAAAtL3QIvnZqCF72eS6lKr8ilff7R6kiNklokiTuaU5wNsFcwAgAAAAAEtqr3/X731VB+VrbFcY8ZrJKRo2E0Fd+C8L0EMNcvcCBWwAIAAAAABNPhSriux8W8qbwnhCczE3IzlhNEnGDpUwTFDZSL+eYQADMgB9AAAABWQAIAAAAAB99ZW/7KwXKzl5M3XQsAJ3JbEef90IoxFYBArNiYzlgQVzACAAAAAAYO/qaw0+92HAryxOUG7iK6hnIy3OaUA9jIqtHdvcq8YFbAAgAAAAAHrUYj8A0hVgc6VklpDiljOnykrUSfEsjm56XO/bsfKdAAMzAH0AAAAFZAAgAAAAAOK8brUuc2onBNDRtfYMR736dHj4dQqXod8JG7tAMTsDBXMAIAAAAAAW6SrGAL6Bx0s7ZlsYULFfOAiYIGhEWu6md3r+Rk40awVsACAAAAAAIHYXP8RLcCboUmHN3+OlnEw1DxaLSnbTB9PdF228fFAAAzQAfQAAAAVkACAAAAAAV22FGF7ZDwK/EYiGNMlm/QuT3saQdyJM/Fn+ZyQug1oFcwAgAAAAACo7GwCvbcs5UHQMgds9/1QMklEVdjZigpuOFGrDmmxtBWwAIAAAAADQbYYPxlCMMGe2MulbiurApFLoeJSMvTeDU3pyEA2jNwADNQB9AAAABWQAIAAAAADFspsMG7yHjKppyllon1KqAsTrHaZ6JzNqnSz8o6iTvwVzACAAAAAAeiA5pqVIQQ9s6UY/P8v5Jjkl3I7iFNeLDYehikrINrsFbAAgAAAAAFjBTzTpNxDEkA0vSRj0jCED9KDRlboMVyilKyDz5YR4AAM2AH0AAAAFZAAgAAAAAPcLmtq+V1e+MRlZ7NHq1+mrRVBQje5zj685ZvdsfKvSBXMAIAAAAABdHz/3w2k5km97QN9m7oLFYJaVJneNlMboIlz5yUASQAVsACAAAAAAWbp8JVJnx8fEVAJFa7WMfMa7wXeP5M3C8MX20J/i9n0AAzcAfQAAAAVkACAAAAAAYfLwnoxK6XAGQrJFy8+TIJoq38ldBaO75h4zA4ZX5tQFcwAgAAAAAC2wk8UcJH5X5XGnDBYmel6srpBkzBhHtt3Jw1u5TSJ1BWwAIAAAAAA9/YU9eI3D7QbXKIw/3/gzWJ6MZrCYhG0j1wNKgRQp5wADOAB9AAAABWQAIAAAAADGvyrtKkIcaV17ynZA7b2k5Pz6OhvxdWNkDvDWJIja8wVzACAAAAAAOLypVKNxf/wR1G8OZjUUsTQzDYeNNhhITxGMSp7euS4FbAAgAAAAAA9EsxoV1B2DcQ1NJRwuxXnvVR+vkD0wbbDYEI/zFEnDAAM5AH0AAAAFZAAgAAAAAEocREw1L0g+roFUchJI2Yd0M0ME2bnErNUYnpyJP1SqBXMAIAAAAAAcE2/JK/8MoSeOchIuAkKh1X3ImoA7p8ujAZIfvIDo6QVsACAAAAAA+W0+zgLr85/PD7P9a94wk6MgNgrizx/XU9aCxAkp1IwAABJjbQAAAAAAAAAAAAAQcGF5bG9hZElkAAAAAAAQZmlyc3RPcGVyYXRvcgABAAAAEnNwAAEAAAAAAAAAEHRmAAEAAAAQbW4AAAAAABBteADIAAAAAA==", + "subType": "06" + } + } + } + }, + "update": { + "$set": { + "encryptedInt": { + "$$type": "binData" + } + } + }, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedInt", + "bsonType": "int", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberInt": "0" + }, + "max": { + "$numberInt": "200" + } + } + } + ] + } + } + } + }, + "commandName": "findAndModify" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Int-InsertFind.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Int-InsertFind.json new file mode 100644 index 0000000000..08b6d2c2a5 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Int-InsertFind.json @@ -0,0 +1,538 @@ +{ + "description": "fle2v2-Rangev2-Int-InsertFind", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedInt", + "bsonType": "int", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberInt": "0" + }, + "max": { + "$numberInt": "200" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "FLE2 Range Int. Insert and Find.", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedInt": { + "$gt": { + "$numberInt": "0" + } + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + ] + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 0, + "encryptedInt": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "RjBYT2h3ZAoHxhf8DU6/dFbDkEBZp0IxREcsRTu2MXs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "x7GR49EN0t3WXQDihkrbonK7qNIBYC87tpL/XEUyIYc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JfYUqWF+OoGjiYkRI4L5iPlF+T1Eleul7Fki22jp4Qc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q1RyGfIgsaQHoZFRw+DD28V26rN5hweApPLwExncvT8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "L2PFeKGvLS6C+DLudR6fGlBq3ERPvjWvRyNRIA2HVb0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CWxaNqL3iP1yCixDkcmf9bmW3E5VeN8TJkg1jJe528s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+vC6araOEo+fpW7PSIP40/EnzBCj1d2N10Jr3rrXJJM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6SV63Mf51Z6A6p2X3rCnJKCu6ku3Oeb45mBYbz+IoAo=", + "subType": "00" + } + } + ] + }, + { + "_id": 1, + "encryptedInt": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "25j9sQXZCihCmHKvTHgaBsAVZFcGPn7JjHdrCGlwyyw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "FA74j21GUEJb1DJBOpR9nVnjaDZnd8yAQNuaW9Qi26g=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kJv//KVkbrobIBf+QeWC5jxn20mx/P0R1N6aCSMgKM8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zB+Whi9IUUGxfLEe+lGuIzLX4LFbIhaIAm5lRk65QTc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ybO1QU3CgvhO8JgRXH+HxKszWcpl5aGDYYVa75fHa1g=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "X3Y3eSAbbMg//JgiHHiFpYOpV61t8kkDexI+CQyitH4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SlNHXyqVFGDPrX/2ppwog6l4pwj3PKda2TkZbqgfSfA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "McjV8xwTF3xI7863DYOBdyvIv6UpzThl6v9vBRk05bI=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 0, + "encryptedInt": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedInt", + "bsonType": "int", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberInt": "0" + }, + "max": { + "$numberInt": "200" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedInt": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedInt", + "bsonType": "int", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberInt": "0" + }, + "max": { + "$numberInt": "200" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "default", + "filter": { + "encryptedInt": { + "$gt": { + "$binary": { + "base64": "DW0FAAADcGF5bG9hZAAZBQAABGcABQUAAAMwAH0AAAAFZAAgAAAAALGGQ/CRD+pGLD53BZzWcCcYbuGLVEyjzXIx7b+ux/q2BXMAIAAAAACOC6mXEZL27P9hethZbtKYsTXKK+FpgQ9Axxmn9N/cCwVsACAAAAAA+MFEd8XfZSpbXKqqPC2L3TEFswkaG5Ff6aSgf8p+XVIAAzEAfQAAAAVkACAAAAAAtL3QIvnZqCF72eS6lKr8ilff7R6kiNklokiTuaU5wNsFcwAgAAAAAEtqr3/X731VB+VrbFcY8ZrJKRo2E0Fd+C8L0EMNcvcCBWwAIAAAAABNPhSriux8W8qbwnhCczE3IzlhNEnGDpUwTFDZSL+eYQADMgB9AAAABWQAIAAAAAB99ZW/7KwXKzl5M3XQsAJ3JbEef90IoxFYBArNiYzlgQVzACAAAAAAYO/qaw0+92HAryxOUG7iK6hnIy3OaUA9jIqtHdvcq8YFbAAgAAAAAHrUYj8A0hVgc6VklpDiljOnykrUSfEsjm56XO/bsfKdAAMzAH0AAAAFZAAgAAAAAOK8brUuc2onBNDRtfYMR736dHj4dQqXod8JG7tAMTsDBXMAIAAAAAAW6SrGAL6Bx0s7ZlsYULFfOAiYIGhEWu6md3r+Rk40awVsACAAAAAAIHYXP8RLcCboUmHN3+OlnEw1DxaLSnbTB9PdF228fFAAAzQAfQAAAAVkACAAAAAAV22FGF7ZDwK/EYiGNMlm/QuT3saQdyJM/Fn+ZyQug1oFcwAgAAAAACo7GwCvbcs5UHQMgds9/1QMklEVdjZigpuOFGrDmmxtBWwAIAAAAADQbYYPxlCMMGe2MulbiurApFLoeJSMvTeDU3pyEA2jNwADNQB9AAAABWQAIAAAAADFspsMG7yHjKppyllon1KqAsTrHaZ6JzNqnSz8o6iTvwVzACAAAAAAeiA5pqVIQQ9s6UY/P8v5Jjkl3I7iFNeLDYehikrINrsFbAAgAAAAAFjBTzTpNxDEkA0vSRj0jCED9KDRlboMVyilKyDz5YR4AAM2AH0AAAAFZAAgAAAAAPcLmtq+V1e+MRlZ7NHq1+mrRVBQje5zj685ZvdsfKvSBXMAIAAAAABdHz/3w2k5km97QN9m7oLFYJaVJneNlMboIlz5yUASQAVsACAAAAAAWbp8JVJnx8fEVAJFa7WMfMa7wXeP5M3C8MX20J/i9n0AAzcAfQAAAAVkACAAAAAAYfLwnoxK6XAGQrJFy8+TIJoq38ldBaO75h4zA4ZX5tQFcwAgAAAAAC2wk8UcJH5X5XGnDBYmel6srpBkzBhHtt3Jw1u5TSJ1BWwAIAAAAAA9/YU9eI3D7QbXKIw/3/gzWJ6MZrCYhG0j1wNKgRQp5wADOAB9AAAABWQAIAAAAADGvyrtKkIcaV17ynZA7b2k5Pz6OhvxdWNkDvDWJIja8wVzACAAAAAAOLypVKNxf/wR1G8OZjUUsTQzDYeNNhhITxGMSp7euS4FbAAgAAAAAA9EsxoV1B2DcQ1NJRwuxXnvVR+vkD0wbbDYEI/zFEnDAAM5AH0AAAAFZAAgAAAAAEocREw1L0g+roFUchJI2Yd0M0ME2bnErNUYnpyJP1SqBXMAIAAAAAAcE2/JK/8MoSeOchIuAkKh1X3ImoA7p8ujAZIfvIDo6QVsACAAAAAA+W0+zgLr85/PD7P9a94wk6MgNgrizx/XU9aCxAkp1IwAABJjbQAAAAAAAAAAAAAQcGF5bG9hZElkAAAAAAAQZmlyc3RPcGVyYXRvcgABAAAAEnNwAAEAAAAAAAAAEHRmAAEAAAAQbW4AAAAAABBteADIAAAAAA==", + "subType": "06" + } + } + } + }, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedInt", + "bsonType": "int", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberInt": "0" + }, + "max": { + "$numberInt": "200" + } + } + } + ] + } + } + } + }, + "commandName": "find" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Int-Update.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Int-Update.json new file mode 100644 index 0000000000..9f28f768bb --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Int-Update.json @@ -0,0 +1,557 @@ +{ + "description": "fle2v2-Rangev2-Int-Update", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedInt", + "bsonType": "int", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberInt": "0" + }, + "max": { + "$numberInt": "200" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "FLE2 Range Int. Update.", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedInt": { + "$numberInt": "1" + } + } + }, + "object": "coll" + }, + { + "name": "updateOne", + "arguments": { + "filter": { + "encryptedInt": { + "$gt": { + "$numberInt": "0" + } + } + }, + "update": { + "$set": { + "encryptedInt": { + "$numberInt": "2" + } + } + } + }, + "object": "coll", + "expectResult": { + "matchedCount": 1, + "modifiedCount": 1, + "upsertedCount": 0 + } + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 0, + "encryptedInt": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "RjBYT2h3ZAoHxhf8DU6/dFbDkEBZp0IxREcsRTu2MXs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "x7GR49EN0t3WXQDihkrbonK7qNIBYC87tpL/XEUyIYc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JfYUqWF+OoGjiYkRI4L5iPlF+T1Eleul7Fki22jp4Qc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q1RyGfIgsaQHoZFRw+DD28V26rN5hweApPLwExncvT8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "L2PFeKGvLS6C+DLudR6fGlBq3ERPvjWvRyNRIA2HVb0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CWxaNqL3iP1yCixDkcmf9bmW3E5VeN8TJkg1jJe528s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+vC6araOEo+fpW7PSIP40/EnzBCj1d2N10Jr3rrXJJM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6SV63Mf51Z6A6p2X3rCnJKCu6ku3Oeb45mBYbz+IoAo=", + "subType": "00" + } + } + ] + }, + { + "_id": 1, + "encryptedInt": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "hyDcE6QQjPrYJaIS/n7evEZFYcm31Tj89CpEYGF45cI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "F08nMDWDZc+DbWM7XCEJNNCEYyinRmrvGP7EWhmp4is=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cXH4688amcDc8kZOJq4UP8cE3R58Zl7e+Qo/1jyspps=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "uURBxvTp3FBCVkd+LPqyuY7d6rMW6SGIJQEPY/wtkZI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "jG3hax1L3RBp9t38vUt53FsBxgr/+Si/vVISpAylYpE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kwtIW8MhH9Ky5xNjBx8gFA/SHh2YVphie7g5FGBzals=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "FHflwFuEMu4xX0ZApHi+pdlBH+oevAtXckCUb5Wv0xU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ty4cnzJdAlbQKnh7px3GEYjBnvO+jIOaKjoTRDtmh3M=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 0, + "encryptedInt": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedInt", + "bsonType": "int", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberInt": "0" + }, + "max": { + "$numberInt": "200" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedInt": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedInt", + "bsonType": "int", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberInt": "0" + }, + "max": { + "$numberInt": "200" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "update": "default", + "ordered": true, + "updates": [ + { + "q": { + "encryptedInt": { + "$gt": { + "$binary": { + "base64": "DW0FAAADcGF5bG9hZAAZBQAABGcABQUAAAMwAH0AAAAFZAAgAAAAALGGQ/CRD+pGLD53BZzWcCcYbuGLVEyjzXIx7b+ux/q2BXMAIAAAAACOC6mXEZL27P9hethZbtKYsTXKK+FpgQ9Axxmn9N/cCwVsACAAAAAA+MFEd8XfZSpbXKqqPC2L3TEFswkaG5Ff6aSgf8p+XVIAAzEAfQAAAAVkACAAAAAAtL3QIvnZqCF72eS6lKr8ilff7R6kiNklokiTuaU5wNsFcwAgAAAAAEtqr3/X731VB+VrbFcY8ZrJKRo2E0Fd+C8L0EMNcvcCBWwAIAAAAABNPhSriux8W8qbwnhCczE3IzlhNEnGDpUwTFDZSL+eYQADMgB9AAAABWQAIAAAAAB99ZW/7KwXKzl5M3XQsAJ3JbEef90IoxFYBArNiYzlgQVzACAAAAAAYO/qaw0+92HAryxOUG7iK6hnIy3OaUA9jIqtHdvcq8YFbAAgAAAAAHrUYj8A0hVgc6VklpDiljOnykrUSfEsjm56XO/bsfKdAAMzAH0AAAAFZAAgAAAAAOK8brUuc2onBNDRtfYMR736dHj4dQqXod8JG7tAMTsDBXMAIAAAAAAW6SrGAL6Bx0s7ZlsYULFfOAiYIGhEWu6md3r+Rk40awVsACAAAAAAIHYXP8RLcCboUmHN3+OlnEw1DxaLSnbTB9PdF228fFAAAzQAfQAAAAVkACAAAAAAV22FGF7ZDwK/EYiGNMlm/QuT3saQdyJM/Fn+ZyQug1oFcwAgAAAAACo7GwCvbcs5UHQMgds9/1QMklEVdjZigpuOFGrDmmxtBWwAIAAAAADQbYYPxlCMMGe2MulbiurApFLoeJSMvTeDU3pyEA2jNwADNQB9AAAABWQAIAAAAADFspsMG7yHjKppyllon1KqAsTrHaZ6JzNqnSz8o6iTvwVzACAAAAAAeiA5pqVIQQ9s6UY/P8v5Jjkl3I7iFNeLDYehikrINrsFbAAgAAAAAFjBTzTpNxDEkA0vSRj0jCED9KDRlboMVyilKyDz5YR4AAM2AH0AAAAFZAAgAAAAAPcLmtq+V1e+MRlZ7NHq1+mrRVBQje5zj685ZvdsfKvSBXMAIAAAAABdHz/3w2k5km97QN9m7oLFYJaVJneNlMboIlz5yUASQAVsACAAAAAAWbp8JVJnx8fEVAJFa7WMfMa7wXeP5M3C8MX20J/i9n0AAzcAfQAAAAVkACAAAAAAYfLwnoxK6XAGQrJFy8+TIJoq38ldBaO75h4zA4ZX5tQFcwAgAAAAAC2wk8UcJH5X5XGnDBYmel6srpBkzBhHtt3Jw1u5TSJ1BWwAIAAAAAA9/YU9eI3D7QbXKIw/3/gzWJ6MZrCYhG0j1wNKgRQp5wADOAB9AAAABWQAIAAAAADGvyrtKkIcaV17ynZA7b2k5Pz6OhvxdWNkDvDWJIja8wVzACAAAAAAOLypVKNxf/wR1G8OZjUUsTQzDYeNNhhITxGMSp7euS4FbAAgAAAAAA9EsxoV1B2DcQ1NJRwuxXnvVR+vkD0wbbDYEI/zFEnDAAM5AH0AAAAFZAAgAAAAAEocREw1L0g+roFUchJI2Yd0M0ME2bnErNUYnpyJP1SqBXMAIAAAAAAcE2/JK/8MoSeOchIuAkKh1X3ImoA7p8ujAZIfvIDo6QVsACAAAAAA+W0+zgLr85/PD7P9a94wk6MgNgrizx/XU9aCxAkp1IwAABJjbQAAAAAAAAAAAAAQcGF5bG9hZElkAAAAAAAQZmlyc3RPcGVyYXRvcgABAAAAEnNwAAEAAAAAAAAAEHRmAAEAAAAQbW4AAAAAABBteADIAAAAAA==", + "subType": "06" + } + } + } + }, + "u": { + "$set": { + "encryptedInt": { + "$$type": "binData" + } + } + }, + "multi": false, + "upsert": false + } + ], + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedInt", + "bsonType": "int", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberInt": "0" + }, + "max": { + "$numberInt": "200" + } + } + } + ] + } + } + }, + "$db": "default" + }, + "commandName": "update" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Long-Aggregate.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Long-Aggregate.json new file mode 100644 index 0000000000..01ff139a55 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Long-Aggregate.json @@ -0,0 +1,547 @@ +{ + "description": "fle2v2-Rangev2-Long-Aggregate", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedLong", + "bsonType": "long", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberLong": "0" + }, + "max": { + "$numberLong": "200" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "FLE2 Range Long. Aggregate.", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedLong": { + "$gt": { + "$numberLong": "0" + } + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + ] + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 0, + "encryptedLong": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "RjBYT2h3ZAoHxhf8DU6/dFbDkEBZp0IxREcsRTu2MXs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "x7GR49EN0t3WXQDihkrbonK7qNIBYC87tpL/XEUyIYc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JfYUqWF+OoGjiYkRI4L5iPlF+T1Eleul7Fki22jp4Qc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q1RyGfIgsaQHoZFRw+DD28V26rN5hweApPLwExncvT8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "L2PFeKGvLS6C+DLudR6fGlBq3ERPvjWvRyNRIA2HVb0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CWxaNqL3iP1yCixDkcmf9bmW3E5VeN8TJkg1jJe528s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+vC6araOEo+fpW7PSIP40/EnzBCj1d2N10Jr3rrXJJM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6SV63Mf51Z6A6p2X3rCnJKCu6ku3Oeb45mBYbz+IoAo=", + "subType": "00" + } + } + ] + }, + { + "_id": 1, + "encryptedLong": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "25j9sQXZCihCmHKvTHgaBsAVZFcGPn7JjHdrCGlwyyw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "FA74j21GUEJb1DJBOpR9nVnjaDZnd8yAQNuaW9Qi26g=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kJv//KVkbrobIBf+QeWC5jxn20mx/P0R1N6aCSMgKM8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zB+Whi9IUUGxfLEe+lGuIzLX4LFbIhaIAm5lRk65QTc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ybO1QU3CgvhO8JgRXH+HxKszWcpl5aGDYYVa75fHa1g=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "X3Y3eSAbbMg//JgiHHiFpYOpV61t8kkDexI+CQyitH4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SlNHXyqVFGDPrX/2ppwog6l4pwj3PKda2TkZbqgfSfA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "McjV8xwTF3xI7863DYOBdyvIv6UpzThl6v9vBRk05bI=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 0, + "encryptedLong": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedLong", + "bsonType": "long", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberLong": "0" + }, + "max": { + "$numberLong": "200" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedLong": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedLong", + "bsonType": "long", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberLong": "0" + }, + "max": { + "$numberLong": "200" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "aggregate": "default", + "pipeline": [ + { + "$match": { + "encryptedLong": { + "$gt": { + "$binary": { + "base64": "DXUFAAADcGF5bG9hZAAZBQAABGcABQUAAAMwAH0AAAAFZAAgAAAAALGGQ/CRD+pGLD53BZzWcCcYbuGLVEyjzXIx7b+ux/q2BXMAIAAAAACOC6mXEZL27P9hethZbtKYsTXKK+FpgQ9Axxmn9N/cCwVsACAAAAAA+MFEd8XfZSpbXKqqPC2L3TEFswkaG5Ff6aSgf8p+XVIAAzEAfQAAAAVkACAAAAAAtL3QIvnZqCF72eS6lKr8ilff7R6kiNklokiTuaU5wNsFcwAgAAAAAEtqr3/X731VB+VrbFcY8ZrJKRo2E0Fd+C8L0EMNcvcCBWwAIAAAAABNPhSriux8W8qbwnhCczE3IzlhNEnGDpUwTFDZSL+eYQADMgB9AAAABWQAIAAAAAB99ZW/7KwXKzl5M3XQsAJ3JbEef90IoxFYBArNiYzlgQVzACAAAAAAYO/qaw0+92HAryxOUG7iK6hnIy3OaUA9jIqtHdvcq8YFbAAgAAAAAHrUYj8A0hVgc6VklpDiljOnykrUSfEsjm56XO/bsfKdAAMzAH0AAAAFZAAgAAAAAOK8brUuc2onBNDRtfYMR736dHj4dQqXod8JG7tAMTsDBXMAIAAAAAAW6SrGAL6Bx0s7ZlsYULFfOAiYIGhEWu6md3r+Rk40awVsACAAAAAAIHYXP8RLcCboUmHN3+OlnEw1DxaLSnbTB9PdF228fFAAAzQAfQAAAAVkACAAAAAAV22FGF7ZDwK/EYiGNMlm/QuT3saQdyJM/Fn+ZyQug1oFcwAgAAAAACo7GwCvbcs5UHQMgds9/1QMklEVdjZigpuOFGrDmmxtBWwAIAAAAADQbYYPxlCMMGe2MulbiurApFLoeJSMvTeDU3pyEA2jNwADNQB9AAAABWQAIAAAAADFspsMG7yHjKppyllon1KqAsTrHaZ6JzNqnSz8o6iTvwVzACAAAAAAeiA5pqVIQQ9s6UY/P8v5Jjkl3I7iFNeLDYehikrINrsFbAAgAAAAAFjBTzTpNxDEkA0vSRj0jCED9KDRlboMVyilKyDz5YR4AAM2AH0AAAAFZAAgAAAAAPcLmtq+V1e+MRlZ7NHq1+mrRVBQje5zj685ZvdsfKvSBXMAIAAAAABdHz/3w2k5km97QN9m7oLFYJaVJneNlMboIlz5yUASQAVsACAAAAAAWbp8JVJnx8fEVAJFa7WMfMa7wXeP5M3C8MX20J/i9n0AAzcAfQAAAAVkACAAAAAAYfLwnoxK6XAGQrJFy8+TIJoq38ldBaO75h4zA4ZX5tQFcwAgAAAAAC2wk8UcJH5X5XGnDBYmel6srpBkzBhHtt3Jw1u5TSJ1BWwAIAAAAAA9/YU9eI3D7QbXKIw/3/gzWJ6MZrCYhG0j1wNKgRQp5wADOAB9AAAABWQAIAAAAADGvyrtKkIcaV17ynZA7b2k5Pz6OhvxdWNkDvDWJIja8wVzACAAAAAAOLypVKNxf/wR1G8OZjUUsTQzDYeNNhhITxGMSp7euS4FbAAgAAAAAA9EsxoV1B2DcQ1NJRwuxXnvVR+vkD0wbbDYEI/zFEnDAAM5AH0AAAAFZAAgAAAAAEocREw1L0g+roFUchJI2Yd0M0ME2bnErNUYnpyJP1SqBXMAIAAAAAAcE2/JK/8MoSeOchIuAkKh1X3ImoA7p8ujAZIfvIDo6QVsACAAAAAA+W0+zgLr85/PD7P9a94wk6MgNgrizx/XU9aCxAkp1IwAABJjbQAAAAAAAAAAAAAQcGF5bG9hZElkAAAAAAAQZmlyc3RPcGVyYXRvcgABAAAAEnNwAAEAAAAAAAAAEHRmAAEAAAASbW4AAAAAAAAAAAASbXgAyAAAAAAAAAAA", + "subType": "06" + } + } + } + } + } + ], + "cursor": {}, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedLong", + "bsonType": "long", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberLong": "0" + }, + "max": { + "$numberLong": "200" + } + } + } + ] + } + } + } + }, + "commandName": "aggregate" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Long-Correctness.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Long-Correctness.json new file mode 100644 index 0000000000..cc5388b1f0 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Long-Correctness.json @@ -0,0 +1,1412 @@ +{ + "description": "fle2v2-Rangev2-Long-Correctness", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedLong", + "bsonType": "long", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberLong": "0" + }, + "max": { + "$numberLong": "200" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "Find with $gt", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedLong": { + "$gt": { + "$numberLong": "0" + } + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + ] + } + ] + }, + { + "description": "Find with $gte", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedLong": { + "$gte": { + "$numberLong": "0" + } + } + }, + "sort": { + "_id": 1 + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + }, + { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + ] + } + ] + }, + { + "description": "Find with $gt with no results", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedLong": { + "$gt": { + "$numberLong": "1" + } + } + } + }, + "object": "coll", + "expectResult": [] + } + ] + }, + { + "description": "Find with $lt", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedLong": { + "$lt": { + "$numberLong": "1" + } + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + } + ] + } + ] + }, + { + "description": "Find with $lte", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedLong": { + "$lte": { + "$numberLong": "1" + } + } + }, + "sort": { + "_id": 1 + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + }, + { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + ] + } + ] + }, + { + "description": "Find with $lt below min", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedLong": { + "$lt": { + "$numberLong": "0" + } + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "must be greater than the range minimum" + } + } + ] + }, + { + "description": "Find with $gt above max", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedLong": { + "$gt": { + "$numberLong": "200" + } + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "must be less than the range maximum" + } + } + ] + }, + { + "description": "Find with $gt and $lt", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedLong": { + "$gt": { + "$numberLong": "0" + }, + "$lt": { + "$numberLong": "2" + } + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + ] + } + ] + }, + { + "description": "Find with equality", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedLong": { + "$numberLong": "0" + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + } + ] + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedLong": { + "$numberLong": "1" + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + ] + } + ] + }, + { + "description": "Find with full range", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedLong": { + "$gte": { + "$numberLong": "0" + }, + "$lte": { + "$numberLong": "200" + } + } + }, + "sort": { + "_id": 1 + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + }, + { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + ] + } + ] + }, + { + "description": "Find with $in", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedLong": { + "$in": [ + { + "$numberLong": "0" + } + ] + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + } + ] + } + ] + }, + { + "description": "Insert out of range", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedLong": { + "$numberLong": "-1" + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "value must be greater than or equal to the minimum value" + } + } + ] + }, + { + "description": "Insert min and max", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 200, + "encryptedLong": { + "$numberLong": "200" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": {}, + "sort": { + "_id": 1 + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + }, + { + "_id": 200, + "encryptedLong": { + "$numberLong": "200" + } + } + ] + } + ] + }, + { + "description": "Aggregate with $gte", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedLong": { + "$gte": { + "$numberLong": "0" + } + } + } + }, + { + "$sort": { + "_id": 1 + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + }, + { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + ] + } + ] + }, + { + "description": "Aggregate with $gt with no results", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedLong": { + "$gt": { + "$numberLong": "1" + } + } + } + } + ] + }, + "object": "coll", + "expectResult": [] + } + ] + }, + { + "description": "Aggregate with $lt", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedLong": { + "$lt": { + "$numberLong": "1" + } + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + } + ] + } + ] + }, + { + "description": "Aggregate with $lte", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedLong": { + "$lte": { + "$numberLong": "1" + } + } + } + }, + { + "$sort": { + "_id": 1 + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + }, + { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + ] + } + ] + }, + { + "description": "Aggregate with $lt below min", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedLong": { + "$lt": { + "$numberLong": "0" + } + } + } + } + ] + }, + "object": "coll", + "expectError": { + "errorContains": "must be greater than the range minimum" + } + } + ] + }, + { + "description": "Aggregate with $gt above max", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedLong": { + "$gt": { + "$numberLong": "200" + } + } + } + } + ] + }, + "object": "coll", + "expectError": { + "errorContains": "must be less than the range maximum" + } + } + ] + }, + { + "description": "Aggregate with $gt and $lt", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedLong": { + "$gt": { + "$numberLong": "0" + }, + "$lt": { + "$numberLong": "2" + } + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + ] + } + ] + }, + { + "description": "Aggregate with equality", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedLong": { + "$numberLong": "0" + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + } + ] + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedLong": { + "$numberLong": "1" + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + ] + } + ] + }, + { + "description": "Aggregate with full range", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedLong": { + "$gte": { + "$numberLong": "0" + }, + "$lte": { + "$numberLong": "200" + } + } + } + }, + { + "$sort": { + "_id": 1 + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + }, + { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + ] + } + ] + }, + { + "description": "Aggregate with $in", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + }, + "object": "coll" + }, + { + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$match": { + "encryptedLong": { + "$in": [ + { + "$numberLong": "0" + } + ] + } + } + } + ] + }, + "object": "coll", + "expectResult": [ + { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + } + ] + } + ] + }, + { + "description": "Wrong type: Insert Double", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedLong": { + "$numberDouble": "0" + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "Cannot encrypt element" + } + } + ] + }, + { + "description": "Wrong type: Find Double", + "operations": [ + { + "name": "find", + "arguments": { + "filter": { + "encryptedLong": { + "$gte": { + "$numberDouble": "0" + } + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "field type is not supported" + } + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Long-Delete.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Long-Delete.json new file mode 100644 index 0000000000..0a8580110c --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Long-Delete.json @@ -0,0 +1,483 @@ +{ + "description": "fle2v2-Rangev2-Long-Delete", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedLong", + "bsonType": "long", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberLong": "0" + }, + "max": { + "$numberLong": "200" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "FLE2 Range Long. Delete.", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + }, + "object": "coll" + }, + { + "name": "deleteOne", + "arguments": { + "filter": { + "encryptedLong": { + "$gt": { + "$numberLong": "0" + } + } + } + }, + "object": "coll", + "expectResult": { + "deletedCount": 1 + } + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 0, + "encryptedLong": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "RjBYT2h3ZAoHxhf8DU6/dFbDkEBZp0IxREcsRTu2MXs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "x7GR49EN0t3WXQDihkrbonK7qNIBYC87tpL/XEUyIYc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JfYUqWF+OoGjiYkRI4L5iPlF+T1Eleul7Fki22jp4Qc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q1RyGfIgsaQHoZFRw+DD28V26rN5hweApPLwExncvT8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "L2PFeKGvLS6C+DLudR6fGlBq3ERPvjWvRyNRIA2HVb0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CWxaNqL3iP1yCixDkcmf9bmW3E5VeN8TJkg1jJe528s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+vC6araOEo+fpW7PSIP40/EnzBCj1d2N10Jr3rrXJJM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6SV63Mf51Z6A6p2X3rCnJKCu6ku3Oeb45mBYbz+IoAo=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 0, + "encryptedLong": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedLong", + "bsonType": "long", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberLong": "0" + }, + "max": { + "$numberLong": "200" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedLong": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedLong", + "bsonType": "long", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberLong": "0" + }, + "max": { + "$numberLong": "200" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "delete": "default", + "deletes": [ + { + "q": { + "encryptedLong": { + "$gt": { + "$binary": { + "base64": "DXUFAAADcGF5bG9hZAAZBQAABGcABQUAAAMwAH0AAAAFZAAgAAAAALGGQ/CRD+pGLD53BZzWcCcYbuGLVEyjzXIx7b+ux/q2BXMAIAAAAACOC6mXEZL27P9hethZbtKYsTXKK+FpgQ9Axxmn9N/cCwVsACAAAAAA+MFEd8XfZSpbXKqqPC2L3TEFswkaG5Ff6aSgf8p+XVIAAzEAfQAAAAVkACAAAAAAtL3QIvnZqCF72eS6lKr8ilff7R6kiNklokiTuaU5wNsFcwAgAAAAAEtqr3/X731VB+VrbFcY8ZrJKRo2E0Fd+C8L0EMNcvcCBWwAIAAAAABNPhSriux8W8qbwnhCczE3IzlhNEnGDpUwTFDZSL+eYQADMgB9AAAABWQAIAAAAAB99ZW/7KwXKzl5M3XQsAJ3JbEef90IoxFYBArNiYzlgQVzACAAAAAAYO/qaw0+92HAryxOUG7iK6hnIy3OaUA9jIqtHdvcq8YFbAAgAAAAAHrUYj8A0hVgc6VklpDiljOnykrUSfEsjm56XO/bsfKdAAMzAH0AAAAFZAAgAAAAAOK8brUuc2onBNDRtfYMR736dHj4dQqXod8JG7tAMTsDBXMAIAAAAAAW6SrGAL6Bx0s7ZlsYULFfOAiYIGhEWu6md3r+Rk40awVsACAAAAAAIHYXP8RLcCboUmHN3+OlnEw1DxaLSnbTB9PdF228fFAAAzQAfQAAAAVkACAAAAAAV22FGF7ZDwK/EYiGNMlm/QuT3saQdyJM/Fn+ZyQug1oFcwAgAAAAACo7GwCvbcs5UHQMgds9/1QMklEVdjZigpuOFGrDmmxtBWwAIAAAAADQbYYPxlCMMGe2MulbiurApFLoeJSMvTeDU3pyEA2jNwADNQB9AAAABWQAIAAAAADFspsMG7yHjKppyllon1KqAsTrHaZ6JzNqnSz8o6iTvwVzACAAAAAAeiA5pqVIQQ9s6UY/P8v5Jjkl3I7iFNeLDYehikrINrsFbAAgAAAAAFjBTzTpNxDEkA0vSRj0jCED9KDRlboMVyilKyDz5YR4AAM2AH0AAAAFZAAgAAAAAPcLmtq+V1e+MRlZ7NHq1+mrRVBQje5zj685ZvdsfKvSBXMAIAAAAABdHz/3w2k5km97QN9m7oLFYJaVJneNlMboIlz5yUASQAVsACAAAAAAWbp8JVJnx8fEVAJFa7WMfMa7wXeP5M3C8MX20J/i9n0AAzcAfQAAAAVkACAAAAAAYfLwnoxK6XAGQrJFy8+TIJoq38ldBaO75h4zA4ZX5tQFcwAgAAAAAC2wk8UcJH5X5XGnDBYmel6srpBkzBhHtt3Jw1u5TSJ1BWwAIAAAAAA9/YU9eI3D7QbXKIw/3/gzWJ6MZrCYhG0j1wNKgRQp5wADOAB9AAAABWQAIAAAAADGvyrtKkIcaV17ynZA7b2k5Pz6OhvxdWNkDvDWJIja8wVzACAAAAAAOLypVKNxf/wR1G8OZjUUsTQzDYeNNhhITxGMSp7euS4FbAAgAAAAAA9EsxoV1B2DcQ1NJRwuxXnvVR+vkD0wbbDYEI/zFEnDAAM5AH0AAAAFZAAgAAAAAEocREw1L0g+roFUchJI2Yd0M0ME2bnErNUYnpyJP1SqBXMAIAAAAAAcE2/JK/8MoSeOchIuAkKh1X3ImoA7p8ujAZIfvIDo6QVsACAAAAAA+W0+zgLr85/PD7P9a94wk6MgNgrizx/XU9aCxAkp1IwAABJjbQAAAAAAAAAAAAAQcGF5bG9hZElkAAAAAAAQZmlyc3RPcGVyYXRvcgABAAAAEnNwAAEAAAAAAAAAEHRmAAEAAAASbW4AAAAAAAAAAAASbXgAyAAAAAAAAAAA", + "subType": "06" + } + } + } + }, + "limit": 1 + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedLong", + "bsonType": "long", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberLong": "0" + }, + "max": { + "$numberLong": "200" + } + } + } + ] + } + } + } + }, + "commandName": "delete" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Long-FindOneAndUpdate.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Long-FindOneAndUpdate.json new file mode 100644 index 0000000000..f014e1a4ac --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Long-FindOneAndUpdate.json @@ -0,0 +1,551 @@ +{ + "description": "fle2v2-Rangev2-Long-FindOneAndUpdate", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedLong", + "bsonType": "long", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberLong": "0" + }, + "max": { + "$numberLong": "200" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "FLE2 Range Long. FindOneAndUpdate.", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + }, + "object": "coll" + }, + { + "name": "findOneAndUpdate", + "arguments": { + "filter": { + "encryptedLong": { + "$gt": { + "$numberLong": "0" + } + } + }, + "update": { + "$set": { + "encryptedLong": { + "$numberLong": "2" + } + } + }, + "returnDocument": "Before" + }, + "object": "coll", + "expectResult": { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 0, + "encryptedLong": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "RjBYT2h3ZAoHxhf8DU6/dFbDkEBZp0IxREcsRTu2MXs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "x7GR49EN0t3WXQDihkrbonK7qNIBYC87tpL/XEUyIYc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JfYUqWF+OoGjiYkRI4L5iPlF+T1Eleul7Fki22jp4Qc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q1RyGfIgsaQHoZFRw+DD28V26rN5hweApPLwExncvT8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "L2PFeKGvLS6C+DLudR6fGlBq3ERPvjWvRyNRIA2HVb0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CWxaNqL3iP1yCixDkcmf9bmW3E5VeN8TJkg1jJe528s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+vC6araOEo+fpW7PSIP40/EnzBCj1d2N10Jr3rrXJJM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6SV63Mf51Z6A6p2X3rCnJKCu6ku3Oeb45mBYbz+IoAo=", + "subType": "00" + } + } + ] + }, + { + "_id": 1, + "encryptedLong": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "hyDcE6QQjPrYJaIS/n7evEZFYcm31Tj89CpEYGF45cI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "F08nMDWDZc+DbWM7XCEJNNCEYyinRmrvGP7EWhmp4is=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cXH4688amcDc8kZOJq4UP8cE3R58Zl7e+Qo/1jyspps=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "uURBxvTp3FBCVkd+LPqyuY7d6rMW6SGIJQEPY/wtkZI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "jG3hax1L3RBp9t38vUt53FsBxgr/+Si/vVISpAylYpE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kwtIW8MhH9Ky5xNjBx8gFA/SHh2YVphie7g5FGBzals=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "FHflwFuEMu4xX0ZApHi+pdlBH+oevAtXckCUb5Wv0xU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ty4cnzJdAlbQKnh7px3GEYjBnvO+jIOaKjoTRDtmh3M=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 0, + "encryptedLong": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedLong", + "bsonType": "long", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberLong": "0" + }, + "max": { + "$numberLong": "200" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedLong": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedLong", + "bsonType": "long", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberLong": "0" + }, + "max": { + "$numberLong": "200" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "findAndModify": "default", + "query": { + "encryptedLong": { + "$gt": { + "$binary": { + "base64": "DXUFAAADcGF5bG9hZAAZBQAABGcABQUAAAMwAH0AAAAFZAAgAAAAALGGQ/CRD+pGLD53BZzWcCcYbuGLVEyjzXIx7b+ux/q2BXMAIAAAAACOC6mXEZL27P9hethZbtKYsTXKK+FpgQ9Axxmn9N/cCwVsACAAAAAA+MFEd8XfZSpbXKqqPC2L3TEFswkaG5Ff6aSgf8p+XVIAAzEAfQAAAAVkACAAAAAAtL3QIvnZqCF72eS6lKr8ilff7R6kiNklokiTuaU5wNsFcwAgAAAAAEtqr3/X731VB+VrbFcY8ZrJKRo2E0Fd+C8L0EMNcvcCBWwAIAAAAABNPhSriux8W8qbwnhCczE3IzlhNEnGDpUwTFDZSL+eYQADMgB9AAAABWQAIAAAAAB99ZW/7KwXKzl5M3XQsAJ3JbEef90IoxFYBArNiYzlgQVzACAAAAAAYO/qaw0+92HAryxOUG7iK6hnIy3OaUA9jIqtHdvcq8YFbAAgAAAAAHrUYj8A0hVgc6VklpDiljOnykrUSfEsjm56XO/bsfKdAAMzAH0AAAAFZAAgAAAAAOK8brUuc2onBNDRtfYMR736dHj4dQqXod8JG7tAMTsDBXMAIAAAAAAW6SrGAL6Bx0s7ZlsYULFfOAiYIGhEWu6md3r+Rk40awVsACAAAAAAIHYXP8RLcCboUmHN3+OlnEw1DxaLSnbTB9PdF228fFAAAzQAfQAAAAVkACAAAAAAV22FGF7ZDwK/EYiGNMlm/QuT3saQdyJM/Fn+ZyQug1oFcwAgAAAAACo7GwCvbcs5UHQMgds9/1QMklEVdjZigpuOFGrDmmxtBWwAIAAAAADQbYYPxlCMMGe2MulbiurApFLoeJSMvTeDU3pyEA2jNwADNQB9AAAABWQAIAAAAADFspsMG7yHjKppyllon1KqAsTrHaZ6JzNqnSz8o6iTvwVzACAAAAAAeiA5pqVIQQ9s6UY/P8v5Jjkl3I7iFNeLDYehikrINrsFbAAgAAAAAFjBTzTpNxDEkA0vSRj0jCED9KDRlboMVyilKyDz5YR4AAM2AH0AAAAFZAAgAAAAAPcLmtq+V1e+MRlZ7NHq1+mrRVBQje5zj685ZvdsfKvSBXMAIAAAAABdHz/3w2k5km97QN9m7oLFYJaVJneNlMboIlz5yUASQAVsACAAAAAAWbp8JVJnx8fEVAJFa7WMfMa7wXeP5M3C8MX20J/i9n0AAzcAfQAAAAVkACAAAAAAYfLwnoxK6XAGQrJFy8+TIJoq38ldBaO75h4zA4ZX5tQFcwAgAAAAAC2wk8UcJH5X5XGnDBYmel6srpBkzBhHtt3Jw1u5TSJ1BWwAIAAAAAA9/YU9eI3D7QbXKIw/3/gzWJ6MZrCYhG0j1wNKgRQp5wADOAB9AAAABWQAIAAAAADGvyrtKkIcaV17ynZA7b2k5Pz6OhvxdWNkDvDWJIja8wVzACAAAAAAOLypVKNxf/wR1G8OZjUUsTQzDYeNNhhITxGMSp7euS4FbAAgAAAAAA9EsxoV1B2DcQ1NJRwuxXnvVR+vkD0wbbDYEI/zFEnDAAM5AH0AAAAFZAAgAAAAAEocREw1L0g+roFUchJI2Yd0M0ME2bnErNUYnpyJP1SqBXMAIAAAAAAcE2/JK/8MoSeOchIuAkKh1X3ImoA7p8ujAZIfvIDo6QVsACAAAAAA+W0+zgLr85/PD7P9a94wk6MgNgrizx/XU9aCxAkp1IwAABJjbQAAAAAAAAAAAAAQcGF5bG9hZElkAAAAAAAQZmlyc3RPcGVyYXRvcgABAAAAEnNwAAEAAAAAAAAAEHRmAAEAAAASbW4AAAAAAAAAAAASbXgAyAAAAAAAAAAA", + "subType": "06" + } + } + } + }, + "update": { + "$set": { + "encryptedLong": { + "$$type": "binData" + } + } + }, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedLong", + "bsonType": "long", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberLong": "0" + }, + "max": { + "$numberLong": "200" + } + } + } + ] + } + } + } + }, + "commandName": "findAndModify" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Long-InsertFind.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Long-InsertFind.json new file mode 100644 index 0000000000..2896df0032 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Long-InsertFind.json @@ -0,0 +1,538 @@ +{ + "description": "fle2v2-Rangev2-Long-InsertFind", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedLong", + "bsonType": "long", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberLong": "0" + }, + "max": { + "$numberLong": "200" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "FLE2 Range Long. Insert and Find.", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedLong": { + "$gt": { + "$numberLong": "0" + } + } + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + ] + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 0, + "encryptedLong": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "RjBYT2h3ZAoHxhf8DU6/dFbDkEBZp0IxREcsRTu2MXs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "x7GR49EN0t3WXQDihkrbonK7qNIBYC87tpL/XEUyIYc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JfYUqWF+OoGjiYkRI4L5iPlF+T1Eleul7Fki22jp4Qc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q1RyGfIgsaQHoZFRw+DD28V26rN5hweApPLwExncvT8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "L2PFeKGvLS6C+DLudR6fGlBq3ERPvjWvRyNRIA2HVb0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CWxaNqL3iP1yCixDkcmf9bmW3E5VeN8TJkg1jJe528s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+vC6araOEo+fpW7PSIP40/EnzBCj1d2N10Jr3rrXJJM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6SV63Mf51Z6A6p2X3rCnJKCu6ku3Oeb45mBYbz+IoAo=", + "subType": "00" + } + } + ] + }, + { + "_id": 1, + "encryptedLong": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "25j9sQXZCihCmHKvTHgaBsAVZFcGPn7JjHdrCGlwyyw=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "FA74j21GUEJb1DJBOpR9nVnjaDZnd8yAQNuaW9Qi26g=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kJv//KVkbrobIBf+QeWC5jxn20mx/P0R1N6aCSMgKM8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "zB+Whi9IUUGxfLEe+lGuIzLX4LFbIhaIAm5lRk65QTc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ybO1QU3CgvhO8JgRXH+HxKszWcpl5aGDYYVa75fHa1g=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "X3Y3eSAbbMg//JgiHHiFpYOpV61t8kkDexI+CQyitH4=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "SlNHXyqVFGDPrX/2ppwog6l4pwj3PKda2TkZbqgfSfA=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "McjV8xwTF3xI7863DYOBdyvIv6UpzThl6v9vBRk05bI=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 0, + "encryptedLong": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedLong", + "bsonType": "long", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberLong": "0" + }, + "max": { + "$numberLong": "200" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedLong": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedLong", + "bsonType": "long", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberLong": "0" + }, + "max": { + "$numberLong": "200" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "default", + "filter": { + "encryptedLong": { + "$gt": { + "$binary": { + "base64": "DXUFAAADcGF5bG9hZAAZBQAABGcABQUAAAMwAH0AAAAFZAAgAAAAALGGQ/CRD+pGLD53BZzWcCcYbuGLVEyjzXIx7b+ux/q2BXMAIAAAAACOC6mXEZL27P9hethZbtKYsTXKK+FpgQ9Axxmn9N/cCwVsACAAAAAA+MFEd8XfZSpbXKqqPC2L3TEFswkaG5Ff6aSgf8p+XVIAAzEAfQAAAAVkACAAAAAAtL3QIvnZqCF72eS6lKr8ilff7R6kiNklokiTuaU5wNsFcwAgAAAAAEtqr3/X731VB+VrbFcY8ZrJKRo2E0Fd+C8L0EMNcvcCBWwAIAAAAABNPhSriux8W8qbwnhCczE3IzlhNEnGDpUwTFDZSL+eYQADMgB9AAAABWQAIAAAAAB99ZW/7KwXKzl5M3XQsAJ3JbEef90IoxFYBArNiYzlgQVzACAAAAAAYO/qaw0+92HAryxOUG7iK6hnIy3OaUA9jIqtHdvcq8YFbAAgAAAAAHrUYj8A0hVgc6VklpDiljOnykrUSfEsjm56XO/bsfKdAAMzAH0AAAAFZAAgAAAAAOK8brUuc2onBNDRtfYMR736dHj4dQqXod8JG7tAMTsDBXMAIAAAAAAW6SrGAL6Bx0s7ZlsYULFfOAiYIGhEWu6md3r+Rk40awVsACAAAAAAIHYXP8RLcCboUmHN3+OlnEw1DxaLSnbTB9PdF228fFAAAzQAfQAAAAVkACAAAAAAV22FGF7ZDwK/EYiGNMlm/QuT3saQdyJM/Fn+ZyQug1oFcwAgAAAAACo7GwCvbcs5UHQMgds9/1QMklEVdjZigpuOFGrDmmxtBWwAIAAAAADQbYYPxlCMMGe2MulbiurApFLoeJSMvTeDU3pyEA2jNwADNQB9AAAABWQAIAAAAADFspsMG7yHjKppyllon1KqAsTrHaZ6JzNqnSz8o6iTvwVzACAAAAAAeiA5pqVIQQ9s6UY/P8v5Jjkl3I7iFNeLDYehikrINrsFbAAgAAAAAFjBTzTpNxDEkA0vSRj0jCED9KDRlboMVyilKyDz5YR4AAM2AH0AAAAFZAAgAAAAAPcLmtq+V1e+MRlZ7NHq1+mrRVBQje5zj685ZvdsfKvSBXMAIAAAAABdHz/3w2k5km97QN9m7oLFYJaVJneNlMboIlz5yUASQAVsACAAAAAAWbp8JVJnx8fEVAJFa7WMfMa7wXeP5M3C8MX20J/i9n0AAzcAfQAAAAVkACAAAAAAYfLwnoxK6XAGQrJFy8+TIJoq38ldBaO75h4zA4ZX5tQFcwAgAAAAAC2wk8UcJH5X5XGnDBYmel6srpBkzBhHtt3Jw1u5TSJ1BWwAIAAAAAA9/YU9eI3D7QbXKIw/3/gzWJ6MZrCYhG0j1wNKgRQp5wADOAB9AAAABWQAIAAAAADGvyrtKkIcaV17ynZA7b2k5Pz6OhvxdWNkDvDWJIja8wVzACAAAAAAOLypVKNxf/wR1G8OZjUUsTQzDYeNNhhITxGMSp7euS4FbAAgAAAAAA9EsxoV1B2DcQ1NJRwuxXnvVR+vkD0wbbDYEI/zFEnDAAM5AH0AAAAFZAAgAAAAAEocREw1L0g+roFUchJI2Yd0M0ME2bnErNUYnpyJP1SqBXMAIAAAAAAcE2/JK/8MoSeOchIuAkKh1X3ImoA7p8ujAZIfvIDo6QVsACAAAAAA+W0+zgLr85/PD7P9a94wk6MgNgrizx/XU9aCxAkp1IwAABJjbQAAAAAAAAAAAAAQcGF5bG9hZElkAAAAAAAQZmlyc3RPcGVyYXRvcgABAAAAEnNwAAEAAAAAAAAAEHRmAAEAAAASbW4AAAAAAAAAAAASbXgAyAAAAAAAAAAA", + "subType": "06" + } + } + } + }, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedLong", + "bsonType": "long", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberLong": "0" + }, + "max": { + "$numberLong": "200" + } + } + } + ] + } + } + } + }, + "commandName": "find" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Long-Update.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Long-Update.json new file mode 100644 index 0000000000..4f8cd1d80d --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-Long-Update.json @@ -0,0 +1,557 @@ +{ + "description": "fle2v2-Rangev2-Long-Update", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedLong", + "bsonType": "long", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberLong": "0" + }, + "max": { + "$numberLong": "200" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "FLE2 Range Long. Update.", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedLong": { + "$numberLong": "0" + } + } + }, + "object": "coll" + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedLong": { + "$numberLong": "1" + } + } + }, + "object": "coll" + }, + { + "name": "updateOne", + "arguments": { + "filter": { + "encryptedLong": { + "$gt": { + "$numberLong": "0" + } + } + }, + "update": { + "$set": { + "encryptedLong": { + "$numberLong": "2" + } + } + } + }, + "object": "coll", + "expectResult": { + "matchedCount": 1, + "modifiedCount": 1, + "upsertedCount": 0 + } + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 0, + "encryptedLong": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "RjBYT2h3ZAoHxhf8DU6/dFbDkEBZp0IxREcsRTu2MXs=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "x7GR49EN0t3WXQDihkrbonK7qNIBYC87tpL/XEUyIYc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "JfYUqWF+OoGjiYkRI4L5iPlF+T1Eleul7Fki22jp4Qc=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "q1RyGfIgsaQHoZFRw+DD28V26rN5hweApPLwExncvT8=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "L2PFeKGvLS6C+DLudR6fGlBq3ERPvjWvRyNRIA2HVb0=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "CWxaNqL3iP1yCixDkcmf9bmW3E5VeN8TJkg1jJe528s=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "+vC6araOEo+fpW7PSIP40/EnzBCj1d2N10Jr3rrXJJM=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "6SV63Mf51Z6A6p2X3rCnJKCu6ku3Oeb45mBYbz+IoAo=", + "subType": "00" + } + } + ] + }, + { + "_id": 1, + "encryptedLong": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "hyDcE6QQjPrYJaIS/n7evEZFYcm31Tj89CpEYGF45cI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "F08nMDWDZc+DbWM7XCEJNNCEYyinRmrvGP7EWhmp4is=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "cXH4688amcDc8kZOJq4UP8cE3R58Zl7e+Qo/1jyspps=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "uURBxvTp3FBCVkd+LPqyuY7d6rMW6SGIJQEPY/wtkZI=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "jG3hax1L3RBp9t38vUt53FsBxgr/+Si/vVISpAylYpE=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "kwtIW8MhH9Ky5xNjBx8gFA/SHh2YVphie7g5FGBzals=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "FHflwFuEMu4xX0ZApHi+pdlBH+oevAtXckCUb5Wv0xU=", + "subType": "00" + } + }, + { + "$binary": { + "base64": "ty4cnzJdAlbQKnh7px3GEYjBnvO+jIOaKjoTRDtmh3M=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 0, + "encryptedLong": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedLong", + "bsonType": "long", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberLong": "0" + }, + "max": { + "$numberLong": "200" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedLong": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedLong", + "bsonType": "long", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberLong": "0" + }, + "max": { + "$numberLong": "200" + } + } + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "update": "default", + "ordered": true, + "updates": [ + { + "q": { + "encryptedLong": { + "$gt": { + "$binary": { + "base64": "DXUFAAADcGF5bG9hZAAZBQAABGcABQUAAAMwAH0AAAAFZAAgAAAAALGGQ/CRD+pGLD53BZzWcCcYbuGLVEyjzXIx7b+ux/q2BXMAIAAAAACOC6mXEZL27P9hethZbtKYsTXKK+FpgQ9Axxmn9N/cCwVsACAAAAAA+MFEd8XfZSpbXKqqPC2L3TEFswkaG5Ff6aSgf8p+XVIAAzEAfQAAAAVkACAAAAAAtL3QIvnZqCF72eS6lKr8ilff7R6kiNklokiTuaU5wNsFcwAgAAAAAEtqr3/X731VB+VrbFcY8ZrJKRo2E0Fd+C8L0EMNcvcCBWwAIAAAAABNPhSriux8W8qbwnhCczE3IzlhNEnGDpUwTFDZSL+eYQADMgB9AAAABWQAIAAAAAB99ZW/7KwXKzl5M3XQsAJ3JbEef90IoxFYBArNiYzlgQVzACAAAAAAYO/qaw0+92HAryxOUG7iK6hnIy3OaUA9jIqtHdvcq8YFbAAgAAAAAHrUYj8A0hVgc6VklpDiljOnykrUSfEsjm56XO/bsfKdAAMzAH0AAAAFZAAgAAAAAOK8brUuc2onBNDRtfYMR736dHj4dQqXod8JG7tAMTsDBXMAIAAAAAAW6SrGAL6Bx0s7ZlsYULFfOAiYIGhEWu6md3r+Rk40awVsACAAAAAAIHYXP8RLcCboUmHN3+OlnEw1DxaLSnbTB9PdF228fFAAAzQAfQAAAAVkACAAAAAAV22FGF7ZDwK/EYiGNMlm/QuT3saQdyJM/Fn+ZyQug1oFcwAgAAAAACo7GwCvbcs5UHQMgds9/1QMklEVdjZigpuOFGrDmmxtBWwAIAAAAADQbYYPxlCMMGe2MulbiurApFLoeJSMvTeDU3pyEA2jNwADNQB9AAAABWQAIAAAAADFspsMG7yHjKppyllon1KqAsTrHaZ6JzNqnSz8o6iTvwVzACAAAAAAeiA5pqVIQQ9s6UY/P8v5Jjkl3I7iFNeLDYehikrINrsFbAAgAAAAAFjBTzTpNxDEkA0vSRj0jCED9KDRlboMVyilKyDz5YR4AAM2AH0AAAAFZAAgAAAAAPcLmtq+V1e+MRlZ7NHq1+mrRVBQje5zj685ZvdsfKvSBXMAIAAAAABdHz/3w2k5km97QN9m7oLFYJaVJneNlMboIlz5yUASQAVsACAAAAAAWbp8JVJnx8fEVAJFa7WMfMa7wXeP5M3C8MX20J/i9n0AAzcAfQAAAAVkACAAAAAAYfLwnoxK6XAGQrJFy8+TIJoq38ldBaO75h4zA4ZX5tQFcwAgAAAAAC2wk8UcJH5X5XGnDBYmel6srpBkzBhHtt3Jw1u5TSJ1BWwAIAAAAAA9/YU9eI3D7QbXKIw/3/gzWJ6MZrCYhG0j1wNKgRQp5wADOAB9AAAABWQAIAAAAADGvyrtKkIcaV17ynZA7b2k5Pz6OhvxdWNkDvDWJIja8wVzACAAAAAAOLypVKNxf/wR1G8OZjUUsTQzDYeNNhhITxGMSp7euS4FbAAgAAAAAA9EsxoV1B2DcQ1NJRwuxXnvVR+vkD0wbbDYEI/zFEnDAAM5AH0AAAAFZAAgAAAAAEocREw1L0g+roFUchJI2Yd0M0ME2bnErNUYnpyJP1SqBXMAIAAAAAAcE2/JK/8MoSeOchIuAkKh1X3ImoA7p8ujAZIfvIDo6QVsACAAAAAA+W0+zgLr85/PD7P9a94wk6MgNgrizx/XU9aCxAkp1IwAABJjbQAAAAAAAAAAAAAQcGF5bG9hZElkAAAAAAAQZmlyc3RPcGVyYXRvcgABAAAAEnNwAAEAAAAAAAAAEHRmAAEAAAASbW4AAAAAAAAAAAASbXgAyAAAAAAAAAAA", + "subType": "06" + } + } + } + }, + "u": { + "$set": { + "encryptedLong": { + "$$type": "binData" + } + } + }, + "multi": false, + "upsert": false + } + ], + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedLong", + "bsonType": "long", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberInt": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberLong": "0" + }, + "max": { + "$numberLong": "200" + } + } + } + ] + } + } + }, + "$db": "default" + }, + "commandName": "update" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Rangev2-WrongType.json b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-WrongType.json new file mode 100644 index 0000000000..03681947ce --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Rangev2-WrongType.json @@ -0,0 +1,204 @@ +{ + "description": "fle2v2-Rangev2-WrongType", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "8.0.0", + "maxServerVersion": "8.99.99", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedInt", + "bsonType": "int", + "queries": { + "queryType": "range", + "contention": { + "$numberLong": "0" + }, + "trimFactor": { + "$numberLong": "1" + }, + "sparsity": { + "$numberLong": "1" + }, + "min": { + "$numberInt": "0" + }, + "max": { + "$numberInt": "200" + } + } + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "Wrong type: Insert Double", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedInt": { + "$numberDouble": "0" + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "Cannot encrypt element" + } + } + ] + }, + { + "description": "Wrong type: Find Double", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 0, + "encryptedInt": { + "$numberInt": "0" + } + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "encryptedInt": { + "$gte": { + "$numberDouble": "0" + } + } + }, + "sort": { + "_id": 1 + } + }, + "object": "coll", + "expectError": { + "errorContains": "field type is not supported" + } + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-Update.json b/test/client-side-encryption/spec/unified/fle2v2-Update.json new file mode 100644 index 0000000000..9c39c4d83d --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-Update.json @@ -0,0 +1,633 @@ +{ + "description": "fle2v2-Update", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "7.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedIndexed", + "bsonType": "string", + "queries": { + "queryType": "equality", + "contention": { + "$numberLong": "0" + } + } + }, + { + "keyId": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedUnindexed", + "bsonType": "string" + } + ] + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "Update can query an FLE2 indexed field", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedIndexed": "value123" + } + }, + "object": "coll" + }, + { + "name": "updateOne", + "arguments": { + "filter": { + "encryptedIndexed": "value123" + }, + "update": { + "$set": { + "foo": "bar" + } + } + }, + "object": "coll", + "expectResult": { + "matchedCount": 1, + "modifiedCount": 1, + "upsertedCount": 0 + } + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 1, + "encryptedIndexed": { + "$$type": "binData" + }, + "foo": "bar", + "__safeContent__": [ + { + "$binary": { + "base64": "ThpoKfQ8AkOzkFfNC1+9PF0pY2nIzfXvRdxQgjkNbBw=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedIndexed": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedIndexed", + "bsonType": "string", + "queries": { + "queryType": "equality", + "contention": { + "$numberLong": "0" + } + } + }, + { + "keyId": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedUnindexed", + "bsonType": "string" + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "update": "default", + "updates": [ + { + "q": { + "encryptedIndexed": { + "$eq": { + "$binary": { + "base64": "DIkAAAAFZAAgAAAAAPtVteJQAlgb2YMa/+7YWH00sbQPyt7L6Rb8OwBdMmL2BXMAIAAAAAAd44hgVKnEnTFlwNVC14oyc9OZOTspeymusqkRQj57nAVsACAAAAAAaZ9s3G+4znfxStxeOZwcZy1OhzjMGc5hjmdMN+b/w6kSY20AAAAAAAAAAAAA", + "subType": "06" + } + } + } + }, + "u": { + "$set": { + "foo": "bar" + } + }, + "multi": false, + "upsert": false + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedIndexed", + "bsonType": "string", + "queries": { + "queryType": "equality", + "contention": { + "$numberLong": "0" + } + } + }, + { + "keyId": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedUnindexed", + "bsonType": "string" + } + ] + } + } + } + }, + "commandName": "update" + } + } + ] + } + ] + }, + { + "description": "Update can modify an FLE2 indexed field", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedIndexed": "value123" + } + }, + "object": "coll" + }, + { + "name": "updateOne", + "arguments": { + "filter": { + "encryptedIndexed": "value123" + }, + "update": { + "$set": { + "encryptedIndexed": "value456" + } + } + }, + "object": "coll", + "expectResult": { + "matchedCount": 1, + "modifiedCount": 1, + "upsertedCount": 0 + } + }, + { + "name": "find", + "arguments": { + "filter": { + "_id": 1 + } + }, + "object": "coll", + "expectResult": [ + { + "encryptedIndexed": "value456" + } + ] + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 1, + "encryptedIndexed": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "rhe7/w8Ob8Unl44rGr/moScx6m5VODQnscDhF4Nkn6g=", + "subType": "00" + } + } + ] + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encryptedIndexed": { + "$$type": "binData" + } + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedIndexed", + "bsonType": "string", + "queries": { + "queryType": "equality", + "contention": { + "$numberLong": "0" + } + } + }, + { + "keyId": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedUnindexed", + "bsonType": "string" + } + ] + } + } + } + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "update": "default", + "updates": [ + { + "q": { + "encryptedIndexed": { + "$eq": { + "$binary": { + "base64": "DIkAAAAFZAAgAAAAAPtVteJQAlgb2YMa/+7YWH00sbQPyt7L6Rb8OwBdMmL2BXMAIAAAAAAd44hgVKnEnTFlwNVC14oyc9OZOTspeymusqkRQj57nAVsACAAAAAAaZ9s3G+4znfxStxeOZwcZy1OhzjMGc5hjmdMN+b/w6kSY20AAAAAAAAAAAAA", + "subType": "06" + } + } + } + }, + "u": { + "$set": { + "encryptedIndexed": { + "$$type": "binData" + } + } + }, + "multi": false, + "upsert": false + } + ], + "ordered": true, + "encryptionInformation": { + "type": 1, + "schema": { + "default.default": { + "escCollection": "enxcol_.default.esc", + "ecocCollection": "enxcol_.default.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedIndexed", + "bsonType": "string", + "queries": { + "queryType": "equality", + "contention": { + "$numberLong": "0" + } + } + }, + { + "keyId": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedUnindexed", + "bsonType": "string" + } + ] + } + } + } + }, + "commandName": "update" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "default", + "filter": { + "_id": { + "$eq": 1 + } + } + }, + "commandName": "find" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/fle2v2-validatorAndPartialFieldExpression.json b/test/client-side-encryption/spec/unified/fle2v2-validatorAndPartialFieldExpression.json new file mode 100644 index 0000000000..54cc60a3b1 --- /dev/null +++ b/test/client-side-encryption/spec/unified/fle2v2-validatorAndPartialFieldExpression.json @@ -0,0 +1,304 @@ +{ + "description": "fle2v2-validatorAndPartialFieldExpression", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "7.0.0", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ], + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + }, + "encryptedFieldsMap": { + "default.encryptedCollection": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedIndexed", + "bsonType": "string", + "queries": { + "queryType": "equality", + "contention": { + "$numberLong": "0" + } + } + }, + { + "keyId": { + "$binary": { + "base64": "q83vqxI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedUnindexed", + "bsonType": "string" + } + ] + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [] + } + ], + "tests": [ + { + "description": "create with a validator on an unencrypted field is OK", + "operations": [ + { + "name": "dropCollection", + "object": "db", + "arguments": { + "collection": "encryptedCollection" + } + }, + { + "name": "createCollection", + "object": "db", + "arguments": { + "collection": "encryptedCollection", + "validator": { + "unencrypted_string": "foo" + } + } + }, + { + "name": "assertCollectionExists", + "object": "coll", + "arguments": { + "databaseName": "default", + "collectionName": "encryptedCollection" + } + } + ] + }, + { + "description": "create with a validator on an encrypted field is an error", + "operations": [ + { + "name": "dropCollection", + "object": "db", + "arguments": { + "collection": "encryptedCollection" + } + }, + { + "name": "createCollection", + "object": "db", + "arguments": { + "collection": "encryptedCollection", + "validator": { + "encryptedIndexed": "foo" + } + }, + "expectError": { + "errorContains": "Comparison to encrypted fields not supported" + } + } + ] + }, + { + "description": "collMod with a validator on an unencrypted field is OK", + "operations": [ + { + "name": "dropCollection", + "object": "db", + "arguments": { + "collection": "encryptedCollection" + } + }, + { + "name": "createCollection", + "object": "db", + "arguments": { + "collection": "encryptedCollection" + } + }, + { + "name": "runCommand", + "object": "db", + "arguments": { + "commandName": "collMod", + "command": { + "collMod": "encryptedCollection", + "validator": { + "unencrypted_string": "foo" + } + } + } + } + ] + }, + { + "description": "collMod with a validator on an encrypted field is an error", + "operations": [ + { + "name": "dropCollection", + "object": "db", + "arguments": { + "collection": "encryptedCollection" + } + }, + { + "name": "createCollection", + "object": "db", + "arguments": { + "collection": "encryptedCollection" + } + }, + { + "name": "runCommand", + "object": "db", + "arguments": { + "commandName": "collMod", + "command": { + "collMod": "encryptedCollection", + "validator": { + "encryptedIndexed": "foo" + } + } + }, + "expectError": { + "errorContains": "Comparison to encrypted fields not supported" + } + } + ] + }, + { + "description": "createIndexes with a partialFilterExpression on an unencrypted field is OK", + "operations": [ + { + "name": "dropCollection", + "object": "db", + "arguments": { + "collection": "encryptedCollection" + } + }, + { + "name": "createCollection", + "object": "db", + "arguments": { + "collection": "encryptedCollection" + } + }, + { + "name": "runCommand", + "object": "db", + "arguments": { + "commandName": "createIndexes", + "command": { + "createIndexes": "encryptedCollection", + "indexes": [ + { + "name": "name", + "key": { + "name": 1 + }, + "partialFilterExpression": { + "unencrypted_string": "foo" + } + } + ] + } + } + }, + { + "name": "assertIndexExists", + "object": "coll", + "arguments": { + "databaseName": "default", + "collectionName": "encryptedCollection", + "indexName": "name" + } + } + ] + }, + { + "description": "createIndexes with a partialFilterExpression on an encrypted field is an error", + "operations": [ + { + "name": "dropCollection", + "object": "db", + "arguments": { + "collection": "encryptedCollection" + } + }, + { + "name": "createCollection", + "object": "db", + "arguments": { + "collection": "encryptedCollection" + } + }, + { + "name": "runCommand", + "object": "db", + "arguments": { + "commandName": "createIndexes", + "command": { + "createIndexes": "encryptedCollection", + "indexes": [ + { + "name": "name", + "key": { + "name": 1 + }, + "partialFilterExpression": { + "encryptedIndexed": "foo" + } + } + ] + } + }, + "expectError": { + "errorContains": "Comparison to encrypted fields not supported" + } + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/gcpKMS.json b/test/client-side-encryption/spec/unified/gcpKMS.json new file mode 100644 index 0000000000..6468b5b6ce --- /dev/null +++ b/test/client-side-encryption/spec/unified/gcpKMS.json @@ -0,0 +1,292 @@ +{ + "description": "gcpKMS", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "4.1.10", + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "gcp": { + "email": { + "$$placeholder": 1 + }, + "privateKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "validator": { + "$jsonSchema": { + "properties": { + "encrypted_string_aws": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "encrypted_string_azure": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AZURE+AAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "encrypted_string_gcp": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "GCP+AAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "encrypted_string_local": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "encrypted_string_kmip": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "dBHpr8aITfeBQ15grpbLpQ==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "encrypted_string_kmip_delegated": { + "encrypt": { + "keyId": [ + { + "$uuid": "7411e9af-c688-4df7-8143-5e60ae96cba6" + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "GCP+AAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "CiQAIgLj0WyktnB4dfYHo5SLZ41K4ASQrjJUaSzl5vvVH0G12G0SiQEAjlV8XPlbnHDEDFbdTO4QIe8ER2/172U1ouLazG0ysDtFFIlSvWX5ZnZUrRMmp/R2aJkzLXEt/zf8Mn4Lfm+itnjgo5R9K4pmPNvvPKNZX5C16lrPT+aA+rd+zXFSmlMg3i5jnxvTdLHhg3G7Q/Uv1ZIJskKt95bzLoe0tUVzRWMYXLIEcohnQg==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1601574333107" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1601574333107" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "gcp", + "projectId": "devprod-drivers", + "location": "global", + "keyRing": "key-ring-csfle", + "keyName": "key-name-csfle" + }, + "keyAltNames": [ + "altname", + "gcp_altname" + ] + } + ] + } + ], + "tests": [ + { + "description": "Insert a document with auto encryption using GCP KMS provider", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_string_gcp": "string0" + } + }, + "object": "coll" + } + ], + "outcome": [ + { + "documents": [ + { + "_id": 1, + "encrypted_string_gcp": { + "$binary": { + "base64": "ARgj/gAAAAAAAAAAAAAAAAACwFd+Y5Ojw45GUXNvbcIpN9YkRdoHDHkR4kssdn0tIMKlDQOLFkWFY9X07IRlXsxPD8DcTiKnl6XINK28vhcGlg==", + "subType": "06" + } + } + } + ], + "collectionName": "default", + "databaseName": "default" + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "GCP+AAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault" + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encrypted_string_gcp": { + "$binary": { + "base64": "ARgj/gAAAAAAAAAAAAAAAAACwFd+Y5Ojw45GUXNvbcIpN9YkRdoHDHkR4kssdn0tIMKlDQOLFkWFY9X07IRlXsxPD8DcTiKnl6XINK28vhcGlg==", + "subType": "06" + } + } + } + ], + "ordered": true + }, + "commandName": "insert" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/getMore.json b/test/client-side-encryption/spec/unified/getMore.json new file mode 100644 index 0000000000..adaa59b01e --- /dev/null +++ b/test/client-side-encryption/spec/unified/getMore.json @@ -0,0 +1,321 @@ +{ + "description": "getMore", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "4.1.10", + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + }, + { + "_id": 2, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACDdw4KFz3ZLquhsbt7RmDjD0N67n0uSXx7IGnQNCLeIKvot6s/ouI21Eo84IOtb6lhwUNPlSEBNY0/hbszWAKJg==", + "subType": "06" + } + } + }, + { + "_id": 3, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACQ76HWOut3DZtQuV90hp1aaCpZn95vZIaWmn+wrBehcEtcFwyJlBdlyzDzZTWPZCPgiFq72Wvh6Y7VbpU9NAp3A==", + "subType": "06" + } + } + } + ], + "createOptions": { + "validator": { + "$jsonSchema": { + "properties": { + "encrypted_w_altname": { + "encrypt": { + "keyId": "/altname", + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "random": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string_equivalent": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "status": 1, + "_id": { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + }, + "masterKey": { + "provider": "aws", + "key": "arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0", + "region": "us-east-1" + }, + "updateDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyMaterial": { + "$binary": { + "base64": "AQICAHhQNmWG2CzOm1dq3kWLM+iDUZhEqnhJwH9wZVpuZ94A8gEqnsxXlR51T5EbEVezUqqKAAAAwjCBvwYJKoZIhvcNAQcGoIGxMIGuAgEAMIGoBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDHa4jo6yp0Z18KgbUgIBEIB74sKxWtV8/YHje5lv5THTl0HIbhSwM6EqRlmBiFFatmEWaeMk4tO4xBX65eq670I5TWPSLMzpp8ncGHMmvHqRajNBnmFtbYxN3E3/WjxmdbOOe+OXpnGJPcGsftc7cB2shRfA4lICPnE26+oVNXT6p0Lo20nY5XC7jyCO", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyAltNames": [ + "altname", + "another_altname" + ] + } + ] + } + ], + "tests": [ + { + "description": "getMore with encryption", + "operations": [ + { + "name": "find", + "arguments": { + "batchSize": 2, + "filter": {} + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encrypted_string": "string0" + }, + { + "_id": 2, + "encrypted_string": "string1" + }, + { + "_id": 3, + "encrypted_string": "string2" + } + ] + } + ], + "outcome": [ + { + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + }, + { + "_id": 2, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACDdw4KFz3ZLquhsbt7RmDjD0N67n0uSXx7IGnQNCLeIKvot6s/ouI21Eo84IOtb6lhwUNPlSEBNY0/hbszWAKJg==", + "subType": "06" + } + } + }, + { + "_id": 3, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACQ76HWOut3DZtQuV90hp1aaCpZn95vZIaWmn+wrBehcEtcFwyJlBdlyzDzZTWPZCPgiFq72Wvh6Y7VbpU9NAp3A==", + "subType": "06" + } + } + } + ], + "collectionName": "default", + "databaseName": "default" + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "default", + "batchSize": 2 + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "getMore": { + "$$type": [ + "int", + "long" + ] + }, + "collection": "default", + "batchSize": 2 + }, + "commandName": "getMore" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/insert.json b/test/client-side-encryption/spec/unified/insert.json new file mode 100644 index 0000000000..23e4e6c2ae --- /dev/null +++ b/test/client-side-encryption/spec/unified/insert.json @@ -0,0 +1,421 @@ +{ + "description": "insert", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "4.1.10", + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "validator": { + "$jsonSchema": { + "properties": { + "encrypted_w_altname": { + "encrypt": { + "keyId": "/altname", + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "random": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string_equivalent": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "status": 1, + "_id": { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + }, + "masterKey": { + "provider": "aws", + "key": "arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0", + "region": "us-east-1" + }, + "updateDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyMaterial": { + "$binary": { + "base64": "AQICAHhQNmWG2CzOm1dq3kWLM+iDUZhEqnhJwH9wZVpuZ94A8gEqnsxXlR51T5EbEVezUqqKAAAAwjCBvwYJKoZIhvcNAQcGoIGxMIGuAgEAMIGoBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDHa4jo6yp0Z18KgbUgIBEIB74sKxWtV8/YHje5lv5THTl0HIbhSwM6EqRlmBiFFatmEWaeMk4tO4xBX65eq670I5TWPSLMzpp8ncGHMmvHqRajNBnmFtbYxN3E3/WjxmdbOOe+OXpnGJPcGsftc7cB2shRfA4lICPnE26+oVNXT6p0Lo20nY5XC7jyCO", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyAltNames": [ + "altname", + "another_altname" + ] + } + ] + } + ], + "tests": [ + { + "description": "insertOne with encryption", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_string": "string0", + "random": "abc" + } + }, + "object": "coll" + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + }, + "random": { + "$$type": "binData" + } + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + }, + "random": { + "$$type": "binData" + } + } + ], + "ordered": true + }, + "commandName": "insert" + } + } + ] + } + ] + }, + { + "description": "insertMany with encryption", + "operations": [ + { + "name": "insertMany", + "arguments": { + "documents": [ + { + "_id": 1, + "encrypted_string": "string0", + "random": "abc" + }, + { + "_id": 2, + "encrypted_string": "string1" + } + ] + }, + "object": "coll" + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + }, + "random": { + "$$type": "binData" + } + }, + { + "_id": 2, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACDdw4KFz3ZLquhsbt7RmDjD0N67n0uSXx7IGnQNCLeIKvot6s/ouI21Eo84IOtb6lhwUNPlSEBNY0/hbszWAKJg==", + "subType": "06" + } + } + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + }, + "random": { + "$$type": "binData" + } + }, + { + "_id": 2, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACDdw4KFz3ZLquhsbt7RmDjD0N67n0uSXx7IGnQNCLeIKvot6s/ouI21Eo84IOtb6lhwUNPlSEBNY0/hbszWAKJg==", + "subType": "06" + } + } + } + ], + "ordered": true + }, + "commandName": "insert" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/keyAltName.json b/test/client-side-encryption/spec/unified/keyAltName.json new file mode 100644 index 0000000000..826f43df22 --- /dev/null +++ b/test/client-side-encryption/spec/unified/keyAltName.json @@ -0,0 +1,299 @@ +{ + "description": "keyAltName", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "4.1.10", + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "validator": { + "$jsonSchema": { + "properties": { + "encrypted_w_altname": { + "encrypt": { + "keyId": "/altname", + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "random": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string_equivalent": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "status": 1, + "_id": { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + }, + "masterKey": { + "provider": "aws", + "key": "arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0", + "region": "us-east-1" + }, + "updateDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyMaterial": { + "$binary": { + "base64": "AQICAHhQNmWG2CzOm1dq3kWLM+iDUZhEqnhJwH9wZVpuZ94A8gEqnsxXlR51T5EbEVezUqqKAAAAwjCBvwYJKoZIhvcNAQcGoIGxMIGuAgEAMIGoBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDHa4jo6yp0Z18KgbUgIBEIB74sKxWtV8/YHje5lv5THTl0HIbhSwM6EqRlmBiFFatmEWaeMk4tO4xBX65eq670I5TWPSLMzpp8ncGHMmvHqRajNBnmFtbYxN3E3/WjxmdbOOe+OXpnGJPcGsftc7cB2shRfA4lICPnE26+oVNXT6p0Lo20nY5XC7jyCO", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyAltNames": [ + "altname", + "another_altname" + ] + } + ] + } + ], + "tests": [ + { + "description": "Insert with encryption using key alt name", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_w_altname": "string0", + "altname": "altname" + } + }, + "object": "coll" + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 1, + "encrypted_w_altname": { + "$$type": "binData" + }, + "altname": "altname" + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [] + } + }, + { + "keyAltNames": { + "$in": [ + "altname" + ] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encrypted_w_altname": { + "$$type": "binData" + }, + "altname": "altname" + } + ], + "ordered": true + }, + "commandName": "insert" + } + } + ] + } + ] + }, + { + "description": "Replace with key alt name fails", + "operations": [ + { + "name": "updateOne", + "arguments": { + "filter": {}, + "update": { + "$set": { + "encrypted_w_altname": "string0" + } + }, + "upsert": true + }, + "object": "coll", + "expectError": { + "errorContains": "A non-static (JSONPointer) keyId is not supported" + } + } + ], + "outcome": [ + { + "documents": [], + "collectionName": "default", + "databaseName": "default" + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/keyCache.json b/test/client-side-encryption/spec/unified/keyCache.json new file mode 100644 index 0000000000..a39701e286 --- /dev/null +++ b/test/client-side-encryption/spec/unified/keyCache.json @@ -0,0 +1,198 @@ +{ + "description": "keyCache-explicit", + "schemaVersion": "1.22", + "runOnRequirements": [ + { + "csfle": true + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "clientEncryption": { + "id": "clientEncryption0", + "clientEncryptionOpts": { + "keyVaultClient": "client0", + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "OCTP9uKPPmvuqpHlqq83gPk4U6rUPxKVRRyVtrjFmVjdoa4Xzm1SzUbr7aIhNI42czkUBmrCtZKF31eaaJnxEBkqf0RFukA9Mo3NEHQWgAQ2cn9duOcRbaFUQo2z0/rB" + } + }, + "keyExpirationMS": 1 + } + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "keyvault" + } + }, + { + "collection": { + "id": "collection0", + "database": "database0", + "collectionName": "datakeys" + } + } + ], + "initialData": [ + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "a+YWzdygTAG62/cNUkqZiQ==", + "subType": "04" + } + }, + "keyAltNames": [], + "keyMaterial": { + "$binary": { + "base64": "iocBkhO3YBokiJ+FtxDTS71/qKXQ7tSWhWbcnFTXBcMjarsepvALeJ5li+SdUd9ePuatjidxAdMo7vh1V2ZESLMkQWdpPJ9PaJjA67gKQKbbbB4Ik5F2uKjULvrMBnFNVRMup4JNUwWFQJpqbfMveXnUVcD06+pUpAkml/f+DSXrV3e5rxciiNVtz03dAG8wJrsKsFXWj6vTjFhsfknyBA==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "decrypt, wait, and decrypt again", + "operations": [ + { + "name": "decrypt", + "object": "clientEncryption0", + "arguments": { + "value": { + "$binary": { + "base64": "AWvmFs3coEwButv3DVJKmYkCJ6lUzRX9R28WNlw5uyndb+8gurA+p8q14s7GZ04K2ZvghieRlAr5UwZbow3PMq27u5EIhDDczwBFcbdP1amllw==", + "subType": "06" + } + } + }, + "expectResult": "foobar" + }, + { + "name": "wait", + "object": "testRunner", + "arguments": { + "ms": 50 + } + }, + { + "name": "decrypt", + "object": "clientEncryption0", + "arguments": { + "value": { + "$binary": { + "base64": "AWvmFs3coEwButv3DVJKmYkCJ6lUzRX9R28WNlw5uyndb+8gurA+p8q14s7GZ04K2ZvghieRlAr5UwZbow3PMq27u5EIhDDczwBFcbdP1amllw==", + "subType": "06" + } + } + }, + "expectResult": "foobar" + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "a+YWzdygTAG62/cNUkqZiQ==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + } + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "a+YWzdygTAG62/cNUkqZiQ==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + } + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/kmipKMS.json b/test/client-side-encryption/spec/unified/kmipKMS.json new file mode 100644 index 0000000000..e19f85882b --- /dev/null +++ b/test/client-side-encryption/spec/unified/kmipKMS.json @@ -0,0 +1,415 @@ +{ + "description": "kmipKMS", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "4.1.10", + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "kmip": { + "endpoint": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "validator": { + "$jsonSchema": { + "properties": { + "encrypted_string_aws": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "encrypted_string_azure": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AZURE+AAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "encrypted_string_gcp": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "GCP+AAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "encrypted_string_local": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "encrypted_string_kmip": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "dBHpr8aITfeBQ15grpbLpQ==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "encrypted_string_kmip_delegated": { + "encrypt": { + "keyId": [ + { + "$uuid": "7411e9af-c688-4df7-8143-5e60ae96cba6" + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "dBHpr8aITfeBQ15grpbLpQ==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "eUYDyB0HuWb+lQgUwO+6qJQyTTDTY2gp9FbemL7ZFo0pvr0x6rm6Ff9OVUTGH6HyMKipaeHdiIJU1dzsLwvqKvi7Beh+U4iaIWX/K0oEg1GOsJc0+Z/in8gNHbGUYLmycHViM3LES3kdt7FdFSUl5rEBHrM71yoNEXImz17QJWMGOuT4x6yoi2pvnaRJwfrI4DjpmnnTrDMac92jgZehbg==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1634220190041" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1634220190041" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "kmip", + "keyId": "1" + }, + "keyAltNames": [ + "altname", + "kmip_altname" + ] + }, + { + "_id": { + "$uuid": "7411e9af-c688-4df7-8143-5e60ae96cba6" + }, + "keyMaterial": { + "$binary": { + "base64": "5TLMFWlguBWe5GUESTvOVtkdBsCrynhnV72XRyZ66/nk+EP9/1oEp1t1sg0+vwCTqULHjBiUE6DRx2mYD/Eup1+u2Jgz9/+1sV1drXeOPALNPkSgiZiDbIb67zRi+wTABEcKcegJH+FhmSGxwUoQAiHCsCbcvia5P8tN1lt98YQ=", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1634220190041" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1634220190041" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "kmip", + "delegated": true, + "keyId": "11" + }, + "keyAltNames": [ + "delegated" + ] + } + ] + } + ], + "tests": [ + { + "description": "Insert a document with auto encryption using KMIP KMS provider", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_string_kmip": "string0" + } + }, + "object": "coll" + } + ], + "outcome": [ + { + "documents": [ + { + "_id": 1, + "encrypted_string_kmip": { + "$binary": { + "base64": "AXQR6a/GiE33gUNeYK6Wy6UCKCwtKFIsL8eKObDVxvqGupJNUk7kXswHhB7G5j/C1D+6no+Asra0KgSU43bTL3ooIBLVyIzbV5CDJYqzAsa4WQ==", + "subType": "06" + } + } + } + ], + "collectionName": "default", + "databaseName": "default" + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "dBHpr8aITfeBQ15grpbLpQ==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault" + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encrypted_string_kmip": { + "$binary": { + "base64": "AXQR6a/GiE33gUNeYK6Wy6UCKCwtKFIsL8eKObDVxvqGupJNUk7kXswHhB7G5j/C1D+6no+Asra0KgSU43bTL3ooIBLVyIzbV5CDJYqzAsa4WQ==", + "subType": "06" + } + } + } + ], + "ordered": true + }, + "commandName": "insert" + } + } + ] + } + ] + }, + { + "description": "Insert a document with auto encryption using KMIP delegated KMS provider", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_string_kmip_delegated": "string0" + } + }, + "object": "coll" + } + ], + "outcome": [ + { + "documents": [ + { + "_id": 1, + "encrypted_string_kmip_delegated": { + "$binary": { + "base64": "AXQR6a/GiE33gUNeYK6Wy6YCkB+8NVfAAjIbvLqyXIg6g1a8tXrym92DPoqmxpcdQyH0vQM3aFNMz7tZwQBimKs29ztZV/LWjM633HhO5ACl9A==", + "subType": "06" + } + } + } + ], + "collectionName": "default", + "databaseName": "default" + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$uuid": "7411e9af-c688-4df7-8143-5e60ae96cba6" + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault" + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encrypted_string_kmip_delegated": { + "$binary": { + "base64": "AXQR6a/GiE33gUNeYK6Wy6YCkB+8NVfAAjIbvLqyXIg6g1a8tXrym92DPoqmxpcdQyH0vQM3aFNMz7tZwQBimKs29ztZV/LWjM633HhO5ACl9A==", + "subType": "06" + } + } + } + ], + "ordered": true + }, + "commandName": "insert" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/localKMS.json b/test/client-side-encryption/spec/unified/localKMS.json new file mode 100644 index 0000000000..03b8486484 --- /dev/null +++ b/test/client-side-encryption/spec/unified/localKMS.json @@ -0,0 +1,261 @@ +{ + "description": "localKMS", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "4.1.10", + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + }, + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "validator": { + "$jsonSchema": { + "properties": { + "encrypted_string": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "random": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + } + }, + "bsonType": "object" + } + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "Ce9HSz/HKKGkIt4uyy+jDuKGA+rLC2cycykMo6vc8jXxqa1UVDYHWq1r+vZKbnnSRBfB981akzRKZCFpC05CTyFqDhXv6OnMjpG97OZEREGIsHEYiJkBW0jJJvfLLgeLsEpBzsro9FztGGXASxyxFRZFhXvHxyiLOKrdWfs7X1O/iK3pEoHMx6uSNSfUOgbebLfIqW7TO++iQS5g1xovXA==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } + } + ] + } + ], + "tests": [ + { + "description": "Insert a document with auto encryption using local KMS provider", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_string": "string0", + "random": "abc" + } + }, + "object": "coll" + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACV/+zJmpqMU47yxS/xIVAviGi7wHDuFwaULAixEAoIh0xHz73UYOM3D8D44gcJn67EROjbz4ITpYzzlCJovDL0Q==", + "subType": "06" + } + }, + "random": { + "$$type": "binData" + } + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault" + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACV/+zJmpqMU47yxS/xIVAviGi7wHDuFwaULAixEAoIh0xHz73UYOM3D8D44gcJn67EROjbz4ITpYzzlCJovDL0Q==", + "subType": "06" + } + }, + "random": { + "$$type": "binData" + } + } + ], + "ordered": true + }, + "commandName": "insert" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/localSchema.json b/test/client-side-encryption/spec/unified/localSchema.json new file mode 100644 index 0000000000..685ee39d7c --- /dev/null +++ b/test/client-side-encryption/spec/unified/localSchema.json @@ -0,0 +1,337 @@ +{ + "description": "localSchema", + "schemaVersion": "1.23", + "runOnRequirements": [ + { + "minServerVersion": "4.1.10", + "csfle": true + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "schemaMap": { + "default.default": { + "properties": { + "encrypted_w_altname": { + "encrypt": { + "keyId": "/altname", + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "OyQRAeK7QlWMr0E2xWapYg==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "random": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "OyQRAeK7QlWMr0E2xWapYg==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string_equivalent": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "OyQRAeK7QlWMr0E2xWapYg==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + }, + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "client": { + "id": "client1", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "schemaMap": { + "default.default": { + "properties": { + "test": { + "bsonType": "string" + } + }, + "bsonType": "object", + "required": [ + "test" + ] + } + }, + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "encryptedDB", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "encryptedColl", + "database": "encryptedDB", + "collectionName": "default" + } + }, + { + "database": { + "id": "encryptedDB2", + "client": "client1", + "databaseName": "default" + } + }, + { + "collection": { + "id": "encryptedColl2", + "database": "encryptedDB2", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "status": 1, + "_id": { + "$binary": { + "base64": "OyQRAeK7QlWMr0E2xWapYg==", + "subType": "04" + } + }, + "masterKey": { + "provider": "aws", + "key": "arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0", + "region": "us-east-1" + }, + "updateDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyMaterial": { + "$binary": { + "base64": "AQICAHhQNmWG2CzOm1dq3kWLM+iDUZhEqnhJwH9wZVpuZ94A8gEqnsxXlR51T5EbEVezUqqKAAAAwjCBvwYJKoZIhvcNAQcGoIGxMIGuAgEAMIGoBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDHa4jo6yp0Z18KgbUgIBEIB74sKxWtV8/YHje5lv5THTl0HIbhSwM6EqRlmBiFFatmEWaeMk4tO4xBX65eq670I5TWPSLMzpp8ncGHMmvHqRajNBnmFtbYxN3E3/WjxmdbOOe+OXpnGJPcGsftc7cB2shRfA4lICPnE26+oVNXT6p0Lo20nY5XC7jyCO", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyAltNames": [ + "altname", + "another_altname" + ] + } + ] + }, + { + "databaseName": "default", + "collectionName": "default", + "documents": [] + } + ], + "tests": [ + { + "description": "A local schema should override", + "operations": [ + { + "object": "encryptedColl", + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_string": "string0" + } + } + }, + { + "object": "encryptedColl", + "name": "find", + "arguments": { + "filter": { + "_id": 1 + } + }, + "expectResult": [ + { + "_id": 1, + "encrypted_string": "string0" + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "databaseName": "keyvault", + "commandName": "find", + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "OyQRAeK7QlWMr0E2xWapYg==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "readConcern": { + "level": "majority" + } + } + } + }, + { + "commandStartedEvent": { + "commandName": "insert", + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "ATskEQHiu0JVjK9BNsVmqWIClDjVEWlpmVRN76InSQuFW2piVbYFkh0QhZCKyx9DdvFBUG+FWluh0kXyhdq3b2Vt/nqNWjXn2y0+JPhrc4W+wQ==", + "subType": "06" + } + } + } + ], + "ordered": true + } + } + }, + { + "commandStartedEvent": { + "commandName": "find", + "command": { + "find": "default", + "filter": { + "_id": 1 + } + } + } + } + ] + } + ], + "outcome": [ + { + "collectionName": "default", + "databaseName": "default", + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "ATskEQHiu0JVjK9BNsVmqWIClDjVEWlpmVRN76InSQuFW2piVbYFkh0QhZCKyx9DdvFBUG+FWluh0kXyhdq3b2Vt/nqNWjXn2y0+JPhrc4W+wQ==", + "subType": "06" + } + } + } + ] + } + ] + }, + { + "description": "A local schema with no encryption is an error", + "operations": [ + { + "object": "encryptedColl2", + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_string": "string0" + } + }, + "expectError": { + "isError": true, + "errorContains": "JSON schema keyword 'required' is only allowed with a remote schema" + } + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/malformedCiphertext.json b/test/client-side-encryption/spec/unified/malformedCiphertext.json new file mode 100644 index 0000000000..550928f1e0 --- /dev/null +++ b/test/client-side-encryption/spec/unified/malformedCiphertext.json @@ -0,0 +1,241 @@ +{ + "description": "malformedCiphertext", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "4.1.10", + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "schemaMap": { + "default.default": { + "properties": { + "encrypted_w_altname": { + "encrypt": { + "keyId": "/altname", + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "random": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string_equivalent": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + }, + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACDdw4KFz3ZLquhsbt7RmDjD0N67n0uSXx7IGnQNCLeIKvot6s/ouI21Eo84IOtb6lhwUNPlSEBNY0/hbszWAKJg==", + "subType": "00" + } + } + }, + { + "_id": 2, + "encrypted_string": { + "$binary": { + "base64": "AQ==", + "subType": "06" + } + } + }, + { + "_id": 3, + "encrypted_string": { + "$binary": { + "base64": "AQAAa2V2aW4gYWxiZXJ0c29uCg==", + "subType": "06" + } + } + } + ] + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "status": 1, + "_id": { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + }, + "masterKey": { + "provider": "aws", + "key": "arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0", + "region": "us-east-1" + }, + "updateDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyMaterial": { + "$binary": { + "base64": "AQICAHhQNmWG2CzOm1dq3kWLM+iDUZhEqnhJwH9wZVpuZ94A8gEqnsxXlR51T5EbEVezUqqKAAAAwjCBvwYJKoZIhvcNAQcGoIGxMIGuAgEAMIGoBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDHa4jo6yp0Z18KgbUgIBEIB74sKxWtV8/YHje5lv5THTl0HIbhSwM6EqRlmBiFFatmEWaeMk4tO4xBX65eq670I5TWPSLMzpp8ncGHMmvHqRajNBnmFtbYxN3E3/WjxmdbOOe+OXpnGJPcGsftc7cB2shRfA4lICPnE26+oVNXT6p0Lo20nY5XC7jyCO", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyAltNames": [ + "altname", + "another_altname" + ] + } + ] + } + ], + "tests": [ + { + "description": "Wrong subtype", + "operations": [ + { + "name": "find", + "arguments": { + "filter": { + "_id": 1 + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACDdw4KFz3ZLquhsbt7RmDjD0N67n0uSXx7IGnQNCLeIKvot6s/ouI21Eo84IOtb6lhwUNPlSEBNY0/hbszWAKJg==", + "subType": "00" + } + } + } + ] + } + ] + }, + { + "description": "Empty data", + "operations": [ + { + "name": "find", + "arguments": { + "filter": { + "_id": 2 + } + }, + "object": "coll", + "expectError": { + "errorContains": "malformed ciphertext" + } + } + ] + }, + { + "description": "Malformed data", + "operations": [ + { + "name": "find", + "arguments": { + "filter": { + "_id": 3 + } + }, + "object": "coll", + "expectError": { + "errorContains": "not all keys requested were satisfied" + } + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/maxWireVersion.json b/test/client-side-encryption/spec/unified/maxWireVersion.json new file mode 100644 index 0000000000..f7a5f0b7db --- /dev/null +++ b/test/client-side-encryption/spec/unified/maxWireVersion.json @@ -0,0 +1,108 @@ +{ + "description": "maxWireVersion", + "schemaVersion": "1.23", + "runOnRequirements": [ + { + "maxServerVersion": "4.0.99", + "csfle": true + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + }, + "extraOptions": { + "mongocryptdBypassSpawn": true + } + } + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "collection0", + "database": "database0", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "status": 1, + "_id": { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + }, + "masterKey": { + "provider": "aws", + "key": "arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0", + "region": "us-east-1" + }, + "updateDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyMaterial": { + "$binary": { + "base64": "AQICAHhQNmWG2CzOm1dq3kWLM+iDUZhEqnhJwH9wZVpuZ94A8gEqnsxXlR51T5EbEVezUqqKAAAAwjCBvwYJKoZIhvcNAQcGoIGxMIGuAgEAMIGoBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDHa4jo6yp0Z18KgbUgIBEIB74sKxWtV8/YHje5lv5THTl0HIbhSwM6EqRlmBiFFatmEWaeMk4tO4xBX65eq670I5TWPSLMzpp8ncGHMmvHqRajNBnmFtbYxN3E3/WjxmdbOOe+OXpnGJPcGsftc7cB2shRfA4lICPnE26+oVNXT6p0Lo20nY5XC7jyCO", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyAltNames": [ + "altname", + "another_altname" + ] + } + ] + } + ], + "tests": [ + { + "description": "operation fails with maxWireVersion < 8", + "operations": [ + { + "name": "insertOne", + "object": "collection0", + "arguments": { + "document": { + "encrypted_string": "string0" + } + }, + "expectError": { + "errorContains": "Auto-encryption requires a minimum MongoDB version of 4.2" + } + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/missingKey.json b/test/client-side-encryption/spec/unified/missingKey.json new file mode 100644 index 0000000000..af0fd5812a --- /dev/null +++ b/test/client-side-encryption/spec/unified/missingKey.json @@ -0,0 +1,233 @@ +{ + "description": "missingKey", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "4.1.10", + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.different", + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "validator": { + "$jsonSchema": { + "properties": { + "encrypted_w_altname": { + "encrypt": { + "keyId": "/altname", + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "random": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string_equivalent": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "status": 1, + "_id": { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + }, + "masterKey": { + "provider": "aws", + "key": "arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0", + "region": "us-east-1" + }, + "updateDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyMaterial": { + "$binary": { + "base64": "AQICAHhQNmWG2CzOm1dq3kWLM+iDUZhEqnhJwH9wZVpuZ94A8gEqnsxXlR51T5EbEVezUqqKAAAAwjCBvwYJKoZIhvcNAQcGoIGxMIGuAgEAMIGoBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDHa4jo6yp0Z18KgbUgIBEIB74sKxWtV8/YHje5lv5THTl0HIbhSwM6EqRlmBiFFatmEWaeMk4tO4xBX65eq670I5TWPSLMzpp8ncGHMmvHqRajNBnmFtbYxN3E3/WjxmdbOOe+OXpnGJPcGsftc7cB2shRfA4lICPnE26+oVNXT6p0Lo20nY5XC7jyCO", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyAltNames": [ + "altname", + "another_altname" + ] + } + ] + } + ], + "tests": [ + { + "description": "Insert with encryption on a missing key", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_string": "string0", + "random": "abc" + } + }, + "object": "coll", + "expectError": { + "errorContains": "not all keys requested were satisfied" + } + } + ], + "outcome": [ + { + "documents": [], + "collectionName": "default", + "databaseName": "default" + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "different", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/namedKMS.json b/test/client-side-encryption/spec/unified/namedKMS.json new file mode 100644 index 0000000000..5e203865fd --- /dev/null +++ b/test/client-side-encryption/spec/unified/namedKMS.json @@ -0,0 +1,241 @@ +{ + "description": "namedKMS", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "4.1.10", + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local:name2": { + "key": "local+name2+YUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [], + "createOptions": { + "validator": { + "$jsonSchema": { + "properties": { + "encrypted_string": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "local+name2+AAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "local+name2+AAAAAAAAAA==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "DX3iUuOlBsx6wBX9UZ3v/qXk1HNeBace2J+h/JwsDdF/vmSXLZ1l1VmZYIcpVFy6ODhdbzLjd4pNgg9wcm4etYig62KNkmtZ0/s1tAL5VsuW/s7/3PYnYGznZTFhLjIVcOH/RNoRj2eQb/sRTyivL85wePEpAU/JzuBj6qO9Y5txQgs1k0J3aNy10R9aQ8kC1NuSSpLAIXwE6DlNDDJXhw==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local:name2" + } + } + ] + } + ], + "tests": [ + { + "description": "Automatically encrypt and decrypt with a named KMS provider", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_string": "string0" + } + }, + "object": "coll" + }, + { + "name": "find", + "arguments": { + "filter": { + "_id": 1 + } + }, + "object": "coll", + "expectResult": [ + { + "_id": 1, + "encrypted_string": "string0" + } + ] + } + ], + "outcome": [ + { + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AZaHGpfp2pntvgAAAAAAAAAC07sFvTQ0I4O2U49hpr4HezaK44Ivluzv5ntQBTYHDlAJMLyRMyB6Dl+UGHBgqhHe/Xw+pcT9XdiUoOJYAx9g+w==", + "subType": "06" + } + } + } + ], + "collectionName": "default", + "databaseName": "default" + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "local+name2+AAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AZaHGpfp2pntvgAAAAAAAAAC07sFvTQ0I4O2U49hpr4HezaK44Ivluzv5ntQBTYHDlAJMLyRMyB6Dl+UGHBgqhHe/Xw+pcT9XdiUoOJYAx9g+w==", + "subType": "06" + } + } + } + ], + "ordered": true + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "default", + "filter": { + "_id": 1 + } + }, + "commandName": "find" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/noSchema.json b/test/client-side-encryption/spec/unified/noSchema.json new file mode 100644 index 0000000000..c18afa4ed4 --- /dev/null +++ b/test/client-side-encryption/spec/unified/noSchema.json @@ -0,0 +1,115 @@ +{ + "description": "noSchema", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "4.1.10", + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "unencrypted" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "unencrypted", + "documents": [] + } + ], + "tests": [ + { + "description": "Insert on an unencrypted collection", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1 + } + }, + "object": "coll" + } + ], + "outcome": [ + { + "documents": [ + { + "_id": 1 + } + ], + "collectionName": "unencrypted", + "databaseName": "default" + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "unencrypted" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "unencrypted", + "documents": [ + { + "_id": 1 + } + ], + "ordered": true + }, + "commandName": "insert" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/replaceOne.json b/test/client-side-encryption/spec/unified/replaceOne.json new file mode 100644 index 0000000000..a093e238ba --- /dev/null +++ b/test/client-side-encryption/spec/unified/replaceOne.json @@ -0,0 +1,316 @@ +{ + "description": "replaceOne", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "4.1.10", + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + } + ], + "createOptions": { + "validator": { + "$jsonSchema": { + "properties": { + "encrypted_w_altname": { + "encrypt": { + "keyId": "/altname", + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "random": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string_equivalent": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "status": 1, + "_id": { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + }, + "masterKey": { + "provider": "aws", + "key": "arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0", + "region": "us-east-1" + }, + "updateDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyMaterial": { + "$binary": { + "base64": "AQICAHhQNmWG2CzOm1dq3kWLM+iDUZhEqnhJwH9wZVpuZ94A8gEqnsxXlR51T5EbEVezUqqKAAAAwjCBvwYJKoZIhvcNAQcGoIGxMIGuAgEAMIGoBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDHa4jo6yp0Z18KgbUgIBEIB74sKxWtV8/YHje5lv5THTl0HIbhSwM6EqRlmBiFFatmEWaeMk4tO4xBX65eq670I5TWPSLMzpp8ncGHMmvHqRajNBnmFtbYxN3E3/WjxmdbOOe+OXpnGJPcGsftc7cB2shRfA4lICPnE26+oVNXT6p0Lo20nY5XC7jyCO", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyAltNames": [ + "altname", + "another_altname" + ] + } + ] + } + ], + "tests": [ + { + "description": "replaceOne with encryption", + "operations": [ + { + "name": "replaceOne", + "arguments": { + "filter": { + "encrypted_string": "string0" + }, + "replacement": { + "encrypted_string": "string1", + "random": "abc" + } + }, + "object": "coll", + "expectResult": { + "matchedCount": 1, + "modifiedCount": 1, + "upsertedCount": 0 + } + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACDdw4KFz3ZLquhsbt7RmDjD0N67n0uSXx7IGnQNCLeIKvot6s/ouI21Eo84IOtb6lhwUNPlSEBNY0/hbszWAKJg==", + "subType": "06" + } + }, + "random": { + "$$type": "binData" + } + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "update": "default", + "updates": [ + { + "q": { + "encrypted_string": { + "$eq": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + } + }, + "u": { + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACDdw4KFz3ZLquhsbt7RmDjD0N67n0uSXx7IGnQNCLeIKvot6s/ouI21Eo84IOtb6lhwUNPlSEBNY0/hbszWAKJg==", + "subType": "06" + } + }, + "random": { + "$$type": "binData" + } + }, + "multi": false, + "upsert": false + } + ], + "ordered": true + }, + "commandName": "update" + } + } + ] + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/timeoutMS.json b/test/client-side-encryption/spec/unified/timeoutMS.json new file mode 100644 index 0000000000..98dc50e98a --- /dev/null +++ b/test/client-side-encryption/spec/unified/timeoutMS.json @@ -0,0 +1,270 @@ +{ + "description": "timeoutMS", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "4.4", + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ], + "uriOptions": { + "timeoutMS": 500 + } + } + }, + { + "client": { + "id": "client1", + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "cse-timeouts-db" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "cse-timeouts-coll" + } + } + ], + "initialData": [ + { + "databaseName": "cse-timeouts-db", + "collectionName": "cse-timeouts-coll", + "documents": [], + "createOptions": { + "validator": { + "$jsonSchema": { + "properties": { + "encrypted_w_altname": { + "encrypt": { + "keyId": "/altname", + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "random": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string_equivalent": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "status": 1, + "_id": { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + }, + "masterKey": { + "provider": "aws", + "key": "arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0", + "region": "us-east-1" + }, + "updateDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyMaterial": { + "$binary": { + "base64": "AQICAHhQNmWG2CzOm1dq3kWLM+iDUZhEqnhJwH9wZVpuZ94A8gEqnsxXlR51T5EbEVezUqqKAAAAwjCBvwYJKoZIhvcNAQcGoIGxMIGuAgEAMIGoBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDHa4jo6yp0Z18KgbUgIBEIB74sKxWtV8/YHje5lv5THTl0HIbhSwM6EqRlmBiFFatmEWaeMk4tO4xBX65eq670I5TWPSLMzpp8ncGHMmvHqRajNBnmFtbYxN3E3/WjxmdbOOe+OXpnGJPcGsftc7cB2shRfA4lICPnE26+oVNXT6p0Lo20nY5XC7jyCO", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyAltNames": [ + "altname", + "another_altname" + ] + } + ] + } + ], + "tests": [ + { + "description": "timeoutMS applied to listCollections to get collection schema", + "operations": [ + { + "name": "failPoint", + "object": "testRunner", + "arguments": { + "client": "client1", + "failPoint": { + "configureFailPoint": "failCommand", + "mode": { + "times": 1 + }, + "data": { + "failCommands": [ + "listCollections" + ], + "blockConnection": true, + "blockTimeMS": 600 + } + } + } + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_string": "string0", + "random": "abc" + } + }, + "object": "coll", + "expectError": { + "isTimeoutError": true + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "cse-timeouts-coll" + }, + "maxTimeMS": { + "$$type": [ + "int", + "long" + ] + } + }, + "commandName": "listCollections" + } + } + ] + } + ] + }, + { + "description": "remaining timeoutMS applied to find to get keyvault data", + "operations": [ + { + "name": "failPoint", + "object": "testRunner", + "arguments": { + "client": "client1", + "failPoint": { + "configureFailPoint": "failCommand", + "mode": { + "times": 2 + }, + "data": { + "failCommands": [ + "listCollections", + "find" + ], + "blockConnection": true, + "blockTimeMS": 300 + } + } + } + }, + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_string": "string0", + "random": "abc" + } + }, + "object": "coll", + "expectError": { + "isTimeoutError": true + } + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/types.json b/test/client-side-encryption/spec/unified/types.json new file mode 100644 index 0000000000..3bb49f2a64 --- /dev/null +++ b/test/client-side-encryption/spec/unified/types.json @@ -0,0 +1,2262 @@ +{ + "description": "types", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "4.1.10", + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "schemaMap": { + "default.default": { + "properties": { + "encrypted_objectId": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "objectId", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + }, + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db0", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll0", + "database": "db0", + "collectionName": "default" + } + }, + { + "client": { + "id": "client1", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "schemaMap": { + "default.default": { + "properties": { + "encrypted_symbol": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "symbol", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + }, + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db1", + "client": "client1", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll1", + "database": "db1", + "collectionName": "default" + } + }, + { + "client": { + "id": "client2", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "schemaMap": { + "default.default": { + "properties": { + "encrypted_int": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "int", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + }, + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db2", + "client": "client2", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll2", + "database": "db2", + "collectionName": "default" + } + }, + { + "client": { + "id": "client3", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "schemaMap": { + "default.default": { + "properties": { + "encrypted_double": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "double", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + }, + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db3", + "client": "client3", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll3", + "database": "db3", + "collectionName": "default" + } + }, + { + "client": { + "id": "client4", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "schemaMap": { + "default.default": { + "properties": { + "encrypted_decimal": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "decimal", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + }, + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db4", + "client": "client4", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll4", + "database": "db4", + "collectionName": "default" + } + }, + { + "client": { + "id": "client5", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "schemaMap": { + "default.default": { + "properties": { + "encrypted_binData": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "binData", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + }, + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db5", + "client": "client5", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll5", + "database": "db5", + "collectionName": "default" + } + }, + { + "client": { + "id": "client6", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "schemaMap": { + "default.default": { + "properties": { + "encrypted_javascript": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "javascript", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + }, + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db6", + "client": "client6", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll6", + "database": "db6", + "collectionName": "default" + } + }, + { + "client": { + "id": "client7", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "schemaMap": { + "default.default": { + "properties": { + "encrypted_javascriptWithScope": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "javascriptWithScope", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + }, + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db7", + "client": "client7", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll7", + "database": "db7", + "collectionName": "default" + } + }, + { + "client": { + "id": "client8", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "schemaMap": { + "default.default": { + "properties": { + "encrypted_object": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "object", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + }, + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db8", + "client": "client8", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll8", + "database": "db8", + "collectionName": "default" + } + }, + { + "client": { + "id": "client9", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "schemaMap": { + "default.default": { + "properties": { + "encrypted_timestamp": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "timestamp", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + }, + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db9", + "client": "client9", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll9", + "database": "db9", + "collectionName": "default" + } + }, + { + "client": { + "id": "client10", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "schemaMap": { + "default.default": { + "properties": { + "encrypted_regex": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "regex", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + }, + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db10", + "client": "client10", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll10", + "database": "db10", + "collectionName": "default" + } + }, + { + "client": { + "id": "client11", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "schemaMap": { + "default.default": { + "properties": { + "encrypted_date": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "date", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + }, + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db11", + "client": "client11", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll11", + "database": "db11", + "collectionName": "default" + } + }, + { + "client": { + "id": "client12", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "schemaMap": { + "default.default": { + "properties": { + "encrypted_minKey": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "minKey", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + }, + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db12", + "client": "client12", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll12", + "database": "db12", + "collectionName": "default" + } + }, + { + "client": { + "id": "client13", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "schemaMap": { + "default.default": { + "properties": { + "encrypted_maxKey": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "maxKey", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + }, + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db13", + "client": "client13", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll13", + "database": "db13", + "collectionName": "default" + } + }, + { + "client": { + "id": "client14", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "schemaMap": { + "default.default": { + "properties": { + "encrypted_undefined": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "undefined", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + }, + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db14", + "client": "client14", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll14", + "database": "db14", + "collectionName": "default" + } + }, + { + "client": { + "id": "client15", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "schemaMap": { + "default.default": { + "properties": { + "encrypted_array": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "array", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + }, + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db15", + "client": "client15", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll15", + "database": "db15", + "collectionName": "default" + } + }, + { + "client": { + "id": "client16", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "schemaMap": { + "default.default": { + "properties": { + "encrypted_bool": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "bool", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + }, + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db16", + "client": "client16", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll16", + "database": "db16", + "collectionName": "default" + } + }, + { + "client": { + "id": "client17", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "schemaMap": { + "default.default": { + "properties": { + "encrypted_null": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "null", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + }, + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db17", + "client": "client17", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll17", + "database": "db17", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [] + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "status": 1, + "_id": { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + }, + "masterKey": { + "provider": "aws", + "key": "arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0", + "region": "us-east-1" + }, + "updateDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyMaterial": { + "$binary": { + "base64": "AQICAHhQNmWG2CzOm1dq3kWLM+iDUZhEqnhJwH9wZVpuZ94A8gEqnsxXlR51T5EbEVezUqqKAAAAwjCBvwYJKoZIhvcNAQcGoIGxMIGuAgEAMIGoBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDHa4jo6yp0Z18KgbUgIBEIB74sKxWtV8/YHje5lv5THTl0HIbhSwM6EqRlmBiFFatmEWaeMk4tO4xBX65eq670I5TWPSLMzpp8ncGHMmvHqRajNBnmFtbYxN3E3/WjxmdbOOe+OXpnGJPcGsftc7cB2shRfA4lICPnE26+oVNXT6p0Lo20nY5XC7jyCO", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyAltNames": [ + "altname", + "another_altname" + ] + } + ] + } + ], + "tests": [ + { + "description": "type=objectId", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_objectId": { + "$oid": "AAAAAAAAAAAAAAAAAAAAAAAA" + } + } + }, + "object": "coll0" + }, + { + "name": "find", + "object": "coll0", + "arguments": { + "filter": { + "_id": 1 + } + }, + "expectResult": [ + { + "_id": 1, + "encrypted_objectId": { + "$oid": "AAAAAAAAAAAAAAAAAAAAAAAA" + } + } + ] + } + ], + "outcome": [ + { + "documents": [ + { + "_id": 1, + "encrypted_objectId": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAAHmkTPqvzfHMWpvS1mEsrjOxVQ2dyihEgIFWD5E0eNEsiMBQsC0GuvjdqYRL5DHLFI1vKuGek7EYYp0Qyii/tHqA==", + "subType": "06" + } + } + } + ], + "collectionName": "default", + "databaseName": "default" + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encrypted_objectId": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAAHmkTPqvzfHMWpvS1mEsrjOxVQ2dyihEgIFWD5E0eNEsiMBQsC0GuvjdqYRL5DHLFI1vKuGek7EYYp0Qyii/tHqA==", + "subType": "06" + } + } + } + ], + "ordered": true + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "default", + "filter": { + "_id": 1 + } + }, + "commandName": "find" + } + } + ] + } + ] + }, + { + "description": "type=symbol", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_symbol": { + "$symbol": "test" + } + } + }, + "object": "coll1" + }, + { + "name": "find", + "object": "coll1", + "arguments": { + "filter": { + "_id": 1 + } + }, + "expectResult": [ + { + "_id": 1, + "encrypted_symbol": { + "$symbol": "test" + } + } + ] + } + ], + "outcome": [ + { + "documents": [ + { + "_id": 1, + "encrypted_symbol": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAAOOmvDmWjcuKsSCO7U/7t9HJ8eI73B6wduyMbdkvn7n7V4uTJes/j+BTtneSdyG2JHKHGkevWAJSIU2XoO66BSXw==", + "subType": "06" + } + } + } + ], + "collectionName": "default", + "databaseName": "default" + } + ], + "expectEvents": [ + { + "client": "client1", + "events": [ + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encrypted_symbol": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAAOOmvDmWjcuKsSCO7U/7t9HJ8eI73B6wduyMbdkvn7n7V4uTJes/j+BTtneSdyG2JHKHGkevWAJSIU2XoO66BSXw==", + "subType": "06" + } + } + } + ], + "ordered": true + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "default", + "filter": { + "_id": 1 + } + }, + "commandName": "find" + } + } + ] + } + ] + }, + { + "description": "type=int", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_int": { + "$numberInt": "123" + } + } + }, + "object": "coll2" + }, + { + "name": "find", + "object": "coll2", + "arguments": { + "filter": { + "_id": 1 + } + }, + "expectResult": [ + { + "_id": 1, + "encrypted_int": { + "$numberInt": "123" + } + } + ] + } + ], + "outcome": [ + { + "documents": [ + { + "_id": 1, + "encrypted_int": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAAQPNXJVXMEjGZnftMuf2INKufXCtQIRHdw5wTgn6QYt3ejcoAXyiwI4XIUizkpsob494qpt2in4tWeiO7b9zkA8Q==", + "subType": "06" + } + } + } + ], + "collectionName": "default", + "databaseName": "default" + } + ], + "expectEvents": [ + { + "client": "client2", + "events": [ + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encrypted_int": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAAQPNXJVXMEjGZnftMuf2INKufXCtQIRHdw5wTgn6QYt3ejcoAXyiwI4XIUizkpsob494qpt2in4tWeiO7b9zkA8Q==", + "subType": "06" + } + } + } + ], + "ordered": true + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "default", + "filter": { + "_id": 1 + } + }, + "commandName": "find" + } + } + ] + } + ] + }, + { + "description": "type=double", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_double": { + "$numberDouble": "1.23" + } + } + }, + "object": "coll3", + "expectError": { + "errorContains": "element of type: double" + } + } + ] + }, + { + "description": "type=decimal", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_decimal": { + "$numberDecimal": "1.23" + } + } + }, + "object": "coll4", + "expectError": { + "errorContains": "element of type: decimal" + } + } + ] + }, + { + "description": "type=binData", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_binData": { + "$binary": { + "base64": "AAAA", + "subType": "00" + } + } + } + }, + "object": "coll5" + }, + { + "name": "find", + "object": "coll5", + "arguments": { + "filter": { + "_id": 1 + } + }, + "expectResult": [ + { + "_id": 1, + "encrypted_binData": { + "$binary": { + "base64": "AAAA", + "subType": "00" + } + } + } + ] + } + ], + "outcome": [ + { + "documents": [ + { + "_id": 1, + "encrypted_binData": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAAFB/KHZQHaHHo8fctcl7v6kR+sLkJoTRx2cPSSck9ya+nbGROSeFhdhDRHaCzhV78fDEqnMDSVPNi+ZkbaIh46GQ==", + "subType": "06" + } + } + } + ], + "collectionName": "default", + "databaseName": "default" + } + ], + "expectEvents": [ + { + "client": "client5", + "events": [ + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encrypted_binData": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAAFB/KHZQHaHHo8fctcl7v6kR+sLkJoTRx2cPSSck9ya+nbGROSeFhdhDRHaCzhV78fDEqnMDSVPNi+ZkbaIh46GQ==", + "subType": "06" + } + } + } + ], + "ordered": true + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "default", + "filter": { + "_id": 1 + } + }, + "commandName": "find" + } + } + ] + } + ] + }, + { + "description": "type=javascript", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_javascript": { + "$code": "var x = 1;" + } + } + }, + "object": "coll6" + }, + { + "name": "find", + "object": "coll6", + "arguments": { + "filter": { + "_id": 1 + } + }, + "expectResult": [ + { + "_id": 1, + "encrypted_javascript": { + "$code": "var x = 1;" + } + } + ] + } + ], + "outcome": [ + { + "documents": [ + { + "_id": 1, + "encrypted_javascript": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAANrvMgJkTKWGMc9wt3E2RBR2Hu5gL9p+vIIdHe9FcOm99t1W480/oX1Gnd87ON3B399DuFaxi/aaIiQSo7gTX6Lw==", + "subType": "06" + } + } + } + ], + "collectionName": "default", + "databaseName": "default" + } + ], + "expectEvents": [ + { + "client": "client6", + "events": [ + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encrypted_javascript": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAANrvMgJkTKWGMc9wt3E2RBR2Hu5gL9p+vIIdHe9FcOm99t1W480/oX1Gnd87ON3B399DuFaxi/aaIiQSo7gTX6Lw==", + "subType": "06" + } + } + } + ], + "ordered": true + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "default", + "filter": { + "_id": 1 + } + }, + "commandName": "find" + } + } + ] + } + ] + }, + { + "description": "type=javascriptWithScope", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_javascriptWithScope": { + "$code": "var x = 1;", + "$scope": {} + } + } + }, + "object": "coll7", + "expectError": { + "errorContains": "element of type: javascriptWithScope" + } + } + ] + }, + { + "description": "type=object", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_object": {} + } + }, + "object": "coll8", + "expectError": { + "errorContains": "element of type: object" + } + } + ] + }, + { + "description": "type=timestamp", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_timestamp": { + "$timestamp": { + "t": 123, + "i": 456 + } + } + } + }, + "object": "coll9" + }, + { + "name": "find", + "object": "coll9", + "arguments": { + "filter": { + "_id": 1 + } + }, + "expectResult": [ + { + "_id": 1, + "encrypted_timestamp": { + "$timestamp": { + "t": 123, + "i": 456 + } + } + } + ] + } + ], + "outcome": [ + { + "documents": [ + { + "_id": 1, + "encrypted_timestamp": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAARJHaM4Gq3MpDTdBasBsEolQaOmxJQU1wsZVaSFAOLpEh1QihDglXI95xemePFMKhg+KNpFg7lw1ChCs2Wn/c26Q==", + "subType": "06" + } + } + } + ], + "collectionName": "default", + "databaseName": "default" + } + ], + "expectEvents": [ + { + "client": "client9", + "events": [ + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encrypted_timestamp": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAARJHaM4Gq3MpDTdBasBsEolQaOmxJQU1wsZVaSFAOLpEh1QihDglXI95xemePFMKhg+KNpFg7lw1ChCs2Wn/c26Q==", + "subType": "06" + } + } + } + ], + "ordered": true + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "default", + "filter": { + "_id": 1 + } + }, + "commandName": "find" + } + } + ] + } + ] + }, + { + "description": "type=regex", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_regex": { + "$regularExpression": { + "pattern": "test", + "options": "" + } + } + } + }, + "object": "coll10" + }, + { + "name": "find", + "object": "coll10", + "arguments": { + "filter": { + "_id": 1 + } + }, + "expectResult": [ + { + "_id": 1, + "encrypted_regex": { + "$regularExpression": { + "pattern": "test", + "options": "" + } + } + } + ] + } + ], + "outcome": [ + { + "documents": [ + { + "_id": 1, + "encrypted_regex": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAALVnxM4UqGhqf5eXw6nsS08am3YJrTf1EvjKitT8tyyMAbHsICIU3GUjuC7EBofCHbusvgo7pDyaClGostFz44nA==", + "subType": "06" + } + } + } + ], + "collectionName": "default", + "databaseName": "default" + } + ], + "expectEvents": [ + { + "client": "client10", + "events": [ + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encrypted_regex": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAALVnxM4UqGhqf5eXw6nsS08am3YJrTf1EvjKitT8tyyMAbHsICIU3GUjuC7EBofCHbusvgo7pDyaClGostFz44nA==", + "subType": "06" + } + } + } + ], + "ordered": true + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "default", + "filter": { + "_id": 1 + } + }, + "commandName": "find" + } + } + ] + } + ] + }, + { + "description": "type=date", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_date": { + "$date": { + "$numberLong": "123" + } + } + } + }, + "object": "coll11" + }, + { + "name": "find", + "object": "coll11", + "arguments": { + "filter": { + "_id": 1 + } + }, + "expectResult": [ + { + "_id": 1, + "encrypted_date": { + "$date": { + "$numberLong": "123" + } + } + } + ] + } + ], + "outcome": [ + { + "documents": [ + { + "_id": 1, + "encrypted_date": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAAJ5sN7u6l97+DswfKTqZAijSTSOo5htinGKQKUD7pHNJYlLXGOkB4glrCu7ibu0g3344RHQ5yUp4YxMEa8GD+Snw==", + "subType": "06" + } + } + } + ], + "collectionName": "default", + "databaseName": "default" + } + ], + "expectEvents": [ + { + "client": "client11", + "events": [ + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "default", + "documents": [ + { + "_id": 1, + "encrypted_date": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAAJ5sN7u6l97+DswfKTqZAijSTSOo5htinGKQKUD7pHNJYlLXGOkB4glrCu7ibu0g3344RHQ5yUp4YxMEa8GD+Snw==", + "subType": "06" + } + } + } + ], + "ordered": true + }, + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "default", + "filter": { + "_id": 1 + } + }, + "commandName": "find" + } + } + ] + } + ] + }, + { + "description": "type=minKey", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_minKey": { + "$minKey": 1 + } + } + }, + "object": "coll12", + "expectError": { + "errorContains": "Cannot encrypt element of type: minKey" + } + } + ] + }, + { + "description": "type=maxKey", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_maxKey": { + "$maxKey": 1 + } + } + }, + "object": "coll13", + "expectError": { + "errorContains": "Cannot encrypt element of type: maxKey" + } + } + ] + }, + { + "description": "type=undefined", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_undefined": { + "$undefined": true + } + } + }, + "object": "coll14", + "expectError": { + "errorContains": "Cannot encrypt element of type: undefined" + } + } + ] + }, + { + "description": "type=array", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_array": [] + } + }, + "object": "coll15", + "expectError": { + "errorContains": "element of type: array" + } + } + ] + }, + { + "description": "type=bool", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_bool": true + } + }, + "object": "coll16", + "expectError": { + "errorContains": "element of type: bool" + } + } + ] + }, + { + "description": "type=null", + "operations": [ + { + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encrypted_null": true + } + }, + "object": "coll17", + "expectError": { + "errorContains": "Cannot encrypt element of type: null" + } + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/unsupportedCommand.json b/test/client-side-encryption/spec/unified/unsupportedCommand.json new file mode 100644 index 0000000000..a91390324a --- /dev/null +++ b/test/client-side-encryption/spec/unified/unsupportedCommand.json @@ -0,0 +1,200 @@ +{ + "description": "unsupportedCommand", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "4.1.10", + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [ + { + "_id": 1, + "x": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + }, + { + "_id": 2, + "x": 2, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACDdw4KFz3ZLquhsbt7RmDjD0N67n0uSXx7IGnQNCLeIKvot6s/ouI21Eo84IOtb6lhwUNPlSEBNY0/hbszWAKJg==", + "subType": "06" + } + } + } + ], + "createOptions": { + "validator": { + "$jsonSchema": { + "properties": { + "encrypted_w_altname": { + "encrypt": { + "keyId": "/altname", + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "random": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string_equivalent": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "status": 1, + "_id": { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + }, + "masterKey": { + "provider": "aws", + "key": "arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0", + "region": "us-east-1" + }, + "updateDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyMaterial": { + "$binary": { + "base64": "AQICAHhQNmWG2CzOm1dq3kWLM+iDUZhEqnhJwH9wZVpuZ94A8gEqnsxXlR51T5EbEVezUqqKAAAAwjCBvwYJKoZIhvcNAQcGoIGxMIGuAgEAMIGoBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDHa4jo6yp0Z18KgbUgIBEIB74sKxWtV8/YHje5lv5THTl0HIbhSwM6EqRlmBiFFatmEWaeMk4tO4xBX65eq670I5TWPSLMzpp8ncGHMmvHqRajNBnmFtbYxN3E3/WjxmdbOOe+OXpnGJPcGsftc7cB2shRfA4lICPnE26+oVNXT6p0Lo20nY5XC7jyCO", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyAltNames": [ + "altname", + "another_altname" + ] + } + ] + } + ], + "tests": [ + { + "description": "mapReduce deterministic encryption (unsupported)", + "operations": [ + { + "name": "mapReduce", + "arguments": { + "map": { + "$code": "function inc() { return emit(0, this.x + 1) }" + }, + "reduce": { + "$code": "function sum(key, values) { return values.reduce((acc, x) => acc + x); }" + }, + "out": { + "inline": 1 + } + }, + "object": "coll", + "expectError": { + "errorContains": "command not supported for auto encryption: mapreduce" + } + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/updateMany.json b/test/client-side-encryption/spec/unified/updateMany.json new file mode 100644 index 0000000000..cae4c0eaf4 --- /dev/null +++ b/test/client-side-encryption/spec/unified/updateMany.json @@ -0,0 +1,376 @@ +{ + "description": "updateMany", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "4.1.10", + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + }, + { + "_id": 2, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACDdw4KFz3ZLquhsbt7RmDjD0N67n0uSXx7IGnQNCLeIKvot6s/ouI21Eo84IOtb6lhwUNPlSEBNY0/hbszWAKJg==", + "subType": "06" + } + } + } + ], + "createOptions": { + "validator": { + "$jsonSchema": { + "properties": { + "encrypted_w_altname": { + "encrypt": { + "keyId": "/altname", + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "random": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string_equivalent": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "status": 1, + "_id": { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + }, + "masterKey": { + "provider": "aws", + "key": "arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0", + "region": "us-east-1" + }, + "updateDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyMaterial": { + "$binary": { + "base64": "AQICAHhQNmWG2CzOm1dq3kWLM+iDUZhEqnhJwH9wZVpuZ94A8gEqnsxXlR51T5EbEVezUqqKAAAAwjCBvwYJKoZIhvcNAQcGoIGxMIGuAgEAMIGoBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDHa4jo6yp0Z18KgbUgIBEIB74sKxWtV8/YHje5lv5THTl0HIbhSwM6EqRlmBiFFatmEWaeMk4tO4xBX65eq670I5TWPSLMzpp8ncGHMmvHqRajNBnmFtbYxN3E3/WjxmdbOOe+OXpnGJPcGsftc7cB2shRfA4lICPnE26+oVNXT6p0Lo20nY5XC7jyCO", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyAltNames": [ + "altname", + "another_altname" + ] + } + ] + } + ], + "tests": [ + { + "description": "updateMany with deterministic encryption", + "operations": [ + { + "name": "updateMany", + "arguments": { + "filter": { + "encrypted_string": { + "$in": [ + "string0", + "string1" + ] + } + }, + "update": { + "$set": { + "encrypted_string": "string2", + "random": "abc" + } + } + }, + "object": "coll", + "expectResult": { + "matchedCount": 2, + "modifiedCount": 2, + "upsertedCount": 0 + } + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACQ76HWOut3DZtQuV90hp1aaCpZn95vZIaWmn+wrBehcEtcFwyJlBdlyzDzZTWPZCPgiFq72Wvh6Y7VbpU9NAp3A==", + "subType": "06" + } + }, + "random": { + "$$type": "binData" + } + }, + { + "_id": 2, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACQ76HWOut3DZtQuV90hp1aaCpZn95vZIaWmn+wrBehcEtcFwyJlBdlyzDzZTWPZCPgiFq72Wvh6Y7VbpU9NAp3A==", + "subType": "06" + } + }, + "random": { + "$$type": "binData" + } + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "update": "default", + "updates": [ + { + "q": { + "encrypted_string": { + "$in": [ + { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + }, + { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACDdw4KFz3ZLquhsbt7RmDjD0N67n0uSXx7IGnQNCLeIKvot6s/ouI21Eo84IOtb6lhwUNPlSEBNY0/hbszWAKJg==", + "subType": "06" + } + } + ] + } + }, + "u": { + "$set": { + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACQ76HWOut3DZtQuV90hp1aaCpZn95vZIaWmn+wrBehcEtcFwyJlBdlyzDzZTWPZCPgiFq72Wvh6Y7VbpU9NAp3A==", + "subType": "06" + } + }, + "random": { + "$$type": "binData" + } + } + }, + "multi": true, + "upsert": false + } + ], + "ordered": true + }, + "commandName": "update" + } + } + ] + } + ] + }, + { + "description": "updateMany fails when filtering on a random field", + "operations": [ + { + "name": "updateMany", + "arguments": { + "filter": { + "random": "abc" + }, + "update": { + "$set": { + "encrypted_string": "string1" + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "Cannot query on fields encrypted with the randomized encryption" + } + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/updateOne.json b/test/client-side-encryption/spec/unified/updateOne.json new file mode 100644 index 0000000000..6c8fdcbb6e --- /dev/null +++ b/test/client-side-encryption/spec/unified/updateOne.json @@ -0,0 +1,538 @@ +{ + "description": "updateOne", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "4.1.10", + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "aws": { + "accessKeyId": { + "$$placeholder": 1 + }, + "secretAccessKey": { + "$$placeholder": 1 + } + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + }, + { + "client": { + "id": "client_unencrypted" + } + }, + { + "database": { + "id": "db_unencrypted", + "client": "client_unencrypted", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll_unencrypted", + "database": "db_unencrypted", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + } + ], + "createOptions": { + "validator": { + "$jsonSchema": { + "properties": { + "encrypted_w_altname": { + "encrypt": { + "keyId": "/altname", + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "random": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string_equivalent": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + } + } + }, + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "status": 1, + "_id": { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + }, + "masterKey": { + "provider": "aws", + "key": "arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0", + "region": "us-east-1" + }, + "updateDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyMaterial": { + "$binary": { + "base64": "AQICAHhQNmWG2CzOm1dq3kWLM+iDUZhEqnhJwH9wZVpuZ94A8gEqnsxXlR51T5EbEVezUqqKAAAAwjCBvwYJKoZIhvcNAQcGoIGxMIGuAgEAMIGoBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDHa4jo6yp0Z18KgbUgIBEIB74sKxWtV8/YHje5lv5THTl0HIbhSwM6EqRlmBiFFatmEWaeMk4tO4xBX65eq670I5TWPSLMzpp8ncGHMmvHqRajNBnmFtbYxN3E3/WjxmdbOOe+OXpnGJPcGsftc7cB2shRfA4lICPnE26+oVNXT6p0Lo20nY5XC7jyCO", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1552949630483" + } + }, + "keyAltNames": [ + "altname", + "another_altname" + ] + } + ] + } + ], + "tests": [ + { + "description": "updateOne with deterministic encryption", + "operations": [ + { + "name": "updateOne", + "arguments": { + "filter": { + "encrypted_string": "string0" + }, + "update": { + "$set": { + "encrypted_string": "string1", + "random": "abc" + } + } + }, + "object": "coll", + "expectResult": { + "matchedCount": 1, + "modifiedCount": 1, + "upsertedCount": 0 + } + }, + { + "name": "find", + "object": "coll_unencrypted", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 1, + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACDdw4KFz3ZLquhsbt7RmDjD0N67n0uSXx7IGnQNCLeIKvot6s/ouI21Eo84IOtb6lhwUNPlSEBNY0/hbszWAKJg==", + "subType": "06" + } + }, + "random": { + "$$type": "binData" + } + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "datakeys", + "filter": { + "$or": [ + { + "_id": { + "$in": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ] + } + }, + { + "keyAltNames": { + "$in": [] + } + } + ] + }, + "$db": "keyvault", + "readConcern": { + "level": "majority" + } + }, + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "update": "default", + "updates": [ + { + "q": { + "encrypted_string": { + "$eq": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + } + }, + "u": { + "$set": { + "encrypted_string": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACDdw4KFz3ZLquhsbt7RmDjD0N67n0uSXx7IGnQNCLeIKvot6s/ouI21Eo84IOtb6lhwUNPlSEBNY0/hbszWAKJg==", + "subType": "06" + } + }, + "random": { + "$$type": "binData" + } + } + }, + "multi": false, + "upsert": false + } + ], + "ordered": true + }, + "commandName": "update" + } + } + ] + } + ] + }, + { + "description": "updateOne fails when filtering on a random field", + "operations": [ + { + "name": "updateOne", + "arguments": { + "filter": { + "random": "abc" + }, + "update": { + "$set": { + "encrypted_string": "string1" + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "Cannot query on fields encrypted with the randomized encryption" + } + } + ] + }, + { + "description": "$unset works with an encrypted field", + "operations": [ + { + "name": "updateOne", + "arguments": { + "filter": {}, + "update": { + "$unset": { + "encrypted_string": "" + } + } + }, + "object": "coll", + "expectResult": { + "matchedCount": 1, + "modifiedCount": 1, + "upsertedCount": 0 + } + } + ], + "outcome": [ + { + "documents": [ + { + "_id": 1 + } + ], + "collectionName": "default", + "databaseName": "default" + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "update": "default", + "updates": [ + { + "q": {}, + "u": { + "$unset": { + "encrypted_string": "" + } + }, + "multi": { + "$$unsetOrMatches": false + }, + "upsert": { + "$$unsetOrMatches": false + } + } + ], + "ordered": true + }, + "commandName": "update" + } + } + ] + } + ] + }, + { + "description": "$rename works if target value has same encryption options", + "operations": [ + { + "name": "updateOne", + "arguments": { + "filter": {}, + "update": { + "$rename": { + "encrypted_string": "encrypted_string_equivalent" + } + } + }, + "object": "coll", + "expectResult": { + "matchedCount": 1, + "modifiedCount": 1, + "upsertedCount": 0 + } + } + ], + "outcome": [ + { + "documents": [ + { + "_id": 1, + "encrypted_string_equivalent": { + "$binary": { + "base64": "AQAAAAAAAAAAAAAAAAAAAAACwj+3zkv2VM+aTfk60RqhXq6a/77WlLwu/BxXFkL7EppGsju/m8f0x5kBDD3EZTtGALGXlym5jnpZAoSIkswHoA==", + "subType": "06" + } + } + } + ], + "collectionName": "default", + "databaseName": "default" + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "listCollections": 1, + "filter": { + "name": "default" + } + }, + "commandName": "listCollections" + } + }, + { + "commandStartedEvent": { + "command": { + "update": "default", + "updates": [ + { + "q": {}, + "u": { + "$rename": { + "encrypted_string": "encrypted_string_equivalent" + } + }, + "multi": { + "$$unsetOrMatches": false + }, + "upsert": { + "$$unsetOrMatches": false + } + } + ], + "ordered": true + }, + "commandName": "update" + } + } + ] + } + ] + }, + { + "description": "$rename fails if target value has different encryption options", + "operations": [ + { + "name": "updateOne", + "arguments": { + "filter": {}, + "update": { + "$rename": { + "encrypted_string": "random" + } + } + }, + "object": "coll", + "expectError": { + "errorContains": "$rename between two encrypted fields must have the same metadata or both be unencrypted" + } + } + ] + }, + { + "description": "an invalid update (no $ operators) is validated and errors", + "operations": [ + { + "name": "updateOne", + "arguments": { + "filter": {}, + "update": { + "encrypted_string": "random" + } + }, + "object": "coll", + "expectError": { + "errorContains": "" + } + } + ] + } + ] +} diff --git a/test/client-side-encryption/spec/unified/validatorAndPartialFieldExpression.json b/test/client-side-encryption/spec/unified/validatorAndPartialFieldExpression.json new file mode 100644 index 0000000000..c46a193273 --- /dev/null +++ b/test/client-side-encryption/spec/unified/validatorAndPartialFieldExpression.json @@ -0,0 +1,323 @@ +{ + "description": "validatorAndPartialFieldExpression", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "minServerVersion": "6.0.0", + "csfle": { + "minLibmongocryptVersion": "1.15.1" + } + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + }, + "schemaMap": { + "default.encryptedCollection": { + "properties": { + "encrypted_w_altname": { + "encrypt": { + "keyId": "/altname", + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + }, + "random": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + } + }, + "encrypted_string_equivalent": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "AAAAAAAAAAAAAAAAAAAAAA==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" + } + } + }, + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "db", + "client": "client0", + "databaseName": "default" + } + }, + { + "collection": { + "id": "coll", + "database": "db", + "collectionName": "default" + } + } + ], + "initialData": [ + { + "databaseName": "default", + "collectionName": "default", + "documents": [] + } + ], + "tests": [ + { + "description": "create with a validator on an unencrypted field is OK", + "operations": [ + { + "name": "dropCollection", + "object": "db", + "arguments": { + "collection": "encryptedCollection" + } + }, + { + "name": "createCollection", + "object": "db", + "arguments": { + "collection": "encryptedCollection", + "validator": { + "unencrypted_string": "foo" + } + } + }, + { + "name": "assertCollectionExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "encryptedCollection" + } + } + ] + }, + { + "description": "create with a validator on an encrypted field is an error", + "operations": [ + { + "name": "dropCollection", + "object": "db", + "arguments": { + "collection": "encryptedCollection" + } + }, + { + "name": "createCollection", + "object": "db", + "arguments": { + "collection": "encryptedCollection", + "validator": { + "encrypted_string": "foo" + } + }, + "expectError": { + "errorContains": "Comparison to encrypted fields not supported" + } + } + ] + }, + { + "description": "collMod with a validator on an unencrypted field is OK", + "operations": [ + { + "name": "dropCollection", + "object": "db", + "arguments": { + "collection": "encryptedCollection" + } + }, + { + "name": "createCollection", + "object": "db", + "arguments": { + "collection": "encryptedCollection" + } + }, + { + "name": "runCommand", + "object": "db", + "arguments": { + "commandName": "collMod", + "command": { + "collMod": "encryptedCollection", + "validator": { + "unencrypted_string": "foo" + } + } + } + } + ] + }, + { + "description": "collMod with a validator on an encrypted field is an error", + "operations": [ + { + "name": "dropCollection", + "object": "db", + "arguments": { + "collection": "encryptedCollection" + } + }, + { + "name": "createCollection", + "object": "db", + "arguments": { + "collection": "encryptedCollection" + } + }, + { + "name": "runCommand", + "object": "db", + "arguments": { + "commandName": "collMod", + "command": { + "collMod": "encryptedCollection", + "validator": { + "encrypted_string": "foo" + } + } + }, + "expectError": { + "errorContains": "Comparison to encrypted fields not supported" + } + } + ] + }, + { + "description": "createIndexes with a partialFilterExpression on an unencrypted field is OK", + "operations": [ + { + "name": "dropCollection", + "object": "db", + "arguments": { + "collection": "encryptedCollection" + } + }, + { + "name": "createCollection", + "object": "db", + "arguments": { + "collection": "encryptedCollection" + } + }, + { + "name": "runCommand", + "object": "db", + "arguments": { + "commandName": "createIndexes", + "command": { + "createIndexes": "encryptedCollection", + "indexes": [ + { + "name": "name", + "key": { + "name": 1 + }, + "partialFilterExpression": { + "unencrypted_string": "foo" + } + } + ] + } + } + }, + { + "name": "assertIndexExists", + "object": "testRunner", + "arguments": { + "databaseName": "default", + "collectionName": "encryptedCollection", + "indexName": "name" + } + } + ] + }, + { + "description": "createIndexes with a partialFilterExpression on an encrypted field is an error", + "operations": [ + { + "name": "dropCollection", + "object": "db", + "arguments": { + "collection": "encryptedCollection" + } + }, + { + "name": "createCollection", + "object": "db", + "arguments": { + "collection": "encryptedCollection" + } + }, + { + "name": "runCommand", + "object": "db", + "arguments": { + "commandName": "createIndexes", + "command": { + "createIndexes": "encryptedCollection", + "indexes": [ + { + "name": "name", + "key": { + "name": 1 + }, + "partialFilterExpression": { + "encrypted_string": "foo" + } + } + ] + } + }, + "expectError": { + "errorContains": "Comparison to encrypted fields not supported" + } + } + ] + } + ] +} diff --git a/test/collection_management/modifyCollection-pre_and_post_images.json b/test/collection_management/modifyCollection-pre_and_post_images.json deleted file mode 100644 index 8026faeb17..0000000000 --- a/test/collection_management/modifyCollection-pre_and_post_images.json +++ /dev/null @@ -1,111 +0,0 @@ -{ - "description": "modifyCollection-pre_and_post_images", - "schemaVersion": "1.4", - "runOnRequirements": [ - { - "minServerVersion": "6.0", - "serverless": "forbid" - } - ], - "createEntities": [ - { - "client": { - "id": "client0", - "observeEvents": [ - "commandStartedEvent" - ] - } - }, - { - "database": { - "id": "database0", - "client": "client0", - "databaseName": "papi-tests" - } - }, - { - "collection": { - "id": "collection0", - "database": "database0", - "collectionName": "test" - } - } - ], - "tests": [ - { - "description": "modifyCollection to changeStreamPreAndPostImages enabled", - "operations": [ - { - "name": "dropCollection", - "object": "database0", - "arguments": { - "collection": "test" - } - }, - { - "name": "createCollection", - "object": "database0", - "arguments": { - "collection": "test", - "changeStreamPreAndPostImages": { - "enabled": false - } - } - }, - { - "name": "assertCollectionExists", - "object": "testRunner", - "arguments": { - "databaseName": "papi-tests", - "collectionName": "test" - } - }, - { - "name": "modifyCollection", - "object": "database0", - "arguments": { - "collection": "test", - "changeStreamPreAndPostImages": { - "enabled": true - } - } - } - ], - "expectEvents": [ - { - "client": "client0", - "events": [ - { - "commandStartedEvent": { - "command": { - "drop": "test" - }, - "databaseName": "papi-tests" - } - }, - { - "commandStartedEvent": { - "command": { - "create": "test", - "changeStreamPreAndPostImages": { - "enabled": false - } - } - } - }, - { - "commandStartedEvent": { - "command": { - "collMod": "test", - "changeStreamPreAndPostImages": { - "enabled": true - } - } - } - } - ] - } - ] - } - ] -} diff --git a/test/collection_management/timeseries-collection.json b/test/collection_management/timeseries-collection.json index 8525056fd1..2ee52eac41 100644 --- a/test/collection_management/timeseries-collection.json +++ b/test/collection_management/timeseries-collection.json @@ -255,7 +255,7 @@ "description": "createCollection with bucketing options", "runOnRequirements": [ { - "minServerVersion": "7.0" + "minServerVersion": "6.3" } ], "operations": [ diff --git a/test/command_logging/pre-42-server-connection-id.json b/test/command_logging/pre-42-server-connection-id.json deleted file mode 100644 index d5ebd86590..0000000000 --- a/test/command_logging/pre-42-server-connection-id.json +++ /dev/null @@ -1,119 +0,0 @@ -{ - "description": "pre-42-server-connection-id", - "schemaVersion": "1.13", - "runOnRequirements": [ - { - "maxServerVersion": "4.0.99" - } - ], - "createEntities": [ - { - "client": { - "id": "client", - "observeLogMessages": { - "command": "debug" - } - } - }, - { - "database": { - "id": "database", - "client": "client", - "databaseName": "logging-server-connection-id-tests" - } - }, - { - "collection": { - "id": "collection", - "database": "database", - "collectionName": "logging-tests-collection" - } - } - ], - "initialData": [ - { - "databaseName": "logging-server-connection-id-tests", - "collectionName": "logging-tests-collection", - "documents": [] - } - ], - "tests": [ - { - "description": "command log messages do not include server connection id", - "operations": [ - { - "name": "insertOne", - "object": "collection", - "arguments": { - "document": { - "x": 1 - } - } - }, - { - "name": "find", - "object": "collection", - "arguments": { - "filter": { - "$or": true - } - }, - "expectError": { - "isError": true - } - } - ], - "expectLogMessages": [ - { - "client": "client", - "messages": [ - { - "level": "debug", - "component": "command", - "data": { - "message": "Command started", - "commandName": "insert", - "serverConnectionId": { - "$$exists": false - } - } - }, - { - "level": "debug", - "component": "command", - "data": { - "message": "Command succeeded", - "commandName": "insert", - "serverConnectionId": { - "$$exists": false - } - } - }, - { - "level": "debug", - "component": "command", - "data": { - "message": "Command started", - "commandName": "find", - "serverConnectionId": { - "$$exists": false - } - } - }, - { - "level": "debug", - "component": "command", - "data": { - "message": "Command failed", - "commandName": "find", - "serverConnectionId": { - "$$exists": false - } - } - } - ] - } - ] - } - ] -} diff --git a/test/command_monitoring/pre-42-server-connection-id.json b/test/command_monitoring/pre-42-server-connection-id.json deleted file mode 100644 index 141fbe584f..0000000000 --- a/test/command_monitoring/pre-42-server-connection-id.json +++ /dev/null @@ -1,101 +0,0 @@ -{ - "description": "pre-42-server-connection-id", - "schemaVersion": "1.6", - "runOnRequirements": [ - { - "maxServerVersion": "4.0.99" - } - ], - "createEntities": [ - { - "client": { - "id": "client", - "observeEvents": [ - "commandStartedEvent", - "commandSucceededEvent", - "commandFailedEvent" - ] - } - }, - { - "database": { - "id": "database", - "client": "client", - "databaseName": "server-connection-id-tests" - } - }, - { - "collection": { - "id": "collection", - "database": "database", - "collectionName": "coll" - } - } - ], - "initialData": [ - { - "databaseName": "server-connection-id-tests", - "collectionName": "coll", - "documents": [] - } - ], - "tests": [ - { - "description": "command events do not include server connection id", - "operations": [ - { - "name": "insertOne", - "object": "collection", - "arguments": { - "document": { - "x": 1 - } - } - }, - { - "name": "find", - "object": "collection", - "arguments": { - "filter": { - "$or": true - } - }, - "expectError": { - "isError": true - } - } - ], - "expectEvents": [ - { - "client": "client", - "events": [ - { - "commandStartedEvent": { - "commandName": "insert", - "hasServerConnectionId": false - } - }, - { - "commandSucceededEvent": { - "commandName": "insert", - "hasServerConnectionId": false - } - }, - { - "commandStartedEvent": { - "commandName": "find", - "hasServerConnectionId": false - } - }, - { - "commandFailedEvent": { - "commandName": "find", - "hasServerConnectionId": false - } - } - ] - } - ] - } - ] -} diff --git a/test/command_monitoring/unacknowledged-client-bulkWrite.json b/test/command_monitoring/unacknowledged-client-bulkWrite.json index 61bb00726c..14740cea34 100644 --- a/test/command_monitoring/unacknowledged-client-bulkWrite.json +++ b/test/command_monitoring/unacknowledged-client-bulkWrite.json @@ -95,29 +95,34 @@ "ordered": false }, "expectResult": { - "insertedCount": { - "$$unsetOrMatches": 0 - }, - "upsertedCount": { - "$$unsetOrMatches": 0 - }, - "matchedCount": { - "$$unsetOrMatches": 0 - }, - "modifiedCount": { - "$$unsetOrMatches": 0 - }, - "deletedCount": { - "$$unsetOrMatches": 0 - }, - "insertResults": { - "$$unsetOrMatches": {} - }, - "updateResults": { - "$$unsetOrMatches": {} - }, - "deleteResults": { - "$$unsetOrMatches": {} + "$$unsetOrMatches": { + "acknowledged": { + "$$unsetOrMatches": false + }, + "insertedCount": { + "$$unsetOrMatches": 0 + }, + "upsertedCount": { + "$$unsetOrMatches": 0 + }, + "matchedCount": { + "$$unsetOrMatches": 0 + }, + "modifiedCount": { + "$$unsetOrMatches": 0 + }, + "deletedCount": { + "$$unsetOrMatches": 0 + }, + "insertResults": { + "$$unsetOrMatches": {} + }, + "updateResults": { + "$$unsetOrMatches": {} + }, + "deleteResults": { + "$$unsetOrMatches": {} + } } } }, diff --git a/test/connection_logging/connection-logging.json b/test/connection_logging/connection-logging.json index bfbdbe8639..5799e834d7 100644 --- a/test/connection_logging/connection-logging.json +++ b/test/connection_logging/connection-logging.json @@ -520,4 +520,4 @@ ] } ] -} \ No newline at end of file +} diff --git a/test/connection_monitoring/pool-checkout-returned-connection-maxConnecting.json b/test/connection_monitoring/pool-checkout-returned-connection-maxConnecting.json index 965d56f6d8..10b526e0c3 100644 --- a/test/connection_monitoring/pool-checkout-returned-connection-maxConnecting.json +++ b/test/connection_monitoring/pool-checkout-returned-connection-maxConnecting.json @@ -23,6 +23,7 @@ } }, "poolOptions": { + "maxConnecting": 2, "maxPoolSize": 10, "waitQueueTimeoutMS": 5000 }, @@ -72,9 +73,8 @@ "connection": "conn0" }, { - "name": "waitForEvent", - "event": "ConnectionCheckedOut", - "count": 4 + "name": "wait", + "ms": 100 } ], "events": [ @@ -104,14 +104,6 @@ "type": "ConnectionCheckedOut", "connectionId": 1, "address": 42 - }, - { - "type": "ConnectionCheckedOut", - "address": 42 - }, - { - "type": "ConnectionCheckedOut", - "address": 42 } ], "ignore": [ diff --git a/test/connection_string/test/valid-options.json b/test/connection_string/test/valid-options.json index 6c86172d08..fce53873a6 100644 --- a/test/connection_string/test/valid-options.json +++ b/test/connection_string/test/valid-options.json @@ -40,7 +40,7 @@ }, { "description": "Colon in a key value pair", - "uri": "mongodb://example.com/?authMechanism=MONGODB-OIDC&authMechanismProperties=TOKEN_RESOURCE:mongodb://test-cluster", + "uri": "mongodb://example.com/?authMechanism=MONGODB-OIDC&authMechanismProperties=TOKEN_RESOURCE:mongodb://test-cluster,ENVIRONMENT:azure", "valid": true, "warning": false, "hosts": [ @@ -53,7 +53,8 @@ "auth": null, "options": { "authmechanismProperties": { - "TOKEN_RESOURCE": "mongodb://test-cluster" + "TOKEN_RESOURCE": "mongodb://test-cluster", + "ENVIRONMENT": "azure" } } } diff --git a/test/connection_string/test/valid-warnings.json b/test/connection_string/test/valid-warnings.json index daf814a75f..e11757eb0e 100644 --- a/test/connection_string/test/valid-warnings.json +++ b/test/connection_string/test/valid-warnings.json @@ -96,7 +96,7 @@ }, { "description": "Comma in a key value pair causes a warning", - "uri": "mongodb://localhost?authMechanism=MONGODB-OIDC&authMechanismProperties=TOKEN_RESOURCE:mongodb://host1%2Chost2", + "uri": "mongodb://localhost?authMechanism=MONGODB-OIDC&authMechanismProperties=TOKEN_RESOURCE:mongodb://host1%2Chost2,ENVIRONMENT:azure", "valid": true, "warning": true, "hosts": [ diff --git a/test/crud/unified/bulkWrite-updateMany-pipeline.json b/test/crud/unified/bulkWrite-updateMany-pipeline.json new file mode 100644 index 0000000000..e938ea7535 --- /dev/null +++ b/test/crud/unified/bulkWrite-updateMany-pipeline.json @@ -0,0 +1,148 @@ +{ + "description": "bulkWrite-updateMany-pipeline", + "schemaVersion": "1.0", + "runOnRequirements": [ + { + "minServerVersion": "4.1.11" + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "crud-tests" + } + }, + { + "collection": { + "id": "collection0", + "database": "database0", + "collectionName": "test" + } + } + ], + "initialData": [ + { + "collectionName": "test", + "databaseName": "crud-tests", + "documents": [ + { + "_id": 1, + "x": 1, + "y": 1, + "t": { + "u": { + "v": 1 + } + } + }, + { + "_id": 2, + "x": 2, + "y": 1 + } + ] + } + ], + "tests": [ + { + "description": "UpdateMany in bulk write using pipelines", + "operations": [ + { + "object": "collection0", + "name": "bulkWrite", + "arguments": { + "requests": [ + { + "updateMany": { + "filter": {}, + "update": [ + { + "$project": { + "x": 1 + } + }, + { + "$addFields": { + "foo": 1 + } + } + ] + } + } + ] + }, + "expectResult": { + "matchedCount": 2, + "modifiedCount": 2, + "upsertedCount": 0 + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "update": "test", + "updates": [ + { + "q": {}, + "u": [ + { + "$project": { + "x": 1 + } + }, + { + "$addFields": { + "foo": 1 + } + } + ], + "multi": true, + "upsert": { + "$$unsetOrMatches": false + } + } + ] + }, + "commandName": "update", + "databaseName": "crud-tests" + } + } + ] + } + ], + "outcome": [ + { + "collectionName": "test", + "databaseName": "crud-tests", + "documents": [ + { + "_id": 1, + "x": 1, + "foo": 1 + }, + { + "_id": 2, + "x": 2, + "foo": 1 + } + ] + } + ] + } + ] +} diff --git a/test/crud/unified/bulkWrite-updateOne-pipeline.json b/test/crud/unified/bulkWrite-updateOne-pipeline.json new file mode 100644 index 0000000000..769bd106f8 --- /dev/null +++ b/test/crud/unified/bulkWrite-updateOne-pipeline.json @@ -0,0 +1,156 @@ +{ + "description": "bulkWrite-updateOne-pipeline", + "schemaVersion": "1.0", + "runOnRequirements": [ + { + "minServerVersion": "4.1.11" + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "crud-tests" + } + }, + { + "collection": { + "id": "collection0", + "database": "database0", + "collectionName": "test" + } + } + ], + "initialData": [ + { + "collectionName": "test", + "databaseName": "crud-tests", + "documents": [ + { + "_id": 1, + "x": 1, + "y": 1, + "t": { + "u": { + "v": 1 + } + } + }, + { + "_id": 2, + "x": 2, + "y": 1 + } + ] + } + ], + "tests": [ + { + "description": "UpdateOne in bulk write using pipelines", + "operations": [ + { + "object": "collection0", + "name": "bulkWrite", + "arguments": { + "requests": [ + { + "updateOne": { + "filter": { + "_id": 1 + }, + "update": [ + { + "$replaceRoot": { + "newRoot": "$t" + } + }, + { + "$addFields": { + "foo": 1 + } + } + ] + } + } + ] + }, + "expectResult": { + "matchedCount": 1, + "modifiedCount": 1, + "upsertedCount": 0 + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "update": "test", + "updates": [ + { + "q": { + "_id": 1 + }, + "u": [ + { + "$replaceRoot": { + "newRoot": "$t" + } + }, + { + "$addFields": { + "foo": 1 + } + } + ], + "multi": { + "$$unsetOrMatches": false + }, + "upsert": { + "$$unsetOrMatches": false + } + } + ] + }, + "commandName": "update", + "databaseName": "crud-tests" + } + } + ] + } + ], + "outcome": [ + { + "collectionName": "test", + "databaseName": "crud-tests", + "documents": [ + { + "_id": 1, + "u": { + "v": 1 + }, + "foo": 1 + }, + { + "_id": 2, + "x": 2, + "y": 1 + } + ] + } + ] + } + ] +} diff --git a/test/crud/unified/bypassDocumentValidation.json b/test/crud/unified/bypassDocumentValidation.json new file mode 100644 index 0000000000..aff2d37f81 --- /dev/null +++ b/test/crud/unified/bypassDocumentValidation.json @@ -0,0 +1,493 @@ +{ + "description": "bypassDocumentValidation", + "schemaVersion": "1.4", + "runOnRequirements": [ + { + "minServerVersion": "3.2", + "serverless": "forbid" + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "crud" + } + }, + { + "collection": { + "id": "collection0", + "database": "database0", + "collectionName": "coll" + } + } + ], + "initialData": [ + { + "collectionName": "coll", + "databaseName": "crud", + "documents": [ + { + "_id": 1, + "x": 11 + }, + { + "_id": 2, + "x": 22 + }, + { + "_id": 3, + "x": 33 + } + ] + } + ], + "tests": [ + { + "description": "Aggregate with $out passes bypassDocumentValidation: false", + "operations": [ + { + "object": "collection0", + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$sort": { + "x": 1 + } + }, + { + "$match": { + "_id": { + "$gt": 1 + } + } + }, + { + "$out": "other_test_collection" + } + ], + "bypassDocumentValidation": false + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "aggregate": "coll", + "pipeline": [ + { + "$sort": { + "x": 1 + } + }, + { + "$match": { + "_id": { + "$gt": 1 + } + } + }, + { + "$out": "other_test_collection" + } + ], + "bypassDocumentValidation": false + }, + "commandName": "aggregate", + "databaseName": "crud" + } + } + ] + } + ] + }, + { + "description": "BulkWrite passes bypassDocumentValidation: false", + "operations": [ + { + "object": "collection0", + "name": "bulkWrite", + "arguments": { + "requests": [ + { + "insertOne": { + "document": { + "_id": 4, + "x": 44 + } + } + } + ], + "bypassDocumentValidation": false + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "insert": "coll", + "documents": [ + { + "_id": 4, + "x": 44 + } + ], + "bypassDocumentValidation": false + } + } + } + ] + } + ] + }, + { + "description": "FindOneAndReplace passes bypassDocumentValidation: false", + "operations": [ + { + "object": "collection0", + "name": "findOneAndReplace", + "arguments": { + "filter": { + "_id": { + "$gt": 1 + } + }, + "replacement": { + "x": 32 + }, + "bypassDocumentValidation": false + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "findAndModify": "coll", + "query": { + "_id": { + "$gt": 1 + } + }, + "update": { + "x": 32 + }, + "bypassDocumentValidation": false + } + } + } + ] + } + ] + }, + { + "description": "FindOneAndUpdate passes bypassDocumentValidation: false", + "operations": [ + { + "object": "collection0", + "name": "findOneAndUpdate", + "arguments": { + "filter": { + "_id": { + "$gt": 1 + } + }, + "update": { + "$inc": { + "x": 1 + } + }, + "bypassDocumentValidation": false + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "findAndModify": "coll", + "query": { + "_id": { + "$gt": 1 + } + }, + "update": { + "$inc": { + "x": 1 + } + }, + "bypassDocumentValidation": false + } + } + } + ] + } + ] + }, + { + "description": "InsertMany passes bypassDocumentValidation: false", + "operations": [ + { + "object": "collection0", + "name": "insertMany", + "arguments": { + "documents": [ + { + "_id": 4, + "x": 44 + } + ], + "bypassDocumentValidation": false + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "insert": "coll", + "documents": [ + { + "_id": 4, + "x": 44 + } + ], + "bypassDocumentValidation": false + } + } + } + ] + } + ] + }, + { + "description": "InsertOne passes bypassDocumentValidation: false", + "operations": [ + { + "object": "collection0", + "name": "insertOne", + "arguments": { + "document": { + "_id": 4, + "x": 44 + }, + "bypassDocumentValidation": false + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "insert": "coll", + "documents": [ + { + "_id": 4, + "x": 44 + } + ], + "bypassDocumentValidation": false + } + } + } + ] + } + ] + }, + { + "description": "ReplaceOne passes bypassDocumentValidation: false", + "operations": [ + { + "object": "collection0", + "name": "replaceOne", + "arguments": { + "filter": { + "_id": { + "$gt": 1 + } + }, + "replacement": { + "x": 32 + }, + "bypassDocumentValidation": false + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "update": "coll", + "updates": [ + { + "q": { + "_id": { + "$gt": 1 + } + }, + "u": { + "x": 32 + }, + "multi": { + "$$unsetOrMatches": false + }, + "upsert": { + "$$unsetOrMatches": false + } + } + ], + "bypassDocumentValidation": false + } + } + } + ] + } + ] + }, + { + "description": "UpdateMany passes bypassDocumentValidation: false", + "operations": [ + { + "object": "collection0", + "name": "updateMany", + "arguments": { + "filter": { + "_id": { + "$gt": 1 + } + }, + "update": { + "$inc": { + "x": 1 + } + }, + "bypassDocumentValidation": false + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "update": "coll", + "updates": [ + { + "q": { + "_id": { + "$gt": 1 + } + }, + "u": { + "$inc": { + "x": 1 + } + }, + "multi": true, + "upsert": { + "$$unsetOrMatches": false + } + } + ], + "bypassDocumentValidation": false + } + } + } + ] + } + ] + }, + { + "description": "UpdateOne passes bypassDocumentValidation: false", + "operations": [ + { + "object": "collection0", + "name": "updateOne", + "arguments": { + "filter": { + "_id": { + "$gt": 1 + } + }, + "update": { + "$inc": { + "x": 1 + } + }, + "bypassDocumentValidation": false + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "update": "coll", + "updates": [ + { + "q": { + "_id": { + "$gt": 1 + } + }, + "u": { + "$inc": { + "x": 1 + } + }, + "multi": { + "$$unsetOrMatches": false + }, + "upsert": { + "$$unsetOrMatches": false + } + } + ], + "bypassDocumentValidation": false + } + } + } + ] + } + ] + } + ] +} diff --git a/test/crud/unified/client-bulkWrite-replaceOne-sort.json b/test/crud/unified/client-bulkWrite-replaceOne-sort.json index b86bc5f942..fc66ec015d 100644 --- a/test/crud/unified/client-bulkWrite-replaceOne-sort.json +++ b/test/crud/unified/client-bulkWrite-replaceOne-sort.json @@ -1,5 +1,5 @@ { - "description": "client bulkWrite updateOne-sort", + "description": "client bulkWrite replaceOne-sort", "schemaVersion": "1.4", "runOnRequirements": [ { diff --git a/test/crud/unified/findOneAndUpdate-pipeline.json b/test/crud/unified/findOneAndUpdate-pipeline.json new file mode 100644 index 0000000000..81dba9ae93 --- /dev/null +++ b/test/crud/unified/findOneAndUpdate-pipeline.json @@ -0,0 +1,130 @@ +{ + "description": "findOneAndUpdate-pipeline", + "schemaVersion": "1.0", + "runOnRequirements": [ + { + "minServerVersion": "4.1.11" + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "crud-tests" + } + }, + { + "collection": { + "id": "collection0", + "database": "database0", + "collectionName": "test" + } + } + ], + "initialData": [ + { + "collectionName": "test", + "databaseName": "crud-tests", + "documents": [ + { + "_id": 1, + "x": 1, + "y": 1, + "t": { + "u": { + "v": 1 + } + } + }, + { + "_id": 2, + "x": 2, + "y": 1 + } + ] + } + ], + "tests": [ + { + "description": "FindOneAndUpdate using pipelines", + "operations": [ + { + "object": "collection0", + "name": "findOneAndUpdate", + "arguments": { + "filter": { + "_id": 1 + }, + "update": [ + { + "$project": { + "x": 1 + } + }, + { + "$addFields": { + "foo": 1 + } + } + ] + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "findAndModify": "test", + "update": [ + { + "$project": { + "x": 1 + } + }, + { + "$addFields": { + "foo": 1 + } + } + ] + }, + "commandName": "findAndModify", + "databaseName": "crud-tests" + } + } + ] + } + ], + "outcome": [ + { + "collectionName": "test", + "databaseName": "crud-tests", + "documents": [ + { + "_id": 1, + "x": 1, + "foo": 1 + }, + { + "_id": 2, + "x": 2, + "y": 1 + } + ] + } + ] + } + ] +} diff --git a/test/crud/unified/updateMany-pipeline.json b/test/crud/unified/updateMany-pipeline.json new file mode 100644 index 0000000000..e0f6d9d4a4 --- /dev/null +++ b/test/crud/unified/updateMany-pipeline.json @@ -0,0 +1,142 @@ +{ + "description": "updateMany-pipeline", + "schemaVersion": "1.0", + "runOnRequirements": [ + { + "minServerVersion": "4.1.11" + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "crud-tests" + } + }, + { + "collection": { + "id": "collection0", + "database": "database0", + "collectionName": "test" + } + } + ], + "initialData": [ + { + "collectionName": "test", + "databaseName": "crud-tests", + "documents": [ + { + "_id": 1, + "x": 1, + "y": 1, + "t": { + "u": { + "v": 1 + } + } + }, + { + "_id": 2, + "x": 2, + "y": 1 + } + ] + } + ], + "tests": [ + { + "description": "UpdateMany using pipelines", + "operations": [ + { + "object": "collection0", + "name": "updateMany", + "arguments": { + "filter": {}, + "update": [ + { + "$project": { + "x": 1 + } + }, + { + "$addFields": { + "foo": 1 + } + } + ] + }, + "expectResult": { + "matchedCount": 2, + "modifiedCount": 2, + "upsertedCount": 0 + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "update": "test", + "updates": [ + { + "q": {}, + "u": [ + { + "$project": { + "x": 1 + } + }, + { + "$addFields": { + "foo": 1 + } + } + ], + "multi": true, + "upsert": { + "$$unsetOrMatches": false + } + } + ] + }, + "commandName": "update", + "databaseName": "crud-tests" + } + } + ] + } + ], + "outcome": [ + { + "collectionName": "test", + "databaseName": "crud-tests", + "documents": [ + { + "_id": 1, + "x": 1, + "foo": 1 + }, + { + "_id": 2, + "x": 2, + "foo": 1 + } + ] + } + ] + } + ] +} diff --git a/test/crud/unified/updateOne-pipeline.json b/test/crud/unified/updateOne-pipeline.json new file mode 100644 index 0000000000..1348c6b53b --- /dev/null +++ b/test/crud/unified/updateOne-pipeline.json @@ -0,0 +1,150 @@ +{ + "description": "updateOne-pipeline", + "schemaVersion": "1.0", + "runOnRequirements": [ + { + "minServerVersion": "4.1.11" + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "crud-tests" + } + }, + { + "collection": { + "id": "collection0", + "database": "database0", + "collectionName": "test" + } + } + ], + "initialData": [ + { + "collectionName": "test", + "databaseName": "crud-tests", + "documents": [ + { + "_id": 1, + "x": 1, + "y": 1, + "t": { + "u": { + "v": 1 + } + } + }, + { + "_id": 2, + "x": 2, + "y": 1 + } + ] + } + ], + "tests": [ + { + "description": "UpdateOne using pipelines", + "operations": [ + { + "object": "collection0", + "name": "updateOne", + "arguments": { + "filter": { + "_id": 1 + }, + "update": [ + { + "$replaceRoot": { + "newRoot": "$t" + } + }, + { + "$addFields": { + "foo": 1 + } + } + ] + }, + "expectResult": { + "matchedCount": 1, + "modifiedCount": 1, + "upsertedCount": 0 + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "update": "test", + "updates": [ + { + "q": { + "_id": 1 + }, + "u": [ + { + "$replaceRoot": { + "newRoot": "$t" + } + }, + { + "$addFields": { + "foo": 1 + } + } + ], + "multi": { + "$$unsetOrMatches": false + }, + "upsert": { + "$$unsetOrMatches": false + } + } + ] + }, + "commandName": "update", + "databaseName": "crud-tests" + } + } + ] + } + ], + "outcome": [ + { + "collectionName": "test", + "databaseName": "crud-tests", + "documents": [ + { + "_id": 1, + "u": { + "v": 1 + }, + "foo": 1 + }, + { + "_id": 2, + "x": 2, + "y": 1 + } + ] + } + ] + } + ] +} diff --git a/test/crud/unified/updateWithPipelines.json b/test/crud/unified/updateWithPipelines.json deleted file mode 100644 index 164f2f6a19..0000000000 --- a/test/crud/unified/updateWithPipelines.json +++ /dev/null @@ -1,494 +0,0 @@ -{ - "description": "updateWithPipelines", - "schemaVersion": "1.0", - "runOnRequirements": [ - { - "minServerVersion": "4.1.11" - } - ], - "createEntities": [ - { - "client": { - "id": "client0", - "observeEvents": [ - "commandStartedEvent" - ] - } - }, - { - "database": { - "id": "database0", - "client": "client0", - "databaseName": "crud-tests" - } - }, - { - "collection": { - "id": "collection0", - "database": "database0", - "collectionName": "test" - } - } - ], - "initialData": [ - { - "collectionName": "test", - "databaseName": "crud-tests", - "documents": [ - { - "_id": 1, - "x": 1, - "y": 1, - "t": { - "u": { - "v": 1 - } - } - }, - { - "_id": 2, - "x": 2, - "y": 1 - } - ] - } - ], - "tests": [ - { - "description": "UpdateOne using pipelines", - "operations": [ - { - "object": "collection0", - "name": "updateOne", - "arguments": { - "filter": { - "_id": 1 - }, - "update": [ - { - "$replaceRoot": { - "newRoot": "$t" - } - }, - { - "$addFields": { - "foo": 1 - } - } - ] - }, - "expectResult": { - "matchedCount": 1, - "modifiedCount": 1, - "upsertedCount": 0 - } - } - ], - "expectEvents": [ - { - "client": "client0", - "events": [ - { - "commandStartedEvent": { - "command": { - "update": "test", - "updates": [ - { - "q": { - "_id": 1 - }, - "u": [ - { - "$replaceRoot": { - "newRoot": "$t" - } - }, - { - "$addFields": { - "foo": 1 - } - } - ], - "multi": { - "$$unsetOrMatches": false - }, - "upsert": { - "$$unsetOrMatches": false - } - } - ] - }, - "commandName": "update", - "databaseName": "crud-tests" - } - } - ] - } - ], - "outcome": [ - { - "collectionName": "test", - "databaseName": "crud-tests", - "documents": [ - { - "_id": 1, - "u": { - "v": 1 - }, - "foo": 1 - }, - { - "_id": 2, - "x": 2, - "y": 1 - } - ] - } - ] - }, - { - "description": "UpdateMany using pipelines", - "operations": [ - { - "object": "collection0", - "name": "updateMany", - "arguments": { - "filter": {}, - "update": [ - { - "$project": { - "x": 1 - } - }, - { - "$addFields": { - "foo": 1 - } - } - ] - }, - "expectResult": { - "matchedCount": 2, - "modifiedCount": 2, - "upsertedCount": 0 - } - } - ], - "expectEvents": [ - { - "client": "client0", - "events": [ - { - "commandStartedEvent": { - "command": { - "update": "test", - "updates": [ - { - "q": {}, - "u": [ - { - "$project": { - "x": 1 - } - }, - { - "$addFields": { - "foo": 1 - } - } - ], - "multi": true, - "upsert": { - "$$unsetOrMatches": false - } - } - ] - }, - "commandName": "update", - "databaseName": "crud-tests" - } - } - ] - } - ], - "outcome": [ - { - "collectionName": "test", - "databaseName": "crud-tests", - "documents": [ - { - "_id": 1, - "x": 1, - "foo": 1 - }, - { - "_id": 2, - "x": 2, - "foo": 1 - } - ] - } - ] - }, - { - "description": "FindOneAndUpdate using pipelines", - "operations": [ - { - "object": "collection0", - "name": "findOneAndUpdate", - "arguments": { - "filter": { - "_id": 1 - }, - "update": [ - { - "$project": { - "x": 1 - } - }, - { - "$addFields": { - "foo": 1 - } - } - ] - } - } - ], - "expectEvents": [ - { - "client": "client0", - "events": [ - { - "commandStartedEvent": { - "command": { - "findAndModify": "test", - "update": [ - { - "$project": { - "x": 1 - } - }, - { - "$addFields": { - "foo": 1 - } - } - ] - }, - "commandName": "findAndModify", - "databaseName": "crud-tests" - } - } - ] - } - ], - "outcome": [ - { - "collectionName": "test", - "databaseName": "crud-tests", - "documents": [ - { - "_id": 1, - "x": 1, - "foo": 1 - }, - { - "_id": 2, - "x": 2, - "y": 1 - } - ] - } - ] - }, - { - "description": "UpdateOne in bulk write using pipelines", - "operations": [ - { - "object": "collection0", - "name": "bulkWrite", - "arguments": { - "requests": [ - { - "updateOne": { - "filter": { - "_id": 1 - }, - "update": [ - { - "$replaceRoot": { - "newRoot": "$t" - } - }, - { - "$addFields": { - "foo": 1 - } - } - ] - } - } - ] - }, - "expectResult": { - "matchedCount": 1, - "modifiedCount": 1, - "upsertedCount": 0 - } - } - ], - "expectEvents": [ - { - "client": "client0", - "events": [ - { - "commandStartedEvent": { - "command": { - "update": "test", - "updates": [ - { - "q": { - "_id": 1 - }, - "u": [ - { - "$replaceRoot": { - "newRoot": "$t" - } - }, - { - "$addFields": { - "foo": 1 - } - } - ], - "multi": { - "$$unsetOrMatches": false - }, - "upsert": { - "$$unsetOrMatches": false - } - } - ] - }, - "commandName": "update", - "databaseName": "crud-tests" - } - } - ] - } - ], - "outcome": [ - { - "collectionName": "test", - "databaseName": "crud-tests", - "documents": [ - { - "_id": 1, - "u": { - "v": 1 - }, - "foo": 1 - }, - { - "_id": 2, - "x": 2, - "y": 1 - } - ] - } - ] - }, - { - "description": "UpdateMany in bulk write using pipelines", - "operations": [ - { - "object": "collection0", - "name": "bulkWrite", - "arguments": { - "requests": [ - { - "updateMany": { - "filter": {}, - "update": [ - { - "$project": { - "x": 1 - } - }, - { - "$addFields": { - "foo": 1 - } - } - ] - } - } - ] - }, - "expectResult": { - "matchedCount": 2, - "modifiedCount": 2, - "upsertedCount": 0 - } - } - ], - "expectEvents": [ - { - "client": "client0", - "events": [ - { - "commandStartedEvent": { - "command": { - "update": "test", - "updates": [ - { - "q": {}, - "u": [ - { - "$project": { - "x": 1 - } - }, - { - "$addFields": { - "foo": 1 - } - } - ], - "multi": true, - "upsert": { - "$$unsetOrMatches": false - } - } - ] - }, - "commandName": "update", - "databaseName": "crud-tests" - } - } - ] - } - ], - "outcome": [ - { - "collectionName": "test", - "databaseName": "crud-tests", - "documents": [ - { - "_id": 1, - "x": 1, - "foo": 1 - }, - { - "_id": 2, - "x": 2, - "foo": 1 - } - ] - } - ] - } - ] -} diff --git a/test/csot/deprecated-options.json b/test/csot/deprecated-options.json index d3e4631ff4..647e1bf792 100644 --- a/test/csot/deprecated-options.json +++ b/test/csot/deprecated-options.json @@ -6750,16 +6750,23 @@ } } }, + { + "name": "createIndex", + "object": "collection", + "arguments": { + "keys": { + "x": 1 + }, + "timeoutMS": 100000, + "name": "x_1" + } + }, { "name": "dropIndex", "object": "collection", "arguments": { "timeoutMS": 100000, "name": "x_1" - }, - "expectError": { - "isClientError": false, - "isTimeoutError": false } } ] @@ -6815,16 +6822,23 @@ ] } }, + { + "name": "createIndex", + "object": "collection", + "arguments": { + "keys": { + "x": 1 + }, + "timeoutMS": 100000, + "name": "x_1" + } + }, { "name": "dropIndex", "object": "collection", "arguments": { "timeoutMS": 100000, "name": "x_1" - }, - "expectError": { - "isClientError": false, - "isTimeoutError": false } } ], @@ -6832,6 +6846,12 @@ { "client": "client", "events": [ + { + "commandStartedEvent": { + "commandName": "createIndexes", + "databaseName": "test" + } + }, { "commandStartedEvent": { "commandName": "dropIndexes", @@ -6903,6 +6923,16 @@ ] } }, + { + "name": "createIndex", + "object": "collection", + "arguments": { + "keys": { + "x": 1 + }, + "name": "x_1" + } + }, { "name": "dropIndex", "object": "collection", @@ -6910,10 +6940,6 @@ "timeoutMS": 1000, "maxTimeMS": 5000, "name": "x_1" - }, - "expectError": { - "isClientError": false, - "isTimeoutError": false } } ], @@ -6921,6 +6947,12 @@ { "client": "client", "events": [ + { + "commandStartedEvent": { + "commandName": "createIndexes", + "databaseName": "test" + } + }, { "commandStartedEvent": { "commandName": "dropIndexes", @@ -7003,6 +7035,17 @@ } } }, + { + "name": "createIndex", + "object": "collection", + "arguments": { + "keys": { + "x": 1 + }, + "name": "x_1", + "timeoutMS": 100000 + } + }, { "name": "dropIndexes", "object": "collection", diff --git a/test/csot/global-timeoutMS.json b/test/csot/global-timeoutMS.json index 740bbad2e2..f1edbe68e3 100644 --- a/test/csot/global-timeoutMS.json +++ b/test/csot/global-timeoutMS.json @@ -5621,15 +5621,21 @@ } } }, + { + "name": "createIndex", + "object": "collection", + "arguments": { + "keys": { + "x": 1 + }, + "name": "x_1" + } + }, { "name": "dropIndex", "object": "collection", "arguments": { "name": "x_1" - }, - "expectError": { - "isClientError": false, - "isTimeoutError": false } } ], @@ -5637,6 +5643,12 @@ { "client": "client", "events": [ + { + "commandStartedEvent": { + "commandName": "createIndexes", + "databaseName": "test" + } + }, { "commandStartedEvent": { "commandName": "dropIndexes", diff --git a/test/csot/override-operation-timeoutMS.json b/test/csot/override-operation-timeoutMS.json index 6fa0bd802a..f33f876137 100644 --- a/test/csot/override-operation-timeoutMS.json +++ b/test/csot/override-operation-timeoutMS.json @@ -3378,15 +3378,23 @@ } } }, + { + "name": "createIndex", + "object": "collection", + "arguments": { + "keys": { + "x": 1 + }, + "timeoutMS": 1000, + "name": "x_1" + } + }, { "name": "dropIndex", "object": "collection", "arguments": { "timeoutMS": 1000, "name": "x_1" - }, - "expectError": { - "isTimeoutError": false } } ], @@ -3394,6 +3402,12 @@ { "client": "client", "events": [ + { + "commandStartedEvent": { + "commandName": "createIndexes", + "databaseName": "test" + } + }, { "commandStartedEvent": { "commandName": "dropIndexes", @@ -3436,15 +3450,23 @@ } } }, + { + "name": "createIndex", + "object": "collection", + "arguments": { + "keys": { + "x": 1 + }, + "timeoutMS": 0, + "name": "x_1" + } + }, { "name": "dropIndex", "object": "collection", "arguments": { "timeoutMS": 0, "name": "x_1" - }, - "expectError": { - "isTimeoutError": false } } ], @@ -3452,6 +3474,12 @@ { "client": "client", "events": [ + { + "commandStartedEvent": { + "commandName": "createIndexes", + "databaseName": "test" + } + }, { "commandStartedEvent": { "commandName": "dropIndexes", diff --git a/test/csot/runCursorCommand.json b/test/csot/runCursorCommand.json new file mode 100644 index 0000000000..36f774fb5a --- /dev/null +++ b/test/csot/runCursorCommand.json @@ -0,0 +1,583 @@ +{ + "description": "runCursorCommand", + "schemaVersion": "1.9", + "runOnRequirements": [ + { + "minServerVersion": "4.4" + } + ], + "createEntities": [ + { + "client": { + "id": "failPointClient", + "useMultipleMongoses": false + } + }, + { + "client": { + "id": "commandClient", + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent", + "commandSucceededEvent" + ] + } + }, + { + "client": { + "id": "client", + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" + ], + "ignoreCommandMonitoringEvents": [ + "killCursors" + ] + } + }, + { + "database": { + "id": "commandDb", + "client": "commandClient", + "databaseName": "commandDb" + } + }, + { + "database": { + "id": "db", + "client": "client", + "databaseName": "db" + } + }, + { + "collection": { + "id": "collection", + "database": "db", + "collectionName": "collection" + } + } + ], + "initialData": [ + { + "collectionName": "collection", + "databaseName": "db", + "documents": [ + { + "_id": 1, + "x": 11 + }, + { + "_id": 2, + "x": 22 + }, + { + "_id": 3, + "x": 33 + }, + { + "_id": 4, + "x": 44 + }, + { + "_id": 5, + "x": 55 + } + ] + } + ], + "tests": [ + { + "description": "errors if timeoutMode is set without timeoutMS", + "operations": [ + { + "name": "runCursorCommand", + "object": "db", + "arguments": { + "commandName": "find", + "command": { + "find": "collection" + }, + "timeoutMode": "cursorLifetime" + }, + "expectError": { + "isClientError": true + } + } + ] + }, + { + "description": "error if timeoutMode is cursorLifetime and cursorType is tailableAwait", + "operations": [ + { + "name": "runCursorCommand", + "object": "db", + "arguments": { + "commandName": "find", + "command": { + "find": "collection" + }, + "timeoutMode": "cursorLifetime", + "cursorType": "tailableAwait" + }, + "expectError": { + "isClientError": true + } + } + ] + }, + { + "description": "Non-tailable cursor lifetime remaining timeoutMS applied to getMore if timeoutMode is unset", + "runOnRequirements": [ + { + "serverless": "forbid" + } + ], + "operations": [ + { + "name": "failPoint", + "object": "testRunner", + "arguments": { + "client": "failPointClient", + "failPoint": { + "configureFailPoint": "failCommand", + "mode": { + "times": 2 + }, + "data": { + "failCommands": [ + "find", + "getMore" + ], + "blockConnection": true, + "blockTimeMS": 60 + } + } + } + }, + { + "name": "runCursorCommand", + "object": "db", + "arguments": { + "commandName": "find", + "timeoutMS": 100, + "command": { + "find": "collection", + "batchSize": 2 + } + }, + "expectError": { + "isTimeoutError": true + } + } + ], + "expectEvents": [ + { + "client": "client", + "events": [ + { + "commandStartedEvent": { + "commandName": "find", + "command": { + "find": "collection", + "maxTimeMS": { + "$$type": [ + "int", + "long" + ] + } + } + } + }, + { + "commandStartedEvent": { + "commandName": "getMore", + "command": { + "getMore": { + "$$type": [ + "int", + "long" + ] + }, + "collection": "collection", + "maxTimeMS": { + "$$exists": false + } + } + } + } + ] + } + ] + }, + { + "description": "Non-tailable cursor iteration timeoutMS is refreshed for getMore if timeoutMode is iteration - failure", + "runOnRequirements": [ + { + "serverless": "forbid" + } + ], + "operations": [ + { + "name": "failPoint", + "object": "testRunner", + "arguments": { + "client": "failPointClient", + "failPoint": { + "configureFailPoint": "failCommand", + "mode": { + "times": 1 + }, + "data": { + "failCommands": [ + "getMore" + ], + "blockConnection": true, + "blockTimeMS": 60 + } + } + } + }, + { + "name": "runCursorCommand", + "object": "db", + "arguments": { + "commandName": "find", + "command": { + "find": "collection", + "batchSize": 2 + }, + "timeoutMode": "iteration", + "timeoutMS": 100, + "batchSize": 2 + }, + "expectError": { + "isTimeoutError": true + } + } + ], + "expectEvents": [ + { + "client": "client", + "events": [ + { + "commandStartedEvent": { + "commandName": "find", + "databaseName": "db", + "command": { + "find": "collection", + "maxTimeMS": { + "$$exists": false + } + } + } + }, + { + "commandStartedEvent": { + "commandName": "getMore", + "databaseName": "db", + "command": { + "getMore": { + "$$type": [ + "int", + "long" + ] + }, + "collection": "collection", + "maxTimeMS": { + "$$exists": false + } + } + } + } + ] + } + ] + }, + { + "description": "Tailable cursor iteration timeoutMS is refreshed for getMore - failure", + "runOnRequirements": [ + { + "serverless": "forbid" + } + ], + "operations": [ + { + "name": "failPoint", + "object": "testRunner", + "arguments": { + "client": "failPointClient", + "failPoint": { + "configureFailPoint": "failCommand", + "mode": { + "times": 1 + }, + "data": { + "failCommands": [ + "getMore" + ], + "blockConnection": true, + "blockTimeMS": 60 + } + } + } + }, + { + "name": "dropCollection", + "object": "db", + "arguments": { + "collection": "cappedCollection" + } + }, + { + "name": "createCollection", + "object": "db", + "arguments": { + "collection": "cappedCollection", + "capped": true, + "size": 4096, + "max": 3 + }, + "saveResultAsEntity": "cappedCollection" + }, + { + "name": "insertMany", + "object": "cappedCollection", + "arguments": { + "documents": [ + { + "_id": 1, + "x": 11 + }, + { + "_id": 2, + "x": 22 + } + ] + } + }, + { + "name": "createCommandCursor", + "object": "db", + "arguments": { + "commandName": "find", + "command": { + "find": "cappedCollection", + "batchSize": 1, + "tailable": true + }, + "timeoutMode": "iteration", + "timeoutMS": 100, + "batchSize": 1, + "cursorType": "tailable" + }, + "saveResultAsEntity": "tailableCursor" + }, + { + "name": "iterateUntilDocumentOrError", + "object": "tailableCursor" + }, + { + "name": "iterateUntilDocumentOrError", + "object": "tailableCursor", + "expectError": { + "isTimeoutError": true + } + } + ], + "expectEvents": [ + { + "client": "client", + "events": [ + { + "commandStartedEvent": { + "commandName": "drop" + } + }, + { + "commandStartedEvent": { + "commandName": "create" + } + }, + { + "commandStartedEvent": { + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "commandName": "find", + "databaseName": "db", + "command": { + "find": "cappedCollection", + "tailable": true, + "awaitData": { + "$$exists": false + }, + "maxTimeMS": { + "$$exists": false + } + } + } + }, + { + "commandStartedEvent": { + "commandName": "getMore", + "databaseName": "db", + "command": { + "getMore": { + "$$type": [ + "int", + "long" + ] + }, + "collection": "cappedCollection", + "maxTimeMS": { + "$$exists": false + } + } + } + } + ] + } + ] + }, + { + "description": "Tailable cursor awaitData iteration timeoutMS is refreshed for getMore - failure", + "runOnRequirements": [ + { + "serverless": "forbid" + } + ], + "operations": [ + { + "name": "failPoint", + "object": "testRunner", + "arguments": { + "client": "failPointClient", + "failPoint": { + "configureFailPoint": "failCommand", + "mode": { + "times": 1 + }, + "data": { + "failCommands": [ + "getMore" + ], + "blockConnection": true, + "blockTimeMS": 60 + } + } + } + }, + { + "name": "dropCollection", + "object": "db", + "arguments": { + "collection": "cappedCollection" + } + }, + { + "name": "createCollection", + "object": "db", + "arguments": { + "collection": "cappedCollection", + "capped": true, + "size": 4096, + "max": 3 + }, + "saveResultAsEntity": "cappedCollection" + }, + { + "name": "insertMany", + "object": "cappedCollection", + "arguments": { + "documents": [ + { + "foo": "bar" + }, + { + "fizz": "buzz" + } + ] + } + }, + { + "name": "createCommandCursor", + "object": "db", + "arguments": { + "command": { + "find": "cappedCollection", + "tailable": true, + "awaitData": true + }, + "cursorType": "tailableAwait", + "batchSize": 1 + }, + "saveResultAsEntity": "tailableCursor" + }, + { + "name": "iterateUntilDocumentOrError", + "object": "tailableCursor" + }, + { + "name": "iterateUntilDocumentOrError", + "object": "tailableCursor", + "expectError": { + "isTimeoutError": true + } + } + ], + "expectEvents": [ + { + "client": "client", + "events": [ + { + "commandStartedEvent": { + "commandName": "drop" + } + }, + { + "commandStartedEvent": { + "commandName": "create" + } + }, + { + "commandStartedEvent": { + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "commandName": "find", + "databaseName": "db", + "command": { + "find": "cappedCollection", + "tailable": true, + "awaitData": true, + "maxTimeMS": { + "$$exists": true + } + } + } + }, + { + "commandStartedEvent": { + "commandName": "getMore", + "databaseName": "db", + "command": { + "getMore": { + "$$type": [ + "int", + "long" + ] + }, + "collection": "cappedCollection" + } + } + } + ] + } + ] + } + ] +} diff --git a/test/csot/tailable-awaitData.json b/test/csot/tailable-awaitData.json index 535fb69243..80e95ca906 100644 --- a/test/csot/tailable-awaitData.json +++ b/test/csot/tailable-awaitData.json @@ -3,7 +3,8 @@ "schemaVersion": "1.9", "runOnRequirements": [ { - "minServerVersion": "4.4" + "minServerVersion": "4.4", + "serverless": "forbid" } ], "createEntities": [ @@ -77,7 +78,7 @@ ] }, { - "description": "error if maxAwaitTimeMS is greater than timeoutMS", + "description": "error on find if maxAwaitTimeMS is greater than timeoutMS", "operations": [ { "name": "find", @@ -89,13 +90,50 @@ "maxAwaitTimeMS": 10 }, "expectError": { - "isClientError": true + "isClientError": true, + "isTimeoutError": false + } + } + ] + }, + { + "description": "error on aggregate if maxAwaitTimeMS is greater than timeoutMS", + "operations": [ + { + "name": "aggregate", + "object": "collection", + "arguments": { + "pipeline": [], + "timeoutMS": 5, + "maxAwaitTimeMS": 10 + }, + "expectError": { + "isClientError": true, + "isTimeoutError": false } } ] }, { - "description": "error if maxAwaitTimeMS is equal to timeoutMS", + "description": "error on watch if maxAwaitTimeMS is greater than timeoutMS", + "operations": [ + { + "name": "createChangeStream", + "object": "collection", + "arguments": { + "pipeline": [], + "timeoutMS": 5, + "maxAwaitTimeMS": 10 + }, + "expectError": { + "isClientError": true, + "isTimeoutError": false + } + } + ] + }, + { + "description": "error on find if maxAwaitTimeMS is equal to timeoutMS", "operations": [ { "name": "find", @@ -107,7 +145,44 @@ "maxAwaitTimeMS": 5 }, "expectError": { - "isClientError": true + "isClientError": true, + "isTimeoutError": false + } + } + ] + }, + { + "description": "error on aggregate if maxAwaitTimeMS is equal to timeoutMS", + "operations": [ + { + "name": "aggregate", + "object": "collection", + "arguments": { + "pipeline": [], + "timeoutMS": 5, + "maxAwaitTimeMS": 5 + }, + "expectError": { + "isClientError": true, + "isTimeoutError": false + } + } + ] + }, + { + "description": "error on watch if maxAwaitTimeMS is equal to timeoutMS", + "operations": [ + { + "name": "createChangeStream", + "object": "collection", + "arguments": { + "pipeline": [], + "timeoutMS": 5, + "maxAwaitTimeMS": 5 + }, + "expectError": { + "isClientError": true, + "isTimeoutError": false } } ] @@ -417,6 +492,141 @@ ] } ] + }, + { + "description": "apply remaining timeoutMS if less than maxAwaitTimeMS", + "operations": [ + { + "name": "failPoint", + "object": "testRunner", + "arguments": { + "client": "failPointClient", + "failPoint": { + "configureFailPoint": "failCommand", + "mode": { + "times": 1 + }, + "data": { + "failCommands": [ + "getMore" + ], + "blockConnection": true, + "blockTimeMS": 30 + } + } + } + }, + { + "name": "createFindCursor", + "object": "collection", + "arguments": { + "filter": { + "_id": 1 + }, + "cursorType": "tailableAwait", + "batchSize": 1, + "maxAwaitTimeMS": 100, + "timeoutMS": 200 + }, + "saveResultAsEntity": "tailableCursor" + }, + { + "name": "iterateOnce", + "object": "tailableCursor" + }, + { + "name": "iterateUntilDocumentOrError", + "object": "tailableCursor", + "expectError": { + "isTimeoutError": true + } + } + ], + "expectEvents": [ + { + "client": "client", + "ignoreExtraEvents": true, + "events": [ + { + "commandStartedEvent": { + "commandName": "find", + "databaseName": "test" + } + }, + { + "commandStartedEvent": { + "commandName": "getMore", + "databaseName": "test", + "command": { + "maxTimeMS": { + "$$lte": 100 + } + } + } + }, + { + "commandStartedEvent": { + "commandName": "getMore", + "databaseName": "test", + "command": { + "maxTimeMS": { + "$$lte": 70 + } + } + } + } + ] + } + ] + }, + { + "description": "apply maxAwaitTimeMS if less than remaining timeout", + "operations": [ + { + "name": "createFindCursor", + "object": "collection", + "arguments": { + "filter": {}, + "cursorType": "tailableAwait", + "batchSize": 1, + "maxAwaitTimeMS": 100, + "timeoutMS": 200 + }, + "saveResultAsEntity": "tailableCursor" + }, + { + "name": "iterateOnce", + "object": "tailableCursor" + }, + { + "name": "iterateOnce", + "object": "tailableCursor" + } + ], + "expectEvents": [ + { + "client": "client", + "events": [ + { + "commandStartedEvent": { + "commandName": "find", + "databaseName": "test" + } + }, + { + "commandStartedEvent": { + "commandName": "getMore", + "databaseName": "test", + "command": { + "maxTimeMS": { + "$$lte": 100 + } + } + } + } + ] + } + ] } ] } diff --git a/test/csot/waitQueueTimeout.json b/test/csot/waitQueueTimeout.json new file mode 100644 index 0000000000..138d5cc161 --- /dev/null +++ b/test/csot/waitQueueTimeout.json @@ -0,0 +1,176 @@ +{ + "description": "WaitQueueTimeoutError does not clear the pool", + "schemaVersion": "1.9", + "runOnRequirements": [ + { + "minServerVersion": "4.4", + "topologies": [ + "single", + "replicaset", + "sharded" + ] + } + ], + "createEntities": [ + { + "client": { + "id": "failPointClient", + "useMultipleMongoses": false + } + }, + { + "client": { + "id": "client", + "uriOptions": { + "maxPoolSize": 1, + "appname": "waitQueueTimeoutErrorTest" + }, + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent", + "poolClearedEvent" + ] + } + }, + { + "database": { + "id": "database", + "client": "client", + "databaseName": "test" + } + } + ], + "tests": [ + { + "description": "WaitQueueTimeoutError does not clear the pool", + "operations": [ + { + "name": "failPoint", + "object": "testRunner", + "arguments": { + "client": "failPointClient", + "failPoint": { + "configureFailPoint": "failCommand", + "mode": { + "times": 1 + }, + "data": { + "failCommands": [ + "ping" + ], + "blockConnection": true, + "blockTimeMS": 500, + "appName": "waitQueueTimeoutErrorTest" + } + } + } + }, + { + "name": "createEntities", + "object": "testRunner", + "arguments": { + "entities": [ + { + "thread": { + "id": "thread0" + } + } + ] + } + }, + { + "name": "runOnThread", + "object": "testRunner", + "arguments": { + "thread": "thread0", + "operation": { + "name": "runCommand", + "object": "database", + "arguments": { + "command": { + "ping": 1 + }, + "commandName": "ping" + } + } + } + }, + { + "name": "waitForEvent", + "object": "testRunner", + "arguments": { + "client": "client", + "event": { + "commandStartedEvent": { + "commandName": "ping" + } + }, + "count": 1 + } + }, + { + "name": "runCommand", + "object": "database", + "arguments": { + "timeoutMS": 100, + "command": { + "hello": 1 + }, + "commandName": "hello" + }, + "expectError": { + "isTimeoutError": true + } + }, + { + "name": "waitForThread", + "object": "testRunner", + "arguments": { + "thread": "thread0" + } + }, + { + "name": "runCommand", + "object": "database", + "arguments": { + "command": { + "hello": 1 + }, + "commandName": "hello" + } + } + ], + "expectEvents": [ + { + "client": "client", + "eventType": "command", + "events": [ + { + "commandStartedEvent": { + "commandName": "ping", + "databaseName": "test", + "command": { + "ping": 1 + } + } + }, + { + "commandStartedEvent": { + "commandName": "hello", + "databaseName": "test", + "command": { + "hello": 1 + } + } + } + ] + }, + { + "client": "client", + "eventType": "cmap", + "events": [] + } + ] + } + ] +} diff --git a/test/discovery_and_monitoring/errors/pre-42-InterruptedAtShutdown.json b/test/discovery_and_monitoring/errors/pre-42-InterruptedAtShutdown.json deleted file mode 100644 index 9f6ea212e5..0000000000 --- a/test/discovery_and_monitoring/errors/pre-42-InterruptedAtShutdown.json +++ /dev/null @@ -1,70 +0,0 @@ -{ - "description": "Pre-4.2 InterruptedAtShutdown error", - "uri": "mongodb://a/?replicaSet=rs", - "phases": [ - { - "description": "Primary A is discovered", - "responses": [ - [ - "a:27017", - { - "ok": 1, - "helloOk": true, - "isWritablePrimary": true, - "hosts": [ - "a:27017" - ], - "setName": "rs", - "minWireVersion": 0, - "maxWireVersion": 7 - } - ] - ], - "outcome": { - "servers": { - "a:27017": { - "type": "RSPrimary", - "setName": "rs", - "topologyVersion": null, - "pool": { - "generation": 0 - } - } - }, - "topologyType": "ReplicaSetWithPrimary", - "logicalSessionTimeoutMinutes": null, - "setName": "rs" - } - }, - { - "description": "Pre-4.2 InterruptedAtShutdown error marks server Unknown and clears the pool", - "applicationErrors": [ - { - "address": "a:27017", - "when": "afterHandshakeCompletes", - "maxWireVersion": 7, - "type": "command", - "response": { - "ok": 0, - "errmsg": "InterruptedAtShutdown", - "code": 11600 - } - } - ], - "outcome": { - "servers": { - "a:27017": { - "type": "Unknown", - "topologyVersion": null, - "pool": { - "generation": 1 - } - } - }, - "topologyType": "ReplicaSetNoPrimary", - "logicalSessionTimeoutMinutes": null, - "setName": "rs" - } - } - ] -} diff --git a/test/discovery_and_monitoring/errors/pre-42-InterruptedDueToReplStateChange.json b/test/discovery_and_monitoring/errors/pre-42-InterruptedDueToReplStateChange.json deleted file mode 100644 index 7e5f235713..0000000000 --- a/test/discovery_and_monitoring/errors/pre-42-InterruptedDueToReplStateChange.json +++ /dev/null @@ -1,70 +0,0 @@ -{ - "description": "Pre-4.2 InterruptedDueToReplStateChange error", - "uri": "mongodb://a/?replicaSet=rs", - "phases": [ - { - "description": "Primary A is discovered", - "responses": [ - [ - "a:27017", - { - "ok": 1, - "helloOk": true, - "isWritablePrimary": true, - "hosts": [ - "a:27017" - ], - "setName": "rs", - "minWireVersion": 0, - "maxWireVersion": 7 - } - ] - ], - "outcome": { - "servers": { - "a:27017": { - "type": "RSPrimary", - "setName": "rs", - "topologyVersion": null, - "pool": { - "generation": 0 - } - } - }, - "topologyType": "ReplicaSetWithPrimary", - "logicalSessionTimeoutMinutes": null, - "setName": "rs" - } - }, - { - "description": "Pre-4.2 InterruptedDueToReplStateChange error marks server Unknown and clears the pool", - "applicationErrors": [ - { - "address": "a:27017", - "when": "afterHandshakeCompletes", - "maxWireVersion": 7, - "type": "command", - "response": { - "ok": 0, - "errmsg": "InterruptedDueToReplStateChange", - "code": 11602 - } - } - ], - "outcome": { - "servers": { - "a:27017": { - "type": "Unknown", - "topologyVersion": null, - "pool": { - "generation": 1 - } - } - }, - "topologyType": "ReplicaSetNoPrimary", - "logicalSessionTimeoutMinutes": null, - "setName": "rs" - } - } - ] -} diff --git a/test/discovery_and_monitoring/errors/pre-42-LegacyNotPrimary.json b/test/discovery_and_monitoring/errors/pre-42-LegacyNotPrimary.json deleted file mode 100644 index 1635f1a856..0000000000 --- a/test/discovery_and_monitoring/errors/pre-42-LegacyNotPrimary.json +++ /dev/null @@ -1,70 +0,0 @@ -{ - "description": "Pre-4.2 LegacyNotPrimary error", - "uri": "mongodb://a/?replicaSet=rs", - "phases": [ - { - "description": "Primary A is discovered", - "responses": [ - [ - "a:27017", - { - "ok": 1, - "helloOk": true, - "isWritablePrimary": true, - "hosts": [ - "a:27017" - ], - "setName": "rs", - "minWireVersion": 0, - "maxWireVersion": 7 - } - ] - ], - "outcome": { - "servers": { - "a:27017": { - "type": "RSPrimary", - "setName": "rs", - "topologyVersion": null, - "pool": { - "generation": 0 - } - } - }, - "topologyType": "ReplicaSetWithPrimary", - "logicalSessionTimeoutMinutes": null, - "setName": "rs" - } - }, - { - "description": "Pre-4.2 LegacyNotPrimary error marks server Unknown and clears the pool", - "applicationErrors": [ - { - "address": "a:27017", - "when": "afterHandshakeCompletes", - "maxWireVersion": 7, - "type": "command", - "response": { - "ok": 0, - "errmsg": "LegacyNotPrimary", - "code": 10058 - } - } - ], - "outcome": { - "servers": { - "a:27017": { - "type": "Unknown", - "topologyVersion": null, - "pool": { - "generation": 1 - } - } - }, - "topologyType": "ReplicaSetNoPrimary", - "logicalSessionTimeoutMinutes": null, - "setName": "rs" - } - } - ] -} diff --git a/test/discovery_and_monitoring/errors/pre-42-NotPrimaryNoSecondaryOk.json b/test/discovery_and_monitoring/errors/pre-42-NotPrimaryNoSecondaryOk.json deleted file mode 100644 index 0e70ede02c..0000000000 --- a/test/discovery_and_monitoring/errors/pre-42-NotPrimaryNoSecondaryOk.json +++ /dev/null @@ -1,70 +0,0 @@ -{ - "description": "Pre-4.2 NotPrimaryNoSecondaryOk error", - "uri": "mongodb://a/?replicaSet=rs", - "phases": [ - { - "description": "Primary A is discovered", - "responses": [ - [ - "a:27017", - { - "ok": 1, - "helloOk": true, - "isWritablePrimary": true, - "hosts": [ - "a:27017" - ], - "setName": "rs", - "minWireVersion": 0, - "maxWireVersion": 7 - } - ] - ], - "outcome": { - "servers": { - "a:27017": { - "type": "RSPrimary", - "setName": "rs", - "topologyVersion": null, - "pool": { - "generation": 0 - } - } - }, - "topologyType": "ReplicaSetWithPrimary", - "logicalSessionTimeoutMinutes": null, - "setName": "rs" - } - }, - { - "description": "Pre-4.2 NotPrimaryNoSecondaryOk error marks server Unknown and clears the pool", - "applicationErrors": [ - { - "address": "a:27017", - "when": "afterHandshakeCompletes", - "maxWireVersion": 7, - "type": "command", - "response": { - "ok": 0, - "errmsg": "NotPrimaryNoSecondaryOk", - "code": 13435 - } - } - ], - "outcome": { - "servers": { - "a:27017": { - "type": "Unknown", - "topologyVersion": null, - "pool": { - "generation": 1 - } - } - }, - "topologyType": "ReplicaSetNoPrimary", - "logicalSessionTimeoutMinutes": null, - "setName": "rs" - } - } - ] -} diff --git a/test/discovery_and_monitoring/errors/pre-42-NotPrimaryOrSecondary.json b/test/discovery_and_monitoring/errors/pre-42-NotPrimaryOrSecondary.json deleted file mode 100644 index 3fefb21663..0000000000 --- a/test/discovery_and_monitoring/errors/pre-42-NotPrimaryOrSecondary.json +++ /dev/null @@ -1,70 +0,0 @@ -{ - "description": "Pre-4.2 NotPrimaryOrSecondary error", - "uri": "mongodb://a/?replicaSet=rs", - "phases": [ - { - "description": "Primary A is discovered", - "responses": [ - [ - "a:27017", - { - "ok": 1, - "helloOk": true, - "isWritablePrimary": true, - "hosts": [ - "a:27017" - ], - "setName": "rs", - "minWireVersion": 0, - "maxWireVersion": 7 - } - ] - ], - "outcome": { - "servers": { - "a:27017": { - "type": "RSPrimary", - "setName": "rs", - "topologyVersion": null, - "pool": { - "generation": 0 - } - } - }, - "topologyType": "ReplicaSetWithPrimary", - "logicalSessionTimeoutMinutes": null, - "setName": "rs" - } - }, - { - "description": "Pre-4.2 NotPrimaryOrSecondary error marks server Unknown and clears the pool", - "applicationErrors": [ - { - "address": "a:27017", - "when": "afterHandshakeCompletes", - "maxWireVersion": 7, - "type": "command", - "response": { - "ok": 0, - "errmsg": "NotPrimaryOrSecondary", - "code": 13436 - } - } - ], - "outcome": { - "servers": { - "a:27017": { - "type": "Unknown", - "topologyVersion": null, - "pool": { - "generation": 1 - } - } - }, - "topologyType": "ReplicaSetNoPrimary", - "logicalSessionTimeoutMinutes": null, - "setName": "rs" - } - } - ] -} diff --git a/test/discovery_and_monitoring/errors/pre-42-NotWritablePrimary.json b/test/discovery_and_monitoring/errors/pre-42-NotWritablePrimary.json deleted file mode 100644 index d010da0a5b..0000000000 --- a/test/discovery_and_monitoring/errors/pre-42-NotWritablePrimary.json +++ /dev/null @@ -1,70 +0,0 @@ -{ - "description": "Pre-4.2 NotWritablePrimary error", - "uri": "mongodb://a/?replicaSet=rs", - "phases": [ - { - "description": "Primary A is discovered", - "responses": [ - [ - "a:27017", - { - "ok": 1, - "helloOk": true, - "isWritablePrimary": true, - "hosts": [ - "a:27017" - ], - "setName": "rs", - "minWireVersion": 0, - "maxWireVersion": 7 - } - ] - ], - "outcome": { - "servers": { - "a:27017": { - "type": "RSPrimary", - "setName": "rs", - "topologyVersion": null, - "pool": { - "generation": 0 - } - } - }, - "topologyType": "ReplicaSetWithPrimary", - "logicalSessionTimeoutMinutes": null, - "setName": "rs" - } - }, - { - "description": "Pre-4.2 NotWritablePrimary error marks server Unknown and clears the pool", - "applicationErrors": [ - { - "address": "a:27017", - "when": "afterHandshakeCompletes", - "maxWireVersion": 7, - "type": "command", - "response": { - "ok": 0, - "errmsg": "NotWritablePrimary", - "code": 10107 - } - } - ], - "outcome": { - "servers": { - "a:27017": { - "type": "Unknown", - "topologyVersion": null, - "pool": { - "generation": 1 - } - } - }, - "topologyType": "ReplicaSetNoPrimary", - "logicalSessionTimeoutMinutes": null, - "setName": "rs" - } - } - ] -} diff --git a/test/discovery_and_monitoring/errors/pre-42-PrimarySteppedDown.json b/test/discovery_and_monitoring/errors/pre-42-PrimarySteppedDown.json deleted file mode 100644 index 02956d201d..0000000000 --- a/test/discovery_and_monitoring/errors/pre-42-PrimarySteppedDown.json +++ /dev/null @@ -1,70 +0,0 @@ -{ - "description": "Pre-4.2 PrimarySteppedDown error", - "uri": "mongodb://a/?replicaSet=rs", - "phases": [ - { - "description": "Primary A is discovered", - "responses": [ - [ - "a:27017", - { - "ok": 1, - "helloOk": true, - "isWritablePrimary": true, - "hosts": [ - "a:27017" - ], - "setName": "rs", - "minWireVersion": 0, - "maxWireVersion": 7 - } - ] - ], - "outcome": { - "servers": { - "a:27017": { - "type": "RSPrimary", - "setName": "rs", - "topologyVersion": null, - "pool": { - "generation": 0 - } - } - }, - "topologyType": "ReplicaSetWithPrimary", - "logicalSessionTimeoutMinutes": null, - "setName": "rs" - } - }, - { - "description": "Pre-4.2 PrimarySteppedDown error marks server Unknown and clears the pool", - "applicationErrors": [ - { - "address": "a:27017", - "when": "afterHandshakeCompletes", - "maxWireVersion": 7, - "type": "command", - "response": { - "ok": 0, - "errmsg": "PrimarySteppedDown", - "code": 189 - } - } - ], - "outcome": { - "servers": { - "a:27017": { - "type": "Unknown", - "topologyVersion": null, - "pool": { - "generation": 1 - } - } - }, - "topologyType": "ReplicaSetNoPrimary", - "logicalSessionTimeoutMinutes": null, - "setName": "rs" - } - } - ] -} diff --git a/test/discovery_and_monitoring/errors/pre-42-ShutdownInProgress.json b/test/discovery_and_monitoring/errors/pre-42-ShutdownInProgress.json deleted file mode 100644 index fc3a5aa6fe..0000000000 --- a/test/discovery_and_monitoring/errors/pre-42-ShutdownInProgress.json +++ /dev/null @@ -1,70 +0,0 @@ -{ - "description": "Pre-4.2 ShutdownInProgress error", - "uri": "mongodb://a/?replicaSet=rs", - "phases": [ - { - "description": "Primary A is discovered", - "responses": [ - [ - "a:27017", - { - "ok": 1, - "helloOk": true, - "isWritablePrimary": true, - "hosts": [ - "a:27017" - ], - "setName": "rs", - "minWireVersion": 0, - "maxWireVersion": 7 - } - ] - ], - "outcome": { - "servers": { - "a:27017": { - "type": "RSPrimary", - "setName": "rs", - "topologyVersion": null, - "pool": { - "generation": 0 - } - } - }, - "topologyType": "ReplicaSetWithPrimary", - "logicalSessionTimeoutMinutes": null, - "setName": "rs" - } - }, - { - "description": "Pre-4.2 ShutdownInProgress error marks server Unknown and clears the pool", - "applicationErrors": [ - { - "address": "a:27017", - "when": "afterHandshakeCompletes", - "maxWireVersion": 7, - "type": "command", - "response": { - "ok": 0, - "errmsg": "ShutdownInProgress", - "code": 91 - } - } - ], - "outcome": { - "servers": { - "a:27017": { - "type": "Unknown", - "topologyVersion": null, - "pool": { - "generation": 1 - } - } - }, - "topologyType": "ReplicaSetNoPrimary", - "logicalSessionTimeoutMinutes": null, - "setName": "rs" - } - } - ] -} diff --git a/test/discovery_and_monitoring/rs/new_primary.json b/test/discovery_and_monitoring/rs/new_primary.json index 1a84c69c91..69b07516b9 100644 --- a/test/discovery_and_monitoring/rs/new_primary.json +++ b/test/discovery_and_monitoring/rs/new_primary.json @@ -58,7 +58,8 @@ "servers": { "a:27017": { "type": "Unknown", - "setName": null + "setName": null, + "error": "primary marked stale due to discovery of newer primary" }, "b:27017": { "type": "RSPrimary", diff --git a/test/discovery_and_monitoring/rs/new_primary_new_electionid.json b/test/discovery_and_monitoring/rs/new_primary_new_electionid.json index 509720d445..90ef0ce8dc 100644 --- a/test/discovery_and_monitoring/rs/new_primary_new_electionid.json +++ b/test/discovery_and_monitoring/rs/new_primary_new_electionid.json @@ -76,7 +76,8 @@ "a:27017": { "type": "Unknown", "setName": null, - "electionId": null + "electionId": null, + "error": "primary marked stale due to discovery of newer primary" }, "b:27017": { "type": "RSPrimary", @@ -123,7 +124,8 @@ "a:27017": { "type": "Unknown", "setName": null, - "electionId": null + "electionId": null, + "error": "primary marked stale due to electionId/setVersion mismatch" }, "b:27017": { "type": "RSPrimary", diff --git a/test/discovery_and_monitoring/rs/new_primary_new_setversion.json b/test/discovery_and_monitoring/rs/new_primary_new_setversion.json index 96533c61ee..9c1e2d4bdd 100644 --- a/test/discovery_and_monitoring/rs/new_primary_new_setversion.json +++ b/test/discovery_and_monitoring/rs/new_primary_new_setversion.json @@ -76,7 +76,8 @@ "a:27017": { "type": "Unknown", "setName": null, - "electionId": null + "electionId": null, + "error": "primary marked stale due to discovery of newer primary" }, "b:27017": { "type": "RSPrimary", @@ -123,7 +124,8 @@ "a:27017": { "type": "Unknown", "setName": null, - "electionId": null + "electionId": null, + "error": "primary marked stale due to electionId/setVersion mismatch" }, "b:27017": { "type": "RSPrimary", diff --git a/test/discovery_and_monitoring/rs/null_election_id-pre-6.0.json b/test/discovery_and_monitoring/rs/null_election_id-pre-6.0.json index 7261fbfc2a..8a77f31c50 100644 --- a/test/discovery_and_monitoring/rs/null_election_id-pre-6.0.json +++ b/test/discovery_and_monitoring/rs/null_election_id-pre-6.0.json @@ -18,7 +18,7 @@ "setVersion": 1, "setName": "rs", "minWireVersion": 0, - "maxWireVersion": 7 + "maxWireVersion": 16 } ] ], @@ -66,7 +66,7 @@ "$oid": "000000000000000000000002" }, "minWireVersion": 0, - "maxWireVersion": 7 + "maxWireVersion": 16 } ] ], @@ -116,7 +116,7 @@ "setVersion": 1, "setName": "rs", "minWireVersion": 0, - "maxWireVersion": 7 + "maxWireVersion": 16 } ] ], @@ -167,7 +167,7 @@ "$oid": "000000000000000000000001" }, "minWireVersion": 0, - "maxWireVersion": 7 + "maxWireVersion": 16 } ] ], diff --git a/test/discovery_and_monitoring/rs/primary_disconnect_electionid.json b/test/discovery_and_monitoring/rs/primary_disconnect_electionid.json index 5a91188ea8..b030bd2c53 100644 --- a/test/discovery_and_monitoring/rs/primary_disconnect_electionid.json +++ b/test/discovery_and_monitoring/rs/primary_disconnect_electionid.json @@ -48,7 +48,8 @@ "a:27017": { "type": "Unknown", "setName": null, - "electionId": null + "electionId": null, + "error": "primary marked stale due to discovery of newer primary" }, "b:27017": { "type": "RSPrimary", @@ -124,6 +125,7 @@ "a:27017": { "type": "Unknown", "setName": null, + "error": "primary marked stale due to electionId/setVersion mismatch", "electionId": null }, "b:27017": { diff --git a/test/discovery_and_monitoring/rs/primary_disconnect_setversion.json b/test/discovery_and_monitoring/rs/primary_disconnect_setversion.json index f7417ad77b..653a5f29e8 100644 --- a/test/discovery_and_monitoring/rs/primary_disconnect_setversion.json +++ b/test/discovery_and_monitoring/rs/primary_disconnect_setversion.json @@ -48,7 +48,8 @@ "a:27017": { "type": "Unknown", "setName": null, - "electionId": null + "electionId": null, + "error": "primary marked stale due to discovery of newer primary" }, "b:27017": { "type": "RSPrimary", @@ -124,6 +125,7 @@ "a:27017": { "type": "Unknown", "setName": null, + "error": "primary marked stale due to electionId/setVersion mismatch", "electionId": null }, "b:27017": { diff --git a/test/discovery_and_monitoring/rs/primary_mismatched_me_not_removed.json b/test/discovery_and_monitoring/rs/primary_mismatched_me_not_removed.json index 4c40093659..a55dcfc6d4 100644 --- a/test/discovery_and_monitoring/rs/primary_mismatched_me_not_removed.json +++ b/test/discovery_and_monitoring/rs/primary_mismatched_me_not_removed.json @@ -18,7 +18,7 @@ "primary": "localhost:27017", "me": "a:27017", "minWireVersion": 0, - "maxWireVersion": 7 + "maxWireVersion": 25 } ] ], @@ -55,7 +55,7 @@ "primary": "localhost:27017", "me": "localhost:27018", "minWireVersion": 0, - "maxWireVersion": 7 + "maxWireVersion": 25 } ] ], diff --git a/test/discovery_and_monitoring/rs/secondary_ipv6_literal.json b/test/discovery_and_monitoring/rs/secondary_ipv6_literal.json new file mode 100644 index 0000000000..c23d8dc4c9 --- /dev/null +++ b/test/discovery_and_monitoring/rs/secondary_ipv6_literal.json @@ -0,0 +1,38 @@ +{ + "description": "Secondary with IPv6 literal", + "uri": "mongodb://[::1]/?replicaSet=rs", + "phases": [ + { + "responses": [ + [ + "[::1]:27017", + { + "ok": 1, + "helloOk": true, + "isWritablePrimary": false, + "secondary": true, + "setName": "rs", + "me": "[::1]:27017", + "hosts": [ + "[::1]:27017" + ], + "minWireVersion": 0, + "maxWireVersion": 26 + } + ] + ], + "outcome": { + "servers": { + "[::1]:27017": { + "type": "RSSecondary", + "setName": "rs" + } + }, + "topologyType": "ReplicaSetNoPrimary", + "setName": "rs", + "logicalSessionTimeoutMinutes": null, + "compatible": true + } + } + ] +} diff --git a/test/discovery_and_monitoring/rs/setversion_greaterthan_max_without_electionid.json b/test/discovery_and_monitoring/rs/setversion_greaterthan_max_without_electionid.json index 97870d71d5..06c89609f5 100644 --- a/test/discovery_and_monitoring/rs/setversion_greaterthan_max_without_electionid.json +++ b/test/discovery_and_monitoring/rs/setversion_greaterthan_max_without_electionid.json @@ -65,7 +65,8 @@ "a:27017": { "type": "Unknown", "setName": null, - "electionId": null + "electionId": null, + "error": "primary marked stale due to discovery of newer primary" }, "b:27017": { "type": "RSPrimary", diff --git a/test/discovery_and_monitoring/rs/setversion_without_electionid-pre-6.0.json b/test/discovery_and_monitoring/rs/setversion_without_electionid-pre-6.0.json index e62c6963ed..9a1ee61399 100644 --- a/test/discovery_and_monitoring/rs/setversion_without_electionid-pre-6.0.json +++ b/test/discovery_and_monitoring/rs/setversion_without_electionid-pre-6.0.json @@ -17,7 +17,7 @@ "setName": "rs", "setVersion": 2, "minWireVersion": 0, - "maxWireVersion": 7 + "maxWireVersion": 16 } ] ], @@ -56,7 +56,7 @@ "setName": "rs", "setVersion": 1, "minWireVersion": 0, - "maxWireVersion": 7 + "maxWireVersion": 16 } ] ], @@ -65,7 +65,8 @@ "a:27017": { "type": "Unknown", "setName": null, - "electionId": null + "electionId": null, + "error": "primary marked stale due to discovery of newer primary" }, "b:27017": { "type": "RSPrimary", diff --git a/test/discovery_and_monitoring/rs/use_setversion_without_electionid-pre-6.0.json b/test/discovery_and_monitoring/rs/use_setversion_without_electionid-pre-6.0.json index 2f9b567b85..03195aacde 100644 --- a/test/discovery_and_monitoring/rs/use_setversion_without_electionid-pre-6.0.json +++ b/test/discovery_and_monitoring/rs/use_setversion_without_electionid-pre-6.0.json @@ -20,7 +20,7 @@ "$oid": "000000000000000000000001" }, "minWireVersion": 0, - "maxWireVersion": 7 + "maxWireVersion": 16 } ] ], @@ -64,7 +64,7 @@ "setName": "rs", "setVersion": 2, "minWireVersion": 0, - "maxWireVersion": 7 + "maxWireVersion": 16 } ] ], @@ -73,7 +73,8 @@ "a:27017": { "type": "Unknown", "setName": null, - "electionId": null + "electionId": null, + "error": "primary marked stale due to discovery of newer primary" }, "b:27017": { "type": "RSPrimary", @@ -108,7 +109,7 @@ "$oid": "000000000000000000000002" }, "minWireVersion": 0, - "maxWireVersion": 7 + "maxWireVersion": 16 } ] ], @@ -117,7 +118,8 @@ "a:27017": { "type": "Unknown", "setName": null, - "electionId": null + "electionId": null, + "error": "primary marked stale due to electionId/setVersion mismatch" }, "b:27017": { "type": "RSPrimary", diff --git a/test/discovery_and_monitoring/rs/use_setversion_without_electionid.json b/test/discovery_and_monitoring/rs/use_setversion_without_electionid.json index 551f3e12c2..eaf586d728 100644 --- a/test/discovery_and_monitoring/rs/use_setversion_without_electionid.json +++ b/test/discovery_and_monitoring/rs/use_setversion_without_electionid.json @@ -81,7 +81,8 @@ "b:27017": { "type": "Unknown", "setName": null, - "electionId": null + "electionId": null, + "error": "primary marked stale due to electionId/setVersion mismatch" } }, "topologyType": "ReplicaSetWithPrimary", @@ -128,7 +129,8 @@ "b:27017": { "type": "Unknown", "setName": null, - "electionId": null + "electionId": null, + "error": "primary marked stale due to electionId/setVersion mismatch" } }, "topologyType": "ReplicaSetWithPrimary", diff --git a/test/discovery_and_monitoring/unified/serverMonitoringMode.json b/test/discovery_and_monitoring/unified/serverMonitoringMode.json index 4b492f7d85..e44fad1bcd 100644 --- a/test/discovery_and_monitoring/unified/serverMonitoringMode.json +++ b/test/discovery_and_monitoring/unified/serverMonitoringMode.json @@ -5,8 +5,7 @@ { "topologies": [ "single", - "sharded", - "sharded-replicaset" + "sharded" ], "serverless": "forbid" } diff --git a/test/gridfs/delete.json b/test/gridfs/delete.json index 7a4ec27f88..9a9b22fc1e 100644 --- a/test/gridfs/delete.json +++ b/test/gridfs/delete.json @@ -49,10 +49,7 @@ "uploadDate": { "$date": "1970-01-01T00:00:00.000Z" }, - "md5": "d41d8cd98f00b204e9800998ecf8427e", "filename": "length-0", - "contentType": "application/octet-stream", - "aliases": [], "metadata": {} }, { @@ -64,10 +61,7 @@ "uploadDate": { "$date": "1970-01-01T00:00:00.000Z" }, - "md5": "d41d8cd98f00b204e9800998ecf8427e", "filename": "length-0-with-empty-chunk", - "contentType": "application/octet-stream", - "aliases": [], "metadata": {} }, { @@ -79,10 +73,7 @@ "uploadDate": { "$date": "1970-01-01T00:00:00.000Z" }, - "md5": "c700ed4fdb1d27055aa3faa2c2432283", "filename": "length-2", - "contentType": "application/octet-stream", - "aliases": [], "metadata": {} }, { @@ -94,10 +85,7 @@ "uploadDate": { "$date": "1970-01-01T00:00:00.000Z" }, - "md5": "dd254cdc958e53abaa67da9f797125f5", "filename": "length-8", - "contentType": "application/octet-stream", - "aliases": [], "metadata": {} } ] @@ -197,10 +185,7 @@ "uploadDate": { "$date": "1970-01-01T00:00:00.000Z" }, - "md5": "d41d8cd98f00b204e9800998ecf8427e", "filename": "length-0-with-empty-chunk", - "contentType": "application/octet-stream", - "aliases": [], "metadata": {} }, { @@ -212,10 +197,7 @@ "uploadDate": { "$date": "1970-01-01T00:00:00.000Z" }, - "md5": "c700ed4fdb1d27055aa3faa2c2432283", "filename": "length-2", - "contentType": "application/octet-stream", - "aliases": [], "metadata": {} }, { @@ -227,10 +209,7 @@ "uploadDate": { "$date": "1970-01-01T00:00:00.000Z" }, - "md5": "dd254cdc958e53abaa67da9f797125f5", "filename": "length-8", - "contentType": "application/octet-stream", - "aliases": [], "metadata": {} } ] @@ -330,10 +309,7 @@ "uploadDate": { "$date": "1970-01-01T00:00:00.000Z" }, - "md5": "d41d8cd98f00b204e9800998ecf8427e", "filename": "length-0", - "contentType": "application/octet-stream", - "aliases": [], "metadata": {} }, { @@ -345,10 +321,7 @@ "uploadDate": { "$date": "1970-01-01T00:00:00.000Z" }, - "md5": "c700ed4fdb1d27055aa3faa2c2432283", "filename": "length-2", - "contentType": "application/octet-stream", - "aliases": [], "metadata": {} }, { @@ -360,10 +333,7 @@ "uploadDate": { "$date": "1970-01-01T00:00:00.000Z" }, - "md5": "dd254cdc958e53abaa67da9f797125f5", "filename": "length-8", - "contentType": "application/octet-stream", - "aliases": [], "metadata": {} } ] @@ -448,10 +418,7 @@ "uploadDate": { "$date": "1970-01-01T00:00:00.000Z" }, - "md5": "d41d8cd98f00b204e9800998ecf8427e", "filename": "length-0", - "contentType": "application/octet-stream", - "aliases": [], "metadata": {} }, { @@ -463,10 +430,7 @@ "uploadDate": { "$date": "1970-01-01T00:00:00.000Z" }, - "md5": "d41d8cd98f00b204e9800998ecf8427e", "filename": "length-0-with-empty-chunk", - "contentType": "application/octet-stream", - "aliases": [], "metadata": {} }, { @@ -478,10 +442,7 @@ "uploadDate": { "$date": "1970-01-01T00:00:00.000Z" }, - "md5": "c700ed4fdb1d27055aa3faa2c2432283", "filename": "length-2", - "contentType": "application/octet-stream", - "aliases": [], "metadata": {} } ] @@ -536,7 +497,7 @@ } }, "expectError": { - "isError": true + "isClientError": true } } ], @@ -554,10 +515,7 @@ "uploadDate": { "$date": "1970-01-01T00:00:00.000Z" }, - "md5": "d41d8cd98f00b204e9800998ecf8427e", "filename": "length-0", - "contentType": "application/octet-stream", - "aliases": [], "metadata": {} }, { @@ -569,10 +527,7 @@ "uploadDate": { "$date": "1970-01-01T00:00:00.000Z" }, - "md5": "d41d8cd98f00b204e9800998ecf8427e", "filename": "length-0-with-empty-chunk", - "contentType": "application/octet-stream", - "aliases": [], "metadata": {} }, { @@ -584,10 +539,7 @@ "uploadDate": { "$date": "1970-01-01T00:00:00.000Z" }, - "md5": "c700ed4fdb1d27055aa3faa2c2432283", "filename": "length-2", - "contentType": "application/octet-stream", - "aliases": [], "metadata": {} }, { @@ -599,10 +551,7 @@ "uploadDate": { "$date": "1970-01-01T00:00:00.000Z" }, - "md5": "dd254cdc958e53abaa67da9f797125f5", "filename": "length-8", - "contentType": "application/octet-stream", - "aliases": [], "metadata": {} } ] @@ -701,7 +650,7 @@ } }, "expectError": { - "isError": true + "isClientError": true } } ], @@ -719,10 +668,7 @@ "uploadDate": { "$date": "1970-01-01T00:00:00.000Z" }, - "md5": "d41d8cd98f00b204e9800998ecf8427e", "filename": "length-0", - "contentType": "application/octet-stream", - "aliases": [], "metadata": {} }, { @@ -734,10 +680,7 @@ "uploadDate": { "$date": "1970-01-01T00:00:00.000Z" }, - "md5": "d41d8cd98f00b204e9800998ecf8427e", "filename": "length-0-with-empty-chunk", - "contentType": "application/octet-stream", - "aliases": [], "metadata": {} }, { @@ -749,10 +692,7 @@ "uploadDate": { "$date": "1970-01-01T00:00:00.000Z" }, - "md5": "c700ed4fdb1d27055aa3faa2c2432283", "filename": "length-2", - "contentType": "application/octet-stream", - "aliases": [], "metadata": {} } ] diff --git a/test/gridfs/deleteByName.json b/test/gridfs/deleteByName.json new file mode 100644 index 0000000000..884d0300ce --- /dev/null +++ b/test/gridfs/deleteByName.json @@ -0,0 +1,230 @@ +{ + "description": "gridfs-deleteByName", + "schemaVersion": "1.0", + "createEntities": [ + { + "client": { + "id": "client0" + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "gridfs-tests" + } + }, + { + "bucket": { + "id": "bucket0", + "database": "database0" + } + }, + { + "collection": { + "id": "bucket0_files_collection", + "database": "database0", + "collectionName": "fs.files" + } + }, + { + "collection": { + "id": "bucket0_chunks_collection", + "database": "database0", + "collectionName": "fs.chunks" + } + } + ], + "initialData": [ + { + "collectionName": "fs.files", + "databaseName": "gridfs-tests", + "documents": [ + { + "_id": { + "$oid": "000000000000000000000001" + }, + "length": 0, + "chunkSize": 4, + "uploadDate": { + "$date": "1970-01-01T00:00:00.000Z" + }, + "filename": "filename", + "metadata": {} + }, + { + "_id": { + "$oid": "000000000000000000000002" + }, + "length": 0, + "chunkSize": 4, + "uploadDate": { + "$date": "1970-01-01T00:00:00.000Z" + }, + "filename": "filename", + "metadata": {} + }, + { + "_id": { + "$oid": "000000000000000000000003" + }, + "length": 2, + "chunkSize": 4, + "uploadDate": { + "$date": "1970-01-01T00:00:00.000Z" + }, + "filename": "filename", + "metadata": {} + }, + { + "_id": { + "$oid": "000000000000000000000004" + }, + "length": 8, + "chunkSize": 4, + "uploadDate": { + "$date": "1970-01-01T00:00:00.000Z" + }, + "filename": "otherfilename", + "metadata": {} + } + ] + }, + { + "collectionName": "fs.chunks", + "databaseName": "gridfs-tests", + "documents": [ + { + "_id": { + "$oid": "000000000000000000000001" + }, + "files_id": { + "$oid": "000000000000000000000002" + }, + "n": 0, + "data": { + "$binary": { + "base64": "", + "subType": "00" + } + } + }, + { + "_id": { + "$oid": "000000000000000000000002" + }, + "files_id": { + "$oid": "000000000000000000000003" + }, + "n": 0, + "data": { + "$binary": { + "base64": "", + "subType": "00" + } + } + }, + { + "_id": { + "$oid": "000000000000000000000003" + }, + "files_id": { + "$oid": "000000000000000000000003" + }, + "n": 0, + "data": { + "$binary": { + "base64": "", + "subType": "00" + } + } + }, + { + "_id": { + "$oid": "000000000000000000000004" + }, + "files_id": { + "$oid": "000000000000000000000004" + }, + "n": 0, + "data": { + "$binary": { + "base64": "", + "subType": "00" + } + } + } + ] + } + ], + "tests": [ + { + "description": "delete when multiple revisions of the file exist", + "operations": [ + { + "name": "deleteByName", + "object": "bucket0", + "arguments": { + "filename": "filename" + } + } + ], + "outcome": [ + { + "collectionName": "fs.files", + "databaseName": "gridfs-tests", + "documents": [ + { + "_id": { + "$oid": "000000000000000000000004" + }, + "length": 8, + "chunkSize": 4, + "uploadDate": { + "$date": "1970-01-01T00:00:00.000Z" + }, + "filename": "otherfilename", + "metadata": {} + } + ] + }, + { + "collectionName": "fs.chunks", + "databaseName": "gridfs-tests", + "documents": [ + { + "_id": { + "$oid": "000000000000000000000004" + }, + "files_id": { + "$oid": "000000000000000000000004" + }, + "n": 0, + "data": { + "$binary": { + "base64": "", + "subType": "00" + } + } + } + ] + } + ] + }, + { + "description": "delete when file name does not exist", + "operations": [ + { + "name": "deleteByName", + "object": "bucket0", + "arguments": { + "filename": "missing-file" + }, + "expectError": { + "isClientError": true + } + } + ] + } + ] +} diff --git a/test/gridfs/download.json b/test/gridfs/download.json index 48d3246218..67658ac512 100644 --- a/test/gridfs/download.json +++ b/test/gridfs/download.json @@ -49,10 +49,7 @@ "uploadDate": { "$date": "1970-01-01T00:00:00.000Z" }, - "md5": "d41d8cd98f00b204e9800998ecf8427e", "filename": "length-0", - "contentType": "application/octet-stream", - "aliases": [], "metadata": {} }, { @@ -64,10 +61,7 @@ "uploadDate": { "$date": "1970-01-01T00:00:00.000Z" }, - "md5": "d41d8cd98f00b204e9800998ecf8427e", "filename": "length-0-with-empty-chunk", - "contentType": "application/octet-stream", - "aliases": [], "metadata": {} }, { @@ -79,10 +73,7 @@ "uploadDate": { "$date": "1970-01-01T00:00:00.000Z" }, - "md5": "c700ed4fdb1d27055aa3faa2c2432283", "filename": "length-2", - "contentType": "application/octet-stream", - "aliases": [], "metadata": {} }, { @@ -94,10 +85,7 @@ "uploadDate": { "$date": "1970-01-01T00:00:00.000Z" }, - "md5": "dd254cdc958e53abaa67da9f797125f5", "filename": "length-8", - "contentType": "application/octet-stream", - "aliases": [], "metadata": {} }, { @@ -109,10 +97,7 @@ "uploadDate": { "$date": "1970-01-01T00:00:00.000Z" }, - "md5": "57d83cd477bfb1ccd975ab33d827a92b", "filename": "length-10", - "contentType": "application/octet-stream", - "aliases": [], "metadata": {} }, { @@ -124,9 +109,6 @@ "uploadDate": { "$date": "1970-01-01T00:00:00.000Z" }, - "md5": "c700ed4fdb1d27055aa3faa2c2432283", - "contentType": "application/octet-stream", - "aliases": [], "metadata": {} } ] @@ -356,7 +338,7 @@ } }, "expectError": { - "isError": true + "isClientError": true } } ] @@ -388,7 +370,7 @@ } }, "expectError": { - "isError": true + "isClientError": true } } ] @@ -420,7 +402,7 @@ } }, "expectError": { - "isError": true + "isClientError": true } } ] @@ -489,7 +471,7 @@ } }, "expectError": { - "isError": true + "isClientError": true } } ] @@ -532,7 +514,7 @@ } }, "expectError": { - "isError": true + "isClientError": true } } ] diff --git a/test/gridfs/downloadByName.json b/test/gridfs/downloadByName.json index cd44663957..45abaf7b42 100644 --- a/test/gridfs/downloadByName.json +++ b/test/gridfs/downloadByName.json @@ -49,10 +49,7 @@ "uploadDate": { "$date": "1970-01-01T00:00:00.000Z" }, - "md5": "47ed733b8d10be225eceba344d533586", "filename": "abc", - "contentType": "application/octet-stream", - "aliases": [], "metadata": {} }, { @@ -64,10 +61,7 @@ "uploadDate": { "$date": "1970-01-02T00:00:00.000Z" }, - "md5": "b15835f133ff2e27c7cb28117bfae8f4", "filename": "abc", - "contentType": "application/octet-stream", - "aliases": [], "metadata": {} }, { @@ -79,10 +73,7 @@ "uploadDate": { "$date": "1970-01-03T00:00:00.000Z" }, - "md5": "eccbc87e4b5ce2fe28308fd9f2a7baf3", "filename": "abc", - "contentType": "application/octet-stream", - "aliases": [], "metadata": {} }, { @@ -94,10 +85,7 @@ "uploadDate": { "$date": "1970-01-04T00:00:00.000Z" }, - "md5": "f623e75af30e62bbd73d6df5b50bb7b5", "filename": "abc", - "contentType": "application/octet-stream", - "aliases": [], "metadata": {} }, { @@ -109,10 +97,7 @@ "uploadDate": { "$date": "1970-01-05T00:00:00.000Z" }, - "md5": "4c614360da93c0a041b22e537de151eb", "filename": "abc", - "contentType": "application/octet-stream", - "aliases": [], "metadata": {} } ] @@ -305,7 +290,7 @@ "filename": "xyz" }, "expectError": { - "isError": true + "isClientError": true } } ] @@ -321,7 +306,7 @@ "revision": 999 }, "expectError": { - "isError": true + "isClientError": true } } ] diff --git a/test/gridfs/rename.json b/test/gridfs/rename.json new file mode 100644 index 0000000000..08064d4a5c --- /dev/null +++ b/test/gridfs/rename.json @@ -0,0 +1,179 @@ +{ + "description": "gridfs-rename", + "schemaVersion": "1.0", + "createEntities": [ + { + "client": { + "id": "client0" + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "gridfs-tests" + } + }, + { + "bucket": { + "id": "bucket0", + "database": "database0" + } + }, + { + "collection": { + "id": "bucket0_files_collection", + "database": "database0", + "collectionName": "fs.files" + } + }, + { + "collection": { + "id": "bucket0_chunks_collection", + "database": "database0", + "collectionName": "fs.chunks" + } + } + ], + "initialData": [ + { + "collectionName": "fs.files", + "databaseName": "gridfs-tests", + "documents": [ + { + "_id": { + "$oid": "000000000000000000000001" + }, + "length": 0, + "chunkSize": 4, + "uploadDate": { + "$date": "1970-01-01T00:00:00.000Z" + }, + "filename": "filename", + "metadata": {} + }, + { + "_id": { + "$oid": "000000000000000000000002" + }, + "length": 0, + "chunkSize": 4, + "uploadDate": { + "$date": "1970-01-01T00:00:00.000Z" + }, + "filename": "filename", + "metadata": {} + } + ] + }, + { + "collectionName": "fs.chunks", + "databaseName": "gridfs-tests", + "documents": [ + { + "_id": { + "$oid": "000000000000000000000001" + }, + "files_id": { + "$oid": "000000000000000000000002" + }, + "n": 0, + "data": { + "$binary": { + "base64": "", + "subType": "00" + } + } + } + ] + } + ], + "tests": [ + { + "description": "rename by id", + "operations": [ + { + "name": "rename", + "object": "bucket0", + "arguments": { + "id": { + "$oid": "000000000000000000000001" + }, + "newFilename": "newfilename" + } + } + ], + "outcome": [ + { + "collectionName": "fs.files", + "databaseName": "gridfs-tests", + "documents": [ + { + "_id": { + "$oid": "000000000000000000000001" + }, + "length": 0, + "chunkSize": 4, + "uploadDate": { + "$date": "1970-01-01T00:00:00.000Z" + }, + "filename": "newfilename", + "metadata": {} + }, + { + "_id": { + "$oid": "000000000000000000000002" + }, + "length": 0, + "chunkSize": 4, + "uploadDate": { + "$date": "1970-01-01T00:00:00.000Z" + }, + "filename": "filename", + "metadata": {} + } + ] + }, + { + "collectionName": "fs.chunks", + "databaseName": "gridfs-tests", + "documents": [ + { + "_id": { + "$oid": "000000000000000000000001" + }, + "files_id": { + "$oid": "000000000000000000000002" + }, + "n": 0, + "data": { + "$binary": { + "base64": "", + "subType": "00" + } + } + } + ] + } + ] + }, + { + "description": "rename when file id does not exist", + "operations": [ + { + "name": "rename", + "object": "bucket0", + "arguments": { + "id": { + "$oid": "000000000000000000000003" + }, + "newFilename": "newfilename" + }, + "expectError": { + "isClientError": true + } + } + ] + } + ] +} diff --git a/test/gridfs/renameByName.json b/test/gridfs/renameByName.json new file mode 100644 index 0000000000..26f04fb9e0 --- /dev/null +++ b/test/gridfs/renameByName.json @@ -0,0 +1,313 @@ +{ + "description": "gridfs-renameByName", + "schemaVersion": "1.0", + "createEntities": [ + { + "client": { + "id": "client0" + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "gridfs-tests" + } + }, + { + "bucket": { + "id": "bucket0", + "database": "database0" + } + }, + { + "collection": { + "id": "bucket0_files_collection", + "database": "database0", + "collectionName": "fs.files" + } + }, + { + "collection": { + "id": "bucket0_chunks_collection", + "database": "database0", + "collectionName": "fs.chunks" + } + } + ], + "initialData": [ + { + "collectionName": "fs.files", + "databaseName": "gridfs-tests", + "documents": [ + { + "_id": { + "$oid": "000000000000000000000001" + }, + "length": 0, + "chunkSize": 4, + "uploadDate": { + "$date": "1970-01-01T00:00:00.000Z" + }, + "filename": "filename", + "metadata": {} + }, + { + "_id": { + "$oid": "000000000000000000000002" + }, + "length": 0, + "chunkSize": 4, + "uploadDate": { + "$date": "1970-01-01T00:00:00.000Z" + }, + "filename": "filename", + "metadata": {} + }, + { + "_id": { + "$oid": "000000000000000000000003" + }, + "length": 2, + "chunkSize": 4, + "uploadDate": { + "$date": "1970-01-01T00:00:00.000Z" + }, + "filename": "filename", + "metadata": {} + }, + { + "_id": { + "$oid": "000000000000000000000004" + }, + "length": 8, + "chunkSize": 4, + "uploadDate": { + "$date": "1970-01-01T00:00:00.000Z" + }, + "filename": "otherfilename", + "metadata": {} + } + ] + }, + { + "collectionName": "fs.chunks", + "databaseName": "gridfs-tests", + "documents": [ + { + "_id": { + "$oid": "000000000000000000000001" + }, + "files_id": { + "$oid": "000000000000000000000002" + }, + "n": 0, + "data": { + "$binary": { + "base64": "", + "subType": "00" + } + } + }, + { + "_id": { + "$oid": "000000000000000000000002" + }, + "files_id": { + "$oid": "000000000000000000000003" + }, + "n": 0, + "data": { + "$binary": { + "base64": "", + "subType": "00" + } + } + }, + { + "_id": { + "$oid": "000000000000000000000003" + }, + "files_id": { + "$oid": "000000000000000000000004" + }, + "n": 0, + "data": { + "$binary": { + "base64": "", + "subType": "00" + } + } + }, + { + "_id": { + "$oid": "000000000000000000000004" + }, + "files_id": { + "$oid": "000000000000000000000004" + }, + "n": 1, + "data": { + "$binary": { + "base64": "", + "subType": "00" + } + } + } + ] + } + ], + "tests": [ + { + "description": "rename when multiple revisions of the file exist", + "operations": [ + { + "name": "renameByName", + "object": "bucket0", + "arguments": { + "filename": "filename", + "newFilename": "newfilename" + } + } + ], + "outcome": [ + { + "collectionName": "fs.files", + "databaseName": "gridfs-tests", + "documents": [ + { + "_id": { + "$oid": "000000000000000000000001" + }, + "length": 0, + "chunkSize": 4, + "uploadDate": { + "$date": "1970-01-01T00:00:00.000Z" + }, + "filename": "newfilename", + "metadata": {} + }, + { + "_id": { + "$oid": "000000000000000000000002" + }, + "length": 0, + "chunkSize": 4, + "uploadDate": { + "$date": "1970-01-01T00:00:00.000Z" + }, + "filename": "newfilename", + "metadata": {} + }, + { + "_id": { + "$oid": "000000000000000000000003" + }, + "length": 2, + "chunkSize": 4, + "uploadDate": { + "$date": "1970-01-01T00:00:00.000Z" + }, + "filename": "newfilename", + "metadata": {} + }, + { + "_id": { + "$oid": "000000000000000000000004" + }, + "length": 8, + "chunkSize": 4, + "uploadDate": { + "$date": "1970-01-01T00:00:00.000Z" + }, + "filename": "otherfilename", + "metadata": {} + } + ] + }, + { + "collectionName": "fs.chunks", + "databaseName": "gridfs-tests", + "documents": [ + { + "_id": { + "$oid": "000000000000000000000001" + }, + "files_id": { + "$oid": "000000000000000000000002" + }, + "n": 0, + "data": { + "$binary": { + "base64": "", + "subType": "00" + } + } + }, + { + "_id": { + "$oid": "000000000000000000000002" + }, + "files_id": { + "$oid": "000000000000000000000003" + }, + "n": 0, + "data": { + "$binary": { + "base64": "", + "subType": "00" + } + } + }, + { + "_id": { + "$oid": "000000000000000000000003" + }, + "files_id": { + "$oid": "000000000000000000000004" + }, + "n": 0, + "data": { + "$binary": { + "base64": "", + "subType": "00" + } + } + }, + { + "_id": { + "$oid": "000000000000000000000004" + }, + "files_id": { + "$oid": "000000000000000000000004" + }, + "n": 1, + "data": { + "$binary": { + "base64": "", + "subType": "00" + } + } + } + ] + } + ] + }, + { + "description": "rename when file name does not exist", + "operations": [ + { + "name": "renameByName", + "object": "bucket0", + "arguments": { + "filename": "missing-file", + "newFilename": "newfilename" + }, + "expectError": { + "isClientError": true + } + } + ] + } + ] +} diff --git a/test/gridfs/upload.json b/test/gridfs/upload.json index 97e18d2bc2..3c1644653a 100644 --- a/test/gridfs/upload.json +++ b/test/gridfs/upload.json @@ -470,75 +470,6 @@ } ] }, - { - "description": "upload when contentType is provided", - "operations": [ - { - "name": "upload", - "object": "bucket0", - "arguments": { - "filename": "filename", - "source": { - "$$hexBytes": "11" - }, - "chunkSizeBytes": 4, - "contentType": "image/jpeg" - }, - "expectResult": { - "$$type": "objectId" - }, - "saveResultAsEntity": "uploadedObjectId" - }, - { - "name": "find", - "object": "bucket0_files_collection", - "arguments": { - "filter": {} - }, - "expectResult": [ - { - "_id": { - "$$matchesEntity": "uploadedObjectId" - }, - "length": 1, - "chunkSize": 4, - "uploadDate": { - "$$type": "date" - }, - "md5": { - "$$unsetOrMatches": "47ed733b8d10be225eceba344d533586" - }, - "filename": "filename", - "contentType": "image/jpeg" - } - ] - }, - { - "name": "find", - "object": "bucket0_chunks_collection", - "arguments": { - "filter": {} - }, - "expectResult": [ - { - "_id": { - "$$type": "objectId" - }, - "files_id": { - "$$matchesEntity": "uploadedObjectId" - }, - "n": 0, - "data": { - "$binary": { - "base64": "EQ==", - "subType": "00" - } - } - } - ] - } - ] - }, { "description": "upload when metadata is provided", "operations": [ diff --git a/test/handshake/unified/metadata-not-propagated.json b/test/handshake/unified/metadata-not-propagated.json new file mode 100644 index 0000000000..500b579b89 --- /dev/null +++ b/test/handshake/unified/metadata-not-propagated.json @@ -0,0 +1,100 @@ +{ + "description": "client metadata is not propagated to the server", + "schemaVersion": "1.9", + "runOnRequirements": [ + { + "minServerVersion": "6.0" + } + ], + "createEntities": [ + { + "client": { + "id": "client", + "observeEvents": [ + "commandSucceededEvent", + "commandFailedEvent", + "connectionClosedEvent", + "connectionCreatedEvent" + ] + } + }, + { + "database": { + "id": "database", + "client": "client", + "databaseName": "test" + } + } + ], + "tests": [ + { + "description": "metadata append does not create new connections or close existing ones and no hello command is sent", + "operations": [ + { + "name": "runCommand", + "object": "database", + "arguments": { + "commandName": "ping", + "command": { + "ping": 1 + } + }, + "expectResult": { + "ok": 1 + } + }, + { + "name": "appendMetadata", + "object": "client", + "arguments": { + "driverInfoOptions": { + "name": "framework", + "version": "2.0", + "platform": "Framework Platform" + } + } + }, + { + "name": "runCommand", + "object": "database", + "arguments": { + "commandName": "ping", + "command": { + "ping": 1 + } + }, + "expectResult": { + "ok": 1 + } + } + ], + "expectEvents": [ + { + "client": "client", + "eventType": "cmap", + "events": [ + { + "connectionCreatedEvent": {} + } + ] + }, + { + "client": "client", + "eventType": "command", + "events": [ + { + "commandSucceededEvent": { + "commandName": "ping" + } + }, + { + "commandSucceededEvent": { + "commandName": "ping" + } + } + ] + } + ] + } + ] +} diff --git a/test/helpers.py b/test/helpers.py index 11d5ab0374..163bf01c12 100644 --- a/test/helpers.py +++ b/test/helpers.py @@ -12,144 +12,22 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Shared constants and helper methods for pymongo, bson, and gridfs test suites.""" +"""Shared helper methods for pymongo, bson, and gridfs test suites.""" from __future__ import annotations -import base64 -import gc -import multiprocessing -import os -import signal -import socket -import subprocess -import sys +import asyncio import threading -import time import traceback -import unittest -import warnings -from asyncio import iscoroutinefunction - -try: - import ipaddress - - HAVE_IPADDRESS = True -except ImportError: - HAVE_IPADDRESS = False from functools import wraps -from typing import Any, Callable, Dict, Generator, no_type_check -from unittest import SkipTest +from typing import Optional, no_type_check -from bson.son import SON -from pymongo import common, message +from bson import SON +from pymongo import common +from pymongo._asyncio_task import create_task from pymongo.read_preferences import ReadPreference -from pymongo.ssl_support import HAVE_SSL, _ssl # type:ignore[attr-defined] -from pymongo.uri_parser import parse_uri - -if HAVE_SSL: - import ssl _IS_SYNC = True -# Enable debug output for uncollectable objects. PyPy does not have set_debug. -if hasattr(gc, "set_debug"): - gc.set_debug( - gc.DEBUG_UNCOLLECTABLE | getattr(gc, "DEBUG_OBJECTS", 0) | getattr(gc, "DEBUG_INSTANCES", 0) - ) - -# The host and port of a single mongod or mongos, or the seed host -# for a replica set. -host = os.environ.get("DB_IP", "localhost") -port = int(os.environ.get("DB_PORT", 27017)) -IS_SRV = "mongodb+srv" in host - -db_user = os.environ.get("DB_USER", "user") -db_pwd = os.environ.get("DB_PASSWORD", "password") - -CERT_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "certificates") -CLIENT_PEM = os.environ.get("CLIENT_PEM", os.path.join(CERT_PATH, "client.pem")) -CA_PEM = os.environ.get("CA_PEM", os.path.join(CERT_PATH, "ca.pem")) - -TLS_OPTIONS: Dict = {"tls": True} -if CLIENT_PEM: - TLS_OPTIONS["tlsCertificateKeyFile"] = CLIENT_PEM -if CA_PEM: - TLS_OPTIONS["tlsCAFile"] = CA_PEM - -COMPRESSORS = os.environ.get("COMPRESSORS") -MONGODB_API_VERSION = os.environ.get("MONGODB_API_VERSION") -TEST_LOADBALANCER = bool(os.environ.get("TEST_LOADBALANCER")) -TEST_SERVERLESS = bool(os.environ.get("TEST_SERVERLESS")) -SINGLE_MONGOS_LB_URI = os.environ.get("SINGLE_MONGOS_LB_URI") -MULTI_MONGOS_LB_URI = os.environ.get("MULTI_MONGOS_LB_URI") - -if TEST_LOADBALANCER: - res = parse_uri(SINGLE_MONGOS_LB_URI or "") - host, port = res["nodelist"][0] - db_user = res["username"] or db_user - db_pwd = res["password"] or db_pwd -elif TEST_SERVERLESS: - TEST_LOADBALANCER = True - res = parse_uri(SINGLE_MONGOS_LB_URI or "") - host, port = res["nodelist"][0] - db_user = res["username"] or db_user - db_pwd = res["password"] or db_pwd - TLS_OPTIONS = {"tls": True} - # Spec says serverless tests must be run with compression. - COMPRESSORS = COMPRESSORS or "zlib" - - -# Shared KMS data. -LOCAL_MASTER_KEY = base64.b64decode( - b"Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ" - b"5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" -) -AWS_CREDS = { - "accessKeyId": os.environ.get("FLE_AWS_KEY", ""), - "secretAccessKey": os.environ.get("FLE_AWS_SECRET", ""), -} -AWS_CREDS_2 = { - "accessKeyId": os.environ.get("FLE_AWS_KEY2", ""), - "secretAccessKey": os.environ.get("FLE_AWS_SECRET2", ""), -} -AZURE_CREDS = { - "tenantId": os.environ.get("FLE_AZURE_TENANTID", ""), - "clientId": os.environ.get("FLE_AZURE_CLIENTID", ""), - "clientSecret": os.environ.get("FLE_AZURE_CLIENTSECRET", ""), -} -GCP_CREDS = { - "email": os.environ.get("FLE_GCP_EMAIL", ""), - "privateKey": os.environ.get("FLE_GCP_PRIVATEKEY", ""), -} -KMIP_CREDS = {"endpoint": os.environ.get("FLE_KMIP_ENDPOINT", "localhost:5698")} - -# Ensure Evergreen metadata doesn't result in truncation -os.environ.setdefault("MONGOB_LOG_MAX_DOCUMENT_LENGTH", "2000") - - -def is_server_resolvable(): - """Returns True if 'server' is resolvable.""" - socket_timeout = socket.getdefaulttimeout() - socket.setdefaulttimeout(1) - try: - try: - socket.gethostbyname("server") - return True - except OSError: - return False - finally: - socket.setdefaulttimeout(socket_timeout) - - -def _create_user(authdb, user, pwd=None, roles=None, **kwargs): - cmd = SON([("createUser", user)]) - # X509 doesn't use a password - if pwd: - cmd["pwd"] = pwd - cmd["roles"] = roles or ["root"] - cmd.update(**kwargs) - return authdb.command(cmd) - def repl_set_step_down(client, **kwargs): """Run replSetStepDown, first unfreezing a secondary with replSetFreeze.""" @@ -244,128 +122,55 @@ def __del__(self): raise Exception(msg) -def _all_users(db): - return {u["user"] for u in db.command("usersInfo").get("users", [])} - - -def sanitize_cmd(cmd): - cp = cmd.copy() - cp.pop("$clusterTime", None) - cp.pop("$db", None) - cp.pop("$readPreference", None) - cp.pop("lsid", None) - if MONGODB_API_VERSION: - # Stable API parameters - cp.pop("apiVersion", None) - # OP_MSG encoding may move the payload type one field to the - # end of the command. Do the same here. - name = next(iter(cp)) - try: - identifier = message._FIELD_MAP[name] - docs = cp.pop(identifier) - cp[identifier] = docs - except KeyError: - pass - return cp - - -def sanitize_reply(reply): - cp = reply.copy() - cp.pop("$clusterTime", None) - cp.pop("operationTime", None) - return cp - - -def print_thread_tracebacks() -> None: - """Print all Python thread tracebacks.""" - for thread_id, frame in sys._current_frames().items(): - sys.stderr.write(f"\n--- Traceback for thread {thread_id} ---\n") - traceback.print_stack(frame, file=sys.stderr) - - -def print_thread_stacks(pid: int) -> None: - """Print all C-level thread stacks for a given process id.""" - if sys.platform == "darwin": - cmd = ["lldb", "--attach-pid", f"{pid}", "--batch", "--one-line", '"thread backtrace all"'] - else: - cmd = ["gdb", f"--pid={pid}", "--batch", '--eval-command="thread apply all bt"'] - - try: - res = subprocess.run( - cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, encoding="utf-8" - ) - except Exception as exc: - sys.stderr.write(f"Could not print C-level thread stacks because {cmd[0]} failed: {exc}") - else: - sys.stderr.write(res.stdout) - - # Global knobs to speed up the test suite. global_knobs = client_knobs(events_queue_frequency=0.05) -def _get_executors(topology): - executors = [] - for server in topology._servers.values(): - # Some MockMonitor do not have an _executor. - if hasattr(server._monitor, "_executor"): - executors.append(server._monitor._executor) - if hasattr(server._monitor, "_rtt_monitor"): - executors.append(server._monitor._rtt_monitor._executor) - executors.append(topology._Topology__events_executor) - if topology._srv_monitor: - executors.append(topology._srv_monitor._executor) - - return [e for e in executors if e is not None] - - -def print_running_topology(topology): - running = [e for e in _get_executors(topology) if not e._stopped] - if running: - print( - "WARNING: found Topology with running threads:\n" - f" Threads: {running}\n" - f" Topology: {topology}\n" - f" Creation traceback:\n{topology._settings._stack}" - ) - - -def test_cases(suite): - """Iterator over all TestCases within a TestSuite.""" - for suite_or_case in suite._tests: - if isinstance(suite_or_case, unittest.TestCase): - # unittest.TestCase - yield suite_or_case - else: - # unittest.TestSuite - yield from test_cases(suite_or_case) - - -# Helper method to workaround https://bugs.python.org/issue21724 -def clear_warning_registry(): - """Clear the __warningregistry__ for all modules.""" - for _, module in list(sys.modules.items()): - if hasattr(module, "__warningregistry__"): - module.__warningregistry__ = {} # type:ignore[attr-defined] - - -class SystemCertsPatcher: - def __init__(self, ca_certs): - if ( - ssl.OPENSSL_VERSION.lower().startswith("libressl") - and sys.platform == "darwin" - and not _ssl.IS_PYOPENSSL - ): - raise SkipTest( - "LibreSSL on OSX doesn't support setting CA certificates " - "using SSL_CERT_FILE environment variable." - ) - self.original_certs = os.environ.get("SSL_CERT_FILE") - # Tell OpenSSL where CA certificates live. - os.environ["SSL_CERT_FILE"] = ca_certs +if _IS_SYNC: + PARENT = threading.Thread +else: + PARENT = object - def disable(self): - if self.original_certs is None: - os.environ.pop("SSL_CERT_FILE") - else: - os.environ["SSL_CERT_FILE"] = self.original_certs + +class ConcurrentRunner(PARENT): + def __init__(self, **kwargs): + if _IS_SYNC: + super().__init__(**kwargs) + self.name = kwargs.get("name", "ConcurrentRunner") + self.stopped = False + self.task = None + self.target = kwargs.get("target", None) + self.args = kwargs.get("args", []) + + if not _IS_SYNC: + + def start(self): + self.task = create_task(self.run(), name=self.name) + + def join(self, timeout: Optional[float] = None): # type: ignore[override] + if self.task is not None: + asyncio.wait([self.task], timeout=timeout) + + def is_alive(self): + return not self.stopped + + def run(self): + try: + self.target(*self.args) + finally: + self.stopped = True + + +class ExceptionCatchingTask(ConcurrentRunner): + """A Task that stores any exception encountered while running.""" + + def __init__(self, **kwargs): + super().__init__(**kwargs) + self.exc = None + + def run(self): + try: + super().run() + except BaseException as exc: + self.exc = exc + raise diff --git a/test/helpers_shared.py b/test/helpers_shared.py new file mode 100644 index 0000000000..49cf131808 --- /dev/null +++ b/test/helpers_shared.py @@ -0,0 +1,271 @@ +# Copyright 2019-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +import base64 +import gc +import os +import socket +import subprocess +import sys +import traceback +import unittest +from pathlib import Path + +try: + import ipaddress + + HAVE_IPADDRESS = True +except ImportError: + HAVE_IPADDRESS = False +from functools import wraps +from typing import no_type_check +from unittest import SkipTest + +from bson.son import SON +from pymongo import message +from pymongo.ssl_support import HAVE_SSL, _ssl # type:ignore[attr-defined] +from pymongo.synchronous.uri_parser import parse_uri + +if HAVE_SSL: + import ssl + + +# Enable debug output for uncollectable objects. PyPy does not have set_debug. +if hasattr(gc, "set_debug"): + gc.set_debug( + gc.DEBUG_UNCOLLECTABLE | getattr(gc, "DEBUG_OBJECTS", 0) | getattr(gc, "DEBUG_INSTANCES", 0) + ) + +# The host and port of a single mongod or mongos, or the seed host +# for a replica set. +host = os.environ.get("DB_IP", "localhost") +port = int(os.environ.get("DB_PORT", 27017)) +IS_SRV = "mongodb+srv" in host + +db_user = os.environ.get("DB_USER", "user") +db_pwd = os.environ.get("DB_PASSWORD", "password") + +HERE = Path(__file__).absolute() +CERT_PATH = str(HERE.parent / "certificates") +CLIENT_PEM = os.environ.get("CLIENT_PEM", os.path.join(CERT_PATH, "client.pem")) +CA_PEM = os.environ.get("CA_PEM", os.path.join(CERT_PATH, "ca.pem")) + +TLS_OPTIONS: dict = {"tls": True} +if CLIENT_PEM: + TLS_OPTIONS["tlsCertificateKeyFile"] = CLIENT_PEM +if CA_PEM: + TLS_OPTIONS["tlsCAFile"] = CA_PEM + +COMPRESSORS = os.environ.get("COMPRESSORS") +MONGODB_API_VERSION = os.environ.get("MONGODB_API_VERSION") +TEST_LOADBALANCER = bool(os.environ.get("TEST_LOAD_BALANCER")) +SINGLE_MONGOS_LB_URI = os.environ.get("SINGLE_MONGOS_LB_URI") +MULTI_MONGOS_LB_URI = os.environ.get("MULTI_MONGOS_LB_URI") + +if TEST_LOADBALANCER: + res = parse_uri(SINGLE_MONGOS_LB_URI or "") + host, port = res["nodelist"][0] + db_user = res["username"] or db_user + db_pwd = res["password"] or db_pwd + + +# Shared KMS data. +LOCAL_MASTER_KEY = base64.b64decode( + b"Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ" + b"5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" +) +AWS_CREDS = { + "accessKeyId": os.environ.get("FLE_AWS_KEY", ""), + "secretAccessKey": os.environ.get("FLE_AWS_SECRET", ""), +} +AWS_CREDS_2 = { + "accessKeyId": os.environ.get("FLE_AWS_KEY2", ""), + "secretAccessKey": os.environ.get("FLE_AWS_SECRET2", ""), +} +AZURE_CREDS = { + "tenantId": os.environ.get("FLE_AZURE_TENANTID", ""), + "clientId": os.environ.get("FLE_AZURE_CLIENTID", ""), + "clientSecret": os.environ.get("FLE_AZURE_CLIENTSECRET", ""), +} +GCP_CREDS = { + "email": os.environ.get("FLE_GCP_EMAIL", ""), + "privateKey": os.environ.get("FLE_GCP_PRIVATEKEY", ""), +} +KMIP_CREDS = {"endpoint": os.environ.get("FLE_KMIP_ENDPOINT", "localhost:5698")} +AWS_TEMP_CREDS = { + "accessKeyId": os.environ.get("CSFLE_AWS_TEMP_ACCESS_KEY_ID", ""), + "secretAccessKey": os.environ.get("CSFLE_AWS_TEMP_SECRET_ACCESS_KEY", ""), + "sessionToken": os.environ.get("CSFLE_AWS_TEMP_SESSION_TOKEN", ""), +} + +ALL_KMS_PROVIDERS = dict( + aws=AWS_CREDS, + azure=AZURE_CREDS, + gcp=GCP_CREDS, + local=dict(key=LOCAL_MASTER_KEY), + kmip=KMIP_CREDS, +) +DEFAULT_KMS_TLS = dict(kmip=dict(tlsCAFile=CA_PEM, tlsCertificateKeyFile=CLIENT_PEM)) + +# Ensure Evergreen metadata doesn't result in truncation +os.environ.setdefault("MONGOB_LOG_MAX_DOCUMENT_LENGTH", "2000") + + +def is_server_resolvable(): + """Returns True if 'server' is resolvable.""" + socket_timeout = socket.getdefaulttimeout() + socket.setdefaulttimeout(1) + try: + try: + socket.gethostbyname("server") + return True + except OSError: + return False + finally: + socket.setdefaulttimeout(socket_timeout) + + +def _create_user(authdb, user, pwd=None, roles=None, **kwargs): + cmd = SON([("createUser", user)]) + # X509 doesn't use a password + if pwd: + cmd["pwd"] = pwd + cmd["roles"] = roles or ["root"] + cmd.update(**kwargs) + return authdb.command(cmd) + + +def _all_users(db): + return {u["user"] for u in db.command("usersInfo").get("users", [])} + + +def sanitize_cmd(cmd): + cp = cmd.copy() + cp.pop("$clusterTime", None) + cp.pop("$db", None) + cp.pop("$readPreference", None) + cp.pop("lsid", None) + if MONGODB_API_VERSION: + # Stable API parameters + cp.pop("apiVersion", None) + # OP_MSG encoding may move the payload type one field to the + # end of the command. Do the same here. + name = next(iter(cp)) + try: + identifier = message._FIELD_MAP[name] + docs = cp.pop(identifier) + cp[identifier] = docs + except KeyError: + pass + return cp + + +def sanitize_reply(reply): + cp = reply.copy() + cp.pop("$clusterTime", None) + cp.pop("operationTime", None) + return cp + + +def print_thread_tracebacks() -> None: + """Print all Python thread tracebacks.""" + for thread_id, frame in sys._current_frames().items(): + sys.stderr.write(f"\n--- Traceback for thread {thread_id} ---\n") + traceback.print_stack(frame, file=sys.stderr) + + +def print_thread_stacks(pid: int) -> None: + """Print all C-level thread stacks for a given process id.""" + if sys.platform == "darwin": + cmd = ["lldb", "--attach-pid", f"{pid}", "--batch", "--one-line", '"thread backtrace all"'] + else: + cmd = ["gdb", f"--pid={pid}", "--batch", '--eval-command="thread apply all bt"'] + + try: + res = subprocess.run( + cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, encoding="utf-8" + ) + except Exception as exc: + sys.stderr.write(f"Could not print C-level thread stacks because {cmd[0]} failed: {exc}") + else: + sys.stderr.write(res.stdout) + + +def _get_executors(topology): + executors = [] + for server in topology._servers.values(): + # Some MockMonitor do not have an _executor. + if hasattr(server._monitor, "_executor"): + executors.append(server._monitor._executor) + if hasattr(server._monitor, "_rtt_monitor"): + executors.append(server._monitor._rtt_monitor._executor) + executors.append(topology._Topology__events_executor) + if topology._srv_monitor: + executors.append(topology._srv_monitor._executor) + + return [e for e in executors if e is not None] + + +def print_running_topology(topology): + running = [e for e in _get_executors(topology) if not e._stopped] + if running: + print( + "WARNING: found Topology with running threads:\n" + f" Threads: {running}\n" + f" Topology: {topology}\n" + f" Creation traceback:\n{topology._settings._stack}" + ) + + +def test_cases(suite): + """Iterator over all TestCases within a TestSuite.""" + for suite_or_case in suite._tests: + if isinstance(suite_or_case, unittest.TestCase): + # unittest.TestCase + yield suite_or_case + else: + # unittest.TestSuite + yield from test_cases(suite_or_case) + + +# Helper method to workaround https://bugs.python.org/issue21724 +def clear_warning_registry(): + """Clear the __warningregistry__ for all modules.""" + for _, module in list(sys.modules.items()): + if hasattr(module, "__warningregistry__"): + module.__warningregistry__ = {} # type:ignore[attr-defined] + + +class SystemCertsPatcher: + def __init__(self, ca_certs): + if ( + ssl.OPENSSL_VERSION.lower().startswith("libressl") + and sys.platform == "darwin" + and not _ssl.IS_PYOPENSSL + ): + raise SkipTest( + "LibreSSL on OSX doesn't support setting CA certificates " + "using SSL_CERT_FILE environment variable." + ) + self.original_certs = os.environ.get("SSL_CERT_FILE") + # Tell OpenSSL where CA certificates live. + os.environ["SSL_CERT_FILE"] = ca_certs + + def disable(self): + if self.original_certs is None: + os.environ.pop("SSL_CERT_FILE") + else: + os.environ["SSL_CERT_FILE"] = self.original_certs diff --git a/test/lambda/build.sh b/test/lambda/build.sh deleted file mode 100755 index c7cc24eab2..0000000000 --- a/test/lambda/build.sh +++ /dev/null @@ -1,28 +0,0 @@ -#!/bin/bash -set -o errexit # Exit the script with error if any of the commands fail -set -o xtrace - -rm -rf mongodb/pymongo -rm -rf mongodb/gridfs -rm -rf mongodb/bson - -pushd ../.. -rm -f pymongo/*.so -rm -f bson/*.so -image="quay.io/pypa/manylinux2014_x86_64:latest" - -DOCKER=$(command -v docker) || true -if [ -z "$DOCKER" ]; then - PODMAN=$(command -v podman) || true - if [ -z "$PODMAN" ]; then - echo "docker or podman are required!" - exit 1 - fi - DOCKER=podman -fi - -$DOCKER run --rm -v "`pwd`:/src" $image /src/test/lambda/build_internal.sh -cp -r pymongo ./test/lambda/mongodb/pymongo -cp -r bson ./test/lambda/mongodb/bson -cp -r gridfs ./test/lambda/mongodb/gridfs -popd diff --git a/test/lambda/build_internal.sh b/test/lambda/build_internal.sh index fec488d32c..84423db4d1 100755 --- a/test/lambda/build_internal.sh +++ b/test/lambda/build_internal.sh @@ -1,5 +1,5 @@ #!/bin/bash -ex cd /src -PYTHON=/opt/python/cp39-cp39/bin/python +PYTHON=/opt/python/cp310-cp310/bin/python $PYTHON -m pip install -v -e . diff --git a/test/lambda/template.yaml b/test/lambda/template.yaml index 651ac4a8f8..11052f88dd 100644 --- a/test/lambda/template.yaml +++ b/test/lambda/template.yaml @@ -23,7 +23,7 @@ Resources: Variables: MONGODB_URI: !Ref MongoDbUri Handler: app.lambda_handler - Runtime: python3.9 + Runtime: python3.10 Architectures: - x86_64 Events: diff --git a/test/load_balancer/transactions.json b/test/load_balancer/transactions.json index 0dd04ee854..ca9c145217 100644 --- a/test/load_balancer/transactions.json +++ b/test/load_balancer/transactions.json @@ -1616,6 +1616,50 @@ ] } ] + }, + { + "description": "pinned connection is released when session ended", + "operations": [ + { + "name": "startTransaction", + "object": "session0" + }, + { + "name": "insertOne", + "object": "collection0", + "arguments": { + "document": { + "x": 1 + }, + "session": "session0" + } + }, + { + "name": "commitTransaction", + "object": "session0" + }, + { + "name": "endSession", + "object": "session0" + } + ], + "expectEvents": [ + { + "client": "client0", + "eventType": "cmap", + "events": [ + { + "connectionReadyEvent": {} + }, + { + "connectionCheckedOutEvent": {} + }, + { + "connectionCheckedInEvent": {} + } + ] + } + ] } ] } diff --git a/test/mockupdb/test_cluster_time.py b/test/mockupdb/test_cluster_time.py index ea879b7ea3..42ca916971 100644 --- a/test/mockupdb/test_cluster_time.py +++ b/test/mockupdb/test_cluster_time.py @@ -123,50 +123,11 @@ def test_monitor(self): client = self.simple_client(server.uri, heartbeatFrequencyMS=500) - request = server.receives("ismaster") - # No $clusterTime in first ismaster, only in subsequent ones - self.assertNotIn("$clusterTime", request) - request.ok(reply) - - # Next exchange: client returns first clusterTime, we send the second. - request = server.receives("ismaster") - self.assertIn("$clusterTime", request) - self.assertEqual(request["$clusterTime"]["clusterTime"], cluster_time) - cluster_time = Timestamp(cluster_time.time, cluster_time.inc + 1) - reply["$clusterTime"] = {"clusterTime": cluster_time} - request.reply(reply) - - # Third exchange: client returns second clusterTime. - request = server.receives("ismaster") - self.assertEqual(request["$clusterTime"]["clusterTime"], cluster_time) - - # Return command error with a new clusterTime. - cluster_time = Timestamp(cluster_time.time, cluster_time.inc + 1) - error = { - "ok": 0, - "code": 211, - "errmsg": "Cache Reader No keys found for HMAC ...", - "$clusterTime": {"clusterTime": cluster_time}, - } - request.reply(error) - - # PyMongo 3.11+ closes the monitoring connection on command errors. - - # Fourth exchange: the Monitor closes the connection and runs the - # handshake on a new connection. - request = server.receives("ismaster") - # No $clusterTime in first ismaster, only in subsequent ones - self.assertNotIn("$clusterTime", request) - - # Reply without $clusterTime. - reply.pop("$clusterTime") - request.reply(reply) - - # Fifth exchange: the Monitor attempt uses the clusterTime from - # the previous isMaster error. - request = server.receives("ismaster") - self.assertEqual(request["$clusterTime"]["clusterTime"], cluster_time) - request.reply(reply) + for _ in range(3): + request = server.receives("ismaster") + # No $clusterTime in heartbeats or handshakes. + self.assertNotIn("$clusterTime", request) + request.ok(reply) client.close() def test_collection_bulk_error(self): diff --git a/test/mockupdb/test_cursor_namespace.py b/test/mockupdb/test_cursor_namespace.py index 89b897f479..7538540bda 100644 --- a/test/mockupdb/test_cursor_namespace.py +++ b/test/mockupdb/test_cursor_namespace.py @@ -40,7 +40,7 @@ class TestCursorNamespace(PyMongoTestCase): @classmethod def setUpClass(cls): - cls.server = MockupDB(auto_ismaster={"maxWireVersion": 7}) + cls.server = MockupDB(auto_ismaster={"maxWireVersion": 8}) cls.server.run() cls.client = cls.unmanaged_simple_client(cls.server.uri) diff --git a/test/mockupdb/test_id_ordering.py b/test/mockupdb/test_id_ordering.py deleted file mode 100644 index 7e2c91d592..0000000000 --- a/test/mockupdb/test_id_ordering.py +++ /dev/null @@ -1,94 +0,0 @@ -# Copyright 2024-present MongoDB, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import annotations - -from test import PyMongoTestCase - -import pytest - -from pymongo import InsertOne - -try: - from mockupdb import MockupDB, OpMsg, go, going - - _HAVE_MOCKUPDB = True -except ImportError: - _HAVE_MOCKUPDB = False - - -from bson.objectid import ObjectId - -pytestmark = pytest.mark.mockupdb - - -# https://github.com/mongodb/specifications/blob/master/source/crud/tests/README.md#16-generated-document-identifiers-are-the-first-field-in-their-document -class TestIdOrdering(PyMongoTestCase): - def test_16_generated_document_ids_are_first_field(self): - server = MockupDB() - server.autoresponds( - "hello", - isWritablePrimary=True, - msg="isdbgrid", - minWireVersion=0, - maxWireVersion=25, - helloOk=True, - serviceId=ObjectId(), - ) - server.run() - self.addCleanup(server.stop) - - # We also verify that the original document contains an _id field after each insert - document = {"x": 1} - - client = self.simple_client(server.uri, loadBalanced=True) - collection = client.db.coll - with going(collection.insert_one, document): - request = server.receives() - self.assertEqual("_id", next(iter(request["documents"][0]))) - request.reply({"ok": 1}) - self.assertIn("_id", document) - - document = {"x1": 1} - - with going(collection.bulk_write, [InsertOne(document)]): - request = server.receives() - self.assertEqual("_id", next(iter(request["documents"][0]))) - request.reply({"ok": 1}) - self.assertIn("_id", document) - - document = {"x2": 1} - with going(client.bulk_write, [InsertOne(namespace="db.coll", document=document)]): - request = server.receives() - self.assertEqual("_id", next(iter(request["ops"][0]["document"]))) - request.reply({"ok": 1}) - self.assertIn("_id", document) - - # Re-ordering user-supplied _id fields is not required by the spec, but PyMongo does it for performance reasons - with going(collection.insert_one, {"x": 1, "_id": 111}): - request = server.receives() - self.assertEqual("_id", next(iter(request["documents"][0]))) - request.reply({"ok": 1}) - - with going(collection.bulk_write, [InsertOne({"x1": 1, "_id": 1111})]): - request = server.receives() - self.assertEqual("_id", next(iter(request["documents"][0]))) - request.reply({"ok": 1}) - - with going( - client.bulk_write, [InsertOne(namespace="db.coll", document={"x2": 1, "_id": 11111})] - ): - request = server.receives() - self.assertEqual("_id", next(iter(request["ops"][0]["document"]))) - request.reply({"ok": 1}) diff --git a/test/mod_wsgi_test/test_client.py b/test/mod_wsgi_test/test_client.py index 88eeb7a57e..c122863bfa 100644 --- a/test/mod_wsgi_test/test_client.py +++ b/test/mod_wsgi_test/test_client.py @@ -24,7 +24,7 @@ from urllib.request import urlopen -def parse_args(): +def parse_args(args=None): parser = OptionParser( """usage: %prog [options] mode url [...] @@ -70,7 +70,7 @@ def parse_args(): ) try: - options, args = parser.parse_args() + options, args = parser.parse_args(args or sys.argv[1:]) mode, urls = args[0], args[1:] except (ValueError, IndexError): parser.print_usage() @@ -103,11 +103,11 @@ def __init__(self, options, urls, nrequests_per_thread): def run(self): for _i in range(self.nrequests_per_thread): try: - get(urls) + get(self.urls) except Exception as e: print(e) - if not options.continue_: + if not self.options.continue_: thread.interrupt_main() thread.exit() @@ -117,7 +117,7 @@ def run(self): URLGetterThread.counter += 1 counter = URLGetterThread.counter - should_print = options.verbose and not counter % 1000 + should_print = self.options.verbose and not counter % 1000 if should_print: print(counter) diff --git a/test/ocsp/test_ocsp.py b/test/ocsp/test_ocsp.py index a42b3a34ee..b20eaa35d6 100644 --- a/test/ocsp/test_ocsp.py +++ b/test/ocsp/test_ocsp.py @@ -19,6 +19,7 @@ import os import sys import unittest +from pathlib import Path import pytest @@ -38,15 +39,10 @@ FORMAT = "%(asctime)s %(levelname)s %(module)s %(message)s" logging.basicConfig(format=FORMAT, level=logging.DEBUG) -if sys.platform == "win32": - # The non-stapled OCSP endpoint check is slow on Windows. - TIMEOUT_MS = 5000 -else: - TIMEOUT_MS = 500 - def _connect(options): - uri = f"mongodb://localhost:27017/?serverSelectionTimeoutMS={TIMEOUT_MS}&tlsCAFile={CA_FILE}&{options}" + assert CA_FILE is not None + uri = f"mongodb://localhost:27017/?serverSelectionTimeoutMS=10000&tlsCAFile={Path(CA_FILE).as_posix()}&{options}" print(uri) try: client = pymongo.MongoClient(uri) diff --git a/test/performance/async_perf_test.py b/test/performance/async_perf_test.py new file mode 100644 index 0000000000..6eb31ea4fe --- /dev/null +++ b/test/performance/async_perf_test.py @@ -0,0 +1,496 @@ +# Copyright 2015-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Asynchronous Tests for the MongoDB Driver Performance Benchmarking Spec. + +See https://github.com/mongodb/specifications/blob/master/source/benchmarking/benchmarking.md + + +To set up the benchmarks locally:: + + python -m pip install simplejson + git clone --depth 1 https://github.com/mongodb/specifications.git + pushd specifications/source/benchmarking/data + tar xf extended_bson.tgz + tar xf parallel.tgz + tar xf single_and_multi_document.tgz + popd + export TEST_PATH="specifications/source/benchmarking/data" + export OUTPUT_FILE="results.json" + +Then to run all benchmarks quickly:: + + FASTBENCH=1 python test/performance/async_perf_test.py -v + +To run individual benchmarks quickly:: + + FASTBENCH=1 python test/performance/async_perf_test.py -v TestRunCommand TestFindManyAndEmptyCursor +""" +from __future__ import annotations + +import asyncio +import os +import sys +import tempfile +import time +import warnings +from typing import Any, List, Optional, Union + +import pytest + +try: + import simplejson as json +except ImportError: + import json # type: ignore[no-redef] + +sys.path[0:0] = [""] + +from test.asynchronous import AsyncPyMongoTestCase, async_client_context, unittest + +from bson import encode +from gridfs import AsyncGridFSBucket +from pymongo import ( + DeleteOne, + InsertOne, + ReplaceOne, +) + +pytestmark = pytest.mark.perf + +# Spec says to use at least 1 minute cumulative execution time and up to 100 iterations or 5 minutes but that +# makes the benchmarks too slow. Instead, we use at least 30 seconds and at most 60 seconds. +NUM_ITERATIONS = 100 +MIN_ITERATION_TIME = 30 +MAX_ITERATION_TIME = 120 +NUM_DOCS = 10000 +# When debugging or prototyping it's often useful to run the benchmarks locally, set FASTBENCH=1 to run quickly. +if bool(os.getenv("FASTBENCH")): + NUM_ITERATIONS = 2 + MIN_ITERATION_TIME = 1 + MAX_ITERATION_TIME = 30 + NUM_DOCS = 1000 + +TEST_PATH = os.environ.get( + "TEST_PATH", os.path.join(os.path.dirname(os.path.realpath(__file__)), os.path.join("data")) +) + +OUTPUT_FILE = os.environ.get("OUTPUT_FILE") + +result_data: List = [] + + +def tearDownModule(): + output = json.dumps(result_data, indent=4) + if OUTPUT_FILE: + with open(OUTPUT_FILE, "w") as opf: + opf.write(output) + else: + print(output) + + +class Timer: + def __enter__(self): + self.start = time.monotonic() + return self + + def __exit__(self, *args): + self.end = time.monotonic() + self.interval = self.end - self.start + + +async def concurrent(n_tasks, func): + tasks = [func() for _ in range(n_tasks)] + await asyncio.gather(*tasks) + + +class PerformanceTest: + dataset: str + data_size: int + fail: Any + n_tasks: int = 1 + did_init: bool = False + + async def asyncSetUp(self): + await async_client_context.init() + self.setup_time = time.monotonic() + + async def asyncTearDown(self): + duration = time.monotonic() - self.setup_time + # Remove "Test" so that TestFlatEncoding is reported as "FlatEncoding". + name = self.__class__.__name__[4:] + median = self.percentile(50) + megabytes_per_sec = (self.data_size * self.n_tasks) / median / 1000000 + print( + f"Completed {self.__class__.__name__} {megabytes_per_sec:.3f} MB/s, MEDIAN={self.percentile(50):.3f}s, " + f"total time={duration:.3f}s, iterations={len(self.results)}" + ) + result_data.append( + { + "info": { + "test_name": name, + "args": { + "tasks": self.n_tasks, + }, + }, + "metrics": [ + { + "name": "megabytes_per_sec", + "type": "MEDIAN", + "value": megabytes_per_sec, + "metadata": { + "improvement_direction": "up", + "measurement_unit": "megabytes_per_second", + }, + }, + ], + } + ) + + async def before(self): + pass + + async def do_task(self): + raise NotImplementedError + + async def after(self): + pass + + def percentile(self, percentile): + if hasattr(self, "results"): + sorted_results = sorted(self.results) + percentile_index = int(len(sorted_results) * percentile / 100) - 1 + return sorted_results[percentile_index] + else: + self.fail("Test execution failed") + return None + + async def runTest(self): + results = [] + start = time.monotonic() + i = 0 + while True: + i += 1 + await self.before() + with Timer() as timer: + if self.n_tasks == 1: + await self.do_task() + else: + await concurrent(self.n_tasks, self.do_task) + await self.after() + results.append(timer.interval) + duration = time.monotonic() - start + if duration > MIN_ITERATION_TIME and i >= NUM_ITERATIONS: + break + if i >= NUM_ITERATIONS: + break + if duration > MAX_ITERATION_TIME: + with warnings.catch_warnings(): + warnings.simplefilter("default") + warnings.warn( + f"{self.__class__.__name__} timed out after {MAX_ITERATION_TIME}s, completed {i}/{NUM_ITERATIONS} iterations." + ) + + break + + self.results = results + + +# SINGLE-DOC BENCHMARKS +class TestRunCommand(PerformanceTest, AsyncPyMongoTestCase): + data_size = len(encode({"hello": True})) * NUM_DOCS + + async def asyncSetUp(self): + await super().asyncSetUp() + self.client = async_client_context.client + await self.client.drop_database("perftest") + + async def do_task(self): + command = self.client.perftest.command + for _ in range(NUM_DOCS): + await command("hello", True) + + +class TestRunCommand8Tasks(TestRunCommand): + n_tasks = 8 + + +class TestRunCommand80Tasks(TestRunCommand): + n_tasks = 80 + + +class TestRunCommandUnlimitedTasks(TestRunCommand): + async def do_task(self): + command = self.client.perftest.command + await asyncio.gather(*[command("hello", True) for _ in range(NUM_DOCS)]) + + +class TestDocument(PerformanceTest): + async def asyncSetUp(self): + await super().asyncSetUp() + # Location of test data. + with open( # noqa: ASYNC101 + os.path.join(TEST_PATH, os.path.join("single_and_multi_document", self.dataset)) + ) as data: + self.document = json.loads(data.read()) + + self.client = async_client_context.client + await self.client.drop_database("perftest") + + async def asyncTearDown(self): + await super().asyncTearDown() + await self.client.drop_database("perftest") + + async def before(self): + self.corpus = await self.client.perftest.create_collection("corpus") + + async def after(self): + await self.client.perftest.drop_collection("corpus") + + +class FindTest(TestDocument): + dataset = "tweet.json" + + async def asyncSetUp(self): + await super().asyncSetUp() + self.data_size = len(encode(self.document)) * NUM_DOCS + documents = [self.document.copy() for _ in range(NUM_DOCS)] + self.corpus = self.client.perftest.corpus + result = await self.corpus.insert_many(documents) + self.inserted_ids = result.inserted_ids + + async def before(self): + pass + + async def after(self): + pass + + +class TestFindOneByID(FindTest, AsyncPyMongoTestCase): + async def do_task(self): + find_one = self.corpus.find_one + for _id in self.inserted_ids: + await find_one({"_id": _id}) + + +class TestFindOneByID8Tasks(TestFindOneByID): + n_tasks = 8 + + +class TestFindOneByID80Tasks(TestFindOneByID): + n_tasks = 80 + + +class TestFindOneByIDUnlimitedTasks(TestFindOneByID): + async def do_task(self): + find_one = self.corpus.find_one + await asyncio.gather(*[find_one({"_id": _id}) for _id in self.inserted_ids]) + + +class SmallDocInsertTest(TestDocument): + dataset = "small_doc.json" + + async def asyncSetUp(self): + await super().asyncSetUp() + self.data_size = len(encode(self.document)) * NUM_DOCS + self.documents = [self.document.copy() for _ in range(NUM_DOCS)] + + +class SmallDocMixedTest(TestDocument): + dataset = "small_doc.json" + + async def asyncSetUp(self): + await super().asyncSetUp() + self.data_size = len(encode(self.document)) * NUM_DOCS * 2 + self.documents = [self.document.copy() for _ in range(NUM_DOCS)] + + +class TestSmallDocInsertOne(SmallDocInsertTest, AsyncPyMongoTestCase): + async def do_task(self): + insert_one = self.corpus.insert_one + for doc in self.documents: + await insert_one(doc) + + +class TestSmallDocInsertOneUnlimitedTasks(SmallDocInsertTest, AsyncPyMongoTestCase): + async def do_task(self): + insert_one = self.corpus.insert_one + await asyncio.gather(*[insert_one(doc) for doc in self.documents]) + + +class LargeDocInsertTest(TestDocument): + dataset = "large_doc.json" + + async def asyncSetUp(self): + await super().asyncSetUp() + n_docs = 10 + self.data_size = len(encode(self.document)) * n_docs + self.documents = [self.document.copy() for _ in range(n_docs)] + + +class TestLargeDocInsertOne(LargeDocInsertTest, AsyncPyMongoTestCase): + async def do_task(self): + insert_one = self.corpus.insert_one + for doc in self.documents: + await insert_one(doc) + + +class TestLargeDocInsertOneUnlimitedTasks(LargeDocInsertTest, AsyncPyMongoTestCase): + async def do_task(self): + insert_one = self.corpus.insert_one + await asyncio.gather(*[insert_one(doc) for doc in self.documents]) + + +# MULTI-DOC BENCHMARKS +class TestFindManyAndEmptyCursor(FindTest, AsyncPyMongoTestCase): + async def do_task(self): + await self.corpus.find().to_list() + + +class TestFindManyAndEmptyCursor8Tasks(TestFindManyAndEmptyCursor): + n_tasks = 8 + + +class TestFindManyAndEmptyCursor80Tasks(TestFindManyAndEmptyCursor): + n_tasks = 80 + + +class TestSmallDocBulkInsert(SmallDocInsertTest, AsyncPyMongoTestCase): + async def do_task(self): + await self.corpus.insert_many(self.documents, ordered=True) + + +class TestSmallDocCollectionBulkInsert(SmallDocInsertTest, AsyncPyMongoTestCase): + async def asyncSetUp(self): + await super().asyncSetUp() + self.models = [] + for doc in self.documents: + self.models.append(InsertOne(namespace="perftest.corpus", document=doc)) + + async def do_task(self): + await self.corpus.bulk_write(self.models, ordered=True) + + +class TestSmallDocClientBulkInsert(SmallDocInsertTest, AsyncPyMongoTestCase): + @async_client_context.require_version_min(8, 0, 0, -24) + async def asyncSetUp(self): + await super().asyncSetUp() + self.models = [] + for doc in self.documents: + self.models.append(InsertOne(namespace="perftest.corpus", document=doc)) + + @async_client_context.require_version_min(8, 0, 0, -24) + async def do_task(self): + await self.client.bulk_write(self.models, ordered=True) + + +class TestSmallDocBulkMixedOps(SmallDocMixedTest, AsyncPyMongoTestCase): + async def asyncSetUp(self): + await super().asyncSetUp() + self.models: list[Union[InsertOne, ReplaceOne, DeleteOne]] = [] + for doc in self.documents: + self.models.append(InsertOne(document=doc)) + self.models.append(ReplaceOne(filter={}, replacement=doc.copy(), upsert=True)) + self.models.append(DeleteOne(filter={})) + + async def do_task(self): + await self.corpus.bulk_write(self.models, ordered=True) + + +class TestSmallDocClientBulkMixedOps(SmallDocMixedTest, AsyncPyMongoTestCase): + @async_client_context.require_version_min(8, 0, 0, -24) + async def asyncSetUp(self): + await super().asyncSetUp() + self.models: list[Union[InsertOne, ReplaceOne, DeleteOne]] = [] + for doc in self.documents: + self.models.append(InsertOne(namespace="perftest.corpus", document=doc)) + self.models.append( + ReplaceOne( + namespace="perftest.corpus", filter={}, replacement=doc.copy(), upsert=True + ) + ) + self.models.append(DeleteOne(namespace="perftest.corpus", filter={})) + + @async_client_context.require_version_min(8, 0, 0, -24) + async def do_task(self): + await self.client.bulk_write(self.models, ordered=True) + + +class TestLargeDocBulkInsert(LargeDocInsertTest, AsyncPyMongoTestCase): + async def do_task(self): + await self.corpus.insert_many(self.documents, ordered=True) + + +class TestLargeDocCollectionBulkInsert(LargeDocInsertTest, AsyncPyMongoTestCase): + async def asyncSetUp(self): + await super().asyncSetUp() + self.models = [] + for doc in self.documents: + self.models.append(InsertOne(namespace="perftest.corpus", document=doc)) + + async def do_task(self): + await self.corpus.bulk_write(self.models, ordered=True) + + +class TestLargeDocClientBulkInsert(LargeDocInsertTest, AsyncPyMongoTestCase): + @async_client_context.require_version_min(8, 0, 0, -24) + async def asyncSetUp(self): + await super().asyncSetUp() + self.models = [] + for doc in self.documents: + self.models.append(InsertOne(namespace="perftest.corpus", document=doc)) + + @async_client_context.require_version_min(8, 0, 0, -24) + async def do_task(self): + await self.client.bulk_write(self.models, ordered=True) + + +class GridFsTest(PerformanceTest): + async def asyncSetUp(self): + await super().asyncSetUp() + self.client = async_client_context.client + await self.client.drop_database("perftest") + + gridfs_path = os.path.join( + TEST_PATH, os.path.join("single_and_multi_document", "gridfs_large.bin") + ) + with open(gridfs_path, "rb") as data: # noqa: ASYNC101 + self.document = data.read() + self.data_size = len(self.document) + self.bucket = AsyncGridFSBucket(self.client.perftest) + + async def asyncTearDown(self): + await super().asyncTearDown() + await self.client.drop_database("perftest") + + +class TestGridFsUpload(GridFsTest, AsyncPyMongoTestCase): + async def before(self): + # Create the bucket. + await self.bucket.upload_from_stream("init", b"x") + + async def do_task(self): + await self.bucket.upload_from_stream("gridfstest", self.document) + + +class TestGridFsDownload(GridFsTest, AsyncPyMongoTestCase): + async def asyncSetUp(self): + await super().asyncSetUp() + self.uploaded_id = await self.bucket.upload_from_stream("gridfstest", self.document) + + async def do_task(self): + await (await self.bucket.open_download_stream(self.uploaded_id)).read() + + +if __name__ == "__main__": + unittest.main() diff --git a/test/performance/perf_test.py b/test/performance/perf_test.py index 6e269e25b0..5688d28d2d 100644 --- a/test/performance/perf_test.py +++ b/test/performance/perf_test.py @@ -151,7 +151,15 @@ def tearDown(self): }, }, "metrics": [ - {"name": "megabytes_per_sec", "type": "MEDIAN", "value": megabytes_per_sec}, + { + "name": "megabytes_per_sec", + "type": "MEDIAN", + "value": megabytes_per_sec, + "metadata": { + "improvement_direction": "up", + "measurement_unit": "megabytes_per_second", + }, + }, ], } ) @@ -443,6 +451,17 @@ def do_task(self): self.corpus.insert_many(self.documents, ordered=True) +class TestSmallDocCollectionBulkInsert(SmallDocInsertTest, unittest.TestCase): + def setUp(self): + super().setUp() + self.models = [] + for doc in self.documents: + self.models.append(InsertOne(namespace="perftest.corpus", document=doc)) + + def do_task(self): + self.corpus.bulk_write(self.models, ordered=True) + + class TestSmallDocClientBulkInsert(SmallDocInsertTest, unittest.TestCase): @client_context.require_version_min(8, 0, 0, -24) def setUp(self): @@ -493,6 +512,17 @@ def do_task(self): self.corpus.insert_many(self.documents, ordered=True) +class TestLargeDocCollectionBulkInsert(LargeDocInsertTest, unittest.TestCase): + def setUp(self): + super().setUp() + self.models = [] + for doc in self.documents: + self.models.append(InsertOne(namespace="perftest.corpus", document=doc)) + + def do_task(self): + self.corpus.bulk_write(self.models, ordered=True) + + class TestLargeDocClientBulkInsert(LargeDocInsertTest, unittest.TestCase): @client_context.require_version_min(8, 0, 0, -24) def setUp(self): diff --git a/test/retryable_reads/unified/estimatedDocumentCount.json b/test/retryable_reads/unified/estimatedDocumentCount.json index 75a676b9b6..2ee29f6799 100644 --- a/test/retryable_reads/unified/estimatedDocumentCount.json +++ b/test/retryable_reads/unified/estimatedDocumentCount.json @@ -195,7 +195,7 @@ "object": "collection1", "name": "estimatedDocumentCount", "expectError": { - "isError": true + "isClientError": true } } ], @@ -241,7 +241,7 @@ "object": "collection0", "name": "estimatedDocumentCount", "expectError": { - "isError": true + "isClientError": true } } ], diff --git a/test/retryable_writes/unified/aggregate-out-merge.json b/test/retryable_writes/unified/aggregate-out-merge.json index c46bf8c31f..fd25c345ac 100644 --- a/test/retryable_writes/unified/aggregate-out-merge.json +++ b/test/retryable_writes/unified/aggregate-out-merge.json @@ -1,6 +1,6 @@ { "description": "aggregate with $out/$merge does not set txnNumber", - "schemaVersion": "1.3", + "schemaVersion": "1.4", "runOnRequirements": [ { "minServerVersion": "3.6", @@ -45,6 +45,11 @@ "tests": [ { "description": "aggregate with $out does not set txnNumber", + "runOnRequirements": [ + { + "serverless": "forbid" + } + ], "operations": [ { "object": "collection0", diff --git a/test/retryable_writes/unified/insertOne-serverErrors.json b/test/retryable_writes/unified/insertOne-serverErrors.json index f404adcaf4..8edafb7029 100644 --- a/test/retryable_writes/unified/insertOne-serverErrors.json +++ b/test/retryable_writes/unified/insertOne-serverErrors.json @@ -739,7 +739,7 @@ ] }, { - "description": "InsertOne fails after WriteConcernError WriteConcernFailed", + "description": "InsertOne fails after WriteConcernError WriteConcernTimeout", "operations": [ { "name": "failPoint", @@ -757,7 +757,6 @@ ], "writeConcernError": { "code": 64, - "codeName": "WriteConcernFailed", "errmsg": "waiting for replication timed out", "errInfo": { "wtimeout": true diff --git a/test/run_command/unified/runCommand.json b/test/run_command/unified/runCommand.json index 007e514bd7..fde9de92e6 100644 --- a/test/run_command/unified/runCommand.json +++ b/test/run_command/unified/runCommand.json @@ -229,7 +229,6 @@ { "topologies": [ "replicaset", - "sharded-replicaset", "load-balanced", "sharded" ] @@ -493,7 +492,7 @@ { "minServerVersion": "4.2", "topologies": [ - "sharded-replicaset", + "sharded", "load-balanced" ] } diff --git a/test/sdam_monitoring/discovered_standalone.json b/test/sdam_monitoring/discovered_standalone.json index dd8f7fc51e..097203694e 100644 --- a/test/sdam_monitoring/discovered_standalone.json +++ b/test/sdam_monitoring/discovered_standalone.json @@ -11,7 +11,7 @@ "helloOk": true, "isWritablePrimary": true, "minWireVersion": 0, - "maxWireVersion": 6 + "maxWireVersion": 21 } ] ], diff --git a/test/sdam_monitoring/replica_set_with_no_primary.json b/test/sdam_monitoring/replica_set_with_no_primary.json index 950e32efe1..41d048729d 100644 --- a/test/sdam_monitoring/replica_set_with_no_primary.json +++ b/test/sdam_monitoring/replica_set_with_no_primary.json @@ -19,7 +19,7 @@ "b:27017" ], "minWireVersion": 0, - "maxWireVersion": 6 + "maxWireVersion": 21 } ] ], diff --git a/test/sdam_monitoring/replica_set_with_primary.json b/test/sdam_monitoring/replica_set_with_primary.json index 2ad94d6e6a..3ccc127d1d 100644 --- a/test/sdam_monitoring/replica_set_with_primary.json +++ b/test/sdam_monitoring/replica_set_with_primary.json @@ -18,7 +18,7 @@ "b:27017" ], "minWireVersion": 0, - "maxWireVersion": 6 + "maxWireVersion": 21 } ] ], diff --git a/test/sdam_monitoring/replica_set_with_removal.json b/test/sdam_monitoring/replica_set_with_removal.json index ae28faa30c..dc6fbe7e7d 100644 --- a/test/sdam_monitoring/replica_set_with_removal.json +++ b/test/sdam_monitoring/replica_set_with_removal.json @@ -69,7 +69,7 @@ "a:27017" ], "minWireVersion": 0, - "maxWireVersion": 6 + "maxWireVersion": 21 } ], [ diff --git a/test/sdam_monitoring/required_replica_set.json b/test/sdam_monitoring/required_replica_set.json index 401c5d99c5..1f4e5c1d71 100644 --- a/test/sdam_monitoring/required_replica_set.json +++ b/test/sdam_monitoring/required_replica_set.json @@ -18,7 +18,7 @@ "b:27017" ], "minWireVersion": 0, - "maxWireVersion": 6 + "maxWireVersion": 21 } ] ], diff --git a/test/sdam_monitoring/standalone.json b/test/sdam_monitoring/standalone.json index 821a1525d4..f375a383ca 100644 --- a/test/sdam_monitoring/standalone.json +++ b/test/sdam_monitoring/standalone.json @@ -11,7 +11,7 @@ "helloOk": true, "isWritablePrimary": true, "minWireVersion": 0, - "maxWireVersion": 6 + "maxWireVersion": 21 } ] ], diff --git a/test/sdam_monitoring/standalone_suppress_equal_description_changes.json b/test/sdam_monitoring/standalone_suppress_equal_description_changes.json index 5958e2d26c..4d046ff8ed 100644 --- a/test/sdam_monitoring/standalone_suppress_equal_description_changes.json +++ b/test/sdam_monitoring/standalone_suppress_equal_description_changes.json @@ -11,7 +11,7 @@ "helloOk": true, "isWritablePrimary": true, "minWireVersion": 0, - "maxWireVersion": 6 + "maxWireVersion": 21 } ], [ @@ -21,7 +21,7 @@ "helloOk": true, "isWritablePrimary": true, "minWireVersion": 0, - "maxWireVersion": 6 + "maxWireVersion": 21 } ] ], diff --git a/test/server_selection_logging/standalone.json b/test/server_selection_logging/standalone.json index 3152d0bbf3..fa01ad9911 100644 --- a/test/server_selection_logging/standalone.json +++ b/test/server_selection_logging/standalone.json @@ -47,29 +47,9 @@ } } ], - "initialData": [ - { - "collectionName": "server-selection", - "databaseName": "logging-tests", - "documents": [ - { - "_id": 1, - "x": 11 - }, - { - "_id": 2, - "x": 22 - }, - { - "_id": 3, - "x": 33 - } - ] - } - ], "tests": [ { - "description": "A successful insert operation", + "description": "A successful operation", "operations": [ { "name": "waitForEvent", @@ -250,912 +230,6 @@ ] } ] - }, - { - "description": "A successful find operation", - "operations": [ - { - "name": "waitForEvent", - "object": "testRunner", - "arguments": { - "client": "client", - "event": { - "topologyDescriptionChangedEvent": {} - }, - "count": 2 - } - }, - { - "name": "findOne", - "object": "collection", - "arguments": { - "filter": { - "x": 1 - } - } - } - - ], - "expectLogMessages": [ - { - "client": "client", - "messages": [ - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection started", - "selector": { - "$$exists": true - }, - "operation": "find", - "topologyDescription": { - "$$exists": true - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection succeeded", - "selector": { - "$$exists": true - }, - "operation": "find", - "topologyDescription": { - "$$exists": true - }, - "serverHost": { - "$$type": "string" - }, - "serverPort": { - "$$type": [ - "int", - "long" - ] - } - } - } - ] - } - ] - }, - { - "description": "A successful findAndModify operation", - "operations": [ - { - "name": "waitForEvent", - "object": "testRunner", - "arguments": { - "client": "client", - "event": { - "topologyDescriptionChangedEvent": {} - }, - "count": 2 - } - }, - { - "name": "findOneAndReplace", - "object": "collection", - "arguments": { - "filter": { - "x": 1 - }, - "replacement": { - "x": 11 - } - } - } - ], - "expectLogMessages": [ - { - "client": "client", - "messages": [ - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection started", - "selector": { - "$$exists": true - }, - "operation": "findAndModify", - "topologyDescription": { - "$$exists": true - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection succeeded", - "selector": { - "$$exists": true - }, - "operation": "findAndModify", - "topologyDescription": { - "$$exists": true - }, - "serverHost": { - "$$type": "string" - }, - "serverPort": { - "$$type": [ - "int", - "long" - ] - } - } - } - ] - } - ] - }, - { - "description": "A successful find and getMore operation", - "operations": [ - { - "name": "waitForEvent", - "object": "testRunner", - "arguments": { - "client": "client", - "event": { - "topologyDescriptionChangedEvent": {} - }, - "count": 2 - } - }, - { - "name": "find", - "object": "collection", - "arguments": { - "batchSize": 3 - } - } - ], - "expectLogMessages": [ - { - "client": "client", - "messages": [ - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection started", - "selector": { - "$$exists": true - }, - "operation": "find", - "topologyDescription": { - "$$exists": true - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection succeeded", - "selector": { - "$$exists": true - }, - "operation": "find", - "topologyDescription": { - "$$exists": true - }, - "serverHost": { - "$$type": "string" - }, - "serverPort": { - "$$type": [ - "int", - "long" - ] - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection started", - "selector": { - "$$exists": true - }, - "operation": "getMore", - "topologyDescription": { - "$$exists": true - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection succeeded", - "selector": { - "$$exists": true - }, - "operation": "getMore", - "topologyDescription": { - "$$exists": true - }, - "serverHost": { - "$$type": "string" - }, - "serverPort": { - "$$type": [ - "int", - "long" - ] - } - } - } - ] - } - ] - }, - { - "description": "A successful aggregate operation", - "operations": [ - { - "name": "waitForEvent", - "object": "testRunner", - "arguments": { - "client": "client", - "event": { - "topologyDescriptionChangedEvent": {} - }, - "count": 2 - } - }, - { - "name": "aggregate", - "object": "collection", - "arguments": { - "pipeline": [ - { - "$match": { - "_id": { - "$gt": 1 - } - } - } - ] - } - } - ], - "expectLogMessages": [ - { - "client": "client", - "messages": [ - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection started", - "selector": { - "$$exists": true - }, - "operation": "aggregate", - "topologyDescription": { - "$$exists": true - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection succeeded", - "selector": { - "$$exists": true - }, - "operation": "aggregate", - "topologyDescription": { - "$$exists": true - }, - "serverHost": { - "$$type": "string" - }, - "serverPort": { - "$$type": [ - "int", - "long" - ] - } - } - } - ] - } - ] - }, - { - "description": "A successful count operation", - "operations": [ - { - "name": "waitForEvent", - "object": "testRunner", - "arguments": { - "client": "client", - "event": { - "topologyDescriptionChangedEvent": {} - }, - "count": 2 - } - }, - { - "name": "countDocuments", - "object": "collection", - "arguments": { - "filter": {} - } - } - ], - "expectLogMessages": [ - { - "client": "client", - "messages": [ - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection started", - "selector": { - "$$exists": true - }, - "operation": "count", - "topologyDescription": { - "$$exists": true - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection succeeded", - "selector": { - "$$exists": true - }, - "operation": "count", - "topologyDescription": { - "$$exists": true - }, - "serverHost": { - "$$type": "string" - }, - "serverPort": { - "$$type": [ - "int", - "long" - ] - } - } - } - ] - } - ] - }, - { - "description": "A successful distinct operation", - "operations": [ - { - "name": "waitForEvent", - "object": "testRunner", - "arguments": { - "client": "client", - "event": { - "topologyDescriptionChangedEvent": {} - }, - "count": 2 - } - }, - { - "name": "distinct", - "object": "collection", - "arguments": { - "fieldName": "x", - "filter": {} - } - } - ], - "expectLogMessages": [ - { - "client": "client", - "messages": [ - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection started", - "selector": { - "$$exists": true - }, - "operation": "distinct", - "topologyDescription": { - "$$exists": true - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection succeeded", - "selector": { - "$$exists": true - }, - "operation": "distinct", - "topologyDescription": { - "$$exists": true - }, - "serverHost": { - "$$type": "string" - }, - "serverPort": { - "$$type": [ - "int", - "long" - ] - } - } - } - ] - } - ] - }, - { - "description": "Successful collection management operations", - "operations": [ - { - "name": "waitForEvent", - "object": "testRunner", - "arguments": { - "client": "client", - "event": { - "topologyDescriptionChangedEvent": {} - }, - "count": 2 - } - }, - { - "name": "createCollection", - "object": "database", - "arguments": { - "collection": "foo" - } - }, - { - "name": "listCollections", - "object": "database" - }, - { - "name": "dropCollection", - "object": "database", - "arguments": { - "collection": "foo" - } - } - ], - "expectLogMessages": [ - { - "client": "client", - "messages": [ - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection started", - "selector": { - "$$exists": true - }, - "operation": "create", - "topologyDescription": { - "$$exists": true - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection succeeded", - "selector": { - "$$exists": true - }, - "operation": "create", - "topologyDescription": { - "$$exists": true - }, - "serverHost": { - "$$type": "string" - }, - "serverPort": { - "$$type": [ - "int", - "long" - ] - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection started", - "selector": { - "$$exists": true - }, - "operation": "listCollections", - "topologyDescription": { - "$$exists": true - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection succeeded", - "selector": { - "$$exists": true - }, - "operation": "listCollections", - "topologyDescription": { - "$$exists": true - }, - "serverHost": { - "$$type": "string" - }, - "serverPort": { - "$$type": [ - "int", - "long" - ] - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection started", - "selector": { - "$$exists": true - }, - "operation": "drop", - "topologyDescription": { - "$$exists": true - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection succeeded", - "selector": { - "$$exists": true - }, - "operation": "drop", - "topologyDescription": { - "$$exists": true - }, - "serverHost": { - "$$type": "string" - }, - "serverPort": { - "$$type": [ - "int", - "long" - ] - } - } - } - ] - } - ] - }, - { - "description": "Successful index operations", - "operations": [ - { - "name": "waitForEvent", - "object": "testRunner", - "arguments": { - "client": "client", - "event": { - "topologyDescriptionChangedEvent": {} - }, - "count": 2 - } - }, - { - "name": "createIndex", - "object": "collection", - "arguments": { - "keys": { - "x": 1 - }, - "name": "x_1" - } - }, - { - "name": "listIndexes", - "object": "collection" - }, - { - "name": "dropIndex", - "object": "collection", - "arguments": { - "name": "x_1" - } - } - ], - "expectLogMessages": [ - { - "client": "client", - "messages": [ - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection started", - "selector": { - "$$exists": true - }, - "operation": "createIndexes", - "topologyDescription": { - "$$exists": true - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection succeeded", - "selector": { - "$$exists": true - }, - "operation": "createIndexes", - "topologyDescription": { - "$$exists": true - }, - "serverHost": { - "$$type": "string" - }, - "serverPort": { - "$$type": [ - "int", - "long" - ] - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection started", - "selector": { - "$$exists": true - }, - "operation": "listIndexes", - "topologyDescription": { - "$$exists": true - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection succeeded", - "selector": { - "$$exists": true - }, - "operation": "listIndexes", - "topologyDescription": { - "$$exists": true - }, - "serverHost": { - "$$type": "string" - }, - "serverPort": { - "$$type": [ - "int", - "long" - ] - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection started", - "selector": { - "$$exists": true - }, - "operation": "dropIndexes", - "topologyDescription": { - "$$exists": true - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection succeeded", - "selector": { - "$$exists": true - }, - "operation": "dropIndexes", - "topologyDescription": { - "$$exists": true - }, - "serverHost": { - "$$type": "string" - }, - "serverPort": { - "$$type": [ - "int", - "long" - ] - } - } - } - ] - } - ] - }, - { - "description": "A successful update operation", - "operations": [ - { - "name": "waitForEvent", - "object": "testRunner", - "arguments": { - "client": "client", - "event": { - "topologyDescriptionChangedEvent": {} - }, - "count": 2 - } - }, - { - "name": "updateOne", - "object": "collection", - "arguments": { - "filter": { - "x": 1 - }, - "update": { - "$inc": { - "x": 1 - } - } - } - } - ], - "expectLogMessages": [ - { - "client": "client", - "messages": [ - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection started", - "selector": { - "$$exists": true - }, - "operation": "update", - "topologyDescription": { - "$$exists": true - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection succeeded", - "selector": { - "$$exists": true - }, - "operation": "update", - "topologyDescription": { - "$$exists": true - }, - "serverHost": { - "$$type": "string" - }, - "serverPort": { - "$$type": [ - "int", - "long" - ] - } - } - } - ] - } - ] - }, - { - "description": "A successful delete operation", - "operations": [ - { - "name": "waitForEvent", - "object": "testRunner", - "arguments": { - "client": "client", - "event": { - "topologyDescriptionChangedEvent": {} - }, - "count": 2 - } - }, - { - "name": "deleteOne", - "object": "collection", - "arguments": { - "filter": { - "x": 1 - } - } - } - ], - "expectLogMessages": [ - { - "client": "client", - "messages": [ - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection started", - "selector": { - "$$exists": true - }, - "operation": "delete", - "topologyDescription": { - "$$exists": true - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection succeeded", - "selector": { - "$$exists": true - }, - "operation": "delete", - "topologyDescription": { - "$$exists": true - }, - "serverHost": { - "$$type": "string" - }, - "serverPort": { - "$$type": [ - "int", - "long" - ] - } - } - } - ] - } - ] } ] } diff --git a/test/sessions/driver-sessions-dirty-session-errors.json b/test/sessions/driver-sessions-dirty-session-errors.json index 361ea83d7b..d7a1c6aba7 100644 --- a/test/sessions/driver-sessions-dirty-session-errors.json +++ b/test/sessions/driver-sessions-dirty-session-errors.json @@ -11,7 +11,7 @@ { "minServerVersion": "4.1.8", "topologies": [ - "sharded-replicaset" + "sharded" ] } ], @@ -347,7 +347,9 @@ "x": 1 } }, - "new": false, + "new": { + "$$unsetOrMatches": false + }, "lsid": { "$$sessionLsid": "session0" }, @@ -375,7 +377,9 @@ "x": 1 } }, - "new": false, + "new": { + "$$unsetOrMatches": false + }, "lsid": { "$$sessionLsid": "session0" }, @@ -627,7 +631,9 @@ "x": 1 } }, - "new": false, + "new": { + "$$unsetOrMatches": false + }, "lsid": { "$$type": "object" }, @@ -655,7 +661,9 @@ "x": 1 } }, - "new": false, + "new": { + "$$unsetOrMatches": false + }, "lsid": { "$$type": "object" }, diff --git a/test/sessions/snapshot-sessions-unsupported-ops.json b/test/sessions/snapshot-sessions-unsupported-ops.json index 1021b7f264..c41f74d337 100644 --- a/test/sessions/snapshot-sessions-unsupported-ops.json +++ b/test/sessions/snapshot-sessions-unsupported-ops.json @@ -6,7 +6,7 @@ "minServerVersion": "5.0", "topologies": [ "replicaset", - "sharded-replicaset" + "sharded" ] } ], diff --git a/test/sessions/snapshot-sessions.json b/test/sessions/snapshot-sessions.json index 75b577b039..260f8b6f48 100644 --- a/test/sessions/snapshot-sessions.json +++ b/test/sessions/snapshot-sessions.json @@ -6,7 +6,7 @@ "minServerVersion": "5.0", "topologies": [ "replicaset", - "sharded-replicaset" + "sharded" ] } ], diff --git a/test/test_auth.py b/test/test_auth.py index 345d16121b..27f6743fae 100644 --- a/test/test_auth.py +++ b/test/test_auth.py @@ -30,7 +30,7 @@ client_context, unittest, ) -from test.utils import AllowListEventListener, delay, ignore_deprecations +from test.utils_shared import AllowListEventListener, delay, ignore_deprecations import pytest diff --git a/test/auth_oidc/test_auth_oidc.py b/test/test_auth_oidc.py similarity index 94% rename from test/auth_oidc/test_auth_oidc.py rename to test/test_auth_oidc.py index 7a78f3d2f6..877a5ca981 100644 --- a/test/auth_oidc/test_auth_oidc.py +++ b/test/test_auth_oidc.py @@ -17,13 +17,13 @@ import os import sys -import threading import time import unittest import warnings from contextlib import contextmanager from pathlib import Path from test import PyMongoTestCase +from test.helpers import ConcurrentRunner from typing import Dict import pytest @@ -31,7 +31,7 @@ sys.path[0:0] = [""] from test.unified_format import generate_test_classes -from test.utils import EventListener, OvertCommandListener +from test.utils_shared import EventListener, OvertCommandListener from bson import SON from pymongo import MongoClient @@ -49,7 +49,9 @@ OIDCCallbackResult, _get_authenticator, ) -from pymongo.uri_parser import parse_uri +from pymongo.synchronous.uri_parser import parse_uri + +_IS_SYNC = True ROOT = Path(__file__).parent.parent.resolve() TEST_PATH = ROOT / "auth" / "unified" @@ -70,6 +72,11 @@ def setUpClass(cls): cls.uri_single = os.environ["MONGODB_URI_SINGLE"] cls.uri_multiple = os.environ.get("MONGODB_URI_MULTI") cls.uri_admin = os.environ["MONGODB_URI"] + if ENVIRON == "test": + if not TOKEN_DIR: + raise ValueError("Please set OIDC_TOKEN_DIR") + if not TOKEN_FILE: + raise ValueError("Please set OIDC_TOKEN_FILE") def setUp(self): self.request_called = 0 @@ -81,15 +88,15 @@ def get_token(self, username=None): token_file = TOKEN_FILE else: token_file = os.path.join(TOKEN_DIR, username) - with open(token_file) as fid: + with open(token_file) as fid: # noqa: ASYNC101,RUF100 return fid.read() elif ENVIRON == "azure": opts = parse_uri(self.uri_single)["options"] - token_aud = opts["authmechanismproperties"]["TOKEN_RESOURCE"] + token_aud = opts["authMechanismProperties"]["TOKEN_RESOURCE"] return _get_azure_response(token_aud, username)["access_token"] elif ENVIRON == "gcp": opts = parse_uri(self.uri_single)["options"] - token_aud = opts["authmechanismproperties"]["TOKEN_RESOURCE"] + token_aud = opts["authMechanismProperties"]["TOKEN_RESOURCE"] return _get_gcp_response(token_aud, username)["access_token"] elif ENVIRON == "k8s": return _get_k8s_token() @@ -178,7 +185,7 @@ def test_1_2_single_principal_explicit_username(self): client = self.create_client(username="test_user1") # Perform a find operation that succeeds. client.test.test.find_one() - # Close the client.. + # Close the client. client.close() def test_1_3_multiple_principal_user_1(self): @@ -237,9 +244,9 @@ def test_1_6_allowed_hosts_blocked(self): authmechanismproperties=props, connect=False, ) - # Assert that a find operation fails with a client-side error. - with self.assertRaises(ConfigurationError): - client.test.test.find_one() + # Assert that a find operation fails with a client-side error. + with self.assertRaises(ConfigurationError): + client.test.test.find_one() # Close the client. client.close() @@ -249,9 +256,11 @@ def test_1_7_allowed_hosts_in_connection_string_ignored(self): uri = "mongodb+srv://example.com?authMechanism=MONGODB-OIDC&authMechanismProperties=ALLOWED_HOSTS:%5B%22example.com%22%5D" with self.assertRaises(ConfigurationError), warnings.catch_warnings(): warnings.simplefilter("ignore") - _ = MongoClient( - uri, authmechanismproperties=dict(OIDC_HUMAN_CALLBACK=self.create_request_cb()) + c = MongoClient( + uri, + authmechanismproperties=dict(OIDC_HUMAN_CALLBACK=self.create_request_cb()), ) + c._connect() def test_1_8_machine_idp_human_callback(self): if not os.environ.get("OIDC_IS_LOCAL"): @@ -629,7 +638,7 @@ def test_reauthenticate_succeeds_bulk_read(self): ): # Perform a bulk read operation. cursor = client.test.test.find_raw_batches({}) - list(cursor) + cursor.to_list() # Assert that the request callback has been called twice. self.assertEqual(self.request_called, 2) @@ -653,7 +662,7 @@ def test_reauthenticate_succeeds_cursor(self): ): # Perform a find operation. cursor = client.test.test.find({"a": 1}) - self.assertGreaterEqual(len(list(cursor)), 1) + self.assertGreaterEqual(len(cursor.to_list()), 1) # Assert that the request callback has been called twice. self.assertEqual(self.request_called, 2) @@ -677,7 +686,7 @@ def test_reauthenticate_succeeds_get_more(self): ): # Perform a find operation. cursor = client.test.test.find({"a": 1}, batch_size=1) - self.assertGreaterEqual(len(list(cursor)), 1) + self.assertGreaterEqual(len(cursor.to_list()), 1) # Assert that the request callback has been called twice. self.assertEqual(self.request_called, 2) @@ -707,7 +716,7 @@ def test_reauthenticate_succeeds_get_more_exhaust(self): ): # Perform a find operation. cursor = client.test.test.find({"a": 1}, batch_size=1, cursor_type=CursorType.EXHAUST) - self.assertGreaterEqual(len(list(cursor)), 1) + self.assertGreaterEqual(len(cursor.to_list()), 1) # Assert that the request callback has been called twice. self.assertEqual(self.request_called, 2) @@ -732,7 +741,7 @@ def test_reauthenticate_succeeds_command(self): # Perform a count operation. cursor = client.test.command({"count": "test"}) - self.assertGreaterEqual(len(list(cursor)), 1) + self.assertGreaterEqual(len(cursor), 1) # Assert that the request callback has been called twice. self.assertEqual(self.request_called, 2) @@ -785,19 +794,20 @@ def test_1_2_callback_is_called_once_for_multiple_connections(self): # Create a ``MongoClient`` configured with a custom OIDC callback that # implements the provider logic. client = self.create_client() + client._connect() - # Start 10 threads and run 100 find operations in each thread that all succeed. + # Start 10 tasks and run 100 find operations that all succeed in each task. def target(): for _ in range(100): client.test.test.find_one() - threads = [] - for _ in range(10): - thread = threading.Thread(target=target) - thread.start() - threads.append(thread) - for thread in threads: - thread.join() + tasks = [] + for i in range(10): + tasks.append(ConcurrentRunner(target=target)) + for t in tasks: + t.start() + for t in tasks: + t.join() # Assert that the callback was called 1 time. self.assertEqual(self.request_called, 1) @@ -875,6 +885,7 @@ def test_2_6_ALLOWED_HOSTS_defaults_ignored(self): def test_3_1_authentication_failure_with_cached_tokens_fetch_a_new_token_and_retry(self): # Create a MongoClient and an OIDC callback that implements the provider logic. client = self.create_client() + client._connect() # Poison the cache with an invalid access token. # Set a fail point for ``find`` command. with self.fail_point( @@ -941,6 +952,7 @@ def test_4_1_reauthentication_succeeds(self): # Create a ``MongoClient`` configured with a custom OIDC callback that # implements the provider logic. client = self.create_client() + client._connect() # Set a fail point for the find command. with self.fail_point( @@ -1032,6 +1044,7 @@ def test_4_4_speculative_authentication_should_be_ignored_on_reauthentication(se # Create an OIDC configured client that can listen for `SaslStart` commands. listener = EventListener() client = self.create_client(event_listeners=[listener]) + client._connect() # Preload the *Client Cache* with a valid access token to enforce Speculative Authentication. client2 = self.create_client() @@ -1070,11 +1083,30 @@ def test_4_4_speculative_authentication_should_be_ignored_on_reauthentication(se # Assert there were `SaslStart` commands executed. assert any(event.command_name.lower() == "saslstart" for event in listener.started_events) + def test_4_5_reauthentication_succeeds_when_a_session_is_involved(self): + # Create an OIDC configured client. + client = self.create_client() + + # Set a fail point for `find` commands of the form: + with self.fail_point( + { + "mode": {"times": 1}, + "data": {"failCommands": ["find"], "errorCode": 391}, + } + ): + # Start a new session. + with client.start_session() as session: + # In the started session perform a `find` operation that succeeds. + client.test.test.find_one({}, session=session) + + # Assert that the callback was called 2 times (once during the connection handshake, and again during reauthentication). + self.assertEqual(self.request_called, 2) + def test_5_1_azure_with_no_username(self): if ENVIRON != "azure": raise unittest.SkipTest("Test is only supported on Azure") opts = parse_uri(self.uri_single)["options"] - resource = opts["authmechanismproperties"]["TOKEN_RESOURCE"] + resource = opts["authMechanismProperties"]["TOKEN_RESOURCE"] props = dict(TOKEN_RESOURCE=resource, ENVIRONMENT="azure") client = self.create_client(authMechanismProperties=props) @@ -1085,7 +1117,7 @@ def test_5_2_azure_with_bad_username(self): raise unittest.SkipTest("Test is only supported on Azure") opts = parse_uri(self.uri_single)["options"] - token_aud = opts["authmechanismproperties"]["TOKEN_RESOURCE"] + token_aud = opts["authMechanismProperties"]["TOKEN_RESOURCE"] props = dict(TOKEN_RESOURCE=token_aud, ENVIRONMENT="azure") client = self.create_client(username="bad", authmechanismproperties=props) @@ -1096,6 +1128,7 @@ def test_speculative_auth_success(self): client1 = self.create_client() client1.test.test.find_one() client2 = self.create_client() + client2._connect() # Prime the cache of the second client. client2.options.pool_options._credentials.cache.data = ( diff --git a/test/test_auth_spec.py b/test/test_auth_spec.py index 3c3a1a67ae..ac6411cd89 100644 --- a/test/test_auth_spec.py +++ b/test/test_auth_spec.py @@ -22,13 +22,17 @@ import warnings from test import PyMongoTestCase +import pytest + sys.path[0:0] = [""] from test import unittest from test.unified_format import generate_test_classes from pymongo import MongoClient -from pymongo.synchronous.auth_oidc import OIDCCallback +from pymongo.auth_oidc_shared import OIDCCallback + +pytestmark = pytest.mark.auth _IS_SYNC = True diff --git a/test/test_binary.py b/test/test_binary.py index 567c5ae92f..a64aa42280 100644 --- a/test/test_binary.py +++ b/test/test_binary.py @@ -82,8 +82,8 @@ def test_binary(self): a_binary = Binary(b"hello world") self.assertTrue(a_binary.startswith(b"hello")) self.assertTrue(a_binary.endswith(b"world")) - self.assertTrue(isinstance(a_binary, Binary)) - self.assertFalse(isinstance(a_string, Binary)) + self.assertIsInstance(a_binary, Binary) + self.assertNotIsInstance(a_string, Binary) def test_exceptions(self): self.assertRaises(TypeError, Binary, None) diff --git a/test/test_bson.py b/test/test_bson.py index e550b538d3..f792db1e89 100644 --- a/test/test_bson.py +++ b/test/test_bson.py @@ -33,7 +33,7 @@ sys.path[0:0] = [""] from test import qcheck, unittest -from test.utils import ExceptionCatchingThread +from test.helpers import ExceptionCatchingTask import bson from bson import ( @@ -142,7 +142,7 @@ def helper(doc): helper({}) helper({"test": "hello"}) - self.assertTrue(isinstance(decoder(encoder({"hello": "world"}))["hello"], str)) + self.assertIsInstance(decoder(encoder({"hello": "world"}))["hello"], str) helper({"mike": -10120}) helper({"long": Int64(10)}) helper({"really big long": 2147483648}) @@ -557,8 +557,8 @@ def test_unknown_type(self): try: decode(bs) except Exception as exc: - self.assertTrue(isinstance(exc, InvalidBSON)) - self.assertTrue(part in str(exc)) + self.assertIsInstance(exc, InvalidBSON) + self.assertIn(part, str(exc)) else: self.fail("Failed to raise an exception.") @@ -722,7 +722,7 @@ def test_uuid(self): opts = CodecOptions(uuid_representation=UuidRepresentation.STANDARD) transformed_id = decode(encode({"id": id}, codec_options=opts), codec_options=opts)["id"] - self.assertTrue(isinstance(transformed_id, uuid.UUID)) + self.assertIsInstance(transformed_id, uuid.UUID) self.assertEqual(id, transformed_id) self.assertNotEqual(uuid.uuid4(), transformed_id) @@ -731,7 +731,7 @@ def test_uuid_legacy(self): legacy = Binary.from_uuid(id, UuidRepresentation.PYTHON_LEGACY) self.assertEqual(3, legacy.subtype) bin = decode(encode({"uuid": legacy}))["uuid"] - self.assertTrue(isinstance(bin, Binary)) + self.assertIsInstance(bin, Binary) transformed = bin.as_uuid(UuidRepresentation.PYTHON_LEGACY) self.assertEqual(id, transformed) @@ -739,7 +739,7 @@ def test_vector(self): """Tests of subtype 9""" # We start with valid cases, across the 3 dtypes implemented. # Work with a simple vector that can be interpreted as int8, float32, or ubyte - list_vector = [127, 7] + list_vector = [127, 8] # As INT8, vector has length 2 binary_vector = Binary.from_vector(list_vector, BinaryVectorDtype.INT8) vector = binary_vector.as_vector() @@ -764,18 +764,18 @@ def test_vector(self): uncompressed = "" for val in list_vector: uncompressed += format(val, "08b") - assert uncompressed[:-padding] == "0111111100000" + assert uncompressed[:-padding] == "0111111100001" # It is worthwhile explicitly showing the values encoded to BSON padded_doc = {"padded_vec": padded_vec} assert ( encode(padded_doc) - == b"\x1a\x00\x00\x00\x05padded_vec\x00\x04\x00\x00\x00\t\x10\x03\x7f\x07\x00" + == b"\x1a\x00\x00\x00\x05padded_vec\x00\x04\x00\x00\x00\t\x10\x03\x7f\x08\x00" ) # and dumped to json assert ( json_util.dumps(padded_doc) - == '{"padded_vec": {"$binary": {"base64": "EAN/Bw==", "subType": "09"}}}' + == '{"padded_vec": {"$binary": {"base64": "EAN/CA==", "subType": "09"}}}' ) # FLOAT32 is also implemented @@ -784,15 +784,19 @@ def test_vector(self): # Now some invalid cases for x in [-1, 257]: - try: + with self.assertRaises(struct.error): Binary.from_vector([x], BinaryVectorDtype.PACKED_BIT) - except Exception as exc: - self.assertTrue(isinstance(exc, struct.error)) - else: - self.fail("Failed to raise an exception.") - # Test form of Binary.from_vector(BinaryVector) + # Test one must pass zeros for all ignored bits + with self.assertRaises(ValueError): + Binary.from_vector([255], BinaryVectorDtype.PACKED_BIT, padding=7) + with self.assertWarns(DeprecationWarning): + meta = struct.pack(", >=, !=, and ==. + # These tests should be kept as assertTrue as opposed to using unittest's built-in comparison assertions because + # MinKey and MaxKey define their own __ge__, __le__, and other comparison attributes, and we want to explicitly test that. self.assertTrue(MinKey() < None) self.assertTrue(MinKey() < 1) self.assertTrue(MinKey() <= 1) @@ -1075,7 +1139,7 @@ def target(i): my_int = type(f"MyInt_{i}_{j}", (int,), {}) bson.encode({"my_int": my_int()}) - threads = [ExceptionCatchingThread(target=target, args=(i,)) for i in range(3)] + threads = [ExceptionCatchingTask(target=target, args=(i,)) for i in range(3)] for t in threads: t.start() @@ -1099,7 +1163,7 @@ def __repr__(self): ): encode({"t": Wrapper(1)}) - def test_doc_in_invalid_document_error_message(self): + def test_doc_in_invalid_document_error_as_property(self): class Wrapper: def __init__(self, val): self.val = val @@ -1109,8 +1173,43 @@ def __repr__(self): self.assertEqual("1", repr(Wrapper(1))) doc = {"t": Wrapper(1)} - with self.assertRaisesRegex(InvalidDocument, f"Invalid document {doc}"): + with self.assertRaisesRegex(InvalidDocument, "Invalid document:") as cm: + encode(doc) + self.assertEqual(cm.exception.document, doc) + + def test_doc_in_invalid_document_error_as_property_mapping(self): + class MyMapping(abc.Mapping): + def keys(self): + return ["t"] + + def __getitem__(self, name): + if name == "_id": + return None + return Wrapper(name) + + def __len__(self): + return 1 + + def __iter__(self): + return iter(["t"]) + + def __eq__(self, other): + if isinstance(other, MyMapping): + return True + return False + + class Wrapper: + def __init__(self, val): + self.val = val + + def __repr__(self): + return repr(self.val) + + self.assertEqual("1", repr(Wrapper(1))) + doc = MyMapping() + with self.assertRaisesRegex(InvalidDocument, "Invalid document:") as cm: encode(doc) + self.assertEqual(cm.exception.document, doc) class TestCodecOptions(unittest.TestCase): diff --git a/test/test_bson_binary_vector.py b/test/test_bson_binary_vector.py index 00c82bbb65..2783338793 100644 --- a/test/test_bson_binary_vector.py +++ b/test/test_bson_binary_vector.py @@ -15,13 +15,11 @@ from __future__ import annotations import binascii -import codecs -import json import struct from pathlib import Path from test import unittest -from bson import decode, encode +from bson import decode, encode, json_util from bson.binary import Binary, BinaryVectorDtype _TEST_PATH = Path(__file__).parent / "bson_binary_vector" @@ -49,7 +47,7 @@ def create_test(case_spec): def run_test(self): for test_case in case_spec.get("tests", []): description = test_case["description"] - vector_exp = test_case["vector"] + vector_exp = test_case.get("vector") dtype_hex_exp = test_case["dtype_hex"] dtype_alias_exp = test_case.get("dtype_alias") padding_exp = test_case.get("padding", 0) @@ -62,9 +60,6 @@ def run_test(self): cB_exp = binascii.unhexlify(canonical_bson_exp.encode("utf8")) decoded_doc = decode(cB_exp) binary_obs = decoded_doc[test_key] - # Handle special float cases like '-inf' - if dtype_exp in [BinaryVectorDtype.FLOAT32]: - vector_exp = [float(x) for x in vector_exp] # Test round-tripping canonical bson. self.assertEqual(encode(decoded_doc), cB_exp, description) @@ -76,25 +71,47 @@ def run_test(self): self.assertEqual( vector_obs.dtype, BinaryVectorDtype[dtype_alias_exp], description ) - self.assertEqual(vector_obs.data, vector_exp, description) - self.assertEqual(vector_obs.padding, padding_exp, description) - + if dtype_exp in [BinaryVectorDtype.FLOAT32]: + [ + self.assertAlmostEqual(vector_obs.data[i], vector_exp[i], delta=1e-5) + for i in range(len(vector_exp)) + ] + else: + self.assertEqual(vector_obs.data, vector_exp, description) # Test Binary Vector to BSON vector_exp = Binary.from_vector(vector_exp, dtype_exp, padding_exp) cB_obs = binascii.hexlify(encode({test_key: vector_exp})).decode().upper() self.assertEqual(cB_obs, canonical_bson_exp, description) else: - with self.assertRaises((struct.error, ValueError), msg=description): - Binary.from_vector(vector_exp, dtype_exp, padding_exp) + """ + #### To prove correct in an invalid case (`valid:false`), one MUST + - (encoding case) if the vector field is present, raise an exception + when attempting to encode a document from the numeric values,dtype, and padding. + - (decoding case) if the canonical_bson field is present, raise an exception + when attempting to deserialize it into the corresponding + numeric values, as the field contains corrupted data. + """ + # Tests Binary.from_vector() + if vector_exp is not None: + with self.assertRaises((struct.error, ValueError), msg=description): + Binary.from_vector(vector_exp, dtype_exp, padding_exp) + + # Tests Binary.as_vector() + if canonical_bson_exp is not None: + with self.assertRaises((struct.error, ValueError), msg=description): + cB_exp = binascii.unhexlify(canonical_bson_exp.encode("utf8")) + decoded_doc = decode(cB_exp) + binary_obs = decoded_doc[test_key] + binary_obs.as_vector() return run_test def create_tests(): for filename in _TEST_PATH.glob("*.json"): - with codecs.open(str(filename), encoding="utf-8") as test_file: - test_method = create_test(json.load(test_file)) + with open(str(filename), encoding="utf-8") as test_file: + test_method = create_test(json_util.loads(test_file.read())) setattr(TestBSONBinaryVector, "test_" + filename.stem, test_method) diff --git a/test/test_bson_corpus.py b/test/test_bson_corpus.py index 96ef458ec5..504025e766 100644 --- a/test/test_bson_corpus.py +++ b/test/test_bson_corpus.py @@ -16,7 +16,6 @@ from __future__ import annotations import binascii -import codecs import functools import glob import json @@ -227,7 +226,7 @@ def run_test(self): def create_tests(): for filename in glob.glob(os.path.join(_TEST_PATH, "*.json")): test_suffix, _ = os.path.splitext(os.path.basename(filename)) - with codecs.open(filename, encoding="utf-8") as bson_test_file: + with open(filename, encoding="utf-8") as bson_test_file: test_method = create_test(json.load(bson_test_file)) setattr(TestBSONCorpus, "test_" + test_suffix, test_method) diff --git a/test/test_bulk.py b/test/test_bulk.py index 6d29ff510a..1de406fca5 100644 --- a/test/test_bulk.py +++ b/test/test_bulk.py @@ -24,7 +24,7 @@ sys.path[0:0] = [""] from test import IntegrationTest, client_context, remove_all_users, unittest -from test.utils import wait_until +from test.utils_shared import wait_until from bson.binary import Binary, UuidRepresentation from bson.codec_options import CodecOptions @@ -94,7 +94,7 @@ def assertEqualUpsert(self, expected, actual): self.assertEqual(expected["index"], actual["index"]) if expected["_id"] == "...": # Unspecified value. - self.assertTrue("_id" in actual) + self.assertIn("_id", actual) else: self.assertEqual(expected["_id"], actual["_id"]) @@ -107,7 +107,7 @@ def assertEqualWriteError(self, expected, actual): self.assertEqual(expected["code"], actual["code"]) if expected["errmsg"] == "...": # Unspecified value. - self.assertTrue("errmsg" in actual) + self.assertIn("errmsg", actual) else: self.assertEqual(expected["errmsg"], actual["errmsg"]) @@ -115,7 +115,7 @@ def assertEqualWriteError(self, expected, actual): actual_op = actual["op"].copy() if expected_op.get("_id") == "...": # Unspecified _id. - self.assertTrue("_id" in actual_op) + self.assertIn("_id", actual_op) actual_op.pop("_id") expected_op.pop("_id") @@ -160,12 +160,12 @@ def _test_update_many(self, update): result = self.coll.bulk_write([UpdateMany({}, update)]) self.assertEqualResponse(expected, result.bulk_api_result) self.assertEqual(2, result.matched_count) - self.assertTrue(result.modified_count in (2, None)) + self.assertIn(result.modified_count, (2, None)) def test_update_many(self): self._test_update_many({"$set": {"foo": "bar"}}) - @client_context.require_version_min(4, 1, 11) + @client_context.require_version_min(4, 2, 0) def test_update_many_pipeline(self): self._test_update_many([{"$set": {"foo": "bar"}}]) @@ -201,12 +201,12 @@ def _test_update_one(self, update): result = self.coll.bulk_write([UpdateOne({}, update)]) self.assertEqualResponse(expected, result.bulk_api_result) self.assertEqual(1, result.matched_count) - self.assertTrue(result.modified_count in (1, None)) + self.assertIn(result.modified_count, (1, None)) def test_update_one(self): self._test_update_one({"$set": {"foo": "bar"}}) - @client_context.require_version_min(4, 1, 11) + @client_context.require_version_min(4, 2, 0) def test_update_one_pipeline(self): self._test_update_one([{"$set": {"foo": "bar"}}]) @@ -227,7 +227,7 @@ def test_replace_one(self): result = self.coll.bulk_write([ReplaceOne({}, {"foo": "bar"})]) self.assertEqualResponse(expected, result.bulk_api_result) self.assertEqual(1, result.matched_count) - self.assertTrue(result.modified_count in (1, None)) + self.assertIn(result.modified_count, (1, None)) def test_remove(self): # Test removing all documents, ordered. @@ -281,7 +281,7 @@ def test_upsert(self): self.assertEqual(1, result.upserted_count) assert result.upserted_ids is not None self.assertEqual(1, len(result.upserted_ids)) - self.assertTrue(isinstance(result.upserted_ids.get(0), ObjectId)) + self.assertIsInstance(result.upserted_ids.get(0), ObjectId) self.assertEqual(self.coll.count_documents({"foo": "bar"}), 1) @@ -959,7 +959,6 @@ def cause_wtimeout(self, requests, ordered): @client_context.require_replica_set @client_context.require_secondaries_count(1) def test_write_concern_failure_ordered(self): - self.skipTest("Skipping until PYTHON-4865 is resolved.") details = None # Ensure we don't raise on wnote. @@ -993,11 +992,11 @@ def test_write_concern_failure_ordered(self): # When talking to legacy servers there will be a # write concern error for each operation. - self.assertTrue(len(details["writeConcernErrors"]) > 0) + self.assertGreater(len(details["writeConcernErrors"]), 0) failed = details["writeConcernErrors"][0] self.assertEqual(64, failed["code"]) - self.assertTrue(isinstance(failed["errmsg"], str)) + self.assertIsInstance(failed["errmsg"], str) self.coll.delete_many({}) self.coll.create_index("a", unique=True) @@ -1034,9 +1033,9 @@ def test_write_concern_failure_ordered(self): details, ) - self.assertTrue(len(details["writeConcernErrors"]) > 1) + self.assertGreater(len(details["writeConcernErrors"]), 1) failed = details["writeErrors"][0] - self.assertTrue("duplicate" in failed["errmsg"]) + self.assertIn("duplicate", failed["errmsg"]) @client_context.require_version_max(7, 1) # PYTHON-4560 @client_context.require_replica_set @@ -1070,7 +1069,7 @@ def test_write_concern_failure_unordered(self): self.assertEqual(0, len(details["writeErrors"])) # When talking to legacy servers there will be a # write concern error for each operation. - self.assertTrue(len(details["writeConcernErrors"]) > 1) + self.assertGreater(len(details["writeConcernErrors"]), 1) self.coll.delete_many({}) self.coll.create_index("a", unique=True) @@ -1097,17 +1096,17 @@ def test_write_concern_failure_unordered(self): self.assertEqual(1, len(details["writeErrors"])) # When talking to legacy servers there will be a # write concern error for each operation. - self.assertTrue(len(details["writeConcernErrors"]) > 1) + self.assertGreater(len(details["writeConcernErrors"]), 1) failed = details["writeErrors"][0] self.assertEqual(2, failed["index"]) self.assertEqual(11000, failed["code"]) - self.assertTrue(isinstance(failed["errmsg"], str)) + self.assertIsInstance(failed["errmsg"], str) self.assertEqual(1, failed["op"]["a"]) failed = details["writeConcernErrors"][0] self.assertEqual(64, failed["code"]) - self.assertTrue(isinstance(failed["errmsg"], str)) + self.assertIsInstance(failed["errmsg"], str) upserts = details["upserted"] self.assertEqual(1, len(upserts)) diff --git a/test/test_change_stream.py b/test/test_change_stream.py index 4ed21f55cf..ad51f91873 100644 --- a/test/test_change_stream.py +++ b/test/test_change_stream.py @@ -36,7 +36,7 @@ unittest, ) from test.unified_format import generate_test_classes -from test.utils import ( +from test.utils_shared import ( AllowListEventListener, EventListener, OvertCommandListener, @@ -55,7 +55,6 @@ from pymongo.message import _CursorAddress from pymongo.read_concern import ReadConcern from pymongo.synchronous.command_cursor import CommandCursor -from pymongo.synchronous.helpers import next from pymongo.write_concern import WriteConcern _IS_SYNC = True @@ -263,7 +262,7 @@ def test_batch_size_is_honored(self): # $changeStream.startAtOperationTime was added in 4.0.0. @no_type_check - @client_context.require_version_min(4, 0, 0) + @client_context.require_version_min(4, 2, 0) def test_start_at_operation_time(self): optime = self.get_start_at_operation_time() @@ -406,7 +405,14 @@ def test_change_operations(self): expected_update_description = {"updatedFields": {"new": 1}, "removedFields": ["foo"]} if client_context.version.at_least(4, 5, 0): expected_update_description["truncatedArrays"] = [] - self.assertEqual(expected_update_description, change["updateDescription"]) + self.assertEqual( + expected_update_description, + { + k: v + for k, v in change["updateDescription"].items() + if k in expected_update_description + }, + ) # Replace. self.watched_collection().replace_one({"new": 1}, {"foo": "bar"}) change = change_stream.next() @@ -425,7 +431,7 @@ def test_change_operations(self): self._test_get_invalidate_event(change_stream) @no_type_check - @client_context.require_version_min(4, 1, 1) + @client_context.require_version_min(4, 2, 0) def test_start_after(self): resume_token = self.get_resume_token(invalidate=True) @@ -441,7 +447,7 @@ def test_start_after(self): self.assertEqual(change["fullDocument"], {"_id": 2}) @no_type_check - @client_context.require_version_min(4, 1, 1) + @client_context.require_version_min(4, 2, 0) def test_start_after_resume_process_with_changes(self): resume_token = self.get_resume_token(invalidate=True) @@ -546,27 +552,16 @@ def _test_update_resume_token(self, expected_rt_getter): ) # Prose test no. 1 - @client_context.require_version_min(4, 0, 7) + @client_context.require_version_min(4, 2, 0) def test_update_resume_token(self): self._test_update_resume_token(self._get_expected_resume_token) - # Prose test no. 1 - @client_context.require_version_max(4, 0, 7) - def test_update_resume_token_legacy(self): - self._test_update_resume_token(self._get_expected_resume_token_legacy) - # Prose test no. 2 - @client_context.require_version_min(4, 1, 8) + @client_context.require_version_min(4, 2, 0) def test_raises_error_on_missing_id_418plus(self): # Server returns an error on 4.1.8+ self._test_raises_error_on_missing_id(OperationFailure) - # Prose test no. 2 - @client_context.require_version_max(4, 1, 8) - def test_raises_error_on_missing_id_418minus(self): - # PyMongo raises an error - self._test_raises_error_on_missing_id(InvalidOperation) - # Prose test no. 3 @no_type_check def test_resume_on_error(self): @@ -625,38 +620,12 @@ def raise_error(): cursor.close = raise_error self.insert_one_and_check(change_stream, {"_id": 2}) - # Prose test no. 9 - @no_type_check - @client_context.require_version_min(4, 0, 0) - @client_context.require_version_max(4, 0, 7) - def test_start_at_operation_time_caching(self): - # Case 1: change stream not started with startAtOperationTime - client, listener = self.client_with_listener("aggregate") - with self.change_stream_with_client(client) as cs: - self.kill_change_stream_cursor(cs) - cs.try_next() - cmd = listener.started_events[-1].command - self.assertIsNotNone(cmd["pipeline"][0]["$changeStream"].get("startAtOperationTime")) - - # Case 2: change stream started with startAtOperationTime - listener.reset() - optime = self.get_start_at_operation_time() - with self.change_stream_with_client(client, start_at_operation_time=optime) as cs: - self.kill_change_stream_cursor(cs) - cs.try_next() - cmd = listener.started_events[-1].command - self.assertEqual( - cmd["pipeline"][0]["$changeStream"].get("startAtOperationTime"), - optime, - str([k.command for k in listener.started_events]), - ) - # Prose test no. 10 - SKIPPED # This test is identical to prose test no. 3. # Prose test no. 11 @no_type_check - @client_context.require_version_min(4, 0, 7) + @client_context.require_version_min(4, 2, 0) def test_resumetoken_empty_batch(self): client, listener = self._client_with_listener("getMore") with self.change_stream_with_client(client) as change_stream: @@ -668,7 +637,7 @@ def test_resumetoken_empty_batch(self): # Prose test no. 11 @no_type_check - @client_context.require_version_min(4, 0, 7) + @client_context.require_version_min(4, 2, 0) def test_resumetoken_exhausted_batch(self): client, listener = self._client_with_listener("getMore") with self.change_stream_with_client(client) as change_stream: @@ -678,38 +647,6 @@ def test_resumetoken_exhausted_batch(self): response = listener.succeeded_events[-1].reply self.assertEqual(resume_token, response["cursor"]["postBatchResumeToken"]) - # Prose test no. 12 - @no_type_check - @client_context.require_version_max(4, 0, 7) - def test_resumetoken_empty_batch_legacy(self): - resume_point = self.get_resume_token() - - # Empty resume token when neither resumeAfter or startAfter specified. - with self.change_stream() as change_stream: - change_stream.try_next() - self.assertIsNone(change_stream.resume_token) - - # Resume token value is same as resumeAfter. - with self.change_stream(resume_after=resume_point) as change_stream: - change_stream.try_next() - resume_token = change_stream.resume_token - self.assertEqual(resume_token, resume_point) - - # Prose test no. 12 - @no_type_check - @client_context.require_version_max(4, 0, 7) - def test_resumetoken_exhausted_batch_legacy(self): - # Resume token is _id of last change. - with self.change_stream() as change_stream: - change = self._populate_and_exhaust_change_stream(change_stream) - self.assertEqual(change_stream.resume_token, change["_id"]) - resume_point = change["_id"] - - # Resume token is _id of last change even if resumeAfter is specified. - with self.change_stream(resume_after=resume_point) as change_stream: - change = self._populate_and_exhaust_change_stream(change_stream) - self.assertEqual(change_stream.resume_token, change["_id"]) - # Prose test no. 13 @no_type_check def test_resumetoken_partially_iterated_batch(self): @@ -751,13 +688,13 @@ def test_resumetoken_uniterated_nonempty_batch_resumeafter(self): # Prose test no. 14 @no_type_check @client_context.require_no_mongos - @client_context.require_version_min(4, 1, 1) + @client_context.require_version_min(4, 2, 0) def test_resumetoken_uniterated_nonempty_batch_startafter(self): self._test_resumetoken_uniterated_nonempty_batch("start_after") # Prose test no. 17 @no_type_check - @client_context.require_version_min(4, 1, 1) + @client_context.require_version_min(4, 2, 0) def test_startafter_resume_uses_startafter_after_empty_getMore(self): # Resume should use startAfter after no changes have been returned. resume_point = self.get_resume_token() @@ -775,7 +712,7 @@ def test_startafter_resume_uses_startafter_after_empty_getMore(self): # Prose test no. 18 @no_type_check - @client_context.require_version_min(4, 1, 1) + @client_context.require_version_min(4, 2, 0) def test_startafter_resume_uses_resumeafter_after_nonempty_getMore(self): # Resume should use resumeAfter after some changes have been returned. resume_point = self.get_resume_token() @@ -820,7 +757,7 @@ def test_split_large_change(self): class TestClusterChangeStream(TestChangeStreamBase, APITestsMixin): dbs: list - @client_context.require_version_min(4, 0, 0, -1) + @client_context.require_version_min(4, 2, 0) @client_context.require_change_streams def setUp(self) -> None: super().setUp() @@ -880,7 +817,7 @@ def test_full_pipeline(self): class TestDatabaseChangeStream(TestChangeStreamBase, APITestsMixin): - @client_context.require_version_min(4, 0, 0, -1) + @client_context.require_version_min(4, 2, 0) @client_context.require_change_streams def setUp(self) -> None: super().setUp() diff --git a/test/test_client.py b/test/test_client.py index 5ec425f312..9d201c663b 100644 --- a/test/test_client.py +++ b/test/test_client.py @@ -34,7 +34,7 @@ import time import uuid from typing import Any, Iterable, Type, no_type_check -from unittest import mock +from unittest import mock, skipIf from unittest.mock import patch import pytest @@ -61,17 +61,19 @@ from test.pymongo_mocks import MockClient from test.test_binary import BinaryData from test.utils import ( + assertRaisesExactly, + get_pool, + wait_until, +) +from test.utils_shared import ( NTHREADS, CMAPListener, FunctionCallRecorder, - assertRaisesExactly, delay, - get_pool, gevent_monkey_patched, is_greenthread_patched, lazy_client_trial, one, - wait_until, ) import bson @@ -100,6 +102,7 @@ NetworkTimeout, OperationFailure, ServerSelectionTimeoutError, + WaitQueueTimeoutError, WriteConcernError, ) from pymongo.monitoring import ServerHeartbeatListener, ServerHeartbeatStartedEvent @@ -111,7 +114,6 @@ from pymongo.synchronous.command_cursor import CommandCursor from pymongo.synchronous.cursor import Cursor, CursorType from pymongo.synchronous.database import Database -from pymongo.synchronous.helpers import next from pymongo.synchronous.mongo_client import MongoClient from pymongo.synchronous.pool import ( Connection, @@ -208,7 +210,7 @@ def make_db(base, name): self.assertRaises(InvalidName, make_db, self.client, "te/t") self.assertRaises(InvalidName, make_db, self.client, "te st") - self.assertTrue(isinstance(self.client.test, Database)) + self.assertIsInstance(self.client.test, Database) self.assertEqual(self.client.test, self.client["test"]) self.assertEqual(self.client.test, Database(self.client, "test")) @@ -222,7 +224,7 @@ def test_get_database(self): self.assertEqual(write_concern, db.write_concern) def test_getattr(self): - self.assertTrue(isinstance(self.client["_does_not_exist"], Database)) + self.assertIsInstance(self.client["_does_not_exist"], Database) with self.assertRaises(AttributeError) as context: self.client._does_not_exist @@ -234,10 +236,7 @@ def test_getattr(self): def test_iteration(self): client = self.client - if "PyPy" in sys.version and sys.version_info < (3, 8, 15): - msg = "'NoneType' object is not callable" - else: - msg = "'MongoClient' object is not iterable" + msg = "'MongoClient' object is not iterable" # Iteration fails with self.assertRaisesRegex(TypeError, msg): for _ in client: # type: ignore[misc] # error: "None" not callable [misc] @@ -505,13 +504,13 @@ def test_uri_option_precedence(self): def test_connection_timeout_ms_propagates_to_DNS_resolver(self): # Patch the resolver. - from pymongo.srv_resolver import _resolve + from pymongo.synchronous.srv_resolver import _resolve patched_resolver = FunctionCallRecorder(_resolve) - pymongo.srv_resolver._resolve = patched_resolver + pymongo.synchronous.srv_resolver._resolve = patched_resolver def reset_resolver(): - pymongo.srv_resolver._resolve = _resolve + pymongo.synchronous.srv_resolver._resolve = _resolve self.addCleanup(reset_resolver) @@ -600,7 +599,7 @@ def test_validate_suggestion(self): with self.assertRaisesRegex(ConfigurationError, expected): MongoClient(**{typo: "standard"}) # type: ignore[arg-type] - @patch("pymongo.srv_resolver._SrvResolver.get_hosts") + @patch("pymongo.synchronous.srv_resolver._SrvResolver.get_hosts") def test_detected_environment_logging(self, mock_get_hosts): normal_hosts = [ "normal.host.com", @@ -622,7 +621,8 @@ def test_detected_environment_logging(self, mock_get_hosts): logs = [record.getMessage() for record in cm.records if record.name == "pymongo.client"] self.assertEqual(len(logs), 7) - @patch("pymongo.srv_resolver._SrvResolver.get_hosts") + @skipIf(os.environ.get("DEBUG_LOG"), "Enabling debug logs breaks this test") + @patch("pymongo.synchronous.srv_resolver._SrvResolver.get_hosts") def test_detected_environment_warning(self, mock_get_hosts): with self._caplog.at_level(logging.WARN): normal_hosts = [ @@ -664,7 +664,7 @@ def test_max_idle_time_reaper_default(self): with server._pool.checkout() as conn: pass self.assertEqual(1, len(server._pool.conns)) - self.assertTrue(conn in server._pool.conns) + self.assertIn(conn, server._pool.conns) def test_max_idle_time_reaper_removes_stale_minPoolSize(self): with client_knobs(kill_cursor_frequency=0.1): @@ -730,7 +730,7 @@ def test_min_pool_size(self): lambda: len(server._pool.conns) == 10, "a closed socket gets replaced from the pool", ) - self.assertFalse(conn in server._pool.conns) + self.assertNotIn(conn, server._pool.conns) def test_max_idle_time_checkout(self): # Use high frequency to test _get_socket_no_auth. @@ -745,8 +745,8 @@ def test_max_idle_time_checkout(self): with server._pool.checkout() as new_con: self.assertNotEqual(conn, new_con) self.assertEqual(1, len(server._pool.conns)) - self.assertFalse(conn in server._pool.conns) - self.assertTrue(new_con in server._pool.conns) + self.assertNotIn(conn, server._pool.conns) + self.assertIn(new_con, server._pool.conns) # Test that connections are reused if maxIdleTimeMS is not set. client = self.rs_or_single_client() @@ -824,6 +824,58 @@ def test_init_disconnected_with_auth(self): with self.assertRaises(ConnectionFailure): c.pymongo_test.test.find_one() + @client_context.require_replica_set + @client_context.require_no_load_balancer + @client_context.require_tls + def test_init_disconnected_with_srv(self): + c = self.rs_or_single_client( + "mongodb+srv://test1.test.build.10gen.cc", connect=False, tlsInsecure=True + ) + # nodes returns an empty set if not connected + self.assertEqual(c.nodes, frozenset()) + # topology_description returns the initial seed description if not connected + topology_description = c.topology_description + self.assertEqual(topology_description.topology_type, TOPOLOGY_TYPE.Unknown) + self.assertEqual( + { + ("test1.test.build.10gen.cc", None): ServerDescription( + ("test1.test.build.10gen.cc", None) + ) + }, + topology_description.server_descriptions(), + ) + + # address causes client to block until connected + self.assertIsNotNone(c.address) + # Initial seed topology and connected topology have the same ID + self.assertEqual( + c._topology._topology_id, topology_description._topology_settings._topology_id + ) + c.close() + + c = self.rs_or_single_client( + "mongodb+srv://test1.test.build.10gen.cc", connect=False, tlsInsecure=True + ) + # primary causes client to block until connected + c.primary + self.assertIsNotNone(c._topology) + c.close() + + c = self.rs_or_single_client( + "mongodb+srv://test1.test.build.10gen.cc", connect=False, tlsInsecure=True + ) + # secondaries causes client to block until connected + c.secondaries + self.assertIsNotNone(c._topology) + c.close() + + c = self.rs_or_single_client( + "mongodb+srv://test1.test.build.10gen.cc", connect=False, tlsInsecure=True + ) + # arbiters causes client to block until connected + c.arbiters + self.assertIsNotNone(c._topology) + def test_equality(self): seed = "{}:{}".format(*list(self.client._topology_settings.seeds)[0]) c = self.rs_or_single_client(seed, connect=False) @@ -908,6 +960,15 @@ def test_repr(self): with eval(the_repr) as client_two: self.assertEqual(client_two, client) + def test_repr_srv_host(self): + client = MongoClient("mongodb+srv://test1.test.build.10gen.cc/", connect=False) + # before srv resolution + self.assertIn("host='mongodb+srv://test1.test.build.10gen.cc'", repr(client)) + client._connect() + # after srv resolution + self.assertIn("host=['localhost.test.build.10gen.cc:", repr(client)) + client.close() + def test_getters(self): wait_until(lambda: client_context.nodes == self.client.nodes, "find all nodes") @@ -916,7 +977,7 @@ def test_list_databases(self): cursor = self.client.list_databases() self.assertIsInstance(cursor, CommandCursor) helper_docs = cursor.to_list() - self.assertTrue(len(helper_docs) > 0) + self.assertGreater(len(helper_docs), 0) self.assertEqual(len(helper_docs), len(cmd_docs)) # PYTHON-3529 Some fields may change between calls, just compare names. for helper_doc, cmd_doc in zip(helper_docs, cmd_docs): @@ -943,8 +1004,8 @@ def test_list_database_names(self): cmd_names = [doc["name"] for doc in cmd_docs] db_names = self.client.list_database_names() - self.assertTrue("pymongo_test" in db_names) - self.assertTrue("pymongo_test_mike" in db_names) + self.assertIn("pymongo_test", db_names) + self.assertIn("pymongo_test_mike", db_names) self.assertEqual(db_names, cmd_names) def test_drop_database(self): @@ -1158,9 +1219,9 @@ def test_unix_socket(self): client = self.rs_or_single_client(uri) client.pymongo_test.test.insert_one({"dummy": "object"}) dbs = client.list_database_names() - self.assertTrue("pymongo_test" in dbs) + self.assertIn("pymongo_test", dbs) - self.assertTrue(mongodb_socket in repr(client)) + self.assertIn(mongodb_socket, repr(client)) # Confirm it fails with a missing socket. with self.assertRaises(ConnectionFailure): @@ -1175,15 +1236,15 @@ def test_document_class(self): db.test.insert_one({"x": 1}) self.assertEqual(dict, c.codec_options.document_class) - self.assertTrue(isinstance(db.test.find_one(), dict)) - self.assertFalse(isinstance(db.test.find_one(), SON)) + self.assertIsInstance(db.test.find_one(), dict) + self.assertNotIsInstance(db.test.find_one(), SON) c = self.rs_or_single_client(document_class=SON) db = c.pymongo_test self.assertEqual(SON, c.codec_options.document_class) - self.assertTrue(isinstance(db.test.find_one(), SON)) + self.assertIsInstance(db.test.find_one(), SON) def test_timeouts(self): client = self.rs_or_single_client( @@ -1224,7 +1285,6 @@ def test_socket_timeout(self): no_timeout = self.client timeout_sec = 1 timeout = self.rs_or_single_client(socketTimeoutMS=1000 * timeout_sec) - self.addCleanup(timeout.close) no_timeout.pymongo_test.drop_collection("test") no_timeout.pymongo_test.test.insert_one({"x": 1}) @@ -1273,13 +1333,21 @@ def test_server_selection_timeout(self): self.assertAlmostEqual(30, client.options.server_selection_timeout) def test_waitQueueTimeoutMS(self): - client = self.rs_or_single_client(waitQueueTimeoutMS=2000) - self.assertEqual((get_pool(client)).opts.wait_queue_timeout, 2) + listener = CMAPListener() + client = self.rs_or_single_client( + waitQueueTimeoutMS=10, maxPoolSize=1, event_listeners=[listener] + ) + pool = get_pool(client) + self.assertEqual(pool.opts.wait_queue_timeout, 0.01) + with pool.checkout(): + with self.assertRaises(WaitQueueTimeoutError): + client.test.command("ping") + self.assertFalse(listener.events_by_type(monitoring.PoolClearedEvent)) def test_socketKeepAlive(self): pool = get_pool(self.client) with pool.checkout() as conn: - keepalive = conn.conn.getsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE) + keepalive = conn.conn.sock.getsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE) self.assertTrue(keepalive) @no_type_check @@ -1321,8 +1389,8 @@ def test_ipv6(self): client.pymongo_test_bernie.test.insert_one({"dummy": "object"}) dbs = client.list_database_names() - self.assertTrue("pymongo_test" in dbs) - self.assertTrue("pymongo_test_bernie" in dbs) + self.assertIn("pymongo_test", dbs) + self.assertIn("pymongo_test_bernie", dbs) def test_contextlib(self): client = self.rs_or_single_client() @@ -1750,6 +1818,29 @@ def stall_connect(*args, **kwargs): # Each ping command should not take more than 2 seconds self.assertLess(total, 2) + def test_background_connections_log_on_error(self): + with self.assertLogs("pymongo.client", level="ERROR") as cm: + client = self.rs_or_single_client(minPoolSize=1) + # Create a single connection in the pool. + client.admin.command("ping") + + # Cause new connections to fail. + pool = get_pool(client) + + def fail_connect(*args, **kwargs): + raise Exception("failed to connect") + + pool.connect = fail_connect + # Un-patch Pool.connect to break the cyclic reference. + self.addCleanup(delattr, pool, "connect") + + pool.reset_without_pause() + + wait_until( + lambda: "failed to connect" in "".join(cm.output), "start creating connections" + ) + self.assertIn("MongoClient background task encountered an error", "".join(cm.output)) + @client_context.require_replica_set def test_direct_connection(self): # direct_connection=True should result in Single topology. @@ -1784,20 +1875,20 @@ def server_description_count(): return i gc.collect() - with client_knobs(min_heartbeat_interval=0.003): + with client_knobs(min_heartbeat_interval=0.002): client = self.simple_client( - "invalid:27017", heartbeatFrequencyMS=3, serverSelectionTimeoutMS=150 + "invalid:27017", heartbeatFrequencyMS=2, serverSelectionTimeoutMS=200 ) initial_count = server_description_count() with self.assertRaises(ServerSelectionTimeoutError): client.test.test.find_one() gc.collect() final_count = server_description_count() + client.close() # If a bug like PYTHON-2433 is reintroduced then too many # ServerDescriptions will be kept alive and this test will fail: - # AssertionError: 19 != 46 within 15 delta (27 difference) - # On Python 3.11 we seem to get more of a delta. - self.assertAlmostEqual(initial_count, final_count, delta=20) + # AssertionError: 11 != 47 within 20 delta (36 difference) + self.assertAlmostEqual(initial_count, final_count, delta=30) @client_context.require_failCommand_fail_point def test_network_error_message(self): @@ -1837,32 +1928,41 @@ def test_service_name_from_kwargs(self): srvServiceName="customname", connect=False, ) + client._connect() self.assertEqual(client._topology_settings.srv_service_name, "customname") + client.close() client = MongoClient( "mongodb+srv://user:password@test22.test.build.10gen.cc" "/?srvServiceName=shouldbeoverriden", srvServiceName="customname", connect=False, ) + client._connect() self.assertEqual(client._topology_settings.srv_service_name, "customname") + client.close() client = MongoClient( "mongodb+srv://user:password@test22.test.build.10gen.cc/?srvServiceName=customname", connect=False, ) + client._connect() self.assertEqual(client._topology_settings.srv_service_name, "customname") + client.close() def test_srv_max_hosts_kwarg(self): client = self.simple_client("mongodb+srv://test1.test.build.10gen.cc/") + client._connect() self.assertGreater(len(client.topology_description.server_descriptions()), 1) client = self.simple_client("mongodb+srv://test1.test.build.10gen.cc/", srvmaxhosts=1) + client._connect() self.assertEqual(len(client.topology_description.server_descriptions()), 1) client = self.simple_client( "mongodb+srv://test1.test.build.10gen.cc/?srvMaxHosts=1", srvmaxhosts=2 ) + client._connect() self.assertEqual(len(client.topology_description.server_descriptions()), 2) @unittest.skipIf( - client_context.load_balancer or client_context.serverless, + client_context.load_balancer, "loadBalanced clients do not run SDAM", ) @unittest.skipIf(sys.platform == "win32", "Windows does not support SIGSTOP") @@ -1915,7 +2015,7 @@ def _test_handshake(self, env_vars, expected_env): def test_handshake_01_aws(self): self._test_handshake( { - "AWS_EXECUTION_ENV": "AWS_Lambda_python3.9", + "AWS_EXECUTION_ENV": "AWS_Lambda_python3.10", "AWS_REGION": "us-east-2", "AWS_LAMBDA_FUNCTION_MEMORY_SIZE": "1024", }, @@ -1953,7 +2053,7 @@ def test_handshake_04_vercel(self): def test_handshake_05_multiple(self): self._test_handshake( - {"AWS_EXECUTION_ENV": "AWS_Lambda_python3.9", "FUNCTIONS_WORKER_RUNTIME": "python"}, + {"AWS_EXECUTION_ENV": "AWS_Lambda_python3.10", "FUNCTIONS_WORKER_RUNTIME": "python"}, None, ) # Extra cases for other combos. @@ -1965,13 +2065,16 @@ def test_handshake_05_multiple(self): def test_handshake_06_region_too_long(self): self._test_handshake( - {"AWS_EXECUTION_ENV": "AWS_Lambda_python3.9", "AWS_REGION": "a" * 512}, + {"AWS_EXECUTION_ENV": "AWS_Lambda_python3.10", "AWS_REGION": "a" * 512}, {"name": "aws.lambda"}, ) def test_handshake_07_memory_invalid_int(self): self._test_handshake( - {"AWS_EXECUTION_ENV": "AWS_Lambda_python3.9", "AWS_LAMBDA_FUNCTION_MEMORY_SIZE": "big"}, + { + "AWS_EXECUTION_ENV": "AWS_Lambda_python3.10", + "AWS_LAMBDA_FUNCTION_MEMORY_SIZE": "big", + }, {"name": "aws.lambda"}, ) @@ -2402,7 +2505,7 @@ def test_reconnect(self): # MongoClient discovers it's alone. The first attempt raises either # ServerSelectionTimeoutError or AutoReconnect (from - # AsyncMockPool.get_socket). + # MockPool.get_socket). with self.assertRaises(AutoReconnect): c.db.collection.find_one() diff --git a/test/test_client_bulk_write.py b/test/test_client_bulk_write.py index c1cc27c28a..0cb6845099 100644 --- a/test/test_client_bulk_write.py +++ b/test/test_client_bulk_write.py @@ -18,9 +18,6 @@ import os import sys -from bson import encode -from bson.raw_bson import RawBSONDocument - sys.path[0:0] = [""] from test import ( @@ -28,7 +25,8 @@ client_context, unittest, ) -from test.utils import ( +from test.utils import flaky +from test.utils_shared import ( OvertCommandListener, ) from unittest.mock import patch @@ -50,7 +48,6 @@ class TestClientBulkWrite(IntegrationTest): @client_context.require_version_min(8, 0, 0, -24) - @client_context.require_no_serverless def test_returns_error_if_no_namespace_provided(self): models = [InsertOne(document={"a": "b"})] with self.assertRaises(InvalidOperation) as context: @@ -61,7 +58,6 @@ def test_returns_error_if_no_namespace_provided(self): ) @client_context.require_version_min(8, 0, 0, -24) - @client_context.require_no_serverless def test_handles_non_pymongo_error(self): with patch.object( _ClientBulk, "write_command", return_value={"error": TypeError("mock type error")} @@ -73,7 +69,6 @@ def test_handles_non_pymongo_error(self): self.assertFalse(hasattr(context.exception.error, "details")) @client_context.require_version_min(8, 0, 0, -24) - @client_context.require_no_serverless def test_formats_write_error_correctly(self): models = [ InsertOne(namespace="db.coll", document={"_id": 1}), @@ -87,19 +82,9 @@ def test_formats_write_error_correctly(self): self.assertEqual(write_error["idx"], 1) self.assertEqual(write_error["op"], {"insert": 0, "document": {"_id": 1}}) - @client_context.require_version_min(8, 0, 0, -24) - @client_context.require_no_serverless - def test_raw_bson_not_inflated(self): - doc = RawBSONDocument(encode({"a": "b" * 100})) - models = [ - InsertOne(namespace="db.coll", document=doc), - ] - self.client.bulk_write(models=models) - - self.assertIsNone(doc._RawBSONDocument__inflated_doc) - # https://github.com/mongodb/specifications/tree/master/source/crud/tests +# Note: tests 1 and 2 are in test_read_write_concern_spec.py class TestClientBulkWriteCRUD(IntegrationTest): def setUp(self): super().setUp() @@ -108,8 +93,7 @@ def setUp(self): self.max_message_size_bytes = client_context.max_message_size_bytes @client_context.require_version_min(8, 0, 0, -24) - @client_context.require_no_serverless - def test_batch_splits_if_num_operations_too_large(self): + def test_3_batch_splits_if_num_operations_too_large(self): listener = OvertCommandListener() client = self.rs_or_single_client(event_listeners=[listener]) @@ -133,8 +117,7 @@ def test_batch_splits_if_num_operations_too_large(self): self.assertEqual(first_event.operation_id, second_event.operation_id) @client_context.require_version_min(8, 0, 0, -24) - @client_context.require_no_serverless - def test_batch_splits_if_ops_payload_too_large(self): + def test_4_batch_splits_if_ops_payload_too_large(self): listener = OvertCommandListener() client = self.rs_or_single_client(event_listeners=[listener]) @@ -165,9 +148,8 @@ def test_batch_splits_if_ops_payload_too_large(self): self.assertEqual(first_event.operation_id, second_event.operation_id) @client_context.require_version_min(8, 0, 0, -24) - @client_context.require_no_serverless @client_context.require_failCommand_fail_point - def test_collects_write_concern_errors_across_batches(self): + def test_5_collects_write_concern_errors_across_batches(self): listener = OvertCommandListener() client = self.rs_or_single_client( event_listeners=[listener], @@ -208,8 +190,7 @@ def test_collects_write_concern_errors_across_batches(self): self.assertEqual(len(bulk_write_events), 2) @client_context.require_version_min(8, 0, 0, -24) - @client_context.require_no_serverless - def test_collects_write_errors_across_batches_unordered(self): + def test_6_collects_write_errors_across_batches_unordered(self): listener = OvertCommandListener() client = self.rs_or_single_client(event_listeners=[listener]) @@ -238,8 +219,7 @@ def test_collects_write_errors_across_batches_unordered(self): self.assertEqual(len(bulk_write_events), 2) @client_context.require_version_min(8, 0, 0, -24) - @client_context.require_no_serverless - def test_collects_write_errors_across_batches_ordered(self): + def test_6_collects_write_errors_across_batches_ordered(self): listener = OvertCommandListener() client = self.rs_or_single_client(event_listeners=[listener]) @@ -268,8 +248,7 @@ def test_collects_write_errors_across_batches_ordered(self): self.assertEqual(len(bulk_write_events), 1) @client_context.require_version_min(8, 0, 0, -24) - @client_context.require_no_serverless - def test_handles_cursor_requiring_getMore(self): + def test_7_handles_cursor_requiring_getMore(self): listener = OvertCommandListener() client = self.rs_or_single_client(event_listeners=[listener]) @@ -308,9 +287,8 @@ def test_handles_cursor_requiring_getMore(self): self.assertTrue(get_more_event) @client_context.require_version_min(8, 0, 0, -24) - @client_context.require_no_serverless @client_context.require_no_standalone - def test_handles_cursor_requiring_getMore_within_transaction(self): + def test_8_handles_cursor_requiring_getMore_within_transaction(self): listener = OvertCommandListener() client = self.rs_or_single_client(event_listeners=[listener]) @@ -351,9 +329,8 @@ def test_handles_cursor_requiring_getMore_within_transaction(self): self.assertTrue(get_more_event) @client_context.require_version_min(8, 0, 0, -24) - @client_context.require_no_serverless @client_context.require_failCommand_fail_point - def test_handles_getMore_error(self): + def test_9_handles_getMore_error(self): listener = OvertCommandListener() client = self.rs_or_single_client(event_listeners=[listener]) @@ -406,8 +383,7 @@ def test_handles_getMore_error(self): self.assertTrue(kill_cursors_event) @client_context.require_version_min(8, 0, 0, -24) - @client_context.require_no_serverless - def test_returns_error_if_unacknowledged_too_large_insert(self): + def test_10_returns_error_if_unacknowledged_too_large_insert(self): listener = OvertCommandListener() client = self.rs_or_single_client(event_listeners=[listener]) @@ -462,8 +438,7 @@ def _setup_namespace_test_models(self): return num_models, models @client_context.require_version_min(8, 0, 0, -24) - @client_context.require_no_serverless - def test_no_batch_splits_if_new_namespace_is_not_too_large(self): + def test_11_no_batch_splits_if_new_namespace_is_not_too_large(self): listener = OvertCommandListener() client = self.rs_or_single_client(event_listeners=[listener]) @@ -493,8 +468,7 @@ def test_no_batch_splits_if_new_namespace_is_not_too_large(self): self.assertEqual(event.command["nsInfo"][0]["ns"], "db.coll") @client_context.require_version_min(8, 0, 0, -24) - @client_context.require_no_serverless - def test_batch_splits_if_new_namespace_is_too_large(self): + def test_11_batch_splits_if_new_namespace_is_too_large(self): listener = OvertCommandListener() client = self.rs_or_single_client(event_listeners=[listener]) @@ -531,27 +505,27 @@ def test_batch_splits_if_new_namespace_is_too_large(self): self.assertEqual(second_event.command["nsInfo"][0]["ns"], namespace) @client_context.require_version_min(8, 0, 0, -24) - @client_context.require_no_serverless - def test_returns_error_if_no_writes_can_be_added_to_ops(self): + def test_12_returns_error_if_no_writes_can_be_added_to_ops(self): client = self.rs_or_single_client() # Document too large. b_repeated = "b" * self.max_message_size_bytes models = [InsertOne(namespace="db.coll", document={"a": b_repeated})] - with self.assertRaises(DocumentTooLarge): + with self.assertRaises(DocumentTooLarge) as context: client.bulk_write(models=models) + self.assertIsNone(context.exception.partial_result) # Namespace too large. c_repeated = "c" * self.max_message_size_bytes namespace = f"db.{c_repeated}" models = [InsertOne(namespace=namespace, document={"a": "b"})] - with self.assertRaises(DocumentTooLarge): + with self.assertRaises(DocumentTooLarge) as context: client.bulk_write(models=models) + self.assertIsNone(context.exception.partial_result) @client_context.require_version_min(8, 0, 0, -24) - @client_context.require_no_serverless @unittest.skipUnless(_HAVE_PYMONGOCRYPT, "pymongocrypt is not installed") - def test_returns_error_if_auto_encryption_configured(self): + def test_13_returns_error_if_auto_encryption_configured(self): opts = AutoEncryptionOpts( key_vault_namespace="db.coll", kms_providers={"aws": {"accessKeyId": "foo", "secretAccessKey": "bar"}}, @@ -561,12 +535,12 @@ def test_returns_error_if_auto_encryption_configured(self): models = [InsertOne(namespace="db.coll", document={"a": "b"})] with self.assertRaises(InvalidOperation) as context: client.bulk_write(models=models) + self.assertIsNone(context.exception.partial_result) self.assertIn( "bulk_write does not currently support automatic encryption", context.exception._message ) @client_context.require_version_min(8, 0, 0, -24) - @client_context.require_no_serverless def test_upserted_result(self): client = self.rs_or_single_client() @@ -605,8 +579,9 @@ def test_upserted_result(self): self.assertEqual(result.update_results[1].did_upsert, True) self.assertEqual(result.update_results[2].did_upsert, False) + # Note: test 14 is optional and intentionally not implemented because we provide multiple APIs to specify explain. + @client_context.require_version_min(8, 0, 0, -24) - @client_context.require_no_serverless def test_15_unacknowledged_write_across_batches(self): listener = OvertCommandListener() client = self.rs_or_single_client(event_listeners=[listener]) @@ -647,21 +622,20 @@ def test_15_unacknowledged_write_across_batches(self): # https://github.com/mongodb/specifications/blob/master/source/client-side-operations-timeout/tests/README.md#11-multi-batch-bulkwrites class TestClientBulkWriteCSOT(IntegrationTest): def setUp(self): - if os.environ.get("SKIP_CSOT_TESTS", ""): - raise unittest.SkipTest("SKIP_CSOT_TESTS is set, skipping...") super().setUp() self.max_write_batch_size = client_context.max_write_batch_size self.max_bson_object_size = client_context.max_bson_size self.max_message_size_bytes = client_context.max_message_size_bytes @client_context.require_version_min(8, 0, 0, -24) - @client_context.require_no_serverless @client_context.require_failCommand_fail_point + @flaky(reason="PYTHON-5290", max_runs=3, affects_cpython_linux=True) def test_timeout_in_multi_batch_bulk_write(self): + if sys.platform != "linux" and "CI" in os.environ: + self.skipTest("PYTHON-3522 CSOT test runs too slow on Windows and MacOS") _OVERHEAD = 500 internal_client = self.rs_or_single_client(timeoutMS=None) - self.addCleanup(internal_client.close) collection = internal_client.db["coll"] self.addCleanup(collection.drop) diff --git a/test/test_client_context.py b/test/test_client_context.py index e807ac5f5f..9c1b21ee78 100644 --- a/test/test_client_context.py +++ b/test/test_client_context.py @@ -36,31 +36,15 @@ def test_must_connect(self): ), ) - def test_serverless(self): - if not os.environ.get("TEST_SERVERLESS"): - raise SkipTest("TEST_SERVERLESS is not set") - - self.assertTrue( - client_context.connected and client_context.serverless, - "client context must be connected to serverless when " - f"TEST_SERVERLESS is set. Failed attempts:\n{client_context.connection_attempt_info()}", - ) - def test_enableTestCommands_is_disabled(self): - if not os.environ.get("PYMONGO_DISABLE_TEST_COMMANDS"): - raise SkipTest("PYMONGO_DISABLE_TEST_COMMANDS is not set") + if not os.environ.get("DISABLE_TEST_COMMANDS"): + raise SkipTest("DISABLE_TEST_COMMANDS is not set") self.assertFalse( client_context.test_commands_enabled, - "enableTestCommands must be disabled when PYMONGO_DISABLE_TEST_COMMANDS is set.", + "enableTestCommands must be disabled when DISABLE_TEST_COMMANDS is set.", ) - def test_setdefaultencoding_worked(self): - if not os.environ.get("SETDEFAULTENCODING"): - raise SkipTest("SETDEFAULTENCODING is not set") - - self.assertEqual(sys.getdefaultencoding(), os.environ["SETDEFAULTENCODING"]) - def test_free_threading_is_enabled(self): if "free-threading build" not in sys.version: raise SkipTest("this test requires the Python free-threading build") diff --git a/test/test_client_metadata.py b/test/test_client_metadata.py new file mode 100644 index 0000000000..a94c5aa25e --- /dev/null +++ b/test/test_client_metadata.py @@ -0,0 +1,232 @@ +# Copyright 2013-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import asyncio +import os +import pathlib +import time +import unittest +from test import IntegrationTest +from test.unified_format import generate_test_classes +from test.utils_shared import CMAPListener +from typing import Any, Optional + +import pytest + +from pymongo import MongoClient +from pymongo.driver_info import DriverInfo +from pymongo.monitoring import ConnectionClosedEvent + +try: + from mockupdb import MockupDB, OpMsgReply + + _HAVE_MOCKUPDB = True +except ImportError: + _HAVE_MOCKUPDB = False + +pytestmark = pytest.mark.mockupdb + +_IS_SYNC = True + +# Location of JSON test specifications. +if _IS_SYNC: + _TEST_PATH = os.path.join(pathlib.Path(__file__).resolve().parent, "handshake", "unified") +else: + _TEST_PATH = os.path.join( + pathlib.Path(__file__).resolve().parent.parent, "handshake", "unified" + ) + +# Generate unified tests. +globals().update(generate_test_classes(_TEST_PATH, module=__name__)) + + +def _get_handshake_driver_info(request): + assert "client" in request + return request["client"] + + +class TestClientMetadataProse(IntegrationTest): + def setUp(self): + super().setUp() + self.server = MockupDB() + self.handshake_req = None + + def respond(r): + if "ismaster" in r: + # then this is a handshake request + self.handshake_req = r + return r.reply(OpMsgReply(maxWireVersion=13)) + + self.server.autoresponds(respond) + self.server.run() + self.addCleanup(self.server.stop) + + def send_ping_and_get_metadata( + self, client: MongoClient, is_handshake: bool + ) -> tuple[str, Optional[str], Optional[str], dict[str, Any]]: + # reset if handshake request + if is_handshake: + self.handshake_req: Optional[dict] = None + + client.admin.command("ping") + metadata = _get_handshake_driver_info(self.handshake_req) + driver_metadata = metadata["driver"] + name, version, platform = ( + driver_metadata["name"], + driver_metadata["version"], + metadata["platform"], + ) + return name, version, platform, metadata + + def check_metadata_added( + self, + client: MongoClient, + add_name: str, + add_version: Optional[str], + add_platform: Optional[str], + ) -> None: + # send initial metadata + name, version, platform, metadata = self.send_ping_and_get_metadata(client, True) + # wait for connection to become idle + time.sleep(0.005) + + # add new metadata + client.append_metadata(DriverInfo(add_name, add_version, add_platform)) + new_name, new_version, new_platform, new_metadata = self.send_ping_and_get_metadata( + client, True + ) + if add_name is not None and add_name.lower() in name.lower().split("|"): + self.assertEqual(name, new_name) + self.assertEqual(version, new_version) + self.assertEqual(platform, new_platform) + else: + self.assertEqual(new_name, f"{name}|{add_name}" if add_name is not None else name) + self.assertEqual( + new_version, + f"{version}|{add_version}" if add_version is not None else version, + ) + self.assertEqual( + new_platform, + f"{platform}|{add_platform}" if add_platform is not None else platform, + ) + + metadata.pop("driver") + metadata.pop("platform") + new_metadata.pop("driver") + new_metadata.pop("platform") + self.assertEqual(metadata, new_metadata) + + def test_append_metadata(self): + client = self.rs_or_single_client( + "mongodb://" + self.server.address_string, + maxIdleTimeMS=1, + driver=DriverInfo("library", "1.2", "Library Platform"), + ) + self.check_metadata_added(client, "framework", "2.0", "Framework Platform") + + def test_append_metadata_platform_none(self): + client = self.rs_or_single_client( + "mongodb://" + self.server.address_string, + maxIdleTimeMS=1, + driver=DriverInfo("library", "1.2", "Library Platform"), + ) + self.check_metadata_added(client, "framework", "2.0", None) + + def test_append_metadata_version_none(self): + client = self.rs_or_single_client( + "mongodb://" + self.server.address_string, + maxIdleTimeMS=1, + driver=DriverInfo("library", "1.2", "Library Platform"), + ) + self.check_metadata_added(client, "framework", None, "Framework Platform") + + def test_append_metadata_platform_version_none(self): + client = self.rs_or_single_client( + "mongodb://" + self.server.address_string, + maxIdleTimeMS=1, + driver=DriverInfo("library", "1.2", "Library Platform"), + ) + self.check_metadata_added(client, "framework", None, None) + + def test_multiple_successive_metadata_updates(self): + client = self.rs_or_single_client( + "mongodb://" + self.server.address_string, maxIdleTimeMS=1, connect=False + ) + client.append_metadata(DriverInfo("library", "1.2", "Library Platform")) + self.check_metadata_added(client, "framework", "2.0", "Framework Platform") + + def test_multiple_successive_metadata_updates_platform_none(self): + client = self.rs_or_single_client( + "mongodb://" + self.server.address_string, + maxIdleTimeMS=1, + ) + client.append_metadata(DriverInfo("library", "1.2", "Library Platform")) + self.check_metadata_added(client, "framework", "2.0", None) + + def test_multiple_successive_metadata_updates_version_none(self): + client = self.rs_or_single_client( + "mongodb://" + self.server.address_string, + maxIdleTimeMS=1, + ) + client.append_metadata(DriverInfo("library", "1.2", "Library Platform")) + self.check_metadata_added(client, "framework", None, "Framework Platform") + + def test_multiple_successive_metadata_updates_platform_version_none(self): + client = self.rs_or_single_client( + "mongodb://" + self.server.address_string, + maxIdleTimeMS=1, + ) + client.append_metadata(DriverInfo("library", "1.2", "Library Platform")) + self.check_metadata_added(client, "framework", None, None) + + def test_doesnt_update_established_connections(self): + listener = CMAPListener() + client = self.rs_or_single_client( + "mongodb://" + self.server.address_string, + maxIdleTimeMS=1, + driver=DriverInfo("library", "1.2", "Library Platform"), + event_listeners=[listener], + ) + + # send initial metadata + name, version, platform, metadata = self.send_ping_and_get_metadata(client, True) + self.assertIsNotNone(name) + self.assertIsNotNone(version) + self.assertIsNotNone(platform) + + # add data + add_name, add_version, add_platform = "framework", "2.0", "Framework Platform" + client.append_metadata(DriverInfo(add_name, add_version, add_platform)) + # check new data isn't sent + self.handshake_req: Optional[dict] = None + client.admin.command("ping") + self.assertIsNone(self.handshake_req) + self.assertEqual(listener.event_count(ConnectionClosedEvent), 0) + + def test_duplicate_driver_name_no_op(self): + client = self.rs_or_single_client( + "mongodb://" + self.server.address_string, + maxIdleTimeMS=1, + ) + client.append_metadata(DriverInfo("library", "1.2", "Library Platform")) + self.check_metadata_added(client, "framework", None, None) + # wait for connection to become idle + time.sleep(0.005) + # add same metadata again + self.check_metadata_added(client, "Framework", None, None) + + +if __name__ == "__main__": + unittest.main() diff --git a/test/test_code.py b/test/test_code.py index c564e3e04e..23f0af5cef 100644 --- a/test/test_code.py +++ b/test/test_code.py @@ -46,8 +46,8 @@ def test_code(self): a_code = Code("hello world") self.assertTrue(a_code.startswith("hello")) self.assertTrue(a_code.endswith("world")) - self.assertTrue(isinstance(a_code, Code)) - self.assertFalse(isinstance(a_string, Code)) + self.assertIsInstance(a_code, Code) + self.assertNotIsInstance(a_string, Code) self.assertIsNone(a_code.scope) with_scope = Code("hello world", {"my_var": 5}) self.assertEqual({"my_var": 5}, with_scope.scope) diff --git a/test/test_collation.py b/test/test_collation.py index 06436f0638..903f24a228 100644 --- a/test/test_collation.py +++ b/test/test_collation.py @@ -18,7 +18,7 @@ import functools import warnings from test import IntegrationTest, client_context, unittest -from test.utils import EventListener, OvertCommandListener +from test.utils_shared import EventListener, OvertCommandListener from typing import Any from pymongo.collation import ( @@ -37,7 +37,6 @@ UpdateMany, UpdateOne, ) -from pymongo.synchronous.helpers import next from pymongo.write_concern import WriteConcern _IS_SYNC = True diff --git a/test/test_collection.py b/test/test_collection.py index af524bba47..18be309f22 100644 --- a/test/test_collection.py +++ b/test/test_collection.py @@ -21,6 +21,7 @@ import sys from codecs import utf_8_decode from collections import defaultdict +from test.utils import get_pool, is_mongos from typing import Any, Iterable, no_type_check from pymongo.synchronous.database import Database @@ -33,14 +34,13 @@ client_context, unittest, ) -from test.utils import ( +from test.utils_shared import ( IMPOSSIBLE_WRITE_CONCERN, EventListener, OvertCommandListener, - get_pool, - is_mongos, wait_until, ) +from test.version import Version from bson import encode from bson.codec_options import CodecOptions @@ -74,7 +74,6 @@ ) from pymongo.synchronous.collection import Collection, ReturnDocument from pymongo.synchronous.command_cursor import CommandCursor -from pymongo.synchronous.helpers import next from pymongo.synchronous.mongo_client import MongoClient from pymongo.write_concern import WriteConcern @@ -112,7 +111,7 @@ def make_col(base, name): def test_getattr(self): coll = self.db.test - self.assertTrue(isinstance(coll["_does_not_exist"], Collection)) + self.assertIsInstance(coll["_does_not_exist"], Collection) with self.assertRaises(AttributeError) as context: coll._does_not_exist @@ -133,13 +132,7 @@ def test_getattr(self): def test_iteration(self): coll = self.db.coll - if "PyPy" in sys.version and sys.version_info < (3, 8, 15): - msg = "'NoneType' object is not callable" - else: - if _IS_SYNC: - msg = "'Collection' object is not iterable" - else: - msg = "'Collection' object is not iterable" + msg = "'Collection' object is not iterable" # Iteration fails with self.assertRaisesRegex(TypeError, msg): for _ in coll: # type: ignore[misc] # error: "None" not callable [misc] @@ -183,7 +176,7 @@ def write_concern_collection(self): yield self.db.test def test_equality(self): - self.assertTrue(isinstance(self.db.test, Collection)) + self.assertIsInstance(self.db.test, Collection) self.assertEqual(self.db.test, self.db["test"]) self.assertEqual(self.db.test, Collection(self.db, "test")) self.assertEqual(self.db.test.mike, self.db["test.mike"]) @@ -219,7 +212,7 @@ def lambda_test_2(): def test_drop_nonexistent_collection(self): self.db.drop_collection("test") - self.assertFalse("test" in self.db.list_collection_names()) + self.assertNotIn("test", self.db.list_collection_names()) # No exception self.db.drop_collection("test") @@ -255,7 +248,7 @@ def test_create_indexes(self): db.test.drop_indexes() self.assertEqual(len(db.test.index_information()), 1) db.test.create_indexes([IndexModel("hello")]) - self.assertTrue("hello_1" in db.test.index_information()) + self.assertIn("hello_1", db.test.index_information()) db.test.drop_indexes() self.assertEqual(len(db.test.index_information()), 1) @@ -264,7 +257,7 @@ def test_create_indexes(self): ) info = db.test.index_information() for name in names: - self.assertTrue(name in info) + self.assertIn(name, info) db.test.drop() db.test.insert_one({"a": 1}) @@ -316,16 +309,16 @@ def test_create_index(self): db.test.drop_indexes() self.assertEqual(len(db.test.index_information()), 1) db.test.create_index("hello") - self.assertTrue("hello_1" in db.test.index_information()) + self.assertIn("hello_1", db.test.index_information()) db.test.drop_indexes() self.assertEqual(len(db.test.index_information()), 1) db.test.create_index([("hello", DESCENDING), ("world", ASCENDING)]) - self.assertTrue("hello_-1_world_1" in db.test.index_information()) + self.assertIn("hello_-1_world_1", db.test.index_information()) db.test.drop_indexes() db.test.create_index([("hello", DESCENDING), ("world", ASCENDING)], name=None) - self.assertTrue("hello_-1_world_1" in db.test.index_information()) + self.assertIn("hello_-1_world_1", db.test.index_information()) db.test.drop() db.test.insert_one({"a": 1}) @@ -351,10 +344,13 @@ def test_drop_index(self): db.test.drop_index(name) # Drop it again. - with self.assertRaises(OperationFailure): + if client_context.version < Version(8, 3, -1): + with self.assertRaises(OperationFailure): + db.test.drop_index(name) + else: db.test.drop_index(name) self.assertEqual(len(db.test.index_information()), 2) - self.assertTrue("hello_1" in db.test.index_information()) + self.assertIn("hello_1", db.test.index_information()) db.test.drop_indexes() db.test.create_index("hello") @@ -364,7 +360,7 @@ def test_drop_index(self): self.assertEqual(name, "goodbye_1") db.test.drop_index([("goodbye", ASCENDING)]) self.assertEqual(len(db.test.index_information()), 2) - self.assertTrue("hello_1" in db.test.index_information()) + self.assertIn("hello_1", db.test.index_information()) with self.write_concern_collection() as coll: coll.drop_index("hello_1") @@ -396,7 +392,7 @@ def map_indexes(indexes): indexes = (db.test.list_indexes()).to_list() self.assertEqual(len(indexes), 1) - self.assertTrue("_id_" in map_indexes(indexes)) + self.assertIn("_id_", map_indexes(indexes)) db.test.create_index("hello") indexes = (db.test.list_indexes()).to_list() @@ -425,7 +421,7 @@ def test_index_info(self): db.test.drop() db.test.insert_one({}) # create collection self.assertEqual(len(db.test.index_information()), 1) - self.assertTrue("_id_" in db.test.index_information()) + self.assertIn("_id_", db.test.index_information()) db.test.create_index("hello") self.assertEqual(len(db.test.index_information()), 2) @@ -485,7 +481,7 @@ def test_index_text(self): db.test.drop_indexes() self.assertEqual("t_text", db.test.create_index([("t", TEXT)])) index_info = (db.test.index_information())["t_text"] - self.assertTrue("weights" in index_info) + self.assertIn("weights", index_info) db.test.insert_many( [{"t": "spam eggs and spam"}, {"t": "spam"}, {"t": "egg sausage and bacon"}] @@ -497,7 +493,7 @@ def test_index_text(self): # Sort by 'score' field. cursor.sort([("score", {"$meta": "textScore"})]) results = cursor.to_list() - self.assertTrue(results[0]["score"] >= results[1]["score"]) + self.assertGreaterEqual(results[0]["score"], results[1]["score"]) db.test.drop_indexes() @@ -546,7 +542,7 @@ def test_index_background(self): db.test.create_index([("keya", ASCENDING)]) db.test.create_index([("keyb", ASCENDING)], background=False) db.test.create_index([("keyc", ASCENDING)], background=True) - self.assertFalse("background" in (db.test.index_information())["keya_1"]) + self.assertNotIn("background", (db.test.index_information())["keya_1"]) self.assertFalse((db.test.index_information())["keyb_1"]["background"]) self.assertTrue((db.test.index_information())["keyc_1"]["background"]) @@ -697,7 +693,7 @@ def test_field_selection(self): doc = next(db.test.find({}, {"_id": False})) l = list(doc) - self.assertFalse("_id" in l) + self.assertNotIn("_id", l) def test_options(self): db = self.db @@ -713,8 +709,8 @@ def test_insert_one(self): document: dict[str, Any] = {"_id": 1000} result = db.test.insert_one(document) - self.assertTrue(isinstance(result, InsertOneResult)) - self.assertTrue(isinstance(result.inserted_id, int)) + self.assertIsInstance(result, InsertOneResult) + self.assertIsInstance(result.inserted_id, int) self.assertEqual(document["_id"], result.inserted_id) self.assertTrue(result.acknowledged) self.assertIsNotNone(db.test.find_one({"_id": document["_id"]})) @@ -722,8 +718,8 @@ def test_insert_one(self): document = {"foo": "bar"} result = db.test.insert_one(document) - self.assertTrue(isinstance(result, InsertOneResult)) - self.assertTrue(isinstance(result.inserted_id, ObjectId)) + self.assertIsInstance(result, InsertOneResult) + self.assertIsInstance(result.inserted_id, ObjectId) self.assertEqual(document["_id"], result.inserted_id) self.assertTrue(result.acknowledged) self.assertIsNotNone(db.test.find_one({"_id": document["_id"]})) @@ -731,8 +727,8 @@ def test_insert_one(self): db = db.client.get_database(db.name, write_concern=WriteConcern(w=0)) result = db.test.insert_one(document) - self.assertTrue(isinstance(result, InsertOneResult)) - self.assertTrue(isinstance(result.inserted_id, ObjectId)) + self.assertIsInstance(result, InsertOneResult) + self.assertIsInstance(result.inserted_id, ObjectId) self.assertEqual(document["_id"], result.inserted_id) self.assertFalse(result.acknowledged) # The insert failed duplicate key... @@ -744,7 +740,7 @@ def async_lambda(): document = RawBSONDocument(encode({"_id": ObjectId(), "foo": "bar"})) result = db.test.insert_one(document) - self.assertTrue(isinstance(result, InsertOneResult)) + self.assertIsInstance(result, InsertOneResult) self.assertEqual(result.inserted_id, None) def test_insert_many(self): @@ -753,38 +749,38 @@ def test_insert_many(self): docs: list = [{} for _ in range(5)] result = db.test.insert_many(docs) - self.assertTrue(isinstance(result, InsertManyResult)) - self.assertTrue(isinstance(result.inserted_ids, list)) + self.assertIsInstance(result, InsertManyResult) + self.assertIsInstance(result.inserted_ids, list) self.assertEqual(5, len(result.inserted_ids)) for doc in docs: _id = doc["_id"] - self.assertTrue(isinstance(_id, ObjectId)) - self.assertTrue(_id in result.inserted_ids) + self.assertIsInstance(_id, ObjectId) + self.assertIn(_id, result.inserted_ids) self.assertEqual(1, db.test.count_documents({"_id": _id})) self.assertTrue(result.acknowledged) docs = [{"_id": i} for i in range(5)] result = db.test.insert_many(docs) - self.assertTrue(isinstance(result, InsertManyResult)) - self.assertTrue(isinstance(result.inserted_ids, list)) + self.assertIsInstance(result, InsertManyResult) + self.assertIsInstance(result.inserted_ids, list) self.assertEqual(5, len(result.inserted_ids)) for doc in docs: _id = doc["_id"] - self.assertTrue(isinstance(_id, int)) - self.assertTrue(_id in result.inserted_ids) + self.assertIsInstance(_id, int) + self.assertIn(_id, result.inserted_ids) self.assertEqual(1, db.test.count_documents({"_id": _id})) self.assertTrue(result.acknowledged) docs = [RawBSONDocument(encode({"_id": i + 5})) for i in range(5)] result = db.test.insert_many(docs) - self.assertTrue(isinstance(result, InsertManyResult)) - self.assertTrue(isinstance(result.inserted_ids, list)) + self.assertIsInstance(result, InsertManyResult) + self.assertIsInstance(result.inserted_ids, list) self.assertEqual([], result.inserted_ids) db = db.client.get_database(db.name, write_concern=WriteConcern(w=0)) docs: list = [{} for _ in range(5)] result = db.test.insert_many(docs) - self.assertTrue(isinstance(result, InsertManyResult)) + self.assertIsInstance(result, InsertManyResult) self.assertFalse(result.acknowledged) self.assertEqual(20, db.test.count_documents({})) @@ -825,20 +821,20 @@ def test_delete_one(self): self.db.test.insert_one({"z": 1}) result = self.db.test.delete_one({"x": 1}) - self.assertTrue(isinstance(result, DeleteResult)) + self.assertIsInstance(result, DeleteResult) self.assertEqual(1, result.deleted_count) self.assertTrue(result.acknowledged) self.assertEqual(2, self.db.test.count_documents({})) result = self.db.test.delete_one({"y": 1}) - self.assertTrue(isinstance(result, DeleteResult)) + self.assertIsInstance(result, DeleteResult) self.assertEqual(1, result.deleted_count) self.assertTrue(result.acknowledged) self.assertEqual(1, self.db.test.count_documents({})) db = self.db.client.get_database(self.db.name, write_concern=WriteConcern(w=0)) result = db.test.delete_one({"z": 1}) - self.assertTrue(isinstance(result, DeleteResult)) + self.assertIsInstance(result, DeleteResult) self.assertRaises(InvalidOperation, lambda: result.deleted_count) self.assertFalse(result.acknowledged) @@ -856,14 +852,14 @@ def test_delete_many(self): self.db.test.insert_one({"y": 1}) result = self.db.test.delete_many({"x": 1}) - self.assertTrue(isinstance(result, DeleteResult)) + self.assertIsInstance(result, DeleteResult) self.assertEqual(2, result.deleted_count) self.assertTrue(result.acknowledged) self.assertEqual(0, self.db.test.count_documents({"x": 1})) db = self.db.client.get_database(self.db.name, write_concern=WriteConcern(w=0)) result = db.test.delete_many({"y": 1}) - self.assertTrue(isinstance(result, DeleteResult)) + self.assertIsInstance(result, DeleteResult) self.assertRaises(InvalidOperation, lambda: result.deleted_count) self.assertFalse(result.acknowledged) @@ -915,10 +911,10 @@ def test_insert_bypass_document_validation(self): with self.assertRaises(OperationFailure): db.test.insert_one({"_id": 1, "x": 100}) result = db.test.insert_one({"_id": 1, "x": 100}, bypass_document_validation=True) - self.assertTrue(isinstance(result, InsertOneResult)) + self.assertIsInstance(result, InsertOneResult) self.assertEqual(1, result.inserted_id) result = db.test.insert_one({"_id": 2, "a": 0}) - self.assertTrue(isinstance(result, InsertOneResult)) + self.assertIsInstance(result, InsertOneResult) self.assertEqual(2, result.inserted_id) db_w0.test.insert_one({"y": 1}, bypass_document_validation=True) @@ -933,22 +929,22 @@ def async_lambda(): with self.assertRaises(OperationFailure): db.test.insert_many(docs) result = db.test.insert_many(docs, bypass_document_validation=True) - self.assertTrue(isinstance(result, InsertManyResult)) + self.assertIsInstance(result, InsertManyResult) self.assertTrue(97, len(result.inserted_ids)) for doc in docs: _id = doc["_id"] - self.assertTrue(isinstance(_id, int)) - self.assertTrue(_id in result.inserted_ids) + self.assertIsInstance(_id, int) + self.assertIn(_id, result.inserted_ids) self.assertEqual(1, db.test.count_documents({"x": doc["x"]})) self.assertTrue(result.acknowledged) docs = [{"_id": i, "a": 200 - i} for i in range(100, 200)] result = db.test.insert_many(docs) - self.assertTrue(isinstance(result, InsertManyResult)) + self.assertIsInstance(result, InsertManyResult) self.assertTrue(97, len(result.inserted_ids)) for doc in docs: _id = doc["_id"] - self.assertTrue(isinstance(_id, int)) - self.assertTrue(_id in result.inserted_ids) + self.assertIsInstance(_id, int) + self.assertIn(_id, result.inserted_ids) self.assertEqual(1, db.test.count_documents({"a": doc["a"]})) self.assertTrue(result.acknowledged) @@ -1124,23 +1120,23 @@ def test_find_w_fields(self): db.test.insert_one({"x": 1, "mike": "awesome", "extra thing": "abcdefghijklmnopqrstuvwxyz"}) self.assertEqual(1, db.test.count_documents({})) doc = next(db.test.find({})) - self.assertTrue("x" in doc) + self.assertIn("x", doc) doc = next(db.test.find({})) - self.assertTrue("mike" in doc) + self.assertIn("mike", doc) doc = next(db.test.find({})) - self.assertTrue("extra thing" in doc) + self.assertIn("extra thing", doc) doc = next(db.test.find({}, ["x", "mike"])) - self.assertTrue("x" in doc) + self.assertIn("x", doc) doc = next(db.test.find({}, ["x", "mike"])) - self.assertTrue("mike" in doc) + self.assertIn("mike", doc) doc = next(db.test.find({}, ["x", "mike"])) - self.assertFalse("extra thing" in doc) + self.assertNotIn("extra thing", doc) doc = next(db.test.find({}, ["mike"])) - self.assertFalse("x" in doc) + self.assertNotIn("x", doc) doc = next(db.test.find({}, ["mike"])) - self.assertTrue("mike" in doc) + self.assertIn("mike", doc) doc = next(db.test.find({}, ["mike"])) - self.assertFalse("extra thing" in doc) + self.assertNotIn("extra thing", doc) @no_type_check def test_fields_specifier_as_dict(self): @@ -1151,8 +1147,8 @@ def test_fields_specifier_as_dict(self): self.assertEqual([1, 2, 3], (db.test.find_one())["x"]) self.assertEqual([2, 3], (db.test.find_one(projection={"x": {"$slice": -2}}))["x"]) - self.assertTrue("x" not in db.test.find_one(projection={"x": 0})) - self.assertTrue("mike" in db.test.find_one(projection={"x": 0})) + self.assertNotIn("x", db.test.find_one(projection={"x": 0})) + self.assertIn("mike", db.test.find_one(projection={"x": 0})) def test_find_w_regex(self): db = self.db @@ -1175,7 +1171,7 @@ def test_id_can_be_anything(self): db.test.delete_many({}) auto_id = {"hello": "world"} db.test.insert_one(auto_id) - self.assertTrue(isinstance(auto_id["_id"], ObjectId)) + self.assertIsInstance(auto_id["_id"], ObjectId) numeric = {"_id": 240, "hello": "world"} db.test.insert_one(numeric) @@ -1187,7 +1183,7 @@ def test_id_can_be_anything(self): for x in db.test.find(): self.assertEqual(x["hello"], "world") - self.assertTrue("_id" in x) + self.assertIn("_id", x) def test_unique_index(self): db = self.db @@ -1307,10 +1303,10 @@ def test_error_code(self): try: self.db.test.update_many({}, {"$thismodifierdoesntexist": 1}) except OperationFailure as exc: - self.assertTrue(exc.code in (9, 10147, 16840, 17009)) + self.assertIn(exc.code, (9, 10147, 16840, 17009)) # Just check that we set the error document. Fields # vary by MongoDB version. - self.assertTrue(exc.details is not None) + self.assertIsNotNone(exc.details) else: self.fail("OperationFailure was not raised") @@ -1339,9 +1335,9 @@ def test_replace_one(self): id1 = (db.test.insert_one({"x": 1})).inserted_id result = db.test.replace_one({"x": 1}, {"y": 1}) - self.assertTrue(isinstance(result, UpdateResult)) + self.assertIsInstance(result, UpdateResult) self.assertEqual(1, result.matched_count) - self.assertTrue(result.modified_count in (None, 1)) + self.assertIn(result.modified_count, (None, 1)) self.assertIsNone(result.upserted_id) self.assertTrue(result.acknowledged) self.assertEqual(1, db.test.count_documents({"y": 1})) @@ -1350,9 +1346,9 @@ def test_replace_one(self): replacement = RawBSONDocument(encode({"_id": id1, "z": 1})) result = db.test.replace_one({"y": 1}, replacement, True) - self.assertTrue(isinstance(result, UpdateResult)) + self.assertIsInstance(result, UpdateResult) self.assertEqual(1, result.matched_count) - self.assertTrue(result.modified_count in (None, 1)) + self.assertIn(result.modified_count, (None, 1)) self.assertIsNone(result.upserted_id) self.assertTrue(result.acknowledged) self.assertEqual(1, db.test.count_documents({"z": 1})) @@ -1360,16 +1356,16 @@ def test_replace_one(self): self.assertEqual((db.test.find_one(id1))["z"], 1) # type: ignore result = db.test.replace_one({"x": 2}, {"y": 2}, True) - self.assertTrue(isinstance(result, UpdateResult)) + self.assertIsInstance(result, UpdateResult) self.assertEqual(0, result.matched_count) - self.assertTrue(result.modified_count in (None, 0)) - self.assertTrue(isinstance(result.upserted_id, ObjectId)) + self.assertIn(result.modified_count, (None, 0)) + self.assertIsInstance(result.upserted_id, ObjectId) self.assertTrue(result.acknowledged) self.assertEqual(1, db.test.count_documents({"y": 2})) db = db.client.get_database(db.name, write_concern=WriteConcern(w=0)) result = db.test.replace_one({"x": 0}, {"y": 0}) - self.assertTrue(isinstance(result, UpdateResult)) + self.assertIsInstance(result, UpdateResult) self.assertRaises(InvalidOperation, lambda: result.matched_count) self.assertRaises(InvalidOperation, lambda: result.modified_count) self.assertRaises(InvalidOperation, lambda: result.upserted_id) @@ -1384,33 +1380,33 @@ def test_update_one(self): id1 = (db.test.insert_one({"x": 5})).inserted_id result = db.test.update_one({}, {"$inc": {"x": 1}}) - self.assertTrue(isinstance(result, UpdateResult)) + self.assertIsInstance(result, UpdateResult) self.assertEqual(1, result.matched_count) - self.assertTrue(result.modified_count in (None, 1)) + self.assertIn(result.modified_count, (None, 1)) self.assertIsNone(result.upserted_id) self.assertTrue(result.acknowledged) self.assertEqual((db.test.find_one(id1))["x"], 6) # type: ignore id2 = (db.test.insert_one({"x": 1})).inserted_id result = db.test.update_one({"x": 6}, {"$inc": {"x": 1}}) - self.assertTrue(isinstance(result, UpdateResult)) + self.assertIsInstance(result, UpdateResult) self.assertEqual(1, result.matched_count) - self.assertTrue(result.modified_count in (None, 1)) + self.assertIn(result.modified_count, (None, 1)) self.assertIsNone(result.upserted_id) self.assertTrue(result.acknowledged) self.assertEqual((db.test.find_one(id1))["x"], 7) # type: ignore self.assertEqual((db.test.find_one(id2))["x"], 1) # type: ignore result = db.test.update_one({"x": 2}, {"$set": {"y": 1}}, True) - self.assertTrue(isinstance(result, UpdateResult)) + self.assertIsInstance(result, UpdateResult) self.assertEqual(0, result.matched_count) - self.assertTrue(result.modified_count in (None, 0)) - self.assertTrue(isinstance(result.upserted_id, ObjectId)) + self.assertIn(result.modified_count, (None, 0)) + self.assertIsInstance(result.upserted_id, ObjectId) self.assertTrue(result.acknowledged) db = db.client.get_database(db.name, write_concern=WriteConcern(w=0)) result = db.test.update_one({"x": 0}, {"$inc": {"x": 1}}) - self.assertTrue(isinstance(result, UpdateResult)) + self.assertIsInstance(result, UpdateResult) self.assertRaises(InvalidOperation, lambda: result.matched_count) self.assertRaises(InvalidOperation, lambda: result.modified_count) self.assertRaises(InvalidOperation, lambda: result.upserted_id) @@ -1441,31 +1437,31 @@ def test_update_many(self): db.test.insert_one({"x": 4, "y": 4}) result = db.test.update_many({"x": 4}, {"$set": {"y": 5}}) - self.assertTrue(isinstance(result, UpdateResult)) + self.assertIsInstance(result, UpdateResult) self.assertEqual(2, result.matched_count) - self.assertTrue(result.modified_count in (None, 2)) + self.assertIn(result.modified_count, (None, 2)) self.assertIsNone(result.upserted_id) self.assertTrue(result.acknowledged) self.assertEqual(3, db.test.count_documents({"y": 5})) result = db.test.update_many({"x": 5}, {"$set": {"y": 6}}) - self.assertTrue(isinstance(result, UpdateResult)) + self.assertIsInstance(result, UpdateResult) self.assertEqual(1, result.matched_count) - self.assertTrue(result.modified_count in (None, 1)) + self.assertIn(result.modified_count, (None, 1)) self.assertIsNone(result.upserted_id) self.assertTrue(result.acknowledged) self.assertEqual(1, db.test.count_documents({"y": 6})) result = db.test.update_many({"x": 2}, {"$set": {"y": 1}}, True) - self.assertTrue(isinstance(result, UpdateResult)) + self.assertIsInstance(result, UpdateResult) self.assertEqual(0, result.matched_count) - self.assertTrue(result.modified_count in (None, 0)) - self.assertTrue(isinstance(result.upserted_id, ObjectId)) + self.assertIn(result.modified_count, (None, 0)) + self.assertIsInstance(result.upserted_id, ObjectId) self.assertTrue(result.acknowledged) db = db.client.get_database(db.name, write_concern=WriteConcern(w=0)) result = db.test.update_many({"x": 0}, {"$inc": {"x": 1}}) - self.assertTrue(isinstance(result, UpdateResult)) + self.assertIsInstance(result, UpdateResult) self.assertRaises(InvalidOperation, lambda: result.matched_count) self.assertRaises(InvalidOperation, lambda: result.modified_count) self.assertRaises(InvalidOperation, lambda: result.upserted_id) @@ -1545,7 +1541,7 @@ def test_aggregate(self): pipeline = {"$project": {"_id": False, "foo": True}} result = db.test.aggregate([pipeline]) - self.assertTrue(isinstance(result, CommandCursor)) + self.assertIsInstance(result, CommandCursor) self.assertEqual([{"foo": [1, 2]}], result.to_list()) # Test write concern. @@ -1563,7 +1559,7 @@ def test_aggregate_raw_bson(self): pipeline = {"$project": {"_id": False, "foo": True}} coll = db.get_collection("test", codec_options=CodecOptions(document_class=RawBSONDocument)) result = coll.aggregate([pipeline]) - self.assertTrue(isinstance(result, CommandCursor)) + self.assertIsInstance(result, CommandCursor) first_result = next(result) self.assertIsInstance(first_result, RawBSONDocument) self.assertEqual([1, 2], list(first_result["foo"])) @@ -1572,7 +1568,7 @@ def test_aggregation_cursor_validation(self): db = self.db projection = {"$project": {"_id": "$_id"}} cursor = db.test.aggregate([projection], cursor={}) - self.assertTrue(isinstance(cursor, CommandCursor)) + self.assertIsInstance(cursor, CommandCursor) def test_aggregation_cursor(self): db = self.db @@ -1714,21 +1710,21 @@ def test_find_one(self): self.assertEqual(db.test.find_one({}), db.test.find_one()) self.assertEqual(db.test.find_one({"hello": "world"}), db.test.find_one()) - self.assertTrue("hello" in db.test.find_one(projection=["hello"])) - self.assertTrue("hello" not in db.test.find_one(projection=["foo"])) + self.assertIn("hello", db.test.find_one(projection=["hello"])) + self.assertNotIn("hello", db.test.find_one(projection=["foo"])) - self.assertTrue("hello" in db.test.find_one(projection=("hello",))) - self.assertTrue("hello" not in db.test.find_one(projection=("foo",))) + self.assertIn("hello", db.test.find_one(projection=("hello",))) + self.assertNotIn("hello", db.test.find_one(projection=("foo",))) - self.assertTrue("hello" in db.test.find_one(projection={"hello"})) - self.assertTrue("hello" not in db.test.find_one(projection={"foo"})) + self.assertIn("hello", db.test.find_one(projection={"hello"})) + self.assertNotIn("hello", db.test.find_one(projection={"foo"})) - self.assertTrue("hello" in db.test.find_one(projection=frozenset(["hello"]))) - self.assertTrue("hello" not in db.test.find_one(projection=frozenset(["foo"]))) + self.assertIn("hello", db.test.find_one(projection=frozenset(["hello"]))) + self.assertNotIn("hello", db.test.find_one(projection=frozenset(["foo"]))) self.assertEqual(["_id"], list(db.test.find_one(projection={"_id": True}))) - self.assertTrue("hello" in list(db.test.find_one(projection={}))) - self.assertTrue("hello" in list(db.test.find_one(projection=[]))) + self.assertIn("hello", list(db.test.find_one(projection={}))) + self.assertIn("hello", list(db.test.find_one(projection=[]))) self.assertEqual(None, db.test.find_one({"hello": "foo"})) self.assertEqual(None, db.test.find_one(ObjectId())) @@ -2193,9 +2189,9 @@ def test_find_regex(self): c.drop() c.insert_one({"r": re.compile(".*")}) - self.assertTrue(isinstance((c.find_one())["r"], Regex)) # type: ignore + self.assertIsInstance((c.find_one())["r"], Regex) # type: ignore for doc in c.find(): - self.assertTrue(isinstance(doc["r"], Regex)) + self.assertIsInstance(doc["r"], Regex) def test_find_command_generation(self): cmd = _gen_find_command( diff --git a/test/test_comment.py b/test/test_comment.py index 9f9bf98640..bcab0061fa 100644 --- a/test/test_comment.py +++ b/test/test_comment.py @@ -20,9 +20,9 @@ import sys sys.path[0:0] = [""] -from asyncio import iscoroutinefunction +from inspect import iscoroutinefunction from test import IntegrationTest, client_context, unittest -from test.utils import OvertCommandListener +from test.utils_shared import OvertCommandListener from bson.dbref import DBRef from pymongo.operations import IndexModel diff --git a/test/test_connection_monitoring.py b/test/test_connection_monitoring.py index 05411d17ba..1405824453 100644 --- a/test/test_connection_monitoring.py +++ b/test/test_connection_monitoring.py @@ -15,20 +15,20 @@ """Execute Transactions Spec tests.""" from __future__ import annotations +import asyncio import os import sys import time +from pathlib import Path +from test.utils import get_pool, get_pools sys.path[0:0] = [""] -from test import IntegrationTest, client_knobs, unittest +from test import IntegrationTest, client_context, client_knobs, unittest from test.pymongo_mocks import DummyMonitor -from test.utils import ( +from test.utils_shared import ( CMAPListener, camel_to_snake, - client_context, - get_pool, - get_pools, wait_until, ) from test.utils_spec_runner import SpecRunnerThread, SpecTestCreator @@ -60,6 +60,8 @@ from pymongo.synchronous.pool import PoolState, _PoolClosedError from pymongo.topology_description import updated_topology_description +_IS_SYNC = True + OBJECT_TYPES = { # Event types. "ConnectionCheckedIn": ConnectionCheckedInEvent, @@ -81,7 +83,10 @@ class TestCMAP(IntegrationTest): # Location of JSON test specifications. - TEST_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "connection_monitoring") + if _IS_SYNC: + TEST_PATH = os.path.join(Path(__file__).resolve().parent, "connection_monitoring") + else: + TEST_PATH = os.path.join(Path(__file__).resolve().parent.parent, "connection_monitoring") # Test operations: @@ -204,15 +209,10 @@ def check_error(self, actual, expected): self.check_object(actual, expected) self.assertIn(message, str(actual)) - def _set_fail_point(self, client, command_args): - cmd = SON([("configureFailPoint", "failCommand")]) - cmd.update(command_args) - client.admin.command(cmd) - def set_fail_point(self, command_args): if not client_context.supports_failCommand_fail_point: self.skipTest("failCommand fail point must be supported") - self._set_fail_point(self.client, command_args) + self.configure_fail_point(self.client, command_args) def run_scenario(self, scenario_def, test): """Run a CMAP spec test.""" @@ -258,7 +258,6 @@ def run_scenario(self, scenario_def, test): client._topology.open() else: client._get_topology() - self.addCleanup(client.close) self.pool = list(client._topology._servers.values())[0].pool # Map of target names to Thread objects. @@ -315,13 +314,11 @@ def cleanup(): # def test_1_client_connection_pool_options(self): client = self.rs_or_single_client(**self.POOL_OPTIONS) - self.addCleanup(client.close) - pool_opts = get_pool(client).opts + pool_opts = (get_pool(client)).opts self.assertEqual(pool_opts.non_default_options, self.POOL_OPTIONS) def test_2_all_client_pools_have_same_options(self): client = self.rs_or_single_client(**self.POOL_OPTIONS) - self.addCleanup(client.close) client.admin.command("ping") # Discover at least one secondary. if client_context.has_secondaries: @@ -337,14 +334,12 @@ def test_3_uri_connection_pool_options(self): opts = "&".join([f"{k}={v}" for k, v in self.POOL_OPTIONS.items()]) uri = f"mongodb://{client_context.pair}/?{opts}" client = self.rs_or_single_client(uri) - self.addCleanup(client.close) - pool_opts = get_pool(client).opts + pool_opts = (get_pool(client)).opts self.assertEqual(pool_opts.non_default_options, self.POOL_OPTIONS) def test_4_subscribe_to_events(self): listener = CMAPListener() client = self.single_client(event_listeners=[listener]) - self.addCleanup(client.close) self.assertEqual(listener.event_count(PoolCreatedEvent), 1) # Creates a new connection. @@ -368,7 +363,6 @@ def test_4_subscribe_to_events(self): def test_5_check_out_fails_connection_error(self): listener = CMAPListener() client = self.single_client(event_listeners=[listener]) - self.addCleanup(client.close) pool = get_pool(client) def mock_connect(*args, **kwargs): @@ -397,7 +391,6 @@ def test_5_check_out_fails_auth_error(self): client = self.single_client_noauth( username="notauser", password="fail", event_listeners=[listener] ) - self.addCleanup(client.close) # Attempt to create a new connection. with self.assertRaisesRegex(OperationFailure, "failed"): diff --git a/test/test_connections_survive_primary_stepdown_spec.py b/test/test_connections_survive_primary_stepdown_spec.py index 1fb08cbed5..8e9a3b8e62 100644 --- a/test/test_connections_survive_primary_stepdown_spec.py +++ b/test/test_connections_survive_primary_stepdown_spec.py @@ -16,19 +16,18 @@ from __future__ import annotations import sys +from test.utils import ensure_all_connected sys.path[0:0] = [""] from test import ( IntegrationTest, client_context, - reset_client_context, unittest, ) from test.helpers import repl_set_step_down -from test.utils import ( +from test.utils_shared import ( CMAPListener, - ensure_all_connected, ) from bson import SON @@ -123,18 +122,12 @@ def run_scenario(self, error_code, retry, pool_status_checker): def test_not_primary_keep_connection_pool(self): self.run_scenario(10107, True, self.verify_pool_not_cleared) - @client_context.require_version_min(4, 0, 0) - @client_context.require_version_max(4, 1, 0, -1) - @client_context.require_test_commands - def test_not_primary_reset_connection_pool(self): - self.run_scenario(10107, False, self.verify_pool_cleared) - - @client_context.require_version_min(4, 0, 0) + @client_context.require_version_min(4, 2, 0) @client_context.require_test_commands def test_shutdown_in_progress(self): self.run_scenario(91, False, self.verify_pool_cleared) - @client_context.require_version_min(4, 0, 0) + @client_context.require_version_min(4, 2, 0) @client_context.require_test_commands def test_interrupted_at_shutdown(self): self.run_scenario(11600, False, self.verify_pool_cleared) diff --git a/test/test_crud_unified.py b/test/test_crud_unified.py index 26f34cba88..1b1abf3600 100644 --- a/test/test_crud_unified.py +++ b/test/test_crud_unified.py @@ -33,7 +33,7 @@ _TEST_PATH = os.path.join(pathlib.Path(__file__).resolve().parent.parent, "crud", "unified") # Generate unified tests. -globals().update(generate_test_classes(_TEST_PATH, module=__name__, RUN_ON_SERVERLESS=True)) +globals().update(generate_test_classes(_TEST_PATH, module=__name__)) if __name__ == "__main__": unittest.main() diff --git a/test/test_csot.py b/test/test_csot.py index c075a07d5a..981af1ed03 100644 --- a/test/test_csot.py +++ b/test/test_csot.py @@ -17,30 +17,35 @@ import os import sys +from pathlib import Path sys.path[0:0] = [""] from test import IntegrationTest, client_context, unittest from test.unified_format import generate_test_classes +from test.utils import flaky import pymongo from pymongo import _csot from pymongo.errors import PyMongoError +_IS_SYNC = True + # Location of JSON test specifications. -TEST_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "csot") +if _IS_SYNC: + TEST_PATH = os.path.join(Path(__file__).resolve().parent, "csot") +else: + TEST_PATH = os.path.join(Path(__file__).resolve().parent.parent, "csot") # Generate unified tests. globals().update(generate_test_classes(TEST_PATH, module=__name__)) class TestCSOT(IntegrationTest): - RUN_ON_SERVERLESS = True RUN_ON_LOAD_BALANCER = True + @flaky(reason="PYTHON-3522") def test_timeout_nested(self): - if os.environ.get("SKIP_CSOT_TESTS", ""): - raise unittest.SkipTest("SKIP_CSOT_TESTS is set, skipping...") coll = self.db.coll self.assertEqual(_csot.get_timeout(), None) self.assertEqual(_csot.get_deadline(), float("inf")) @@ -77,9 +82,8 @@ def test_timeout_nested(self): self.assertEqual(_csot.get_rtt(), 0.0) @client_context.require_change_streams + @flaky(reason="PYTHON-3522") def test_change_stream_can_resume_after_timeouts(self): - if os.environ.get("SKIP_CSOT_TESTS", ""): - raise unittest.SkipTest("SKIP_CSOT_TESTS is set, skipping...") coll = self.db.test coll.insert_one({}) with coll.watch() as stream: diff --git a/test/test_cursor.py b/test/test_cursor.py index 84e431f8cb..219ca396c9 100644 --- a/test/test_cursor.py +++ b/test/test_cursor.py @@ -31,7 +31,8 @@ sys.path[0:0] = [""] from test import IntegrationTest, client_context, unittest -from test.utils import ( +from test.utils import flaky +from test.utils_shared import ( AllowListEventListener, EventListener, OvertCommandListener, @@ -42,6 +43,7 @@ from bson import decode_all from bson.code import Code +from bson.raw_bson import RawBSONDocument from pymongo import ASCENDING, DESCENDING from pymongo.collation import Collation from pymongo.errors import ExecutionTimeout, InvalidOperation, OperationFailure, PyMongoError @@ -49,7 +51,6 @@ from pymongo.read_concern import ReadConcern from pymongo.read_preferences import ReadPreference from pymongo.synchronous.cursor import Cursor, CursorType -from pymongo.synchronous.helpers import next from pymongo.write_concern import WriteConcern _IS_SYNC = True @@ -174,8 +175,8 @@ def test_max_time_ms(self): cursor = coll.find().max_time_ms(999) c2 = cursor.clone() self.assertEqual(999, c2._max_time_ms) - self.assertTrue("$maxTimeMS" in cursor._query_spec()) - self.assertTrue("$maxTimeMS" in c2._query_spec()) + self.assertIn("$maxTimeMS", cursor._query_spec()) + self.assertIn("$maxTimeMS", c2._query_spec()) self.assertTrue(coll.find_one(max_time_ms=1000)) @@ -196,6 +197,21 @@ def test_max_time_ms(self): finally: client.admin.command("configureFailPoint", "maxTimeAlwaysTimeOut", mode="off") + def test_maxtime_ms_message(self): + db = self.db + db.t.insert_one({"x": 1}) + with self.assertRaises(Exception) as error: + db.t.find_one({"$where": delay(2)}, max_time_ms=1) + + self.assertIn("(configured timeouts: connectTimeoutMS: 20000.0ms", str(error.exception)) + + client = self.rs_client(document_class=RawBSONDocument) + client.db.t.insert_one({"x": 1}) + with self.assertRaises(Exception) as error: + client.db.t.find_one({"$where": delay(2)}, max_time_ms=1) + + self.assertIn("(configured timeouts: connectTimeoutMS: 20000.0ms", str(error.exception)) + def test_max_await_time_ms(self): db = self.db db.pymongo_test.drop() @@ -236,19 +252,19 @@ def test_max_await_time_ms(self): # Tailable_defaults. coll.find(cursor_type=CursorType.TAILABLE_AWAIT).to_list() # find - self.assertFalse("maxTimeMS" in listener.started_events[0].command) + self.assertNotIn("maxTimeMS", listener.started_events[0].command) # getMore - self.assertFalse("maxTimeMS" in listener.started_events[1].command) + self.assertNotIn("maxTimeMS", listener.started_events[1].command) listener.reset() # Tailable_with max_await_time_ms set. coll.find(cursor_type=CursorType.TAILABLE_AWAIT).max_await_time_ms(99).to_list() # find self.assertEqual("find", listener.started_events[0].command_name) - self.assertFalse("maxTimeMS" in listener.started_events[0].command) + self.assertNotIn("maxTimeMS", listener.started_events[0].command) # getMore self.assertEqual("getMore", listener.started_events[1].command_name) - self.assertTrue("maxTimeMS" in listener.started_events[1].command) + self.assertIn("maxTimeMS", listener.started_events[1].command) self.assertEqual(99, listener.started_events[1].command["maxTimeMS"]) listener.reset() @@ -259,11 +275,11 @@ def test_max_await_time_ms(self): coll.find(cursor_type=CursorType.TAILABLE_AWAIT).max_time_ms(99).to_list() # find self.assertEqual("find", listener.started_events[0].command_name) - self.assertTrue("maxTimeMS" in listener.started_events[0].command) + self.assertIn("maxTimeMS", listener.started_events[0].command) self.assertEqual(99, listener.started_events[0].command["maxTimeMS"]) # getMore self.assertEqual("getMore", listener.started_events[1].command_name) - self.assertFalse("maxTimeMS" in listener.started_events[1].command) + self.assertNotIn("maxTimeMS", listener.started_events[1].command) listener.reset() # Tailable_with both max_time_ms and max_await_time_ms @@ -275,11 +291,11 @@ def test_max_await_time_ms(self): ) # find self.assertEqual("find", listener.started_events[0].command_name) - self.assertTrue("maxTimeMS" in listener.started_events[0].command) + self.assertIn("maxTimeMS", listener.started_events[0].command) self.assertEqual(99, listener.started_events[0].command["maxTimeMS"]) # getMore self.assertEqual("getMore", listener.started_events[1].command_name) - self.assertTrue("maxTimeMS" in listener.started_events[1].command) + self.assertIn("maxTimeMS", listener.started_events[1].command) self.assertEqual(99, listener.started_events[1].command["maxTimeMS"]) listener.reset() @@ -287,31 +303,31 @@ def test_max_await_time_ms(self): coll.find(batch_size=1).max_await_time_ms(99).to_list() # find self.assertEqual("find", listener.started_events[0].command_name) - self.assertFalse("maxTimeMS" in listener.started_events[0].command) + self.assertNotIn("maxTimeMS", listener.started_events[0].command) # getMore self.assertEqual("getMore", listener.started_events[1].command_name) - self.assertFalse("maxTimeMS" in listener.started_events[1].command) + self.assertNotIn("maxTimeMS", listener.started_events[1].command) listener.reset() # Non tailable_await with max_time_ms coll.find(batch_size=1).max_time_ms(99).to_list() # find self.assertEqual("find", listener.started_events[0].command_name) - self.assertTrue("maxTimeMS" in listener.started_events[0].command) + self.assertIn("maxTimeMS", listener.started_events[0].command) self.assertEqual(99, listener.started_events[0].command["maxTimeMS"]) # getMore self.assertEqual("getMore", listener.started_events[1].command_name) - self.assertFalse("maxTimeMS" in listener.started_events[1].command) + self.assertNotIn("maxTimeMS", listener.started_events[1].command) # Non tailable_await with both max_time_ms and max_await_time_ms coll.find(batch_size=1).max_time_ms(99).max_await_time_ms(88).to_list() # find self.assertEqual("find", listener.started_events[0].command_name) - self.assertTrue("maxTimeMS" in listener.started_events[0].command) + self.assertIn("maxTimeMS", listener.started_events[0].command) self.assertEqual(99, listener.started_events[0].command["maxTimeMS"]) # getMore self.assertEqual("getMore", listener.started_events[1].command_name) - self.assertFalse("maxTimeMS" in listener.started_events[1].command) + self.assertNotIn("maxTimeMS", listener.started_events[1].command) @client_context.require_test_commands @client_context.require_no_mongos @@ -353,6 +369,29 @@ def test_explain_with_read_concern(self): self.assertEqual(len(started), 1) self.assertNotIn("readConcern", started[0].command) + # https://github.com/mongodb/specifications/blob/master/source/crud/tests/README.md#14-explain-helpers-allow-users-to-specify-maxtimems + def test_explain_csot(self): + # Create a MongoClient with command monitoring enabled (referred to as client). + listener = AllowListEventListener("explain") + client = self.rs_or_single_client(event_listeners=[listener]) + + # Create a collection, referred to as collection, with the namespace explain-test.collection. + # Workaround for SERVER-108463 + names = client["explain-test"].list_collection_names() + if "collection" not in names: + collection = client["explain-test"].create_collection("collection") + else: + collection = client["explain-test"]["collection"] + + # Run an explained find on collection. The find will have the query predicate { name: 'john doe' }. Specify a maxTimeMS value of 2000ms for the explain. + with pymongo.timeout(2.0): + self.assertTrue(collection.find({"name": "john doe"}).explain()) + + # Obtain the command started event for the explain. Confirm that the top-level explain command should has a maxTimeMS value of 2000. + started = listener.started_events + self.assertEqual(len(started), 1) + assert 1500 < started[0].command["maxTimeMS"] <= 2000 + def test_hint(self): db = self.db with self.assertRaises(TypeError): @@ -924,16 +963,19 @@ def test_clone(self): # Shallow copies can so can mutate cursor2 = copy.copy(cursor) cursor2._projection["cursor2"] = False - self.assertTrue(cursor._projection and "cursor2" in cursor._projection) + self.assertIsNotNone(cursor._projection) + self.assertIn("cursor2", cursor._projection.keys()) # Deepcopies and shouldn't mutate cursor3 = copy.deepcopy(cursor) cursor3._projection["cursor3"] = False - self.assertFalse(cursor._projection and "cursor3" in cursor._projection) + self.assertIsNotNone(cursor._projection) + self.assertNotIn("cursor3", cursor._projection.keys()) cursor4 = cursor.clone() cursor4._projection["cursor4"] = False - self.assertFalse(cursor._projection and "cursor4" in cursor._projection) + self.assertIsNotNone(cursor._projection) + self.assertNotIn("cursor4", cursor._projection.keys()) # Test memo when deepcopying queries query = {"hello": "world"} @@ -950,7 +992,7 @@ def test_clone(self): cursor = self.db.test.find().hint([("z", 1), ("a", 1)]) cursor2 = copy.deepcopy(cursor) # Internal types are now dict rather than SON by default - self.assertTrue(isinstance(cursor2._hint, dict)) + self.assertIsInstance(cursor2._hint, dict) self.assertEqual(cursor._hint, cursor2._hint) @client_context.require_sync @@ -1178,15 +1220,6 @@ def test_distinct(self): self.assertEqual(["b", "c"], distinct) - @client_context.require_version_max(4, 1, 0, -1) - def test_max_scan(self): - self.db.drop_collection("test") - self.db.test.insert_many([{} for _ in range(100)]) - - self.assertEqual(100, len(self.db.test.find().to_list())) - self.assertEqual(50, len(self.db.test.find().max_scan(50).to_list())) - self.assertEqual(50, len(self.db.test.find().max_scan(90).max_scan(50).to_list())) - def test_with_statement(self): self.db.drop_collection("test") self.db.test.insert_many([{} for _ in range(100)]) @@ -1403,12 +1436,11 @@ def test_to_list_length(self): docs = c.to_list(3) self.assertEqual(len(docs), 2) + @flaky(reason="PYTHON-3522") def test_to_list_csot_applied(self): - if os.environ.get("SKIP_CSOT_TESTS", ""): - raise unittest.SkipTest("SKIP_CSOT_TESTS is set, skipping...") client = self.single_client(timeoutMS=500, w=1) coll = client.pymongo.test - # Initialize the client with a larger timeout to help make test less flakey + # Initialize the client with a larger timeout to help make test less flaky with pymongo.timeout(10): coll.insert_many([{} for _ in range(5)]) cursor = coll.find({"$where": delay(1)}) @@ -1446,12 +1478,11 @@ def test_command_cursor_to_list_length(self): self.assertEqual(len(result.to_list(1)), 1) @client_context.require_failCommand_blockConnection + @flaky(reason="PYTHON-3522") def test_command_cursor_to_list_csot_applied(self): - if os.environ.get("SKIP_CSOT_TESTS", ""): - raise unittest.SkipTest("SKIP_CSOT_TESTS is set, skipping...") client = self.single_client(timeoutMS=500, w=1) coll = client.pymongo.test - # Initialize the client with a larger timeout to help make test less flakey + # Initialize the client with a larger timeout to help make test less flaky with pymongo.timeout(10): coll.insert_many([{} for _ in range(5)]) fail_command = { @@ -1588,7 +1619,6 @@ def test_get_item(self): def test_collation(self): next(self.db.test.find_raw_batches(collation=Collation("en_US"))) - @client_context.require_no_mmap # MMAPv1 does not support read concern def test_read_concern(self): self.db.get_collection("test", write_concern=WriteConcern(w="majority")).insert_one({}) c = self.db.get_collection("test", read_concern=ReadConcern("majority")) @@ -1801,6 +1831,7 @@ def test_monitoring(self): @client_context.require_version_min(5, 0, -1) @client_context.require_no_mongos + @client_context.require_sync def test_exhaust_cursor_db_set(self): listener = OvertCommandListener() client = self.rs_or_single_client(event_listeners=[listener]) @@ -1810,7 +1841,7 @@ def test_exhaust_cursor_db_set(self): listener.reset() - result = c.find({}, cursor_type=pymongo.CursorType.EXHAUST, batch_size=1).to_list() + result = list(c.find({}, cursor_type=pymongo.CursorType.EXHAUST, batch_size=1)) self.assertEqual(len(result), 3) diff --git a/test/test_custom_types.py b/test/test_custom_types.py index 6771ea25f9..02f3127165 100644 --- a/test/test_custom_types.py +++ b/test/test_custom_types.py @@ -23,10 +23,12 @@ from random import random from typing import Any, Tuple, Type, no_type_check +from bson.decimal128 import DecimalDecoder, DecimalEncoder +from gridfs.synchronous.grid_file import GridIn, GridOut + sys.path[0:0] = [""] -from test import client_context, unittest -from test.test_client import IntegrationTest +from test import IntegrationTest, client_context, unittest from bson import ( _BUILT_IN_TYPES, @@ -50,35 +52,14 @@ from bson.errors import InvalidDocument from bson.int64 import Int64 from bson.raw_bson import RawBSONDocument -from gridfs import GridIn, GridOut from pymongo.errors import DuplicateKeyError from pymongo.message import _CursorAddress from pymongo.synchronous.collection import ReturnDocument +_IS_SYNC = True -class DecimalEncoder(TypeEncoder): - @property - def python_type(self): - return Decimal - - def transform_python(self, value): - return Decimal128(value) - - -class DecimalDecoder(TypeDecoder): - @property - def bson_type(self): - return Decimal128 - - def transform_bson(self, value): - return value.to_decimal() - - -class DecimalCodec(DecimalDecoder, DecimalEncoder): - pass - -DECIMAL_CODECOPTS = CodecOptions(type_registry=TypeRegistry([DecimalCodec()])) +DECIMAL_CODECOPTS = CodecOptions(type_registry=TypeRegistry([DecimalEncoder(), DecimalDecoder()])) class UndecipherableInt64Type: @@ -576,6 +557,15 @@ def test_initialize_fail(self): with self.assertRaisesRegex(TypeError, err_msg): TypeRegistry(fallback_encoder="hello") # type: ignore[arg-type] + def test_type_registry_codecs(self): + codec_instances = [codec() for codec in self.codecs] + type_registry = TypeRegistry(codec_instances) + self.assertEqual(type_registry.codecs, codec_instances) + + def test_type_registry_fallback(self): + type_registry = TypeRegistry(fallback_encoder=self.fallback_encoder) + self.assertEqual(type_registry.fallback_encoder, self.fallback_encoder) + def test_type_registry_repr(self): codec_instances = [codec() for codec in self.codecs] type_registry = TypeRegistry(codec_instances) @@ -707,7 +697,7 @@ def test_aggregate_w_custom_type_decoder(self): ] result = test.aggregate(pipeline) - res = list(result)[0] + res = (result.to_list())[0] self.assertEqual(res["_id"], "complete") self.assertIsInstance(res["total_qty"], UndecipherableInt64Type) self.assertEqual(res["total_qty"].value, 20) @@ -774,13 +764,14 @@ def test_grid_out_custom_opts(self): one.close() two = GridOut(db.fs, 5) + two.open() self.assertEqual("my_file", two.name) self.assertEqual("my_file", two.filename) self.assertEqual(5, two._id) self.assertEqual(11, two.length) self.assertEqual(1000, two.chunk_size) - self.assertTrue(isinstance(two.upload_date, datetime.datetime)) + self.assertIsInstance(two.upload_date, datetime.datetime) self.assertEqual({"foo": "red", "bar": "blue"}, two.metadata) self.assertEqual(3, two.bar) @@ -936,7 +927,7 @@ def create_targets(self, *args, **kwargs): class TestDatabaseChangeStreamsWCustomTypes(IntegrationTest, ChangeStreamsWCustomTypesTestMixin): - @client_context.require_version_min(4, 0, 0) + @client_context.require_version_min(4, 2, 0) @client_context.require_change_streams def setUp(self): super().setUp() @@ -954,7 +945,7 @@ def create_targets(self, *args, **kwargs): class TestClusterChangeStreamsWCustomTypes(IntegrationTest, ChangeStreamsWCustomTypesTestMixin): - @client_context.require_version_min(4, 0, 0) + @client_context.require_version_min(4, 2, 0) @client_context.require_change_streams def setUp(self): super().setUp() @@ -970,7 +961,6 @@ def create_targets(self, *args, **kwargs): kwargs["type_registry"] = codec_options.type_registry kwargs["document_class"] = codec_options.document_class self.watched_target = self.rs_client(*args, **kwargs) - self.addCleanup(self.watched_target.close) self.input_target = self.watched_target[self.db.name].test # Insert a record to ensure db, coll are created. self.input_target.insert_one({"data": "dummy"}) diff --git a/test/test_data_lake.py b/test/test_data_lake.py deleted file mode 100644 index a374db550e..0000000000 --- a/test/test_data_lake.py +++ /dev/null @@ -1,109 +0,0 @@ -# Copyright 2020-present MongoDB, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Test Atlas Data Lake.""" -from __future__ import annotations - -import os -import sys -from pathlib import Path - -import pytest - -sys.path[0:0] = [""] - -from test import IntegrationTest, client_context, unittest -from test.unified_format import generate_test_classes -from test.utils import ( - OvertCommandListener, -) - -pytestmark = pytest.mark.data_lake - - -# Location of JSON test specifications. -_TEST_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "data_lake") - - -class TestDataLakeMustConnect(unittest.TestCase): - def test_connected_to_data_lake(self): - data_lake = os.environ.get("TEST_DATA_LAKE") - if not data_lake: - self.skipTest("TEST_DATA_LAKE is not set") - - self.assertTrue( - client_context.is_data_lake and client_context.connected, - "client context must be connected to data lake when DATA_LAKE is set. Failed attempts:\n{}".format( - client_context.connection_attempt_info() - ), - ) - - -class TestDataLakeProse(IntegrationTest): - # Default test database and collection names. - TEST_DB = "test" - TEST_COLLECTION = "driverdata" - - @classmethod - @client_context.require_data_lake - def setUpClass(cls): - super().setUpClass() - - # Test killCursors - def test_1(self): - listener = OvertCommandListener() - client = self.rs_or_single_client(event_listeners=[listener]) - cursor = client[self.TEST_DB][self.TEST_COLLECTION].find({}, batch_size=2) - next(cursor) - - # find command assertions - find_cmd = listener.succeeded_events[-1] - self.assertEqual(find_cmd.command_name, "find") - cursor_id = find_cmd.reply["cursor"]["id"] - cursor_ns = find_cmd.reply["cursor"]["ns"] - - # killCursors command assertions - cursor.close() - started = listener.started_events[-1] - self.assertEqual(started.command_name, "killCursors") - succeeded = listener.succeeded_events[-1] - self.assertEqual(succeeded.command_name, "killCursors") - - self.assertIn(cursor_id, started.command["cursors"]) - target_ns = ".".join([started.command["$db"], started.command["killCursors"]]) - self.assertEqual(cursor_ns, target_ns) - - self.assertIn(cursor_id, succeeded.reply["cursorsKilled"]) - - # Test no auth - def test_2(self): - client = self.rs_client_noauth() - client.admin.command("ping") - - # Test with auth - def test_3(self): - for mechanism in ["SCRAM-SHA-1", "SCRAM-SHA-256"]: - client = self.rs_or_single_client(authMechanism=mechanism) - client[self.TEST_DB][self.TEST_COLLECTION].find_one() - - -# Location of JSON test specifications. -TEST_PATH = Path(__file__).parent / "data_lake/unified" - -# Generate unified tests. -globals().update(generate_test_classes(TEST_PATH, module=__name__)) - - -if __name__ == "__main__": - unittest.main() diff --git a/test/test_database.py b/test/test_database.py index 5e854c941d..ebbf6e55c6 100644 --- a/test/test_database.py +++ b/test/test_database.py @@ -25,7 +25,7 @@ from test import IntegrationTest, client_context, unittest from test.test_custom_types import DECIMAL_CODECOPTS -from test.utils import ( +from test.utils_shared import ( IMPOSSIBLE_WRITE_CONCERN, OvertCommandListener, wait_until, @@ -51,7 +51,6 @@ from pymongo.synchronous import auth from pymongo.synchronous.collection import Collection from pymongo.synchronous.database import Database -from pymongo.synchronous.helpers import next from pymongo.synchronous.mongo_client import MongoClient from pymongo.write_concern import WriteConcern @@ -90,7 +89,7 @@ def test_get_collection(self): def test_getattr(self): db = self.client.pymongo_test - self.assertTrue(isinstance(db["_does_not_exist"], Collection)) + self.assertIsInstance(db["_does_not_exist"], Collection) with self.assertRaises(AttributeError) as context: db._does_not_exist @@ -102,10 +101,7 @@ def test_getattr(self): def test_iteration(self): db = self.client.pymongo_test - if "PyPy" in sys.version and sys.version_info < (3, 8, 15): - msg = "'NoneType' object is not callable" - else: - msg = "'Database' object is not iterable" + msg = "'Database' object is not iterable" # Iteration fails with self.assertRaisesRegex(TypeError, msg): for _ in db: # type: ignore[misc] # error: "None" not callable [misc] @@ -165,13 +161,13 @@ def test_create_collection(self): db.create_collection("coll..ection") # type: ignore[arg-type] test = db.create_collection("test") - self.assertTrue("test" in db.list_collection_names()) + self.assertIn("test", db.list_collection_names()) test.insert_one({"hello": "world"}) self.assertEqual((db.test.find_one())["hello"], "world") db.drop_collection("test.foo") db.create_collection("test.foo") - self.assertTrue("test.foo" in db.list_collection_names()) + self.assertIn("test.foo", db.list_collection_names()) with self.assertRaises(CollectionInvalid): db.create_collection("test.foo") @@ -181,17 +177,17 @@ def test_list_collection_names(self): db.test.mike.insert_one({"dummy": "object"}) colls = db.list_collection_names() - self.assertTrue("test" in colls) - self.assertTrue("test.mike" in colls) + self.assertIn("test", colls) + self.assertIn("test.mike", colls) for coll in colls: - self.assertTrue("$" not in coll) + self.assertNotIn("$", coll) db.systemcoll.test.insert_one({}) no_system_collections = db.list_collection_names( filter={"name": {"$regex": r"^(?!system\.)"}} ) for coll in no_system_collections: - self.assertTrue(not coll.startswith("system.")) + self.assertFalse(coll.startswith("system.")) self.assertIn("systemcoll.test", no_system_collections) # Force more than one batch. @@ -241,7 +237,7 @@ def test_check_exists(self): listener.reset() db.drop_collection("unique") db.create_collection("unique", check_exists=False) - self.assertTrue(len(listener.started_events) > 0) + self.assertGreater(len(listener.started_events), 0) self.assertNotIn("listCollections", listener.started_command_names()) def test_list_collections(self): @@ -254,12 +250,12 @@ def test_list_collections(self): colls = [result["name"] for result in results] # All the collections present. - self.assertTrue("test" in colls) - self.assertTrue("test.mike" in colls) + self.assertIn("test", colls) + self.assertIn("test.mike", colls) # No collection containing a '$'. for coll in colls: - self.assertTrue("$" not in coll) + self.assertNotIn("$", coll) # Duplicate check. coll_cnt: dict = {} @@ -267,19 +263,13 @@ def test_list_collections(self): try: # Found duplicate. coll_cnt[coll] += 1 - self.assertTrue(False) + self.fail("Found duplicate") except KeyError: coll_cnt[coll] = 1 coll_cnt: dict = {} - # Checking if is there any collection which don't exists. - if ( - len(set(colls) - {"test", "test.mike"}) == 0 - or len(set(colls) - {"test", "test.mike", "system.indexes"}) == 0 - ): - self.assertTrue(True) - else: - self.assertTrue(False) + # Check if there are any collections which don't exist. + self.assertLessEqual(set(colls), {"test", "test.mike", "system.indexes"}) colls = (db.list_collections(filter={"name": {"$regex": "^test$"}})).to_list() self.assertEqual(1, len(colls)) @@ -294,12 +284,12 @@ def test_list_collections(self): colls = [result["name"] for result in results] # Checking only capped collections are present - self.assertTrue("test" in colls) - self.assertFalse("test.mike" in colls) + self.assertIn("test", colls) + self.assertNotIn("test.mike", colls) # No collection containing a '$'. for coll in colls: - self.assertTrue("$" not in coll) + self.assertNotIn("$", coll) # Duplicate check. coll_cnt = {} @@ -307,16 +297,13 @@ def test_list_collections(self): try: # Found duplicate. coll_cnt[coll] += 1 - self.assertTrue(False) + self.fail("Found duplicate") except KeyError: coll_cnt[coll] = 1 coll_cnt = {} - # Checking if is there any collection which don't exists. - if len(set(colls) - {"test"}) == 0 or len(set(colls) - {"test", "system.indexes"}) == 0: - self.assertTrue(True) - else: - self.assertTrue(False) + # Check if there are any collections which don't exist. + self.assertLessEqual(set(colls), {"test", "system.indexes"}) self.client.drop_database("pymongo_test") @@ -339,24 +326,24 @@ def test_drop_collection(self): db.drop_collection(None) # type: ignore[arg-type] db.test.insert_one({"dummy": "object"}) - self.assertTrue("test" in db.list_collection_names()) + self.assertIn("test", db.list_collection_names()) db.drop_collection("test") - self.assertFalse("test" in db.list_collection_names()) + self.assertNotIn("test", db.list_collection_names()) db.test.insert_one({"dummy": "object"}) - self.assertTrue("test" in db.list_collection_names()) + self.assertIn("test", db.list_collection_names()) db.drop_collection("test") - self.assertFalse("test" in db.list_collection_names()) + self.assertNotIn("test", db.list_collection_names()) db.test.insert_one({"dummy": "object"}) - self.assertTrue("test" in db.list_collection_names()) + self.assertIn("test", db.list_collection_names()) db.drop_collection(db.test) - self.assertFalse("test" in db.list_collection_names()) + self.assertNotIn("test", db.list_collection_names()) db.test.insert_one({"dummy": "object"}) - self.assertTrue("test" in db.list_collection_names()) + self.assertIn("test", db.list_collection_names()) db.test.drop() - self.assertFalse("test" in db.list_collection_names()) + self.assertNotIn("test", db.list_collection_names()) db.test.drop() db.drop_collection(db.test.doesnotexist) @@ -426,7 +413,22 @@ def test_command_with_regex(self): result = db.command("aggregate", "test", pipeline=[], cursor={}) for doc in result["cursor"]["firstBatch"]: - self.assertTrue(isinstance(doc["r"], Regex)) + self.assertIsInstance(doc["r"], Regex) + + def test_command_bulkWrite(self): + # Ensure bulk write commands can be run directly via db.command(). + if client_context.version.at_least(8, 0): + self.client.admin.command( + { + "bulkWrite": 1, + "nsInfo": [{"ns": self.db.test.full_name}], + "ops": [{"insert": 0, "document": {}}], + } + ) + self.db.command({"insert": "test", "documents": [{}]}) + self.db.command({"update": "test", "updates": [{"q": {}, "u": {"$set": {"x": 1}}}]}) + self.db.command({"delete": "test", "deletes": [{"q": {}, "limit": 1}]}) + self.db.test.drop() def test_cursor_command(self): db = self.client.pymongo_test @@ -455,7 +457,7 @@ def test_password_digest(self): with self.assertRaises(TypeError): auth._password_digest(None) # type: ignore[arg-type, call-arg] - self.assertTrue(isinstance(auth._password_digest("mike", "password"), str)) + self.assertIsInstance(auth._password_digest("mike", "password"), str) self.assertEqual( auth._password_digest("mike", "password"), "cd7e45b3b2767dc2fa9b6b548457ed00" ) @@ -526,7 +528,7 @@ def test_insert_find_one(self): a_doc = SON({"hello": "world"}) a_key = (db.test.insert_one(a_doc)).inserted_id - self.assertTrue(isinstance(a_doc["_id"], ObjectId)) + self.assertIsInstance(a_doc["_id"], ObjectId) self.assertEqual(a_doc["_id"], a_key) self.assertEqual(a_doc, db.test.find_one({"_id": a_doc["_id"]})) self.assertEqual(a_doc, db.test.find_one(a_key)) diff --git a/test/test_default_exports.py b/test/test_default_exports.py index d9301d2223..adc3882a36 100644 --- a/test/test_default_exports.py +++ b/test/test_default_exports.py @@ -209,6 +209,19 @@ def test_pymongo_imports(self): ) from pymongo.write_concern import WriteConcern, validate_boolean + def test_pymongo_submodule_attributes(self): + import pymongo + + self.assertTrue(hasattr(pymongo, "uri_parser")) + self.assertTrue(pymongo.uri_parser) + self.assertTrue(pymongo.uri_parser.parse_uri) + self.assertTrue(pymongo.change_stream) + self.assertTrue(pymongo.client_session) + self.assertTrue(pymongo.collection) + self.assertTrue(pymongo.cursor) + self.assertTrue(pymongo.command_cursor) + self.assertTrue(pymongo.database) + def test_gridfs_imports(self): import gridfs from gridfs.errors import CorruptGridFile, FileExists, GridFSError, NoFile diff --git a/test/test_discovery_and_monitoring.py b/test/test_discovery_and_monitoring.py index ce7a52f1a0..67a82996bd 100644 --- a/test/test_discovery_and_monitoring.py +++ b/test/test_discovery_and_monitoring.py @@ -15,30 +15,49 @@ """Test the topology module.""" from __future__ import annotations +import asyncio import os import socketserver import sys import threading +import time +from asyncio import StreamReader, StreamWriter +from pathlib import Path +from test.helpers import ConcurrentRunner +from test.utils import flaky + +from pymongo.operations import _Op +from pymongo.server_selectors import writable_server_selector +from pymongo.synchronous.pool import Connection sys.path[0:0] = [""] -from test import IntegrationTest, PyMongoTestCase, unittest +from test import ( + IntegrationTest, + PyMongoTestCase, + UnitTest, + client_context, + unittest, +) from test.pymongo_mocks import DummyMonitor from test.unified_format import generate_test_classes from test.utils import ( + get_pool, +) +from test.utils_shared import ( CMAPListener, HeartbeatEventListener, HeartbeatEventsListListener, assertion_context, - client_context, - get_pool, + barrier_wait, + create_barrier, server_name_to_type, wait_until, ) from unittest.mock import patch from bson import Timestamp, json_util -from pymongo import MongoClient, common, monitoring +from pymongo import common, monitoring from pymongo.errors import ( AutoReconnect, ConfigurationError, @@ -52,11 +71,19 @@ from pymongo.server_description import SERVER_TYPE, ServerDescription from pymongo.synchronous.settings import TopologySettings from pymongo.synchronous.topology import Topology, _ErrorContext +from pymongo.synchronous.uri_parser import parse_uri from pymongo.topology_description import TOPOLOGY_TYPE -from pymongo.uri_parser import parse_uri + +_IS_SYNC = True # Location of JSON test specifications. -SDAM_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "discovery_and_monitoring") +if _IS_SYNC: + SDAM_PATH = os.path.join(Path(__file__).resolve().parent, "discovery_and_monitoring") +else: + SDAM_PATH = os.path.join( + Path(__file__).resolve().parent.parent, + "discovery_and_monitoring", + ) def create_mock_topology(uri, monitor_class=DummyMonitor): @@ -64,8 +91,8 @@ def create_mock_topology(uri, monitor_class=DummyMonitor): replica_set_name = None direct_connection = None load_balanced = None - if "replicaset" in parsed_uri["options"]: - replica_set_name = parsed_uri["options"]["replicaset"] + if "replicaSet" in parsed_uri["options"]: + replica_set_name = parsed_uri["options"]["replicaSet"] if "directConnection" in parsed_uri["options"]: direct_connection = parsed_uri["options"]["directConnection"] if "loadBalanced" in parsed_uri["options"]: @@ -128,7 +155,7 @@ def get_type(topology, hostname): return description.server_type -class TestAllScenarios(unittest.TestCase): +class TestAllScenarios(UnitTest): pass @@ -166,6 +193,9 @@ def check_outcome(self, topology, outcome): server_type_name(expected_server_type), server_type_name(actual_server_description.server_type), ) + expected_error = expected_server.get("error") + if expected_error: + self.assertIn(expected_error, str(actual_server_description.error)) self.assertEqual(expected_server.get("setName"), actual_server_description.replica_set_name) @@ -240,11 +270,11 @@ def create_tests(): create_tests() -class TestClusterTimeComparison(unittest.TestCase): +class TestClusterTimeComparison(PyMongoTestCase): def test_cluster_time_comparison(self): t = create_mock_topology("mongodb://host") - def send_cluster_time(time, inc, should_update): + def send_cluster_time(time, inc): old = t.max_cluster_time() new = {"clusterTime": Timestamp(time, inc)} got_hello( @@ -259,34 +289,33 @@ def send_cluster_time(time, inc, should_update): ) actual = t.max_cluster_time() - if should_update: - self.assertEqual(actual, new) - else: - self.assertEqual(actual, old) + # We never update $clusterTime from monitoring connections. + self.assertEqual(actual, old) - send_cluster_time(0, 1, True) - send_cluster_time(2, 2, True) - send_cluster_time(2, 1, False) - send_cluster_time(1, 3, False) - send_cluster_time(2, 3, True) + send_cluster_time(0, 1) + send_cluster_time(2, 2) + send_cluster_time(2, 1) + send_cluster_time(1, 3) + send_cluster_time(2, 3) class TestIgnoreStaleErrors(IntegrationTest): def test_ignore_stale_connection_errors(self): - N_THREADS = 5 - barrier = threading.Barrier(N_THREADS, timeout=30) - client = self.rs_or_single_client(minPoolSize=N_THREADS) - self.addCleanup(client.close) + if not _IS_SYNC and sys.version_info < (3, 11): + self.skipTest("Test requires asyncio.Barrier (added in Python 3.11)") + N_TASKS = 5 + barrier = create_barrier(N_TASKS) + client = self.rs_or_single_client(minPoolSize=N_TASKS) # Wait for initial discovery. client.admin.command("ping") pool = get_pool(client) starting_generation = pool.gen.get_overall() - wait_until(lambda: len(pool.conns) == N_THREADS, "created conns") + wait_until(lambda: len(pool.conns) == N_TASKS, "created conns") def mock_command(*args, **kwargs): - # Synchronize all threads to ensure they use the same generation. - barrier.wait() + # Synchronize all tasks to ensure they use the same generation. + barrier_wait(barrier, timeout=30) raise AutoReconnect("mock Connection.command error") for conn in pool.conns: @@ -298,12 +327,12 @@ def insert_command(i): except AutoReconnect: pass - threads = [] - for i in range(N_THREADS): - threads.append(threading.Thread(target=insert_command, args=(i,))) - for t in threads: + tasks = [] + for i in range(N_TASKS): + tasks.append(ConcurrentRunner(target=insert_command, args=(i,))) + for t in tasks: t.start() - for t in threads: + for t in tasks: t.join() # Expect a single pool reset for the network error @@ -322,10 +351,9 @@ class TestPoolManagement(IntegrationTest): def test_pool_unpause(self): # This test implements the prose test "Connection Pool Management" listener = CMAPHeartbeatListener() - client = self.single_client( + _ = self.single_client( appName="SDAMPoolManagementTest", heartbeatFrequencyMS=500, event_listeners=[listener] ) - self.addCleanup(client.close) # Assert that ConnectionPoolReadyEvent occurs after the first # ServerHeartbeatSucceededEvent. listener.wait_for_event(monitoring.PoolReadyEvent, 1) @@ -348,16 +376,81 @@ def test_pool_unpause(self): listener.wait_for_event(monitoring.ServerHeartbeatSucceededEvent, 1) listener.wait_for_event(monitoring.PoolReadyEvent, 1) + @client_context.require_failCommand_appName + @client_context.require_test_commands + @client_context.require_async + @flaky(reason="PYTHON-5428") + def test_connection_close_does_not_block_other_operations(self): + listener = CMAPHeartbeatListener() + client = self.single_client( + appName="SDAMConnectionCloseTest", + event_listeners=[listener], + heartbeatFrequencyMS=500, + minPoolSize=10, + ) + server = (client._get_topology()).select_server(writable_server_selector, _Op.TEST) + wait_until( + lambda: len(server._pool.conns) == 10, + "pool initialized with 10 connections", + ) + + client.db.test.insert_one({"x": 1}) + close_delay = 0.1 + latencies = [] + should_exit = [] + + def run_task(): + while True: + start_time = time.monotonic() + client.db.test.find_one({}) + elapsed = time.monotonic() - start_time + latencies.append(elapsed) + if should_exit: + break + time.sleep(0.001) + + task = ConcurrentRunner(target=run_task) + task.start() + original_close = Connection.close_conn + try: + # Artificially delay the close operation to simulate a slow close + def mock_close(self, reason): + time.sleep(close_delay) + original_close(self, reason) + + Connection.close_conn = mock_close + + fail_hello = { + "mode": {"times": 4}, + "data": { + "failCommands": [HelloCompat.LEGACY_CMD, "hello"], + "errorCode": 91, + "appName": "SDAMConnectionCloseTest", + }, + } + with self.fail_point(fail_hello): + # Wait for server heartbeat to fail + listener.wait_for_event(monitoring.ServerHeartbeatFailedEvent, 1) + # Wait until all idle connections are closed to simulate real-world conditions + listener.wait_for_event(monitoring.ConnectionClosedEvent, 10) + # Wait for one more find to complete after the pool has been reset, then shutdown the task + n = len(latencies) + wait_until(lambda: len(latencies) >= n + 1, "run one more find") + should_exit.append(True) + task.join() + # No operation latency should not significantly exceed close_delay + self.assertLessEqual(max(latencies), close_delay * 5.0) + finally: + Connection.close_conn = original_close + class TestServerMonitoringMode(IntegrationTest): - @client_context.require_no_serverless @client_context.require_no_load_balancer def setUp(self): super().setUp() def test_rtt_connection_is_enabled_stream(self): client = self.rs_or_single_client(serverMonitoringMode="stream") - self.addCleanup(client.close) client.admin.command("ping") def predicate(): @@ -366,23 +459,31 @@ def predicate(): if not monitor._stream: return False if client_context.version >= (4, 4): - if monitor._rtt_monitor._executor._thread is None: - return False + if _IS_SYNC: + if monitor._rtt_monitor._executor._thread is None: + return False + else: + if monitor._rtt_monitor._executor._task is None: + return False else: - if monitor._rtt_monitor._executor._thread is not None: - return False + if _IS_SYNC: + if monitor._rtt_monitor._executor._thread is not None: + return False + else: + if monitor._rtt_monitor._executor._task is not None: + return False return True wait_until(predicate, "find all RTT monitors") def test_rtt_connection_is_disabled_poll(self): client = self.rs_or_single_client(serverMonitoringMode="poll") - self.addCleanup(client.close) + self.assert_rtt_connection_is_disabled(client) def test_rtt_connection_is_disabled_auto(self): envs = [ - {"AWS_EXECUTION_ENV": "AWS_Lambda_python3.9"}, + {"AWS_EXECUTION_ENV": "AWS_Lambda_python3.10"}, {"FUNCTIONS_WORKER_RUNTIME": "python"}, {"K_SERVICE": "gcpservicename"}, {"FUNCTION_NAME": "gcpfunctionname"}, @@ -391,7 +492,6 @@ def test_rtt_connection_is_disabled_auto(self): for env in envs: with patch.dict("os.environ", env): client = self.rs_or_single_client(serverMonitoringMode="auto") - self.addCleanup(client.close) self.assert_rtt_connection_is_disabled(client) def assert_rtt_connection_is_disabled(self, client): @@ -399,7 +499,10 @@ def assert_rtt_connection_is_disabled(self, client): for _, server in client._topology._servers.items(): monitor = server._monitor self.assertFalse(monitor._stream) - self.assertIsNone(monitor._rtt_monitor._executor._thread) + if _IS_SYNC: + self.assertIsNone(monitor._rtt_monitor._executor._thread) + else: + self.assertIsNone(monitor._rtt_monitor._executor._task) class MockTCPHandler(socketserver.BaseRequestHandler): @@ -422,16 +525,46 @@ class TestHeartbeatStartOrdering(PyMongoTestCase): def test_heartbeat_start_ordering(self): events = [] listener = HeartbeatEventsListListener(events) - server = TCPServer(("localhost", 9999), MockTCPHandler) - server.events = events - server_thread = threading.Thread(target=server.handle_request_and_shutdown) - server_thread.start() - _c = self.simple_client( - "mongodb://localhost:9999", serverSelectionTimeoutMS=500, event_listeners=(listener,) - ) - server_thread.join() - listener.wait_for_event(ServerHeartbeatStartedEvent, 1) - listener.wait_for_event(ServerHeartbeatFailedEvent, 1) + + if _IS_SYNC: + server = TCPServer(("localhost", 9999), MockTCPHandler) + server.events = events + server_thread = ConcurrentRunner(target=server.handle_request_and_shutdown) + server_thread.start() + _c = self.simple_client( + "mongodb://localhost:9999", + serverSelectionTimeoutMS=500, + event_listeners=(listener,), + ) + server_thread.join() + listener.wait_for_event(ServerHeartbeatStartedEvent, 1) + listener.wait_for_event(ServerHeartbeatFailedEvent, 1) + + else: + + def handle_client(reader: StreamReader, writer: StreamWriter): + events.append("client connected") + if (reader.read(1024)).strip(): + events.append("client hello received") + writer.close() + writer.wait_closed() + + server = asyncio.start_server(handle_client, "localhost", 9999) + server.events = events + server.start_serving() + _c = self.simple_client( + "mongodb://localhost:9999", + serverSelectionTimeoutMS=500, + event_listeners=(listener,), + ) + _c._connect() + + listener.wait_for_event(ServerHeartbeatStartedEvent, 1) + listener.wait_for_event(ServerHeartbeatFailedEvent, 1) + + server.close() + server.wait_closed() + _c.close() self.assertEqual( events, diff --git a/test/test_dns.py b/test/test_dns.py index f2185efb1b..8f88562e3f 100644 --- a/test/test_dns.py +++ b/test/test_dns.py @@ -18,22 +18,37 @@ import glob import json import os +import pathlib import sys sys.path[0:0] = [""] -from test import IntegrationTest, PyMongoTestCase, client_context, unittest -from test.utils import wait_until +from test import ( + IntegrationTest, + PyMongoTestCase, + client_context, + unittest, +) +from test.utils_shared import wait_until +from unittest.mock import MagicMock, patch from pymongo.common import validate_read_preference_tags from pymongo.errors import ConfigurationError -from pymongo.uri_parser import parse_uri, split_hosts +from pymongo.synchronous.uri_parser import parse_uri +from pymongo.uri_parser_shared import split_hosts + +_IS_SYNC = True class TestDNSRepl(PyMongoTestCase): - TEST_PATH = os.path.join( - os.path.dirname(os.path.realpath(__file__)), "srv_seedlist", "replica-set" - ) + if _IS_SYNC: + TEST_PATH = os.path.join( + pathlib.Path(__file__).resolve().parent, "srv_seedlist", "replica-set" + ) + else: + TEST_PATH = os.path.join( + pathlib.Path(__file__).resolve().parent.parent, "srv_seedlist", "replica-set" + ) load_balanced = False @client_context.require_replica_set @@ -42,9 +57,14 @@ def setUp(self): class TestDNSLoadBalanced(PyMongoTestCase): - TEST_PATH = os.path.join( - os.path.dirname(os.path.realpath(__file__)), "srv_seedlist", "load-balanced" - ) + if _IS_SYNC: + TEST_PATH = os.path.join( + pathlib.Path(__file__).resolve().parent, "srv_seedlist", "load-balanced" + ) + else: + TEST_PATH = os.path.join( + pathlib.Path(__file__).resolve().parent.parent, "srv_seedlist", "load-balanced" + ) load_balanced = True @client_context.require_load_balancer @@ -53,7 +73,12 @@ def setUp(self): class TestDNSSharded(PyMongoTestCase): - TEST_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "srv_seedlist", "sharded") + if _IS_SYNC: + TEST_PATH = os.path.join(pathlib.Path(__file__).resolve().parent, "srv_seedlist", "sharded") + else: + TEST_PATH = os.path.join( + pathlib.Path(__file__).resolve().parent.parent, "srv_seedlist", "sharded" + ) load_balanced = False @client_context.require_mongos @@ -119,7 +144,9 @@ def run_test(self): # tests. copts["tlsAllowInvalidHostnames"] = True - client = PyMongoTestCase.unmanaged_simple_client(uri, **copts) + client = self.simple_client(uri, **copts) + if client._options.connect: + client._connect() if num_seeds is not None: self.assertEqual(len(client._topology_settings.seeds), num_seeds) if hosts is not None: @@ -132,7 +159,6 @@ def run_test(self): client.admin.command("ping") # XXX: we should block until SRV poller runs at least once # and re-run these assertions. - client.close() else: try: parse_uri(uri) @@ -159,38 +185,122 @@ def create_tests(cls): class TestParsingErrors(PyMongoTestCase): def test_invalid_host(self): - self.assertRaisesRegex( - ConfigurationError, - "Invalid URI host: mongodb is not", - self.simple_client, - "mongodb+srv://mongodb", - ) - self.assertRaisesRegex( - ConfigurationError, - "Invalid URI host: mongodb.com is not", - self.simple_client, - "mongodb+srv://mongodb.com", - ) - self.assertRaisesRegex( - ConfigurationError, - "Invalid URI host: an IP address is not", - self.simple_client, - "mongodb+srv://127.0.0.1", - ) - self.assertRaisesRegex( - ConfigurationError, - "Invalid URI host: an IP address is not", - self.simple_client, - "mongodb+srv://[::1]", - ) + with self.assertRaisesRegex(ConfigurationError, "Invalid URI host: an IP address is not"): + client = self.simple_client("mongodb+srv://127.0.0.1") + client._connect() + with self.assertRaisesRegex(ConfigurationError, "Invalid URI host: an IP address is not"): + client = self.simple_client("mongodb+srv://[::1]") + client._connect() class TestCaseInsensitive(IntegrationTest): def test_connect_case_insensitive(self): client = self.simple_client("mongodb+srv://TEST1.TEST.BUILD.10GEN.cc/") - self.addCleanup(client.close) + client._connect() self.assertGreater(len(client.topology_description.server_descriptions()), 1) +class TestInitialDnsSeedlistDiscovery(PyMongoTestCase): + """ + Initial DNS Seedlist Discovery prose tests + https://github.com/mongodb/specifications/blob/0a7a8b5/source/initial-dns-seedlist-discovery/tests/README.md#prose-tests + """ + + def run_initial_dns_seedlist_discovery_prose_tests(self, test_cases): + for case in test_cases: + with patch("dns.resolver.resolve") as mock_resolver: + + def mock_resolve(query, record_type, *args, **kwargs): + mock_srv = MagicMock() + mock_srv.target.to_text.return_value = case["mock_target"] + return [mock_srv] + + mock_resolver.side_effect = mock_resolve + domain = case["query"].split("._tcp.")[1] + connection_string = f"mongodb+srv://{domain}" + if "expected_error" not in case: + parse_uri(connection_string) + else: + try: + parse_uri(connection_string) + except ConfigurationError as e: + self.assertIn(case["expected_error"], str(e)) + else: + self.fail(f"ConfigurationError was not raised for query: {case['query']}") + + def test_1_allow_srv_hosts_with_fewer_than_three_dot_separated_parts(self): + with patch("dns.resolver.resolve"): + parse_uri("mongodb+srv://localhost/") + parse_uri("mongodb+srv://mongo.local/") + + def test_2_throw_when_return_address_does_not_end_with_srv_domain(self): + test_cases = [ + { + "query": "_mongodb._tcp.localhost", + "mock_target": "localhost.mongodb", + "expected_error": "Invalid SRV host", + }, + { + "query": "_mongodb._tcp.blogs.mongodb.com", + "mock_target": "blogs.evil.com", + "expected_error": "Invalid SRV host", + }, + { + "query": "_mongodb._tcp.blogs.mongo.local", + "mock_target": "test_1.evil.com", + "expected_error": "Invalid SRV host", + }, + ] + self.run_initial_dns_seedlist_discovery_prose_tests(test_cases) + + def test_3_throw_when_return_address_is_identical_to_srv_hostname(self): + test_cases = [ + { + "query": "_mongodb._tcp.localhost", + "mock_target": "localhost", + "expected_error": "Invalid SRV host", + }, + { + "query": "_mongodb._tcp.mongo.local", + "mock_target": "mongo.local", + "expected_error": "Invalid SRV host", + }, + ] + self.run_initial_dns_seedlist_discovery_prose_tests(test_cases) + + def test_4_throw_when_return_address_does_not_contain_dot_separating_shared_part_of_domain( + self + ): + test_cases = [ + { + "query": "_mongodb._tcp.localhost", + "mock_target": "test_1.cluster_1localhost", + "expected_error": "Invalid SRV host", + }, + { + "query": "_mongodb._tcp.mongo.local", + "mock_target": "test_1.my_hostmongo.local", + "expected_error": "Invalid SRV host", + }, + { + "query": "_mongodb._tcp.blogs.mongodb.com", + "mock_target": "cluster.testmongodb.com", + "expected_error": "Invalid SRV host", + }, + ] + self.run_initial_dns_seedlist_discovery_prose_tests(test_cases) + + def test_5_when_srv_hostname_has_two_dot_separated_parts_it_is_valid_for_the_returned_hostname_to_be_identical( + self + ): + test_cases = [ + { + "query": "_mongodb._tcp.blogs.mongodb.com", + "mock_target": "blogs.mongodb.com", + }, + ] + self.run_initial_dns_seedlist_discovery_prose_tests(test_cases) + + if __name__ == "__main__": unittest.main() diff --git a/test/test_encryption.py b/test/test_encryption.py index daa5fd5d4c..04e61b7bad 100644 --- a/test/test_encryption.py +++ b/test/test_encryption.py @@ -32,6 +32,7 @@ import warnings from test import IntegrationTest, PyMongoTestCase, client_context from test.test_bulk import BulkTestBase +from test.utils import flaky from test.utils_spec_runner import SpecRunner, SpecTestCreator from threading import Thread from typing import Any, Dict, Mapping, Optional @@ -40,7 +41,7 @@ from pymongo.daemon import _spawn_daemon from pymongo.synchronous.collection import Collection -from pymongo.synchronous.helpers import next +from pymongo.uri_parser_shared import _parse_kms_tls_options try: from pymongo.pyopenssl_context import IS_PYOPENSSL @@ -52,18 +53,21 @@ from test import ( unittest, ) -from test.helpers import ( +from test.helpers_shared import ( + ALL_KMS_PROVIDERS, AWS_CREDS, + AWS_TEMP_CREDS, AZURE_CREDS, CA_PEM, CLIENT_PEM, + DEFAULT_KMS_TLS, GCP_CREDS, KMIP_CREDS, LOCAL_MASTER_KEY, ) from test.test_bulk import BulkTestBase from test.unified_format import generate_test_classes -from test.utils import ( +from test.utils_shared import ( AllowListEventListener, OvertCommandListener, TopologyEventListener, @@ -73,7 +77,7 @@ ) from test.utils_spec_runner import SpecRunner -from bson import DatetimeMS, Decimal128, encode, json_util +from bson import BSON, DatetimeMS, Decimal128, encode, json_util from bson.binary import UUID_SUBTYPE, Binary, UuidRepresentation from bson.codec_options import CodecOptions from bson.errors import BSONError @@ -81,7 +85,7 @@ from bson.son import SON from pymongo import ReadPreference from pymongo.cursor_shared import CursorType -from pymongo.encryption_options import _HAVE_PYMONGOCRYPT, AutoEncryptionOpts, RangeOpts +from pymongo.encryption_options import _HAVE_PYMONGOCRYPT, AutoEncryptionOpts, RangeOpts, TextOpts from pymongo.errors import ( AutoReconnect, BulkWriteError, @@ -91,11 +95,11 @@ EncryptionError, InvalidOperation, OperationFailure, + PyMongoError, ServerSelectionTimeoutError, WriteError, ) from pymongo.operations import InsertOne, ReplaceOne, UpdateOne -from pymongo.ssl_support import get_ssl_context from pymongo.synchronous import encryption from pymongo.synchronous.encryption import Algorithm, ClientEncryption, QueryType from pymongo.synchronous.mongo_client import MongoClient @@ -141,7 +145,7 @@ def test_init(self): self.assertEqual(opts._mongocryptd_bypass_spawn, False) self.assertEqual(opts._mongocryptd_spawn_path, "mongocryptd") self.assertEqual(opts._mongocryptd_spawn_args, ["--idleShutdownTimeoutSecs=60"]) - self.assertEqual(opts._kms_ssl_contexts, {}) + self.assertEqual(opts._kms_tls_options, None) @unittest.skipUnless(_HAVE_PYMONGOCRYPT, "pymongocrypt is not installed") def test_init_spawn_args(self): @@ -167,36 +171,45 @@ def test_init_spawn_args(self): @unittest.skipUnless(_HAVE_PYMONGOCRYPT, "pymongocrypt is not installed") def test_init_kms_tls_options(self): # Error cases: + opts = AutoEncryptionOpts({}, "k.d", kms_tls_options={"kmip": 1}) with self.assertRaisesRegex(TypeError, r'kms_tls_options\["kmip"\] must be a dict'): - AutoEncryptionOpts({}, "k.d", kms_tls_options={"kmip": 1}) + MongoClient(auto_encryption_opts=opts) + tls_opts: Any for tls_opts in [ {"kmip": {"tls": True, "tlsInsecure": True}}, {"kmip": {"tls": True, "tlsAllowInvalidCertificates": True}}, {"kmip": {"tls": True, "tlsAllowInvalidHostnames": True}}, ]: + opts = AutoEncryptionOpts({}, "k.d", kms_tls_options=tls_opts) with self.assertRaisesRegex(ConfigurationError, "Insecure TLS options prohibited"): - opts = AutoEncryptionOpts({}, "k.d", kms_tls_options=tls_opts) + MongoClient(auto_encryption_opts=opts) + opts = AutoEncryptionOpts( + {}, "k.d", kms_tls_options={"kmip": {"tlsCAFile": "does-not-exist"}} + ) with self.assertRaises(FileNotFoundError): - AutoEncryptionOpts({}, "k.d", kms_tls_options={"kmip": {"tlsCAFile": "does-not-exist"}}) + MongoClient(auto_encryption_opts=opts) # Success cases: tls_opts: Any for tls_opts in [None, {}]: opts = AutoEncryptionOpts({}, "k.d", kms_tls_options=tls_opts) - self.assertEqual(opts._kms_ssl_contexts, {}) + kms_tls_contexts = _parse_kms_tls_options(opts._kms_tls_options, _IS_SYNC) + self.assertEqual(kms_tls_contexts, {}) opts = AutoEncryptionOpts({}, "k.d", kms_tls_options={"kmip": {"tls": True}, "aws": {}}) - ctx = opts._kms_ssl_contexts["kmip"] + _kms_ssl_contexts = _parse_kms_tls_options(opts._kms_tls_options, _IS_SYNC) + ctx = _kms_ssl_contexts["kmip"] self.assertEqual(ctx.check_hostname, True) self.assertEqual(ctx.verify_mode, ssl.CERT_REQUIRED) - ctx = opts._kms_ssl_contexts["aws"] + ctx = _kms_ssl_contexts["aws"] self.assertEqual(ctx.check_hostname, True) self.assertEqual(ctx.verify_mode, ssl.CERT_REQUIRED) opts = AutoEncryptionOpts( {}, "k.d", - kms_tls_options={"kmip": {"tlsCAFile": CA_PEM, "tlsCertificateKeyFile": CLIENT_PEM}}, + kms_tls_options=DEFAULT_KMS_TLS, ) - ctx = opts._kms_ssl_contexts["kmip"] + _kms_ssl_contexts = _parse_kms_tls_options(opts._kms_tls_options, _IS_SYNC) + ctx = _kms_ssl_contexts["kmip"] self.assertEqual(ctx.check_hostname, True) self.assertEqual(ctx.verify_mode, ssl.CERT_REQUIRED) @@ -385,7 +398,7 @@ def test_use_after_close(self): ) @unittest.skipIf( is_greenthread_patched(), - "gevent and eventlet do not support POSIX-style forking.", + "gevent does not support POSIX-style forking.", ) @client_context.require_sync def test_fork(self): @@ -441,20 +454,6 @@ class TestClientMaxWireVersion(IntegrationTest): def setUp(self): super().setUp() - @client_context.require_version_max(4, 0, 99) - def test_raise_max_wire_version_error(self): - opts = AutoEncryptionOpts(KMS_PROVIDERS, "keyvault.datakeys") - client = self.rs_or_single_client(auto_encryption_opts=opts) - msg = "Auto-encryption requires a minimum MongoDB version of 4.2" - with self.assertRaisesRegex(ConfigurationError, msg): - client.test.test.insert_one({}) - with self.assertRaisesRegex(ConfigurationError, msg): - client.admin.command("ping") - with self.assertRaisesRegex(ConfigurationError, msg): - client.test.test.find_one({}) - with self.assertRaisesRegex(ConfigurationError, msg): - client.test.test.bulk_write([InsertOne({})]) - def test_raise_unsupported_error(self): opts = AutoEncryptionOpts(KMS_PROVIDERS, "keyvault.datakeys") client = self.rs_or_single_client(auto_encryption_opts=opts) @@ -617,17 +616,10 @@ def test_with_statement(self): # Spec tests -AWS_TEMP_CREDS = { - "accessKeyId": os.environ.get("CSFLE_AWS_TEMP_ACCESS_KEY_ID", ""), - "secretAccessKey": os.environ.get("CSFLE_AWS_TEMP_SECRET_ACCESS_KEY", ""), - "sessionToken": os.environ.get("CSFLE_AWS_TEMP_SESSION_TOKEN", ""), -} - AWS_TEMP_NO_SESSION_CREDS = { "accessKeyId": os.environ.get("CSFLE_AWS_TEMP_ACCESS_KEY_ID", ""), "secretAccessKey": os.environ.get("CSFLE_AWS_TEMP_SECRET_ACCESS_KEY", ""), } -KMS_TLS_OPTS = {"kmip": {"tlsCAFile": CA_PEM, "tlsCertificateKeyFile": CLIENT_PEM}} class TestSpec(SpecRunner): @@ -664,7 +656,7 @@ def parse_auto_encrypt_opts(self, opts): self.skipTest("GCP environment credentials are not set") if "kmip" in kms_providers: kms_providers["kmip"] = KMIP_CREDS - opts["kms_tls_options"] = KMS_TLS_OPTS + opts["kms_tls_options"] = DEFAULT_KMS_TLS if "key_vault_namespace" not in opts: opts["key_vault_namespace"] = "keyvault.datakeys" if "extra_options" in opts: @@ -752,20 +744,11 @@ def run_scenario(self): if _HAVE_PYMONGOCRYPT: globals().update( generate_test_classes( - os.path.join(SPEC_PATH, "unified"), - module=__name__, + os.path.join(SPEC_PATH, "unified"), module=__name__, expected_failures=["mapReduce .*"] ) ) # Prose Tests -ALL_KMS_PROVIDERS = { - "aws": AWS_CREDS, - "azure": AZURE_CREDS, - "gcp": GCP_CREDS, - "kmip": KMIP_CREDS, - "local": {"key": LOCAL_MASTER_KEY}, -} - LOCAL_KEY_ID = Binary(base64.b64decode(b"LOCALAAAAAAAAAAAAAAAAA=="), UUID_SUBTYPE) AWS_KEY_ID = Binary(base64.b64decode(b"AWSAAAAAAAAAAAAAAAAAAA=="), UUID_SUBTYPE) AZURE_KEY_ID = Binary(base64.b64decode(b"AZUREAAAAAAAAAAAAAAAAA=="), UUID_SUBTYPE) @@ -852,13 +835,17 @@ def setUp(self): self.KMS_PROVIDERS, "keyvault.datakeys", schema_map=schemas, - kms_tls_options=KMS_TLS_OPTS, + kms_tls_options=DEFAULT_KMS_TLS, ) self.client_encrypted = self.rs_or_single_client( auto_encryption_opts=opts, uuidRepresentation="standard" ) self.client_encryption = self.create_client_encryption( - self.KMS_PROVIDERS, "keyvault.datakeys", self.client, OPTS, kms_tls_options=KMS_TLS_OPTS + self.KMS_PROVIDERS, + "keyvault.datakeys", + self.client, + OPTS, + kms_tls_options=DEFAULT_KMS_TLS, ) self.listener.reset() @@ -1065,7 +1052,7 @@ def _test_corpus(self, opts): "keyvault.datakeys", client_context.client, OPTS, - kms_tls_options=KMS_TLS_OPTS, + kms_tls_options=DEFAULT_KMS_TLS, ) corpus = self.fix_up_curpus(json_data("corpus", "corpus.json")) @@ -1157,7 +1144,7 @@ def _test_corpus(self, opts): def test_corpus(self): opts = AutoEncryptionOpts( - self.kms_providers(), "keyvault.datakeys", kms_tls_options=KMS_TLS_OPTS + self.kms_providers(), "keyvault.datakeys", kms_tls_options=DEFAULT_KMS_TLS ) self._test_corpus(opts) @@ -1168,7 +1155,7 @@ def test_corpus_local_schema(self): self.kms_providers(), "keyvault.datakeys", schema_map=schemas, - kms_tls_options=KMS_TLS_OPTS, + kms_tls_options=DEFAULT_KMS_TLS, ) self._test_corpus(opts) @@ -1275,7 +1262,7 @@ def test_06_insert_fails_over_16MiB(self): with self.assertRaises(BulkWriteError) as ctx: self.coll_encrypted.bulk_write([InsertOne(doc)]) err = ctx.exception.details["writeErrors"][0] - self.assertEqual(2, err["code"]) + self.assertIn(err["code"], [2, 10334]) self.assertIn("object to insert too large", err["errmsg"]) @@ -1299,19 +1286,19 @@ def setUp(self): key_vault_namespace="keyvault.datakeys", key_vault_client=client_context.client, codec_options=OPTS, - kms_tls_options=KMS_TLS_OPTS, + kms_tls_options=DEFAULT_KMS_TLS, ) kms_providers_invalid = copy.deepcopy(kms_providers) kms_providers_invalid["azure"]["identityPlatformEndpoint"] = "doesnotexist.invalid:443" kms_providers_invalid["gcp"]["endpoint"] = "doesnotexist.invalid:443" - kms_providers_invalid["kmip"]["endpoint"] = "doesnotexist.local:5698" + kms_providers_invalid["kmip"]["endpoint"] = "doesnotexist.invalid:5698" self.client_encryption_invalid = self.create_client_encryption( kms_providers=kms_providers_invalid, key_vault_namespace="keyvault.datakeys", key_vault_client=client_context.client, codec_options=OPTS, - kms_tls_options=KMS_TLS_OPTS, + kms_tls_options=DEFAULT_KMS_TLS, ) self._kmip_host_error = None self._invalid_host_error = None @@ -1361,15 +1348,10 @@ def test_03_aws_region_key_endpoint_port(self): }, ) - @unittest.skipUnless(any(AWS_CREDS.values()), "AWS environment credentials are not set") - def test_04_aws_endpoint_invalid_port(self): - master_key = { - "region": "us-east-1", - "key": ("arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0"), - "endpoint": "kms.us-east-1.amazonaws.com:12345", - } - with self.assertRaisesRegex(EncryptionError, "kms.us-east-1.amazonaws.com:12345"): - self.client_encryption.create_data_key("aws", master_key=master_key) + def test_04_kmip_endpoint_invalid_port(self): + master_key = {"keyId": "1", "endpoint": "localhost:12345"} + with self.assertRaisesRegex(EncryptionError, "localhost:12345"): + self.client_encryption.create_data_key("kmip", master_key=master_key) @unittest.skipUnless(any(AWS_CREDS.values()), "AWS environment credentials are not set") def test_05_aws_endpoint_wrong_region(self): @@ -1475,7 +1457,7 @@ def test_11_kmip_master_key_endpoint(self): self.assertEqual("test", self.client_encryption_invalid.decrypt(encrypted)) def test_12_kmip_master_key_invalid_endpoint(self): - key = {"keyId": "1", "endpoint": "doesnotexist.local:5698"} + key = {"keyId": "1", "endpoint": "doesnotexist.invalid:5698"} with self.assertRaisesRegex(EncryptionError, self.kmip_host_error): self.client_encryption.create_data_key("kmip", key) @@ -2155,12 +2137,13 @@ def test_01_aws(self): # 127.0.0.1:9001: ('Certificate does not contain any `subjectAltName`s.',) key["endpoint"] = "127.0.0.1:9001" with self.assertRaisesRegex( - EncryptionError, "IP address mismatch|wronghost|IPAddressMismatch|Certificate" + EncryptionError, + "IP address mismatch|wronghost|IPAddressMismatch|Certificate|SSL handshake failed", ): self.client_encryption_invalid_hostname.create_data_key("aws", key) def test_02_azure(self): - key = {"keyVaultEndpoint": "doesnotexist.local", "keyName": "foo"} + key = {"keyVaultEndpoint": "doesnotexist.invalid", "keyName": "foo"} # Missing client cert error. with self.assertRaisesRegex(EncryptionError, self.cert_error): self.client_encryption_no_client_cert.create_data_key("azure", key) @@ -2172,7 +2155,8 @@ def test_02_azure(self): self.client_encryption_expired.create_data_key("azure", key) # Invalid cert hostname error. with self.assertRaisesRegex( - EncryptionError, "IP address mismatch|wronghost|IPAddressMismatch|Certificate" + EncryptionError, + "IP address mismatch|wronghost|IPAddressMismatch|Certificate|SSL handshake failed", ): self.client_encryption_invalid_hostname.create_data_key("azure", key) @@ -2189,7 +2173,8 @@ def test_03_gcp(self): self.client_encryption_expired.create_data_key("gcp", key) # Invalid cert hostname error. with self.assertRaisesRegex( - EncryptionError, "IP address mismatch|wronghost|IPAddressMismatch|Certificate" + EncryptionError, + "IP address mismatch|wronghost|IPAddressMismatch|Certificate|SSL handshake failed", ): self.client_encryption_invalid_hostname.create_data_key("gcp", key) @@ -2203,7 +2188,8 @@ def test_04_kmip(self): self.client_encryption_expired.create_data_key("kmip") # Invalid cert hostname error. with self.assertRaisesRegex( - EncryptionError, "IP address mismatch|wronghost|IPAddressMismatch|Certificate" + EncryptionError, + "IP address mismatch|wronghost|IPAddressMismatch|Certificate|SSL handshake failed", ): self.client_encryption_invalid_hostname.create_data_key("kmip") @@ -2213,7 +2199,7 @@ def test_05_tlsDisableOCSPEndpointCheck_is_permitted(self): encryption = self.create_client_encryption( providers, "keyvault.datakeys", self.client, OPTS, kms_tls_options=options ) - ctx = encryption._io_callbacks.opts._kms_ssl_contexts["aws"] + ctx = encryption._io_callbacks._kms_ssl_contexts["aws"] if not hasattr(ctx, "check_ocsp_endpoint"): raise self.skipTest("OCSP not enabled") self.assertFalse(ctx.check_ocsp_endpoint) @@ -2232,7 +2218,7 @@ def test_06_named_kms_providers_apply_tls_options_aws(self): self.client_encryption_with_names.create_data_key("aws:with_tls", key) def test_06_named_kms_providers_apply_tls_options_azure(self): - key = {"keyVaultEndpoint": "doesnotexist.local", "keyName": "foo"} + key = {"keyVaultEndpoint": "doesnotexist.invalid", "keyName": "foo"} # Missing client cert error. with self.assertRaisesRegex(EncryptionError, self.cert_error): self.client_encryption_with_names.create_data_key("azure:no_client_cert", key) @@ -2400,6 +2386,310 @@ def test_05_roundtrip_encrypted_unindexed(self): self.assertEqual(decrypted, val) +# https://github.com/mongodb/specifications/blob/527e22d5090ec48bf1e144c45fc831de0f1935f6/source/client-side-encryption/tests/README.md#25-test-lookup +class TestLookupProse(EncryptionIntegrationTest): + @client_context.require_no_standalone + @client_context.require_version_min(7, 0, -1) + def setUp(self): + super().setUp() + encrypted_client = self.rs_or_single_client( + auto_encryption_opts=AutoEncryptionOpts( + key_vault_namespace="db.keyvault", + kms_providers={"local": {"key": LOCAL_MASTER_KEY}}, + ) + ) + encrypted_client.drop_database("db") + + key_doc = json_data("etc", "data", "lookup", "key-doc.json") + create_key_vault(encrypted_client.db.keyvault, key_doc) + self.addCleanup(client_context.client.drop_database, "db") + + encrypted_client.db.create_collection( + "csfle", + validator={"$jsonSchema": json_data("etc", "data", "lookup", "schema-csfle.json")}, + ) + encrypted_client.db.create_collection( + "csfle2", + validator={"$jsonSchema": json_data("etc", "data", "lookup", "schema-csfle2.json")}, + ) + encrypted_client.db.create_collection( + "qe", encryptedFields=json_data("etc", "data", "lookup", "schema-qe.json") + ) + encrypted_client.db.create_collection( + "qe2", encryptedFields=json_data("etc", "data", "lookup", "schema-qe2.json") + ) + encrypted_client.db.create_collection("no_schema") + encrypted_client.db.create_collection("no_schema2") + + unencrypted_client = self.rs_or_single_client() + + encrypted_client.db.csfle.insert_one({"csfle": "csfle"}) + doc = unencrypted_client.db.csfle.find_one() + self.assertIsInstance(doc["csfle"], Binary) + encrypted_client.db.csfle2.insert_one({"csfle2": "csfle2"}) + doc = unencrypted_client.db.csfle2.find_one() + self.assertIsInstance(doc["csfle2"], Binary) + encrypted_client.db.qe.insert_one({"qe": "qe"}) + doc = unencrypted_client.db.qe.find_one() + self.assertIsInstance(doc["qe"], Binary) + encrypted_client.db.qe2.insert_one({"qe2": "qe2"}) + doc = unencrypted_client.db.qe2.find_one() + self.assertIsInstance(doc["qe2"], Binary) + encrypted_client.db.no_schema.insert_one({"no_schema": "no_schema"}) + encrypted_client.db.no_schema2.insert_one({"no_schema2": "no_schema2"}) + + encrypted_client.close() + unencrypted_client.close() + + @client_context.require_version_min(8, 1, -1) + def test_1_csfle_joins_no_schema(self): + encrypted_client = self.rs_or_single_client( + auto_encryption_opts=AutoEncryptionOpts( + key_vault_namespace="db.keyvault", + kms_providers={"local": {"key": LOCAL_MASTER_KEY}}, + ) + ) + doc = next( + encrypted_client.db.csfle.aggregate( + [ + {"$match": {"csfle": "csfle"}}, + { + "$lookup": { + "from": "no_schema", + "as": "matched", + "pipeline": [ + {"$match": {"no_schema": "no_schema"}}, + {"$project": {"_id": 0}}, + ], + } + }, + {"$project": {"_id": 0}}, + ] + ) + ) + self.assertEqual(doc, {"csfle": "csfle", "matched": [{"no_schema": "no_schema"}]}) + + @client_context.require_version_min(8, 1, -1) + def test_2_qe_joins_no_schema(self): + encrypted_client = self.rs_or_single_client( + auto_encryption_opts=AutoEncryptionOpts( + key_vault_namespace="db.keyvault", + kms_providers={"local": {"key": LOCAL_MASTER_KEY}}, + ) + ) + doc = next( + encrypted_client.db.qe.aggregate( + [ + {"$match": {"qe": "qe"}}, + { + "$lookup": { + "from": "no_schema", + "as": "matched", + "pipeline": [ + {"$match": {"no_schema": "no_schema"}}, + {"$project": {"_id": 0, "__safeContent__": 0}}, + ], + } + }, + {"$project": {"_id": 0, "__safeContent__": 0}}, + ] + ) + ) + self.assertEqual(doc, {"qe": "qe", "matched": [{"no_schema": "no_schema"}]}) + + @client_context.require_version_min(8, 1, -1) + def test_3_no_schema_joins_csfle(self): + encrypted_client = self.rs_or_single_client( + auto_encryption_opts=AutoEncryptionOpts( + key_vault_namespace="db.keyvault", + kms_providers={"local": {"key": LOCAL_MASTER_KEY}}, + ) + ) + doc = next( + encrypted_client.db.no_schema.aggregate( + [ + {"$match": {"no_schema": "no_schema"}}, + { + "$lookup": { + "from": "csfle", + "as": "matched", + "pipeline": [{"$match": {"csfle": "csfle"}}, {"$project": {"_id": 0}}], + } + }, + {"$project": {"_id": 0}}, + ] + ) + ) + self.assertEqual(doc, {"no_schema": "no_schema", "matched": [{"csfle": "csfle"}]}) + + @client_context.require_version_min(8, 1, -1) + def test_4_no_schema_joins_qe(self): + encrypted_client = self.rs_or_single_client( + auto_encryption_opts=AutoEncryptionOpts( + key_vault_namespace="db.keyvault", + kms_providers={"local": {"key": LOCAL_MASTER_KEY}}, + ) + ) + doc = next( + encrypted_client.db.no_schema.aggregate( + [ + {"$match": {"no_schema": "no_schema"}}, + { + "$lookup": { + "from": "qe", + "as": "matched", + "pipeline": [ + {"$match": {"qe": "qe"}}, + {"$project": {"_id": 0, "__safeContent__": 0}}, + ], + } + }, + {"$project": {"_id": 0}}, + ] + ) + ) + self.assertEqual(doc, {"no_schema": "no_schema", "matched": [{"qe": "qe"}]}) + + @client_context.require_version_min(8, 1, -1) + def test_5_csfle_joins_csfle2(self): + encrypted_client = self.rs_or_single_client( + auto_encryption_opts=AutoEncryptionOpts( + key_vault_namespace="db.keyvault", + kms_providers={"local": {"key": LOCAL_MASTER_KEY}}, + ) + ) + doc = next( + encrypted_client.db.csfle.aggregate( + [ + {"$match": {"csfle": "csfle"}}, + { + "$lookup": { + "from": "csfle2", + "as": "matched", + "pipeline": [ + {"$match": {"csfle2": "csfle2"}}, + {"$project": {"_id": 0}}, + ], + } + }, + {"$project": {"_id": 0}}, + ] + ) + ) + self.assertEqual(doc, {"csfle": "csfle", "matched": [{"csfle2": "csfle2"}]}) + + @client_context.require_version_min(8, 1, -1) + def test_6_qe_joins_qe2(self): + encrypted_client = self.rs_or_single_client( + auto_encryption_opts=AutoEncryptionOpts( + key_vault_namespace="db.keyvault", + kms_providers={"local": {"key": LOCAL_MASTER_KEY}}, + ) + ) + doc = next( + encrypted_client.db.qe.aggregate( + [ + {"$match": {"qe": "qe"}}, + { + "$lookup": { + "from": "qe2", + "as": "matched", + "pipeline": [ + {"$match": {"qe2": "qe2"}}, + {"$project": {"_id": 0, "__safeContent__": 0}}, + ], + } + }, + {"$project": {"_id": 0, "__safeContent__": 0}}, + ] + ) + ) + self.assertEqual(doc, {"qe": "qe", "matched": [{"qe2": "qe2"}]}) + + @client_context.require_version_min(8, 1, -1) + def test_7_no_schema_joins_no_schema2(self): + encrypted_client = self.rs_or_single_client( + auto_encryption_opts=AutoEncryptionOpts( + key_vault_namespace="db.keyvault", + kms_providers={"local": {"key": LOCAL_MASTER_KEY}}, + ) + ) + doc = next( + encrypted_client.db.no_schema.aggregate( + [ + {"$match": {"no_schema": "no_schema"}}, + { + "$lookup": { + "from": "no_schema2", + "as": "matched", + "pipeline": [ + {"$match": {"no_schema2": "no_schema2"}}, + {"$project": {"_id": 0}}, + ], + } + }, + {"$project": {"_id": 0}}, + ] + ) + ) + self.assertEqual(doc, {"no_schema": "no_schema", "matched": [{"no_schema2": "no_schema2"}]}) + + @client_context.require_version_min(8, 1, -1) + def test_8_csfle_joins_qe(self): + encrypted_client = self.rs_or_single_client( + auto_encryption_opts=AutoEncryptionOpts( + key_vault_namespace="db.keyvault", + kms_providers={"local": {"key": LOCAL_MASTER_KEY}}, + ) + ) + with self.assertRaises(PyMongoError) as exc: + _ = next( + encrypted_client.db.csfle.aggregate( + [ + {"$match": {"csfle": "qe"}}, + { + "$lookup": { + "from": "qe", + "as": "matched", + "pipeline": [{"$match": {"qe": "qe"}}, {"$project": {"_id": 0}}], + } + }, + {"$project": {"_id": 0}}, + ] + ) + ) + self.assertIn("not supported", str(exc)) + + @client_context.require_version_max(8, 1, -1) + def test_9_error(self): + encrypted_client = self.rs_or_single_client( + auto_encryption_opts=AutoEncryptionOpts( + key_vault_namespace="db.keyvault", + kms_providers={"local": {"key": LOCAL_MASTER_KEY}}, + ) + ) + with self.assertRaises(PyMongoError) as exc: + _ = next( + encrypted_client.db.csfle.aggregate( + [ + {"$match": {"csfle": "csfle"}}, + { + "$lookup": { + "from": "no_schema", + "as": "matched", + "pipeline": [ + {"$match": {"no_schema": "no_schema"}}, + {"$project": {"_id": 0}}, + ], + } + }, + {"$project": {"_id": 0}}, + ] + ) + ) + self.assertIn("Upgrade", str(exc)) + + # https://github.com/mongodb/specifications/blob/072601/source/client-side-encryption/tests/README.md#rewrap class TestRewrapWithSeparateClientEncryption(EncryptionIntegrationTest): MASTER_KEYS: Mapping[str, Mapping[str, Any]] = { @@ -2436,7 +2726,7 @@ def run_test(self, src_provider, dst_provider): key_vault_client=self.client, key_vault_namespace="keyvault.datakeys", kms_providers=ALL_KMS_PROVIDERS, - kms_tls_options=KMS_TLS_OPTS, + kms_tls_options=DEFAULT_KMS_TLS, codec_options=OPTS, ) @@ -2456,7 +2746,7 @@ def run_test(self, src_provider, dst_provider): key_vault_client=client2, key_vault_namespace="keyvault.datakeys", kms_providers=ALL_KMS_PROVIDERS, - kms_tls_options=KMS_TLS_OPTS, + kms_tls_options=DEFAULT_KMS_TLS, codec_options=OPTS, ) @@ -2861,15 +3151,10 @@ def setUp(self): def http_post(self, path, data=None): # Note, the connection to the mock server needs to be closed after # each request because the server is single threaded. - ctx: ssl.SSLContext = get_ssl_context( - CLIENT_PEM, # certfile - None, # passphrase - CA_PEM, # ca_certs - None, # crlfile - False, # allow_invalid_certificates - False, # allow_invalid_hostnames - False, # disable_ocsp_endpoint_check - ) + ctx = ssl.create_default_context(cafile=CA_PEM) + ctx.load_cert_chain(CLIENT_PEM) + ctx.check_hostname = False + ctx.verify_mode = ssl.CERT_NONE conn = http.client.HTTPSConnection("127.0.0.1:9003", context=ctx) try: if data is not None: @@ -2930,6 +3215,7 @@ def test_kms_retry(self): class TestAutomaticDecryptionKeys(EncryptionIntegrationTest): @client_context.require_no_standalone @client_context.require_version_min(7, 0, -1) + @flaky(reason="PYTHON-4982") def setUp(self): super().setUp() self.key1_document = json_data("etc", "data", "keys", "key1-document.json") @@ -2966,9 +3252,10 @@ def test_02_no_fields(self): ) def test_03_invalid_keyid(self): + # checkAuthForCreateCollection can be removed when SERVER-102101 is fixed. with self.assertRaisesRegex( EncryptedCollectionError, - "create.encryptedFields.fields.keyId' is the wrong type 'bool', expected type 'binData", + "(create|checkAuthForCreateCollection).encryptedFields.fields.keyId' is the wrong type 'bool', expected type 'binData", ): self.client_encryption.create_encrypted_collection( database=self.db, @@ -3137,6 +3424,262 @@ def test_collection_name_collision(self): self.assertIsInstance(exc.exception.encrypted_fields["fields"][0]["keyId"], Binary) +# https://github.com/mongodb/specifications/blob/master/source/client-side-encryption/tests/README.md#27-text-explicit-encryption +class TestExplicitTextEncryptionProse(EncryptionIntegrationTest): + @client_context.require_no_standalone + @client_context.require_version_min(8, 2, -1) + @client_context.require_libmongocrypt_min(1, 15, 1) + @client_context.require_pymongocrypt_min(1, 16, 0) + def setUp(self): + super().setUp() + # Load the file key1-document.json as key1Document. + self.key1_document = json_data("etc", "data", "keys", "key1-document.json") + # Read the "_id" field of key1Document as key1ID. + self.key1_id = self.key1_document["_id"] + # Drop and create the collection keyvault.datakeys. + # Insert key1Document in keyvault.datakeys with majority write concern. + self.key_vault = create_key_vault(self.client.keyvault.datakeys, self.key1_document) + self.addCleanup(self.key_vault.drop) + # Create a ClientEncryption object named clientEncryption with these options. + self.kms_providers = {"local": {"key": LOCAL_MASTER_KEY}} + self.client_encryption = self.create_client_encryption( + self.kms_providers, + self.key_vault.full_name, + self.client, + OPTS, + ) + # Create a MongoClient named encryptedClient with these AutoEncryptionOpts. + opts = AutoEncryptionOpts( + self.kms_providers, + "keyvault.datakeys", + bypass_query_analysis=True, + ) + self.client_encrypted = self.rs_or_single_client(auto_encryption_opts=opts) + + # Using QE CreateCollection() and Collection.Drop(), drop and create the following collections with majority write concern: + # db.prefix-suffix using the encryptedFields option set to the contents of encryptedFields-prefix-suffix.json. + db = self.client_encrypted.db + db.drop_collection("prefix-suffix") + encrypted_fields = json_data("etc", "data", "encryptedFields-prefix-suffix.json") + self.client_encryption.create_encrypted_collection( + db, "prefix-suffix", kms_provider="local", encrypted_fields=encrypted_fields + ) + # db.substring using the encryptedFields option set to the contents of encryptedFields-substring.json. + db.drop_collection("substring") + encrypted_fields = json_data("etc", "data", "encryptedFields-substring.json") + self.client_encryption.create_encrypted_collection( + db, "substring", kms_provider="local", encrypted_fields=encrypted_fields + ) + + # Use clientEncryption to encrypt the string "foobarbaz" with the following EncryptOpts. + text_opts = TextOpts( + case_sensitive=True, + diacritic_sensitive=True, + prefix=dict(strMaxQueryLength=10, strMinQueryLength=2), + suffix=dict(strMaxQueryLength=10, strMinQueryLength=2), + ) + encrypted_value = self.client_encryption.encrypt( + "foobarbaz", + key_id=self.key1_id, + algorithm=Algorithm.TEXTPREVIEW, + contention_factor=0, + text_opts=text_opts, + ) + # Use encryptedClient to insert the following document into db.prefix-suffix with majority write concern. + coll = self.client_encrypted.db["prefix-suffix"].with_options( + write_concern=WriteConcern(w="majority") + ) + coll.insert_one({"_id": 0, "encryptedText": encrypted_value}) + + # Use clientEncryption to encrypt the string "foobarbaz" with the following EncryptOpts. + text_opts = TextOpts( + case_sensitive=True, + diacritic_sensitive=True, + substring=dict(strMaxLength=10, strMaxQueryLength=10, strMinQueryLength=2), + ) + encrypted_value = self.client_encryption.encrypt( + "foobarbaz", + key_id=self.key1_id, + algorithm=Algorithm.TEXTPREVIEW, + contention_factor=0, + text_opts=text_opts, + ) + # Use encryptedClient to insert the following document into db.substring with majority write concern. + coll = self.client_encrypted.db["substring"].with_options( + write_concern=WriteConcern(w="majority") + ) + coll.insert_one({"_id": 0, "encryptedText": encrypted_value}) + + def test_01_can_find_a_document_by_prefix(self): + # Use clientEncryption.encrypt() to encrypt the string "foo" with the following EncryptOpts. + text_opts = TextOpts( + case_sensitive=True, + diacritic_sensitive=True, + prefix=dict(strMaxQueryLength=10, strMinQueryLength=2), + ) + encrypted_value = self.client_encryption.encrypt( + "foo", + key_id=self.key1_id, + algorithm=Algorithm.TEXTPREVIEW, + query_type=QueryType.PREFIXPREVIEW, + contention_factor=0, + text_opts=text_opts, + ) + # Use encryptedClient to run a "find" operation on the db.prefix-suffix collection with the following filter. + value = self.client_encrypted.db["prefix-suffix"].find_one( + {"$expr": {"$encStrStartsWith": {"input": "$encryptedText", "prefix": encrypted_value}}} + ) + # Assert the following document is returned. + expected = {"_id": 0, "encryptedText": "foobarbaz"} + value.pop("__safeContent__", None) + self.assertEqual(value, expected) + + def test_02_can_find_a_document_by_suffix(self): + # Use clientEncryption.encrypt() to encrypt the string "baz" with the following EncryptOpts: + text_opts = TextOpts( + case_sensitive=True, + diacritic_sensitive=True, + suffix=dict(strMaxQueryLength=10, strMinQueryLength=2), + ) + encrypted_value = self.client_encryption.encrypt( + "baz", + key_id=self.key1_id, + algorithm=Algorithm.TEXTPREVIEW, + query_type=QueryType.SUFFIXPREVIEW, + contention_factor=0, + text_opts=text_opts, + ) + # Use encryptedClient to run a "find" operation on the db.prefix-suffix collection with the following filter: + value = self.client_encrypted.db["prefix-suffix"].find_one( + {"$expr": {"$encStrEndsWith": {"input": "$encryptedText", "suffix": encrypted_value}}} + ) + # Assert the following document is returned. + expected = {"_id": 0, "encryptedText": "foobarbaz"} + value.pop("__safeContent__", None) + self.assertEqual(value, expected) + + def test_03_no_document_found_by_prefix(self): + # Use clientEncryption.encrypt() to encrypt the string "baz" with the following EncryptOpts: + text_opts = TextOpts( + case_sensitive=True, + diacritic_sensitive=True, + prefix=dict(strMaxQueryLength=10, strMinQueryLength=2), + ) + encrypted_value = self.client_encryption.encrypt( + "baz", + key_id=self.key1_id, + algorithm=Algorithm.TEXTPREVIEW, + query_type=QueryType.PREFIXPREVIEW, + contention_factor=0, + text_opts=text_opts, + ) + # Use encryptedClient to run a "find" operation on the db.prefix-suffix collection with the following filter: + value = self.client_encrypted.db["prefix-suffix"].find_one( + {"$expr": {"$encStrStartsWith": {"input": "$encryptedText", "prefix": encrypted_value}}} + ) + # Assert that no documents are returned. + self.assertIsNone(value) + + def test_04_no_document_found_by_suffix(self): + # Use clientEncryption.encrypt() to encrypt the string "foo" with the following EncryptOpts: + text_opts = TextOpts( + case_sensitive=True, + diacritic_sensitive=True, + suffix=dict(strMaxQueryLength=10, strMinQueryLength=2), + ) + encrypted_value = self.client_encryption.encrypt( + "foo", + key_id=self.key1_id, + algorithm=Algorithm.TEXTPREVIEW, + query_type=QueryType.SUFFIXPREVIEW, + contention_factor=0, + text_opts=text_opts, + ) + # Use encryptedClient to run a "find" operation on the db.prefix-suffix collection with the following filter: + value = self.client_encrypted.db["prefix-suffix"].find_one( + {"$expr": {"$encStrEndsWith": {"input": "$encryptedText", "suffix": encrypted_value}}} + ) + # Assert that no documents are returned. + self.assertIsNone(value) + + def test_05_can_find_a_document_by_substring(self): + # Use clientEncryption.encrypt() to encrypt the string "bar" with the following EncryptOpts: + text_opts = TextOpts( + case_sensitive=True, + diacritic_sensitive=True, + substring=dict(strMaxLength=10, strMaxQueryLength=10, strMinQueryLength=2), + ) + encrypted_value = self.client_encryption.encrypt( + "bar", + key_id=self.key1_id, + algorithm=Algorithm.TEXTPREVIEW, + query_type=QueryType.SUBSTRINGPREVIEW, + contention_factor=0, + text_opts=text_opts, + ) + # Use encryptedClient to run a "find" operation on the db.substring collection with the following filter: + value = self.client_encrypted.db["substring"].find_one( + { + "$expr": { + "$encStrContains": {"input": "$encryptedText", "substring": encrypted_value} + } + } + ) + # Assert the following document is returned: + expected = {"_id": 0, "encryptedText": "foobarbaz"} + value.pop("__safeContent__", None) + self.assertEqual(value, expected) + + def test_06_no_document_found_by_substring(self): + # Use clientEncryption.encrypt() to encrypt the string "qux" with the following EncryptOpts: + text_opts = TextOpts( + case_sensitive=True, + diacritic_sensitive=True, + substring=dict(strMaxLength=10, strMaxQueryLength=10, strMinQueryLength=2), + ) + encrypted_value = self.client_encryption.encrypt( + "qux", + key_id=self.key1_id, + algorithm=Algorithm.TEXTPREVIEW, + query_type=QueryType.SUBSTRINGPREVIEW, + contention_factor=0, + text_opts=text_opts, + ) + # Use encryptedClient to run a "find" operation on the db.substring collection with the following filter: + value = self.client_encrypted.db["substring"].find_one( + { + "$expr": { + "$encStrContains": {"input": "$encryptedText", "substring": encrypted_value} + } + } + ) + # Assert that no documents are returned. + self.assertIsNone(value) + + def test_07_contentionFactor_is_required(self): + from pymongocrypt.errors import MongoCryptError + + # Use clientEncryption.encrypt() to encrypt the string "foo" with the following EncryptOpts: + text_opts = TextOpts( + case_sensitive=True, + diacritic_sensitive=True, + prefix=dict(strMaxQueryLength=10, strMinQueryLength=2), + ) + with self.assertRaises(EncryptionError) as ctx: + self.client_encryption.encrypt( + "foo", + key_id=self.key1_id, + algorithm=Algorithm.TEXTPREVIEW, + query_type=QueryType.PREFIXPREVIEW, + text_opts=text_opts, + ) + # Expect an error from libmongocrypt with a message containing the string: "contention factor is required for textPreview algorithm". + self.assertIsInstance(ctx.exception.cause, MongoCryptError) + self.assertEqual( + str(ctx.exception), "contention factor is required for textPreview algorithm" + ) + + def start_mongocryptd(port) -> None: args = ["mongocryptd", f"--port={port}", "--idleShutdownTimeoutSecs=60"] _spawn_daemon(args) @@ -3171,6 +3714,8 @@ def test_implicit_session_ignored_when_unsupported(self): self.assertNotIn("lsid", self.listener.started_events[1].command) + self.mongocryptd_client.close() + def test_explicit_session_errors_when_unsupported(self): self.listener.reset() with self.mongocryptd_client.start_session() as s: @@ -3183,6 +3728,8 @@ def test_explicit_session_errors_when_unsupported(self): ): self.mongocryptd_client.db.test.insert_one({"x": 1}, session=s) + self.mongocryptd_client.close() + if __name__ == "__main__": unittest.main() diff --git a/test/test_errors.py b/test/test_errors.py index 2cee7c15d8..d6db6a4ec1 100644 --- a/test/test_errors.py +++ b/test/test_errors.py @@ -47,15 +47,9 @@ def test_operation_failure(self): self.assertIn("full error", traceback.format_exc()) def _test_unicode_strs(self, exc): - if sys.implementation.name == "pypy" and sys.implementation.version < (7, 3, 7): - # PyPy used to display unicode in repr differently. - self.assertEqual( - "unicode \U0001f40d, full error: {'errmsg': 'unicode \\U0001f40d'}", str(exc) - ) - else: - self.assertEqual( - "unicode \U0001f40d, full error: {'errmsg': 'unicode \U0001f40d'}", str(exc) - ) + self.assertEqual( + "unicode \U0001f40d, full error: {'errmsg': 'unicode \U0001f40d'}", str(exc) + ) try: raise exc except Exception: diff --git a/test/test_examples.py b/test/test_examples.py index 7f98226e7a..266e32e8d4 100644 --- a/test/test_examples.py +++ b/test/test_examples.py @@ -15,14 +15,18 @@ """MongoDB documentation examples in Python.""" from __future__ import annotations +import asyncio import datetime +import functools import sys import threading +import time +from test.helpers import ConcurrentRunner sys.path[0:0] = [""] from test import IntegrationTest, client_context, unittest -from test.utils import wait_until +from test.utils_shared import wait_until import pymongo from pymongo.errors import ConnectionFailure, OperationFailure @@ -31,6 +35,8 @@ from pymongo.server_api import ServerApi from pymongo.write_concern import WriteConcern +_IS_SYNC = True + class TestSampleShellCommands(IntegrationTest): def setUp(self): @@ -62,7 +68,7 @@ def test_first_three_examples(self): cursor = db.inventory.find({"item": "canvas"}) # End Example 2 - self.assertEqual(len(list(cursor)), 1) + self.assertEqual(len(cursor.to_list()), 1) # Start Example 3 db.inventory.insert_many( @@ -137,31 +143,31 @@ def test_query_top_level_fields(self): cursor = db.inventory.find({}) # End Example 7 - self.assertEqual(len(list(cursor)), 5) + self.assertEqual(len(cursor.to_list()), 5) # Start Example 9 cursor = db.inventory.find({"status": "D"}) # End Example 9 - self.assertEqual(len(list(cursor)), 2) + self.assertEqual(len(cursor.to_list()), 2) # Start Example 10 cursor = db.inventory.find({"status": {"$in": ["A", "D"]}}) # End Example 10 - self.assertEqual(len(list(cursor)), 5) + self.assertEqual(len(cursor.to_list()), 5) # Start Example 11 cursor = db.inventory.find({"status": "A", "qty": {"$lt": 30}}) # End Example 11 - self.assertEqual(len(list(cursor)), 1) + self.assertEqual(len(cursor.to_list()), 1) # Start Example 12 cursor = db.inventory.find({"$or": [{"status": "A"}, {"qty": {"$lt": 30}}]}) # End Example 12 - self.assertEqual(len(list(cursor)), 3) + self.assertEqual(len(cursor.to_list()), 3) # Start Example 13 cursor = db.inventory.find( @@ -169,46 +175,42 @@ def test_query_top_level_fields(self): ) # End Example 13 - self.assertEqual(len(list(cursor)), 2) + self.assertEqual(len(cursor.to_list()), 2) def test_query_embedded_documents(self): db = self.db # Start Example 14 - # Subdocument key order matters in a few of these examples so we have - # to use bson.son.SON instead of a Python dict. - from bson.son import SON - db.inventory.insert_many( [ { "item": "journal", "qty": 25, - "size": SON([("h", 14), ("w", 21), ("uom", "cm")]), + "size": {"h": 14, "w": 21, "uom": "cm"}, "status": "A", }, { "item": "notebook", "qty": 50, - "size": SON([("h", 8.5), ("w", 11), ("uom", "in")]), + "size": {"h": 8.5, "w": 11, "uom": "in"}, "status": "A", }, { "item": "paper", "qty": 100, - "size": SON([("h", 8.5), ("w", 11), ("uom", "in")]), + "size": {"h": 8.5, "w": 11, "uom": "in"}, "status": "D", }, { "item": "planner", "qty": 75, - "size": SON([("h", 22.85), ("w", 30), ("uom", "cm")]), + "size": {"h": 22.85, "w": 30, "uom": "cm"}, "status": "D", }, { "item": "postcard", "qty": 45, - "size": SON([("h", 10), ("w", 15.25), ("uom", "cm")]), + "size": {"h": 10, "w": 15.25, "uom": "cm"}, "status": "A", }, ] @@ -216,34 +218,34 @@ def test_query_embedded_documents(self): # End Example 14 # Start Example 15 - cursor = db.inventory.find({"size": SON([("h", 14), ("w", 21), ("uom", "cm")])}) + cursor = db.inventory.find({"size": {"h": 14, "w": 21, "uom": "cm"}}) # End Example 15 - self.assertEqual(len(list(cursor)), 1) + self.assertEqual(len(cursor.to_list()), 1) # Start Example 16 - cursor = db.inventory.find({"size": SON([("w", 21), ("h", 14), ("uom", "cm")])}) + cursor = db.inventory.find({"size": {"w": 21, "h": 14, "uom": "cm"}}) # End Example 16 - self.assertEqual(len(list(cursor)), 0) + self.assertEqual(len(cursor.to_list()), 0) # Start Example 17 cursor = db.inventory.find({"size.uom": "in"}) # End Example 17 - self.assertEqual(len(list(cursor)), 2) + self.assertEqual(len(cursor.to_list()), 2) # Start Example 18 cursor = db.inventory.find({"size.h": {"$lt": 15}}) # End Example 18 - self.assertEqual(len(list(cursor)), 4) + self.assertEqual(len(cursor.to_list()), 4) # Start Example 19 cursor = db.inventory.find({"size.h": {"$lt": 15}, "size.uom": "in", "status": "D"}) # End Example 19 - self.assertEqual(len(list(cursor)), 1) + self.assertEqual(len(cursor.to_list()), 1) def test_query_arrays(self): db = self.db @@ -269,87 +271,83 @@ def test_query_arrays(self): cursor = db.inventory.find({"tags": ["red", "blank"]}) # End Example 21 - self.assertEqual(len(list(cursor)), 1) + self.assertEqual(len(cursor.to_list()), 1) # Start Example 22 cursor = db.inventory.find({"tags": {"$all": ["red", "blank"]}}) # End Example 22 - self.assertEqual(len(list(cursor)), 4) + self.assertEqual(len(cursor.to_list()), 4) # Start Example 23 cursor = db.inventory.find({"tags": "red"}) # End Example 23 - self.assertEqual(len(list(cursor)), 4) + self.assertEqual(len(cursor.to_list()), 4) # Start Example 24 cursor = db.inventory.find({"dim_cm": {"$gt": 25}}) # End Example 24 - self.assertEqual(len(list(cursor)), 1) + self.assertEqual(len(cursor.to_list()), 1) # Start Example 25 cursor = db.inventory.find({"dim_cm": {"$gt": 15, "$lt": 20}}) # End Example 25 - self.assertEqual(len(list(cursor)), 4) + self.assertEqual(len(cursor.to_list()), 4) # Start Example 26 cursor = db.inventory.find({"dim_cm": {"$elemMatch": {"$gt": 22, "$lt": 30}}}) # End Example 26 - self.assertEqual(len(list(cursor)), 1) + self.assertEqual(len(cursor.to_list()), 1) # Start Example 27 cursor = db.inventory.find({"dim_cm.1": {"$gt": 25}}) # End Example 27 - self.assertEqual(len(list(cursor)), 1) + self.assertEqual(len(cursor.to_list()), 1) # Start Example 28 cursor = db.inventory.find({"tags": {"$size": 3}}) # End Example 28 - self.assertEqual(len(list(cursor)), 1) + self.assertEqual(len(cursor.to_list()), 1) def test_query_array_of_documents(self): db = self.db # Start Example 29 - # Subdocument key order matters in a few of these examples so we have - # to use bson.son.SON instead of a Python dict. - from bson.son import SON - db.inventory.insert_many( [ { "item": "journal", "instock": [ - SON([("warehouse", "A"), ("qty", 5)]), - SON([("warehouse", "C"), ("qty", 15)]), + {"warehouse": "A", "qty": 5}, + {"warehouse": "C", "qty": 15}, ], }, - {"item": "notebook", "instock": [SON([("warehouse", "C"), ("qty", 5)])]}, + {"item": "notebook", "instock": [{"warehouse": "C", "qty": 5}]}, { "item": "paper", "instock": [ - SON([("warehouse", "A"), ("qty", 60)]), - SON([("warehouse", "B"), ("qty", 15)]), + {"warehouse": "A", "qty": 60}, + {"warehouse": "B", "qty": 15}, ], }, { "item": "planner", "instock": [ - SON([("warehouse", "A"), ("qty", 40)]), - SON([("warehouse", "B"), ("qty", 5)]), + {"warehouse": "A", "qty": 40}, + {"warehouse": "B", "qty": 5}, ], }, { "item": "postcard", "instock": [ - SON([("warehouse", "B"), ("qty", 15)]), - SON([("warehouse", "C"), ("qty", 35)]), + {"warehouse": "B", "qty": 15}, + {"warehouse": "C", "qty": 35}, ], }, ] @@ -357,52 +355,52 @@ def test_query_array_of_documents(self): # End Example 29 # Start Example 30 - cursor = db.inventory.find({"instock": SON([("warehouse", "A"), ("qty", 5)])}) + cursor = db.inventory.find({"instock": {"warehouse": "A", "qty": 5}}) # End Example 30 - self.assertEqual(len(list(cursor)), 1) + self.assertEqual(len(cursor.to_list()), 1) # Start Example 31 - cursor = db.inventory.find({"instock": SON([("qty", 5), ("warehouse", "A")])}) + cursor = db.inventory.find({"instock": {"qty": 5, "warehouse": "A"}}) # End Example 31 - self.assertEqual(len(list(cursor)), 0) + self.assertEqual(len(cursor.to_list()), 0) # Start Example 32 cursor = db.inventory.find({"instock.0.qty": {"$lte": 20}}) # End Example 32 - self.assertEqual(len(list(cursor)), 3) + self.assertEqual(len(cursor.to_list()), 3) # Start Example 33 cursor = db.inventory.find({"instock.qty": {"$lte": 20}}) # End Example 33 - self.assertEqual(len(list(cursor)), 5) + self.assertEqual(len(cursor.to_list()), 5) # Start Example 34 cursor = db.inventory.find({"instock": {"$elemMatch": {"qty": 5, "warehouse": "A"}}}) # End Example 34 - self.assertEqual(len(list(cursor)), 1) + self.assertEqual(len(cursor.to_list()), 1) # Start Example 35 cursor = db.inventory.find({"instock": {"$elemMatch": {"qty": {"$gt": 10, "$lte": 20}}}}) # End Example 35 - self.assertEqual(len(list(cursor)), 3) + self.assertEqual(len(cursor.to_list()), 3) # Start Example 36 cursor = db.inventory.find({"instock.qty": {"$gt": 10, "$lte": 20}}) # End Example 36 - self.assertEqual(len(list(cursor)), 4) + self.assertEqual(len(cursor.to_list()), 4) # Start Example 37 cursor = db.inventory.find({"instock.qty": 5, "instock.warehouse": "A"}) # End Example 37 - self.assertEqual(len(list(cursor)), 2) + self.assertEqual(len(cursor.to_list()), 2) def test_query_null(self): db = self.db @@ -415,19 +413,19 @@ def test_query_null(self): cursor = db.inventory.find({"item": None}) # End Example 39 - self.assertEqual(len(list(cursor)), 2) + self.assertEqual(len(cursor.to_list()), 2) # Start Example 40 cursor = db.inventory.find({"item": {"$type": 10}}) # End Example 40 - self.assertEqual(len(list(cursor)), 1) + self.assertEqual(len(cursor.to_list()), 1) # Start Example 41 cursor = db.inventory.find({"item": {"$exists": False}}) # End Example 41 - self.assertEqual(len(list(cursor)), 1) + self.assertEqual(len(cursor.to_list()), 1) def test_projection(self): db = self.db @@ -473,84 +471,84 @@ def test_projection(self): cursor = db.inventory.find({"status": "A"}) # End Example 43 - self.assertEqual(len(list(cursor)), 3) + self.assertEqual(len(cursor.to_list()), 3) # Start Example 44 cursor = db.inventory.find({"status": "A"}, {"item": 1, "status": 1}) # End Example 44 for doc in cursor: - self.assertTrue("_id" in doc) - self.assertTrue("item" in doc) - self.assertTrue("status" in doc) - self.assertFalse("size" in doc) - self.assertFalse("instock" in doc) + self.assertIn("_id", doc) + self.assertIn("item", doc) + self.assertIn("status", doc) + self.assertNotIn("size", doc) + self.assertNotIn("instock", doc) # Start Example 45 cursor = db.inventory.find({"status": "A"}, {"item": 1, "status": 1, "_id": 0}) # End Example 45 for doc in cursor: - self.assertFalse("_id" in doc) - self.assertTrue("item" in doc) - self.assertTrue("status" in doc) - self.assertFalse("size" in doc) - self.assertFalse("instock" in doc) + self.assertNotIn("_id", doc) + self.assertIn("item", doc) + self.assertIn("status", doc) + self.assertNotIn("size", doc) + self.assertNotIn("instock", doc) # Start Example 46 cursor = db.inventory.find({"status": "A"}, {"status": 0, "instock": 0}) # End Example 46 for doc in cursor: - self.assertTrue("_id" in doc) - self.assertTrue("item" in doc) - self.assertFalse("status" in doc) - self.assertTrue("size" in doc) - self.assertFalse("instock" in doc) + self.assertIn("_id", doc) + self.assertIn("item", doc) + self.assertNotIn("status", doc) + self.assertIn("size", doc) + self.assertNotIn("instock", doc) # Start Example 47 cursor = db.inventory.find({"status": "A"}, {"item": 1, "status": 1, "size.uom": 1}) # End Example 47 for doc in cursor: - self.assertTrue("_id" in doc) - self.assertTrue("item" in doc) - self.assertTrue("status" in doc) - self.assertTrue("size" in doc) - self.assertFalse("instock" in doc) + self.assertIn("_id", doc) + self.assertIn("item", doc) + self.assertIn("status", doc) + self.assertIn("size", doc) + self.assertNotIn("instock", doc) size = doc["size"] - self.assertTrue("uom" in size) - self.assertFalse("h" in size) - self.assertFalse("w" in size) + self.assertIn("uom", size) + self.assertNotIn("h", size) + self.assertNotIn("w", size) # Start Example 48 cursor = db.inventory.find({"status": "A"}, {"size.uom": 0}) # End Example 48 for doc in cursor: - self.assertTrue("_id" in doc) - self.assertTrue("item" in doc) - self.assertTrue("status" in doc) - self.assertTrue("size" in doc) - self.assertTrue("instock" in doc) + self.assertIn("_id", doc) + self.assertIn("item", doc) + self.assertIn("status", doc) + self.assertIn("size", doc) + self.assertIn("instock", doc) size = doc["size"] - self.assertFalse("uom" in size) - self.assertTrue("h" in size) - self.assertTrue("w" in size) + self.assertNotIn("uom", size) + self.assertIn("h", size) + self.assertIn("w", size) # Start Example 49 cursor = db.inventory.find({"status": "A"}, {"item": 1, "status": 1, "instock.qty": 1}) # End Example 49 for doc in cursor: - self.assertTrue("_id" in doc) - self.assertTrue("item" in doc) - self.assertTrue("status" in doc) - self.assertFalse("size" in doc) - self.assertTrue("instock" in doc) + self.assertIn("_id", doc) + self.assertIn("item", doc) + self.assertIn("status", doc) + self.assertNotIn("size", doc) + self.assertIn("instock", doc) for subdoc in doc["instock"]: - self.assertFalse("warehouse" in subdoc) - self.assertTrue("qty" in subdoc) + self.assertNotIn("warehouse", subdoc) + self.assertIn("qty", subdoc) # Start Example 50 cursor = db.inventory.find( @@ -559,11 +557,11 @@ def test_projection(self): # End Example 50 for doc in cursor: - self.assertTrue("_id" in doc) - self.assertTrue("item" in doc) - self.assertTrue("status" in doc) - self.assertFalse("size" in doc) - self.assertTrue("instock" in doc) + self.assertIn("_id", doc) + self.assertIn("item", doc) + self.assertIn("status", doc) + self.assertNotIn("size", doc) + self.assertIn("instock", doc) self.assertEqual(len(doc["instock"]), 1) def test_update_and_replace(self): @@ -646,7 +644,7 @@ def test_update_and_replace(self): for doc in db.inventory.find({"item": "paper"}): self.assertEqual(doc["size"]["uom"], "cm") self.assertEqual(doc["status"], "P") - self.assertTrue("lastModified" in doc) + self.assertIn("lastModified", doc) # Start Example 53 db.inventory.update_many( @@ -658,7 +656,7 @@ def test_update_and_replace(self): for doc in db.inventory.find({"qty": {"$lt": 50}}): self.assertEqual(doc["size"]["uom"], "in") self.assertEqual(doc["status"], "P") - self.assertTrue("lastModified" in doc) + self.assertIn("lastModified", doc) # Start Example 54 db.inventory.replace_one( @@ -672,8 +670,8 @@ def test_update_and_replace(self): for doc in db.inventory.find({"item": "paper"}, {"_id": 0}): self.assertEqual(len(doc.keys()), 2) - self.assertTrue("item" in doc) - self.assertTrue("instock" in doc) + self.assertIn("item", doc) + self.assertIn("instock", doc) self.assertEqual(len(doc["instock"]), 2) def test_delete(self): @@ -746,8 +744,9 @@ def insert_docs(): while not done: db.inventory.insert_one({"username": "alice"}) db.inventory.delete_one({"username": "alice"}) + time.sleep(0.005) - t = threading.Thread(target=insert_docs) + t = ConcurrentRunner(target=insert_docs) t.start() try: @@ -1153,19 +1152,13 @@ def callback(session): # Step 2: Start a client session. with client.start_session() as session: # Step 3: Use with_transaction to start a transaction, execute the callback, and commit (or abort on error). - session.with_transaction( - callback, - read_concern=ReadConcern("local"), - write_concern=wc_majority, - read_preference=ReadPreference.PRIMARY, - ) + session.with_transaction(callback) # End Transactions withTxn API Example 1 class TestCausalConsistencyExamples(IntegrationTest): @client_context.require_secondaries_count(1) - @client_context.require_no_mmap def test_causal_consistency(self): # Causal consistency examples client = self.client @@ -1347,20 +1340,37 @@ def test_snapshot_query(self): db.drop_collection("dogs") db.cats.insert_one({"name": "Whiskers", "color": "white", "age": 10, "adoptable": True}) db.dogs.insert_one({"name": "Pebbles", "color": "Brown", "age": 10, "adoptable": True}) - wait_until(lambda: self.check_for_snapshot(db.cats), "success") - wait_until(lambda: self.check_for_snapshot(db.dogs), "success") + + def predicate_one(): + return self.check_for_snapshot(db.cats) + + def predicate_two(): + return self.check_for_snapshot(db.dogs) + + wait_until(predicate_two, "success") + wait_until(predicate_one, "success") # Start Snapshot Query Example 1 db = client.pets with client.start_session(snapshot=True) as s: - adoptablePetsCount = db.cats.aggregate( - [{"$match": {"adoptable": True}}, {"$count": "adoptableCatsCount"}], session=s - ).next()["adoptableCatsCount"] - - adoptablePetsCount += db.dogs.aggregate( - [{"$match": {"adoptable": True}}, {"$count": "adoptableDogsCount"}], session=s - ).next()["adoptableDogsCount"] + adoptablePetsCount = ( + ( + db.cats.aggregate( + [{"$match": {"adoptable": True}}, {"$count": "adoptableCatsCount"}], + session=s, + ) + ).next() + )["adoptableCatsCount"] + + adoptablePetsCount += ( + ( + db.dogs.aggregate( + [{"$match": {"adoptable": True}}, {"$count": "adoptableDogsCount"}], + session=s, + ) + ).next() + )["adoptableDogsCount"] print(adoptablePetsCount) @@ -1371,33 +1381,41 @@ def test_snapshot_query(self): saleDate = datetime.datetime.now() db.sales.insert_one({"shoeType": "boot", "price": 30, "saleDate": saleDate}) - wait_until(lambda: self.check_for_snapshot(db.sales), "success") + + def predicate_three(): + return self.check_for_snapshot(db.sales) + + wait_until(predicate_three, "success") # Start Snapshot Query Example 2 db = client.retail with client.start_session(snapshot=True) as s: - db.sales.aggregate( - [ - { - "$match": { - "$expr": { - "$gt": [ - "$saleDate", - { - "$dateSubtract": { - "startDate": "$$NOW", - "unit": "day", - "amount": 1, - } - }, - ] - } - } - }, - {"$count": "totalDailySales"}, - ], - session=s, - ).next()["totalDailySales"] + _ = ( + ( + db.sales.aggregate( + [ + { + "$match": { + "$expr": { + "$gt": [ + "$saleDate", + { + "$dateSubtract": { + "startDate": "$$NOW", + "unit": "day", + "amount": 1, + } + }, + ] + } + } + }, + {"$count": "totalDailySales"}, + ], + session=s, + ) + ).next() + )["totalDailySales"] # End Snapshot Query Example 2 diff --git a/test/test_fork.py b/test/test_fork.py index 1a89159435..dad947d8c5 100644 --- a/test/test_fork.py +++ b/test/test_fork.py @@ -24,7 +24,7 @@ sys.path[0:0] = [""] from test import IntegrationTest -from test.utils import is_greenthread_patched +from test.utils_shared import is_greenthread_patched from bson.objectid import ObjectId @@ -34,7 +34,7 @@ ) @unittest.skipIf( is_greenthread_patched(), - "gevent and eventlet do not support POSIX-style forking.", + "gevent does not support POSIX-style forking.", ) class TestFork(IntegrationTest): def test_lock_client(self): diff --git a/test/test_grid_file.py b/test/test_grid_file.py index 6534bc11bf..c7ccda44a4 100644 --- a/test/test_grid_file.py +++ b/test/test_grid_file.py @@ -33,7 +33,7 @@ sys.path[0:0] = [""] -from test.utils import OvertCommandListener +from test.utils_shared import OvertCommandListener from bson.objectid import ObjectId from gridfs.errors import NoFile @@ -49,7 +49,6 @@ from pymongo import MongoClient from pymongo.errors import ConfigurationError, ServerSelectionTimeoutError from pymongo.message import _CursorAddress -from pymongo.synchronous.helpers import iter, next _IS_SYNC = True @@ -150,7 +149,7 @@ def test_grid_in_default_opts(self): a = GridIn(self.db.fs) - self.assertTrue(isinstance(a._id, ObjectId)) + self.assertIsInstance(a._id, ObjectId) self.assertRaises(AttributeError, setattr, a, "_id", 5) self.assertEqual(None, a.filename) @@ -195,7 +194,7 @@ def test_grid_in_default_opts(self): self.assertEqual(42, a.forty_two) - self.assertTrue(isinstance(a._id, ObjectId)) + self.assertIsInstance(a._id, ObjectId) self.assertRaises(AttributeError, setattr, a, "_id", 5) self.assertEqual("my_file", a.filename) @@ -209,7 +208,7 @@ def test_grid_in_default_opts(self): self.assertEqual(255 * 1024, a.chunk_size) self.assertRaises(AttributeError, setattr, a, "chunk_size", 5) - self.assertTrue(isinstance(a.upload_date, datetime.datetime)) + self.assertIsInstance(a.upload_date, datetime.datetime) self.assertRaises(AttributeError, setattr, a, "upload_date", 5) self.assertEqual(["foo"], a.aliases) @@ -248,7 +247,7 @@ def test_grid_out_default_opts(self): self.assertEqual(None, b.name) self.assertEqual(None, b.filename) self.assertEqual(255 * 1024, b.chunk_size) - self.assertTrue(isinstance(b.upload_date, datetime.datetime)) + self.assertIsInstance(b.upload_date, datetime.datetime) self.assertEqual(None, b.aliases) self.assertEqual(None, b.metadata) self.assertEqual(None, b.md5) @@ -309,7 +308,7 @@ def test_grid_out_custom_opts(self): self.assertEqual(11, two.length) self.assertEqual("text/html", two.content_type) self.assertEqual(1000, two.chunk_size) - self.assertTrue(isinstance(two.upload_date, datetime.datetime)) + self.assertIsInstance(two.upload_date, datetime.datetime) self.assertEqual(["foo"], two.aliases) self.assertEqual({"foo": 1, "bar": 2}, two.metadata) self.assertEqual(3, two.bar) diff --git a/test/test_gridfs.py b/test/test_gridfs.py index ab8950250b..8bda041447 100644 --- a/test/test_gridfs.py +++ b/test/test_gridfs.py @@ -16,17 +16,20 @@ """Tests for the gridfs package.""" from __future__ import annotations +import asyncio import datetime import sys import threading import time from io import BytesIO +from test.helpers import ConcurrentRunner from unittest.mock import patch sys.path[0:0] = [""] from test import IntegrationTest, client_context, unittest -from test.utils import joinall, one +from test.utils import joinall +from test.utils_shared import one import gridfs from bson.binary import Binary @@ -41,10 +44,12 @@ from pymongo.synchronous.database import Database from pymongo.synchronous.mongo_client import MongoClient +_IS_SYNC = True -class JustWrite(threading.Thread): + +class JustWrite(ConcurrentRunner): def __init__(self, fs, n): - threading.Thread.__init__(self) + super().__init__() self.fs = fs self.n = n self.daemon = True @@ -56,9 +61,9 @@ def run(self): file.close() -class JustRead(threading.Thread): +class JustRead(ConcurrentRunner): def __init__(self, fs, n, results): - threading.Thread.__init__(self) + super().__init__() self.fs = fs self.n = n self.results = results @@ -98,19 +103,21 @@ def setUp(self): def test_basic(self): oid = self.fs.put(b"hello world") - self.assertEqual(b"hello world", self.fs.get(oid).read()) + self.assertEqual(b"hello world", (self.fs.get(oid)).read()) self.assertEqual(1, self.db.fs.files.count_documents({})) self.assertEqual(1, self.db.fs.chunks.count_documents({})) self.fs.delete(oid) - self.assertRaises(NoFile, self.fs.get, oid) + with self.assertRaises(NoFile): + self.fs.get(oid) self.assertEqual(0, self.db.fs.files.count_documents({})) self.assertEqual(0, self.db.fs.chunks.count_documents({})) - self.assertRaises(NoFile, self.fs.get, "foo") + with self.assertRaises(NoFile): + self.fs.get("foo") oid = self.fs.put(b"hello world", _id="foo") self.assertEqual("foo", oid) - self.assertEqual(b"hello world", self.fs.get("foo").read()) + self.assertEqual(b"hello world", (self.fs.get("foo")).read()) def test_multi_chunk_delete(self): self.db.fs.drop() @@ -142,7 +149,7 @@ def test_list(self): def test_empty_file(self): oid = self.fs.put(b"") - self.assertEqual(b"", self.fs.get(oid).read()) + self.assertEqual(b"", (self.fs.get(oid)).read()) self.assertEqual(1, self.db.fs.files.count_documents({})) self.assertEqual(0, self.db.fs.chunks.count_documents({})) @@ -150,7 +157,7 @@ def test_empty_file(self): assert raw is not None self.assertEqual(0, raw["length"]) self.assertEqual(oid, raw["_id"]) - self.assertTrue(isinstance(raw["uploadDate"], datetime.datetime)) + self.assertIsInstance(raw["uploadDate"], datetime.datetime) self.assertEqual(255 * 1024, raw["chunkSize"]) self.assertNotIn("md5", raw) @@ -159,10 +166,12 @@ def test_corrupt_chunk(self): self.db.fs.chunks.update_one({"files_id": files_id}, {"$set": {"data": Binary(b"foo", 0)}}) try: out = self.fs.get(files_id) - self.assertRaises(CorruptGridFile, out.read) + with self.assertRaises(CorruptGridFile): + out.read() out = self.fs.get(files_id) - self.assertRaises(CorruptGridFile, out.readline) + with self.assertRaises(CorruptGridFile): + out.readline() finally: self.fs.delete(files_id) @@ -177,31 +186,33 @@ def test_put_ensures_index(self): self.assertTrue( any( info.get("key") == [("files_id", 1), ("n", 1)] - for info in chunks.index_information().values() + for info in (chunks.index_information()).values() ) ) self.assertTrue( any( info.get("key") == [("filename", 1), ("uploadDate", 1)] - for info in files.index_information().values() + for info in (files.index_information()).values() ) ) def test_alt_collection(self): oid = self.alt.put(b"hello world") - self.assertEqual(b"hello world", self.alt.get(oid).read()) + self.assertEqual(b"hello world", (self.alt.get(oid)).read()) self.assertEqual(1, self.db.alt.files.count_documents({})) self.assertEqual(1, self.db.alt.chunks.count_documents({})) self.alt.delete(oid) - self.assertRaises(NoFile, self.alt.get, oid) + with self.assertRaises(NoFile): + self.alt.get(oid) self.assertEqual(0, self.db.alt.files.count_documents({})) self.assertEqual(0, self.db.alt.chunks.count_documents({})) - self.assertRaises(NoFile, self.alt.get, "foo") + with self.assertRaises(NoFile): + self.alt.get("foo") oid = self.alt.put(b"hello world", _id="foo") self.assertEqual("foo", oid) - self.assertEqual(b"hello world", self.alt.get("foo").read()) + self.assertEqual(b"hello world", (self.alt.get("foo")).read()) self.alt.put(b"", filename="mike") self.alt.put(b"foo", filename="test") @@ -212,23 +223,23 @@ def test_alt_collection(self): def test_threaded_reads(self): self.fs.put(b"hello", _id="test") - threads = [] + tasks = [] results: list = [] for i in range(10): - threads.append(JustRead(self.fs, 10, results)) - threads[i].start() + tasks.append(JustRead(self.fs, 10, results)) + tasks[i].start() - joinall(threads) + joinall(tasks) self.assertEqual(100 * [b"hello"], results) def test_threaded_writes(self): - threads = [] + tasks = [] for i in range(10): - threads.append(JustWrite(self.fs, 10)) - threads[i].start() + tasks.append(JustWrite(self.fs, 10)) + tasks[i].start() - joinall(threads) + joinall(tasks) f = self.fs.get_last_version("test") self.assertEqual(f.read(), b"hello") @@ -246,34 +257,37 @@ def test_get_last_version(self): two = two._id three = self.fs.put(b"baz", filename="test") - self.assertEqual(b"baz", self.fs.get_last_version("test").read()) + self.assertEqual(b"baz", (self.fs.get_last_version("test")).read()) self.fs.delete(three) - self.assertEqual(b"bar", self.fs.get_last_version("test").read()) + self.assertEqual(b"bar", (self.fs.get_last_version("test")).read()) self.fs.delete(two) - self.assertEqual(b"foo", self.fs.get_last_version("test").read()) + self.assertEqual(b"foo", (self.fs.get_last_version("test")).read()) self.fs.delete(one) - self.assertRaises(NoFile, self.fs.get_last_version, "test") + with self.assertRaises(NoFile): + self.fs.get_last_version("test") def test_get_last_version_with_metadata(self): one = self.fs.put(b"foo", filename="test", author="author") time.sleep(0.01) two = self.fs.put(b"bar", filename="test", author="author") - self.assertEqual(b"bar", self.fs.get_last_version(author="author").read()) + self.assertEqual(b"bar", (self.fs.get_last_version(author="author")).read()) self.fs.delete(two) - self.assertEqual(b"foo", self.fs.get_last_version(author="author").read()) + self.assertEqual(b"foo", (self.fs.get_last_version(author="author")).read()) self.fs.delete(one) one = self.fs.put(b"foo", filename="test", author="author1") time.sleep(0.01) two = self.fs.put(b"bar", filename="test", author="author2") - self.assertEqual(b"foo", self.fs.get_last_version(author="author1").read()) - self.assertEqual(b"bar", self.fs.get_last_version(author="author2").read()) - self.assertEqual(b"bar", self.fs.get_last_version(filename="test").read()) + self.assertEqual(b"foo", (self.fs.get_last_version(author="author1")).read()) + self.assertEqual(b"bar", (self.fs.get_last_version(author="author2")).read()) + self.assertEqual(b"bar", (self.fs.get_last_version(filename="test")).read()) - self.assertRaises(NoFile, self.fs.get_last_version, author="author3") - self.assertRaises(NoFile, self.fs.get_last_version, filename="nottest", author="author1") + with self.assertRaises(NoFile): + self.fs.get_last_version(author="author3") + with self.assertRaises(NoFile): + self.fs.get_last_version(filename="nottest", author="author1") self.fs.delete(one) self.fs.delete(two) @@ -286,16 +300,18 @@ def test_get_version(self): self.fs.put(b"baz", filename="test") time.sleep(0.01) - self.assertEqual(b"foo", self.fs.get_version("test", 0).read()) - self.assertEqual(b"bar", self.fs.get_version("test", 1).read()) - self.assertEqual(b"baz", self.fs.get_version("test", 2).read()) + self.assertEqual(b"foo", (self.fs.get_version("test", 0)).read()) + self.assertEqual(b"bar", (self.fs.get_version("test", 1)).read()) + self.assertEqual(b"baz", (self.fs.get_version("test", 2)).read()) - self.assertEqual(b"baz", self.fs.get_version("test", -1).read()) - self.assertEqual(b"bar", self.fs.get_version("test", -2).read()) - self.assertEqual(b"foo", self.fs.get_version("test", -3).read()) + self.assertEqual(b"baz", (self.fs.get_version("test", -1)).read()) + self.assertEqual(b"bar", (self.fs.get_version("test", -2)).read()) + self.assertEqual(b"foo", (self.fs.get_version("test", -3)).read()) - self.assertRaises(NoFile, self.fs.get_version, "test", 3) - self.assertRaises(NoFile, self.fs.get_version, "test", -4) + with self.assertRaises(NoFile): + self.fs.get_version("test", 3) + with self.assertRaises(NoFile): + self.fs.get_version("test", -4) def test_get_version_with_metadata(self): one = self.fs.put(b"foo", filename="test", author="author1") @@ -305,25 +321,32 @@ def test_get_version_with_metadata(self): three = self.fs.put(b"baz", filename="test", author="author2") self.assertEqual( - b"foo", self.fs.get_version(filename="test", author="author1", version=-2).read() + b"foo", + (self.fs.get_version(filename="test", author="author1", version=-2)).read(), ) self.assertEqual( - b"bar", self.fs.get_version(filename="test", author="author1", version=-1).read() + b"bar", + (self.fs.get_version(filename="test", author="author1", version=-1)).read(), ) self.assertEqual( - b"foo", self.fs.get_version(filename="test", author="author1", version=0).read() + b"foo", + (self.fs.get_version(filename="test", author="author1", version=0)).read(), ) self.assertEqual( - b"bar", self.fs.get_version(filename="test", author="author1", version=1).read() + b"bar", + (self.fs.get_version(filename="test", author="author1", version=1)).read(), ) self.assertEqual( - b"baz", self.fs.get_version(filename="test", author="author2", version=0).read() + b"baz", + (self.fs.get_version(filename="test", author="author2", version=0)).read(), ) - self.assertEqual(b"baz", self.fs.get_version(filename="test", version=-1).read()) - self.assertEqual(b"baz", self.fs.get_version(filename="test", version=2).read()) + self.assertEqual(b"baz", (self.fs.get_version(filename="test", version=-1)).read()) + self.assertEqual(b"baz", (self.fs.get_version(filename="test", version=2)).read()) - self.assertRaises(NoFile, self.fs.get_version, filename="test", author="author3") - self.assertRaises(NoFile, self.fs.get_version, filename="test", author="author1", version=2) + with self.assertRaises(NoFile): + self.fs.get_version(filename="test", author="author3") + with self.assertRaises(NoFile): + self.fs.get_version(filename="test", author="author1", version=2) self.fs.delete(one) self.fs.delete(two) @@ -332,11 +355,12 @@ def test_get_version_with_metadata(self): def test_put_filelike(self): oid = self.fs.put(BytesIO(b"hello world"), chunk_size=1) self.assertEqual(11, self.db.fs.chunks.count_documents({})) - self.assertEqual(b"hello world", self.fs.get(oid).read()) + self.assertEqual(b"hello world", (self.fs.get(oid)).read()) def test_file_exists(self): oid = self.fs.put(b"hello") - self.assertRaises(FileExists, self.fs.put, b"world", _id=oid) + with self.assertRaises(FileExists): + self.fs.put(b"world", _id=oid) one = self.fs.new_file(_id=123) one.write(b"some content") @@ -345,15 +369,17 @@ def test_file_exists(self): # Attempt to upload a file with more chunks to the same _id. with patch("gridfs.synchronous.grid_file._UPLOAD_BUFFER_SIZE", DEFAULT_CHUNK_SIZE): two = self.fs.new_file(_id=123) - self.assertRaises(FileExists, two.write, b"x" * DEFAULT_CHUNK_SIZE * 3) + with self.assertRaises(FileExists): + two.write(b"x" * DEFAULT_CHUNK_SIZE * 3) # Original file is still readable (no extra chunks were uploaded). - self.assertEqual(self.fs.get(123).read(), b"some content") + self.assertEqual((self.fs.get(123)).read(), b"some content") two = self.fs.new_file(_id=123) two.write(b"some content") - self.assertRaises(FileExists, two.close) + with self.assertRaises(FileExists): + two.close() # Original file is still readable. - self.assertEqual(self.fs.get(123).read(), b"some content") + self.assertEqual((self.fs.get(123)).read(), b"some content") def test_exists(self): oid = self.fs.put(b"hello") @@ -381,15 +407,16 @@ def test_exists(self): self.assertFalse(self.fs.exists({"foo": {"$gt": 12}})) def test_put_unicode(self): - self.assertRaises(TypeError, self.fs.put, "hello") + with self.assertRaises(TypeError): + self.fs.put("hello") oid = self.fs.put("hello", encoding="utf-8") - self.assertEqual(b"hello", self.fs.get(oid).read()) - self.assertEqual("utf-8", self.fs.get(oid).encoding) + self.assertEqual(b"hello", (self.fs.get(oid)).read()) + self.assertEqual("utf-8", (self.fs.get(oid)).encoding) oid = self.fs.put("aé", encoding="iso-8859-1") - self.assertEqual("aé".encode("iso-8859-1"), self.fs.get(oid).read()) - self.assertEqual("iso-8859-1", self.fs.get(oid).encoding) + self.assertEqual("aé".encode("iso-8859-1"), (self.fs.get(oid)).read()) + self.assertEqual("iso-8859-1", (self.fs.get(oid)).encoding) def test_missing_length_iter(self): # Test fix that guards against PHP-237 @@ -411,11 +438,13 @@ def test_gridfs_lazy_connect(self): client = self.single_client("badhost", connect=False, serverSelectionTimeoutMS=10) db = client.db gfs = gridfs.GridFS(db) - self.assertRaises(ServerSelectionTimeoutError, gfs.list) + with self.assertRaises(ServerSelectionTimeoutError): + gfs.list() fs = gridfs.GridFS(db) f = fs.new_file() - self.assertRaises(ServerSelectionTimeoutError, f.close) + with self.assertRaises(ServerSelectionTimeoutError): + f.close() def test_gridfs_find(self): self.fs.put(b"test2", filename="two") @@ -429,14 +458,15 @@ def test_gridfs_find(self): self.assertEqual(3, files.count_documents({"filename": "two"})) self.assertEqual(4, files.count_documents({})) cursor = self.fs.find(no_cursor_timeout=False).sort("uploadDate", -1).skip(1).limit(2) - gout = next(cursor) + gout = cursor.next() self.assertEqual(b"test1", gout.read()) cursor.rewind() - gout = next(cursor) + gout = cursor.next() self.assertEqual(b"test1", gout.read()) - gout = next(cursor) + gout = cursor.next() self.assertEqual(b"test2+", gout.read()) - self.assertRaises(StopIteration, cursor.__next__) + with self.assertRaises(StopIteration): + cursor.__next__() cursor.rewind() items = cursor.to_list() self.assertEqual(len(items), 2) @@ -484,12 +514,12 @@ def test_grid_in_non_int_chunksize(self): self.fs.put(data, filename="f") self.db.fs.files.update_one({"filename": "f"}, {"$set": {"chunkSize": 100.0}}) - self.assertEqual(data, self.fs.get_version("f").read()) + self.assertEqual(data, (self.fs.get_version("f")).read()) def test_unacknowledged(self): # w=0 is prohibited. with self.assertRaises(ConfigurationError): - gridfs.GridFS(self.rs_or_single_client(w=0).pymongo_test) + gridfs.GridFS((self.rs_or_single_client(w=0)).pymongo_test) def test_md5(self): gin = self.fs.new_file() @@ -524,7 +554,7 @@ def test_gridfs_replica_set(self): self.assertEqual(gin._coll.read_preference, ReadPreference.PRIMARY) oid = fs.put(b"foo") - content = fs.get(oid).read() + content = (fs.get(oid)).read() self.assertEqual(b"foo", content) def test_gridfs_secondary(self): @@ -538,7 +568,8 @@ def test_gridfs_secondary(self): fs = gridfs.GridFS(secondary_connection.gfsreplica, "gfssecondarytest") # This won't detect secondary, raises error - self.assertRaises(NotPrimaryError, fs.put, b"foo") + with self.assertRaises(NotPrimaryError): + fs.put(b"foo") def test_gridfs_secondary_lazy(self): # Should detect it's connected to secondary and not attempt to @@ -552,8 +583,10 @@ def test_gridfs_secondary_lazy(self): fs = gridfs.GridFS(client.gfsreplica, "gfssecondarylazytest") # Connects, doesn't create index. - self.assertRaises(NoFile, fs.get_last_version) - self.assertRaises(NotPrimaryError, fs.put, "data", encoding="utf-8") + with self.assertRaises(NoFile): + fs.get_last_version() + with self.assertRaises(NotPrimaryError): + fs.put("data", encoding="utf-8") if __name__ == "__main__": diff --git a/test/test_gridfs_bucket.py b/test/test_gridfs_bucket.py index 0af4dce811..9dbb082ee9 100644 --- a/test/test_gridfs_bucket.py +++ b/test/test_gridfs_bucket.py @@ -16,18 +16,21 @@ """Tests for the gridfs package.""" from __future__ import annotations +import asyncio import datetime import itertools import sys import threading import time from io import BytesIO +from test.helpers import ConcurrentRunner from unittest.mock import patch sys.path[0:0] = [""] from test import IntegrationTest, client_context, unittest -from test.utils import joinall, one +from test.utils import joinall +from test.utils_shared import one import gridfs from bson.binary import Binary @@ -44,10 +47,12 @@ from pymongo.read_preferences import ReadPreference from pymongo.synchronous.mongo_client import MongoClient +_IS_SYNC = True -class JustWrite(threading.Thread): + +class JustWrite(ConcurrentRunner): def __init__(self, gfs, num): - threading.Thread.__init__(self) + super().__init__() self.gfs = gfs self.num = num self.daemon = True @@ -59,9 +64,9 @@ def run(self): file.close() -class JustRead(threading.Thread): +class JustRead(ConcurrentRunner): def __init__(self, gfs, num, results): - threading.Thread.__init__(self) + super().__init__() self.gfs = gfs self.num = num self.results = results @@ -89,12 +94,13 @@ def setUp(self): def test_basic(self): oid = self.fs.upload_from_stream("test_filename", b"hello world") - self.assertEqual(b"hello world", self.fs.open_download_stream(oid).read()) + self.assertEqual(b"hello world", (self.fs.open_download_stream(oid)).read()) self.assertEqual(1, self.db.fs.files.count_documents({})) self.assertEqual(1, self.db.fs.chunks.count_documents({})) self.fs.delete(oid) - self.assertRaises(NoFile, self.fs.open_download_stream, oid) + with self.assertRaises(NoFile): + self.fs.open_download_stream(oid) self.assertEqual(0, self.db.fs.files.count_documents({})) self.assertEqual(0, self.db.fs.chunks.count_documents({})) @@ -109,9 +115,20 @@ def test_multi_chunk_delete(self): self.assertEqual(0, self.db.fs.files.count_documents({})) self.assertEqual(0, self.db.fs.chunks.count_documents({})) + def test_delete_by_name(self): + self.assertEqual(0, self.db.fs.files.count_documents({})) + self.assertEqual(0, self.db.fs.chunks.count_documents({})) + gfs = gridfs.GridFSBucket(self.db) + gfs.upload_from_stream("test_filename", b"hello", chunk_size_bytes=1) + self.assertEqual(1, self.db.fs.files.count_documents({})) + self.assertEqual(5, self.db.fs.chunks.count_documents({})) + gfs.delete_by_name("test_filename") + self.assertEqual(0, self.db.fs.files.count_documents({})) + self.assertEqual(0, self.db.fs.chunks.count_documents({})) + def test_empty_file(self): oid = self.fs.upload_from_stream("test_filename", b"") - self.assertEqual(b"", self.fs.open_download_stream(oid).read()) + self.assertEqual(b"", (self.fs.open_download_stream(oid)).read()) self.assertEqual(1, self.db.fs.files.count_documents({})) self.assertEqual(0, self.db.fs.chunks.count_documents({})) @@ -119,7 +136,7 @@ def test_empty_file(self): assert raw is not None self.assertEqual(0, raw["length"]) self.assertEqual(oid, raw["_id"]) - self.assertTrue(isinstance(raw["uploadDate"], datetime.datetime)) + self.assertIsInstance(raw["uploadDate"], datetime.datetime) self.assertEqual(255 * 1024, raw["chunkSize"]) self.assertNotIn("md5", raw) @@ -128,10 +145,12 @@ def test_corrupt_chunk(self): self.db.fs.chunks.update_one({"files_id": files_id}, {"$set": {"data": Binary(b"foo", 0)}}) try: out = self.fs.open_download_stream(files_id) - self.assertRaises(CorruptGridFile, out.read) + with self.assertRaises(CorruptGridFile): + out.read() out = self.fs.open_download_stream(files_id) - self.assertRaises(CorruptGridFile, out.readline) + with self.assertRaises(CorruptGridFile): + out.readline() finally: self.fs.delete(files_id) @@ -143,17 +162,16 @@ def test_upload_ensures_index(self): files.drop() self.fs.upload_from_stream("filename", b"junk") - self.assertTrue( - any( - info.get("key") == [("files_id", 1), ("n", 1)] - for info in chunks.index_information().values() - ) + self.assertIn( + [("files_id", 1), ("n", 1)], + [info.get("key") for info in (chunks.index_information()).values()], + "Missing required index on chunks collection: {files_id: 1, n: 1}", ) - self.assertTrue( - any( - info.get("key") == [("filename", 1), ("uploadDate", 1)] - for info in files.index_information().values() - ) + + self.assertIn( + [("filename", 1), ("uploadDate", 1)], + [info.get("key") for info in (files.index_information()).values()], + "Missing required index on files collection: {filename: 1, uploadDate: 1}", ) def test_ensure_index_shell_compat(self): @@ -171,28 +189,29 @@ def test_ensure_index_shell_compat(self): # No error. self.fs.upload_from_stream("filename", b"data") - self.assertTrue( - any( - info.get("key") == [("filename", 1), ("uploadDate", 1)] - for info in files.index_information().values() - ) + self.assertIn( + [("filename", 1), ("uploadDate", 1)], + [info.get("key") for info in (files.index_information()).values()], + "Missing required index on files collection: {filename: 1, uploadDate: 1}", ) files.drop() def test_alt_collection(self): oid = self.alt.upload_from_stream("test_filename", b"hello world") - self.assertEqual(b"hello world", self.alt.open_download_stream(oid).read()) + self.assertEqual(b"hello world", (self.alt.open_download_stream(oid)).read()) self.assertEqual(1, self.db.alt.files.count_documents({})) self.assertEqual(1, self.db.alt.chunks.count_documents({})) self.alt.delete(oid) - self.assertRaises(NoFile, self.alt.open_download_stream, oid) + with self.assertRaises(NoFile): + self.alt.open_download_stream(oid) self.assertEqual(0, self.db.alt.files.count_documents({})) self.assertEqual(0, self.db.alt.chunks.count_documents({})) - self.assertRaises(NoFile, self.alt.open_download_stream, "foo") + with self.assertRaises(NoFile): + self.alt.open_download_stream("foo") self.alt.upload_from_stream("foo", b"hello world") - self.assertEqual(b"hello world", self.alt.open_download_stream_by_name("foo").read()) + self.assertEqual(b"hello world", (self.alt.open_download_stream_by_name("foo")).read()) self.alt.upload_from_stream("mike", b"") self.alt.upload_from_stream("test", b"foo") @@ -200,7 +219,7 @@ def test_alt_collection(self): self.assertEqual( {"mike", "test", "hello world", "foo"}, - {k["filename"] for k in list(self.db.alt.files.find())}, + {k["filename"] for k in self.db.alt.files.find().to_list()}, ) def test_threaded_reads(self): @@ -240,13 +259,14 @@ def test_get_last_version(self): two = two._id three = self.fs.upload_from_stream("test", b"baz") - self.assertEqual(b"baz", self.fs.open_download_stream_by_name("test").read()) + self.assertEqual(b"baz", (self.fs.open_download_stream_by_name("test")).read()) self.fs.delete(three) - self.assertEqual(b"bar", self.fs.open_download_stream_by_name("test").read()) + self.assertEqual(b"bar", (self.fs.open_download_stream_by_name("test")).read()) self.fs.delete(two) - self.assertEqual(b"foo", self.fs.open_download_stream_by_name("test").read()) + self.assertEqual(b"foo", (self.fs.open_download_stream_by_name("test")).read()) self.fs.delete(one) - self.assertRaises(NoFile, self.fs.open_download_stream_by_name, "test") + with self.assertRaises(NoFile): + self.fs.open_download_stream_by_name("test") def test_get_version(self): self.fs.upload_from_stream("test", b"foo") @@ -256,28 +276,30 @@ def test_get_version(self): self.fs.upload_from_stream("test", b"baz") time.sleep(0.01) - self.assertEqual(b"foo", self.fs.open_download_stream_by_name("test", revision=0).read()) - self.assertEqual(b"bar", self.fs.open_download_stream_by_name("test", revision=1).read()) - self.assertEqual(b"baz", self.fs.open_download_stream_by_name("test", revision=2).read()) + self.assertEqual(b"foo", (self.fs.open_download_stream_by_name("test", revision=0)).read()) + self.assertEqual(b"bar", (self.fs.open_download_stream_by_name("test", revision=1)).read()) + self.assertEqual(b"baz", (self.fs.open_download_stream_by_name("test", revision=2)).read()) - self.assertEqual(b"baz", self.fs.open_download_stream_by_name("test", revision=-1).read()) - self.assertEqual(b"bar", self.fs.open_download_stream_by_name("test", revision=-2).read()) - self.assertEqual(b"foo", self.fs.open_download_stream_by_name("test", revision=-3).read()) + self.assertEqual(b"baz", (self.fs.open_download_stream_by_name("test", revision=-1)).read()) + self.assertEqual(b"bar", (self.fs.open_download_stream_by_name("test", revision=-2)).read()) + self.assertEqual(b"foo", (self.fs.open_download_stream_by_name("test", revision=-3)).read()) - self.assertRaises(NoFile, self.fs.open_download_stream_by_name, "test", revision=3) - self.assertRaises(NoFile, self.fs.open_download_stream_by_name, "test", revision=-4) + with self.assertRaises(NoFile): + self.fs.open_download_stream_by_name("test", revision=3) + with self.assertRaises(NoFile): + self.fs.open_download_stream_by_name("test", revision=-4) def test_upload_from_stream(self): oid = self.fs.upload_from_stream("test_file", BytesIO(b"hello world"), chunk_size_bytes=1) self.assertEqual(11, self.db.fs.chunks.count_documents({})) - self.assertEqual(b"hello world", self.fs.open_download_stream(oid).read()) + self.assertEqual(b"hello world", (self.fs.open_download_stream(oid)).read()) def test_upload_from_stream_with_id(self): oid = ObjectId() self.fs.upload_from_stream_with_id( oid, "test_file_custom_id", BytesIO(b"custom id"), chunk_size_bytes=1 ) - self.assertEqual(b"custom id", self.fs.open_download_stream(oid).read()) + self.assertEqual(b"custom id", (self.fs.open_download_stream(oid)).read()) @patch("gridfs.synchronous.grid_file._UPLOAD_BUFFER_CHUNKS", 3) @client_context.require_failCommand_fail_point @@ -316,14 +338,14 @@ def test_open_upload_stream(self): gin = self.fs.open_upload_stream("from_stream") gin.write(b"from stream") gin.close() - self.assertEqual(b"from stream", self.fs.open_download_stream(gin._id).read()) + self.assertEqual(b"from stream", (self.fs.open_download_stream(gin._id)).read()) def test_open_upload_stream_with_id(self): oid = ObjectId() gin = self.fs.open_upload_stream_with_id(oid, "from_stream_custom_id") gin.write(b"from stream with custom id") gin.close() - self.assertEqual(b"from stream with custom id", self.fs.open_download_stream(oid).read()) + self.assertEqual(b"from stream with custom id", (self.fs.open_download_stream(oid)).read()) def test_missing_length_iter(self): # Test fix that guards against PHP-237 @@ -345,12 +367,12 @@ def test_gridfs_lazy_connect(self): client = self.single_client("badhost", connect=False, serverSelectionTimeoutMS=0) cdb = client.db gfs = gridfs.GridFSBucket(cdb) - self.assertRaises(ServerSelectionTimeoutError, gfs.delete, 0) + with self.assertRaises(ServerSelectionTimeoutError): + gfs.delete(0) gfs = gridfs.GridFSBucket(cdb) - self.assertRaises( - ServerSelectionTimeoutError, gfs.upload_from_stream, "test", b"" - ) # Still no connection. + with self.assertRaises(ServerSelectionTimeoutError): + gfs.upload_from_stream("test", b"") # Still no connection. def test_gridfs_find(self): self.fs.upload_from_stream("two", b"test2") @@ -366,14 +388,15 @@ def test_gridfs_find(self): cursor = self.fs.find( {}, no_cursor_timeout=False, sort=[("uploadDate", -1)], skip=1, limit=2 ) - gout = next(cursor) + gout = cursor.next() self.assertEqual(b"test1", gout.read()) cursor.rewind() - gout = next(cursor) + gout = cursor.next() self.assertEqual(b"test1", gout.read()) - gout = next(cursor) + gout = cursor.next() self.assertEqual(b"test2+", gout.read()) - self.assertRaises(StopIteration, cursor.__next__) + with self.assertRaises(StopIteration): + cursor.next() cursor.close() self.assertRaises(TypeError, self.fs.find, {}, {"_id": True}) @@ -383,20 +406,30 @@ def test_grid_in_non_int_chunksize(self): self.fs.upload_from_stream("f", data) self.db.fs.files.update_one({"filename": "f"}, {"$set": {"chunkSize": 100.0}}) - self.assertEqual(data, self.fs.open_download_stream_by_name("f").read()) + self.assertEqual(data, (self.fs.open_download_stream_by_name("f")).read()) def test_unacknowledged(self): # w=0 is prohibited. with self.assertRaises(ConfigurationError): - gridfs.GridFSBucket(self.rs_or_single_client(w=0).pymongo_test) + gridfs.GridFSBucket((self.rs_or_single_client(w=0)).pymongo_test) def test_rename(self): _id = self.fs.upload_from_stream("first_name", b"testing") - self.assertEqual(b"testing", self.fs.open_download_stream_by_name("first_name").read()) + self.assertEqual(b"testing", (self.fs.open_download_stream_by_name("first_name")).read()) self.fs.rename(_id, "second_name") - self.assertRaises(NoFile, self.fs.open_download_stream_by_name, "first_name") - self.assertEqual(b"testing", self.fs.open_download_stream_by_name("second_name").read()) + with self.assertRaises(NoFile): + self.fs.open_download_stream_by_name("first_name") + self.assertEqual(b"testing", (self.fs.open_download_stream_by_name("second_name")).read()) + + def test_rename_by_name(self): + _id = self.fs.upload_from_stream("first_name", b"testing") + self.assertEqual(b"testing", (self.fs.open_download_stream_by_name("first_name")).read()) + + self.fs.rename_by_name("first_name", "second_name") + with self.assertRaises(NoFile): + self.fs.open_download_stream_by_name("first_name") + self.assertEqual(b"testing", (self.fs.open_download_stream_by_name("second_name")).read()) @patch("gridfs.synchronous.grid_file._UPLOAD_BUFFER_SIZE", 5) def test_abort(self): @@ -407,7 +440,8 @@ def test_abort(self): self.assertEqual(3, self.db.fs.chunks.count_documents({"files_id": gin._id})) gin.abort() self.assertTrue(gin.closed) - self.assertRaises(ValueError, gin.write, b"test4") + with self.assertRaises(ValueError): + gin.write(b"test4") self.assertEqual(0, self.db.fs.chunks.count_documents({"files_id": gin._id})) def test_download_to_stream(self): @@ -490,7 +524,7 @@ def test_gridfs_replica_set(self): gfs = gridfs.GridFSBucket(rsc.gfsbucketreplica, "gfsbucketreplicatest") oid = gfs.upload_from_stream("test_filename", b"foo") - content = gfs.open_download_stream(oid).read() + content = (gfs.open_download_stream(oid)).read() self.assertEqual(b"foo", content) def test_gridfs_secondary(self): @@ -504,7 +538,8 @@ def test_gridfs_secondary(self): gfs = gridfs.GridFSBucket(secondary_connection.gfsbucketreplica, "gfsbucketsecondarytest") # This won't detect secondary, raises error - self.assertRaises(NotPrimaryError, gfs.upload_from_stream, "test_filename", b"foo") + with self.assertRaises(NotPrimaryError): + gfs.upload_from_stream("test_filename", b"foo") def test_gridfs_secondary_lazy(self): # Should detect it's connected to secondary and not attempt to @@ -518,8 +553,10 @@ def test_gridfs_secondary_lazy(self): gfs = gridfs.GridFSBucket(client.gfsbucketreplica, "gfsbucketsecondarylazytest") # Connects, doesn't create index. - self.assertRaises(NoFile, gfs.open_download_stream_by_name, "test_filename") - self.assertRaises(NotPrimaryError, gfs.upload_from_stream, "test_filename", b"data") + with self.assertRaises(NoFile): + gfs.open_download_stream_by_name("test_filename") + with self.assertRaises(NotPrimaryError): + gfs.upload_from_stream("test_filename", b"data") if __name__ == "__main__": diff --git a/test/test_gridfs_spec.py b/test/test_gridfs_spec.py index 6840b6ae0c..e84e19725e 100644 --- a/test/test_gridfs_spec.py +++ b/test/test_gridfs_spec.py @@ -17,14 +17,20 @@ import os import sys +from pathlib import Path sys.path[0:0] = [""] from test import unittest from test.unified_format import generate_test_classes +_IS_SYNC = True + # Location of JSON test specifications. -TEST_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "gridfs") +if _IS_SYNC: + TEST_PATH = os.path.join(Path(__file__).resolve().parent, "gridfs") +else: + TEST_PATH = os.path.join(Path(__file__).resolve().parent.parent, "gridfs") # Generate unified tests. globals().update(generate_test_classes(TEST_PATH, module=__name__)) diff --git a/test/test_heartbeat_monitoring.py b/test/test_heartbeat_monitoring.py index 5e203a33b3..7864caf6e1 100644 --- a/test/test_heartbeat_monitoring.py +++ b/test/test_heartbeat_monitoring.py @@ -16,16 +16,19 @@ from __future__ import annotations import sys +from test.utils import MockPool sys.path[0:0] = [""] from test import IntegrationTest, client_knobs, unittest -from test.utils import HeartbeatEventListener, MockPool, wait_until +from test.utils_shared import HeartbeatEventListener, wait_until from pymongo.errors import ConnectionFailure from pymongo.hello import Hello, HelloCompat from pymongo.synchronous.monitor import Monitor +_IS_SYNC = True + class TestHeartbeatMonitoring(IntegrationTest): def create_mock_monitor(self, responses, uri, expected_results): @@ -40,8 +43,12 @@ def _check_with_socket(self, *args, **kwargs): raise responses[1] return Hello(responses[1]), 99 - m = self.single_client( - h=uri, event_listeners=(listener,), _monitor_class=MockMonitor, _pool_class=MockPool + _ = self.single_client( + h=uri, + event_listeners=(listener,), + _monitor_class=MockMonitor, + _pool_class=MockPool, + connect=True, ) expected_len = len(expected_results) @@ -50,20 +57,16 @@ def _check_with_socket(self, *args, **kwargs): # of this test. wait_until(lambda: len(listener.events) >= expected_len, "publish all events") - try: - # zip gives us len(expected_results) pairs. - for expected, actual in zip(expected_results, listener.events): - self.assertEqual(expected, actual.__class__.__name__) - self.assertEqual(actual.connection_id, responses[0]) - if expected != "ServerHeartbeatStartedEvent": - if isinstance(actual.reply, Hello): - self.assertEqual(actual.duration, 99) - self.assertEqual(actual.reply._doc, responses[1]) - else: - self.assertEqual(actual.reply, responses[1]) - - finally: - m.close() + # zip gives us len(expected_results) pairs. + for expected, actual in zip(expected_results, listener.events): + self.assertEqual(expected, actual.__class__.__name__) + self.assertEqual(actual.connection_id, responses[0]) + if expected != "ServerHeartbeatStartedEvent": + if isinstance(actual.reply, Hello): + self.assertEqual(actual.duration, 99) + self.assertEqual(actual.reply._doc, responses[1]) + else: + self.assertEqual(actual.reply, responses[1]) def test_standalone(self): responses = ( diff --git a/test/test_index_management.py b/test/test_index_management.py index 6ca726e2e0..dea8c0e2be 100644 --- a/test/test_index_management.py +++ b/test/test_index_management.py @@ -15,7 +15,9 @@ """Run the auth spec tests.""" from __future__ import annotations +import asyncio import os +import pathlib import sys import time import uuid @@ -27,24 +29,28 @@ from test import IntegrationTest, PyMongoTestCase, unittest from test.unified_format import generate_test_classes -from test.utils import AllowListEventListener, EventListener, OvertCommandListener +from test.utils_shared import AllowListEventListener, OvertCommandListener from pymongo.errors import OperationFailure from pymongo.operations import SearchIndexModel from pymongo.read_concern import ReadConcern from pymongo.write_concern import WriteConcern -pytestmark = pytest.mark.index_management +_IS_SYNC = True -_TEST_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "index_management") +pytestmark = pytest.mark.search_index + +# Location of JSON test specifications. +if _IS_SYNC: + _TEST_PATH = os.path.join(pathlib.Path(__file__).resolve().parent, "index_management") +else: + _TEST_PATH = os.path.join(pathlib.Path(__file__).resolve().parent.parent, "index_management") _NAME = "test-search-index" class TestCreateSearchIndex(IntegrationTest): def test_inputs(self): - if not os.environ.get("TEST_INDEX_MANAGEMENT"): - raise unittest.SkipTest("Skipping index management tests") listener = AllowListEventListener("createSearchIndexes") client = self.simple_client(event_listeners=[listener]) coll = client.test.test @@ -82,23 +88,23 @@ class SearchIndexIntegrationBase(PyMongoTestCase): @classmethod def setUpClass(cls) -> None: - super().setUpClass() - if not os.environ.get("TEST_INDEX_MANAGEMENT"): - raise unittest.SkipTest("Skipping index management tests") - url = os.environ.get("MONGODB_URI") - username = os.environ["DB_USER"] - password = os.environ["DB_PASSWORD"] - cls.listener = listener = OvertCommandListener() - cls.client = cls.unmanaged_simple_client( - url, username=username, password=password, event_listeners=[listener] + cls.url = os.environ.get("MONGODB_URI") + cls.username = os.environ["DB_USER"] + cls.password = os.environ["DB_PASSWORD"] + cls.listener = OvertCommandListener() + + def setUp(self) -> None: + self.client = self.simple_client( + self.url, + username=self.username, + password=self.password, + event_listeners=[self.listener], ) - cls.client.drop_database(_NAME) - cls.db = cls.client[cls.db_name] + self.client.drop_database(_NAME) + self.db = self.client[self.db_name] - @classmethod - def tearDownClass(cls): - cls.client.drop_database(_NAME) - cls.client.close() + def tearDown(self): + self.client.drop_database(_NAME) def wait_for_ready(self, coll, name=_NAME, predicate=None): """Wait for a search index to be ready.""" @@ -107,10 +113,9 @@ def wait_for_ready(self, coll, name=_NAME, predicate=None): predicate = lambda index: index.get("queryable") is True while True: - indices = list(coll.list_search_indexes(name)) + indices = (coll.list_search_indexes(name)).to_list() if len(indices) and predicate(indices[0]): return indices[0] - break time.sleep(5) @@ -133,7 +138,7 @@ def test_comment_field(self): # Get the index definition. self.listener.reset() - coll0.list_search_indexes(name=implicit_search_resp, comment="foo").next() + (coll0.list_search_indexes(name=implicit_search_resp, comment="foo")).next() event = self.listener.events[0] self.assertEqual(event.command["comment"], "foo") @@ -183,7 +188,7 @@ def test_case_2(self): ) # .Assert that the command returns an array containing the new indexes' names: ``["test-search-index-1", "test-search-index-2"]``. - indices = list(coll0.list_search_indexes()) + indices = (coll0.list_search_indexes()).to_list() names = [i["name"] for i in indices] self.assertIn(name1, names) self.assertIn(name2, names) @@ -223,7 +228,7 @@ def test_case_3(self): # Run ``coll0.listSearchIndexes()`` repeatedly every 5 seconds until ``listSearchIndexes`` returns an empty array. t0 = time.time() while True: - indices = list(coll0.list_search_indexes()) + indices = (coll0.list_search_indexes()).to_list() if indices: break if (time.time() - t0) / 60 > 5: @@ -259,7 +264,7 @@ def test_case_4(self): self.wait_for_ready(coll0, predicate=predicate) # Assert that an index is present with the name ``test-search-index`` and the definition has a property ``latestDefinition`` whose value is ``{ 'mappings': { 'dynamic': true } }``. - index = list(coll0.list_search_indexes(_NAME))[0] + index = ((coll0.list_search_indexes(_NAME)).to_list())[0] self.assertIn("latestDefinition", index) self.assertEqual(index["latestDefinition"], model2["definition"]) @@ -324,7 +329,7 @@ def test_case_7(self): ) # Get the index definition. - resp = coll0.list_search_indexes(name=implicit_search_resp).next() + resp = (coll0.list_search_indexes(name=implicit_search_resp)).next() # Assert that the index model contains the correct index type: ``"search"``. self.assertEqual(resp["type"], "search") @@ -335,7 +340,7 @@ def test_case_7(self): ) # Get the index definition. - resp = coll0.list_search_indexes(name=explicit_search_resp).next() + resp = (coll0.list_search_indexes(name=explicit_search_resp)).next() # Assert that the index model contains the correct index type: ``"search"``. self.assertEqual(resp["type"], "search") @@ -350,7 +355,7 @@ def test_case_7(self): ) # Get the index definition. - resp = coll0.list_search_indexes(name=explicit_vector_resp).next() + resp = (coll0.list_search_indexes(name=explicit_vector_resp)).next() # Assert that the index model contains the correct index type: ``"vectorSearch"``. self.assertEqual(resp["type"], "vectorSearch") diff --git a/test/test_json_util.py b/test/test_json_util.py index 821ca76da0..cf2c0efb93 100644 --- a/test/test_json_util.py +++ b/test/test_json_util.py @@ -21,13 +21,13 @@ import sys import uuid from collections import OrderedDict -from typing import Any, List, MutableMapping, Tuple, Type +from typing import Any, Tuple, Type from bson.codec_options import CodecOptions, DatetimeConversion sys.path[0:0] = [""] -from test import IntegrationTest, unittest +from test import unittest from bson import EPOCH_AWARE, EPOCH_NAIVE, SON, DatetimeMS, json_util from bson.binary import ( @@ -341,7 +341,7 @@ def test_regex_object_hook(self): pat = "a*b" json_re = '{"$regex": "%s", "$options": "u"}' % pat loaded = json_util.object_hook(json.loads(json_re)) - self.assertTrue(isinstance(loaded, Regex)) + self.assertIsInstance(loaded, Regex) self.assertEqual(pat, loaded.pattern) self.assertEqual(re.U, loaded.flags) @@ -636,24 +636,5 @@ class MyBinary(Binary): self.assertEqual(json_util.dumps(MyBinary(b"bin", USER_DEFINED_SUBTYPE)), expected_json) -class TestJsonUtilRoundtrip(IntegrationTest): - def test_cursor(self): - db = self.db - - db.drop_collection("test") - docs: List[MutableMapping[str, Any]] = [ - {"foo": [1, 2]}, - {"bar": {"hello": "world"}}, - {"code": Code("function x() { return 1; }")}, - {"bin": Binary(b"\x00\x01\x02\x03\x04", USER_DEFINED_SUBTYPE)}, - {"dbref": {"_ref": DBRef("simple", ObjectId("509b8db456c02c5ab7e63c34"))}}, - ] - - db.test.insert_many(docs) - reloaded_docs = json_util.loads(json_util.dumps(db.test.find())) - for doc in docs: - self.assertTrue(doc in reloaded_docs) - - if __name__ == "__main__": unittest.main() diff --git a/test/test_json_util_integration.py b/test/test_json_util_integration.py new file mode 100644 index 0000000000..4ef5f10fe2 --- /dev/null +++ b/test/test_json_util_integration.py @@ -0,0 +1,28 @@ +from __future__ import annotations + +from test import IntegrationTest +from typing import Any, List, MutableMapping + +from bson import Binary, Code, DBRef, ObjectId, json_util +from bson.binary import USER_DEFINED_SUBTYPE + +_IS_SYNC = True + + +class TestJsonUtilRoundtrip(IntegrationTest): + def test_cursor(self): + db = self.db + + db.drop_collection("test") + docs: List[MutableMapping[str, Any]] = [ + {"foo": [1, 2]}, + {"bar": {"hello": "world"}}, + {"code": Code("function x() { return 1; }")}, + {"bin": Binary(b"\x00\x01\x02\x03\x04", USER_DEFINED_SUBTYPE)}, + {"dbref": {"_ref": DBRef("simple", ObjectId("509b8db456c02c5ab7e63c34"))}}, + ] + + db.test.insert_many(docs) + reloaded_docs = json_util.loads(json_util.dumps((db.test.find()).to_list())) + for doc in docs: + self.assertIn(doc, reloaded_docs) diff --git a/test/test_load_balancer.py b/test/test_load_balancer.py index 23bea4d984..472ef51da3 100644 --- a/test/test_load_balancer.py +++ b/test/test_load_balancer.py @@ -15,10 +15,15 @@ """Test the Load Balancer unified spec tests.""" from __future__ import annotations +import asyncio import gc import os +import pathlib import sys import threading +from asyncio import Event +from test.helpers import ConcurrentRunner, ExceptionCatchingTask +from test.utils import get_pool import pytest @@ -26,20 +31,27 @@ from test import IntegrationTest, client_context, unittest from test.unified_format import generate_test_classes -from test.utils import ExceptionCatchingThread, get_pool, wait_until +from test.utils_shared import ( + create_event, + wait_until, +) + +_IS_SYNC = True pytestmark = pytest.mark.load_balancer # Location of JSON test specifications. -TEST_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "load_balancer") +if _IS_SYNC: + _TEST_PATH = os.path.join(pathlib.Path(__file__).resolve().parent, "load_balancer") +else: + _TEST_PATH = os.path.join(pathlib.Path(__file__).resolve().parent.parent, "load_balancer") # Generate unified tests. -globals().update(generate_test_classes(TEST_PATH, module=__name__)) +globals().update(generate_test_classes(_TEST_PATH, module=__name__)) class TestLB(IntegrationTest): RUN_ON_LOAD_BALANCER = True - RUN_ON_SERVERLESS = True def test_connections_are_only_returned_once(self): if "PyPy" in sys.version: @@ -49,13 +61,12 @@ def test_connections_are_only_returned_once(self): n_conns = len(pool.conns) self.db.test.find_one({}) self.assertEqual(len(pool.conns), n_conns) - list(self.db.test.aggregate([{"$limit": 1}])) + (self.db.test.aggregate([{"$limit": 1}])).to_list() self.assertEqual(len(pool.conns), n_conns) @client_context.require_load_balancer def test_unpin_committed_transaction(self): client = self.rs_client() - self.addCleanup(client.close) pool = get_pool(client) coll = client[self.db.name].test with client.start_session() as session: @@ -86,7 +97,6 @@ def create_resource(coll): def _test_no_gc_deadlock(self, create_resource): client = self.rs_client() - self.addCleanup(client.close) pool = get_pool(client) coll = client[self.db.name].test coll.insert_many([{} for _ in range(10)]) @@ -104,19 +114,19 @@ def _test_no_gc_deadlock(self, create_resource): if client_context.load_balancer: self.assertEqual(pool.active_sockets, 1) # Pinned. - thread = PoolLocker(pool) - thread.start() - self.assertTrue(thread.locked.wait(5), "timed out") + task = PoolLocker(pool) + task.start() + self.assertTrue(task.wait(task.locked, 5), "timed out") # Garbage collect the resource while the pool is locked to ensure we # don't deadlock. del resource # On PyPy it can take a few rounds to collect the cursor. for _ in range(3): gc.collect() - thread.unlock.set() - thread.join(5) - self.assertFalse(thread.is_alive()) - self.assertIsNone(thread.exc) + task.unlock.set() + task.join(5) + self.assertFalse(task.is_alive()) + self.assertIsNone(task.exc) wait_until(lambda: pool.active_sockets == 0, "return socket") # Run another operation to ensure the socket still works. @@ -125,53 +135,60 @@ def _test_no_gc_deadlock(self, create_resource): @client_context.require_transactions def test_session_gc(self): client = self.rs_client() - self.addCleanup(client.close) pool = get_pool(client) session = client.start_session() session.start_transaction() client.test_session_gc.test.find_one({}, session=session) - # Cleanup the transaction left open on the server unless we're - # testing serverless which does not support killSessions. - if not client_context.serverless: - self.addCleanup(self.client.admin.command, "killSessions", [session.session_id]) + # Cleanup the transaction left open on the server + self.addCleanup(self.client.admin.command, "killSessions", [session.session_id]) if client_context.load_balancer: self.assertEqual(pool.active_sockets, 1) # Pinned. - thread = PoolLocker(pool) - thread.start() - self.assertTrue(thread.locked.wait(5), "timed out") + task = PoolLocker(pool) + task.start() + self.assertTrue(task.wait(task.locked, 5), "timed out") # Garbage collect the session while the pool is locked to ensure we # don't deadlock. del session # On PyPy it can take a few rounds to collect the session. for _ in range(3): gc.collect() - thread.unlock.set() - thread.join(5) - self.assertFalse(thread.is_alive()) - self.assertIsNone(thread.exc) + task.unlock.set() + task.join(5) + self.assertFalse(task.is_alive()) + self.assertIsNone(task.exc) wait_until(lambda: pool.active_sockets == 0, "return socket") # Run another operation to ensure the socket still works. client[self.db.name].test.delete_many({}) -class PoolLocker(ExceptionCatchingThread): +class PoolLocker(ExceptionCatchingTask): def __init__(self, pool): super().__init__(target=self.lock_pool) self.pool = pool self.daemon = True - self.locked = threading.Event() - self.unlock = threading.Event() + self.locked = create_event() + self.unlock = create_event() def lock_pool(self): with self.pool.lock: self.locked.set() # Wait for the unlock flag. - unlock_pool = self.unlock.wait(10) + unlock_pool = self.wait(self.unlock, 10) if not unlock_pool: raise Exception("timed out waiting for unlock signal: deadlock?") + def wait(self, event: Event, timeout: int): + if _IS_SYNC: + return event.wait(timeout) # type: ignore[call-arg] + else: + try: + asyncio.wait_for(event.wait(), timeout=timeout) + except asyncio.TimeoutError: + return False + return True + if __name__ == "__main__": unittest.main() diff --git a/test/test_logger.py b/test/test_logger.py index b3c8e6d176..a7d97927fa 100644 --- a/test/test_logger.py +++ b/test/test_logger.py @@ -14,7 +14,7 @@ from __future__ import annotations import os -from test import IntegrationTest, unittest +from test import IntegrationTest, client_context, unittest from unittest.mock import patch from bson import json_util @@ -96,6 +96,49 @@ def test_logging_without_listeners(self): c.db.test.insert_one({"x": "1"}) self.assertGreater(len(cm.records), 0) + @client_context.require_failCommand_fail_point + def test_logging_retry_read_attempts(self): + self.db.test.insert_one({"x": "1"}) + + with self.fail_point( + { + "mode": {"times": 1}, + "data": { + "failCommands": ["find"], + "errorCode": 10107, + "errorLabels": ["RetryableWriteError"], + }, + } + ): + with self.assertLogs("pymongo.command", level="DEBUG") as cm: + self.db.test.find_one({"x": "1"}) + + retry_messages = [ + r.getMessage() for r in cm.records if "Retrying read attempt" in r.getMessage() + ] + self.assertEqual(len(retry_messages), 1) + + @client_context.require_failCommand_fail_point + @client_context.require_retryable_writes + def test_logging_retry_write_attempts(self): + with self.fail_point( + { + "mode": {"times": 1}, + "data": { + "errorCode": 10107, + "errorLabels": ["RetryableWriteError"], + "failCommands": ["insert"], + }, + } + ): + with self.assertLogs("pymongo.command", level="DEBUG") as cm: + self.db.test.insert_one({"x": "1"}) + + retry_messages = [ + r.getMessage() for r in cm.records if "Retrying write attempt" in r.getMessage() + ] + self.assertEqual(len(retry_messages), 1) + if __name__ == "__main__": unittest.main() diff --git a/test/test_max_staleness.py b/test/test_max_staleness.py index 32d09ada9a..56e047fd4b 100644 --- a/test/test_max_staleness.py +++ b/test/test_max_staleness.py @@ -15,10 +15,12 @@ """Test maxStalenessSeconds support.""" from __future__ import annotations +import asyncio import os import sys import time import warnings +from pathlib import Path from pymongo import MongoClient from pymongo.operations import _Op @@ -31,11 +33,16 @@ from pymongo.errors import ConfigurationError from pymongo.server_selectors import writable_server_selector +_IS_SYNC = True + # Location of JSON test specifications. -_TEST_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "max_staleness") +if _IS_SYNC: + TEST_PATH = os.path.join(Path(__file__).resolve().parent, "max_staleness") +else: + TEST_PATH = os.path.join(Path(__file__).resolve().parent.parent, "max_staleness") -class TestAllScenarios(create_selection_tests(_TEST_PATH)): # type: ignore +class TestAllScenarios(create_selection_tests(TEST_PATH)): # type: ignore pass diff --git a/test/test_mongos_load_balancing.py b/test/test_mongos_load_balancing.py index 7bc8225465..8c31854343 100644 --- a/test/test_mongos_load_balancing.py +++ b/test/test_mongos_load_balancing.py @@ -15,8 +15,10 @@ """Test MongoClient's mongos load balancing using a mock.""" from __future__ import annotations +import asyncio import sys import threading +from test.helpers import ConcurrentRunner from pymongo.operations import _Op @@ -24,20 +26,16 @@ from test import MockClientTest, client_context, connected, unittest from test.pymongo_mocks import MockClient -from test.utils import wait_until +from test.utils_shared import wait_until from pymongo.errors import AutoReconnect, InvalidOperation from pymongo.server_selectors import writable_server_selector from pymongo.topology_description import TOPOLOGY_TYPE +_IS_SYNC = True -@client_context.require_connection -@client_context.require_no_load_balancer -def setUpModule(): - pass - -class SimpleOp(threading.Thread): +class SimpleOp(ConcurrentRunner): def __init__(self, client): super().__init__() self.client = client @@ -48,15 +46,15 @@ def run(self): self.passed = True # No exception raised. -def do_simple_op(client, nthreads): - threads = [SimpleOp(client) for _ in range(nthreads)] - for t in threads: +def do_simple_op(client, ntasks): + tasks = [SimpleOp(client) for _ in range(ntasks)] + for t in tasks: t.start() - for t in threads: + for t in tasks: t.join() - for t in threads: + for t in tasks: assert t.passed @@ -68,6 +66,11 @@ def writable_addresses(topology): class TestMongosLoadBalancing(MockClientTest): + @client_context.require_connection + @client_context.require_no_load_balancer + def setUp(self): + super().setUp() + def mock_client(self, **kwargs): mock_client = MockClient( standalones=[], @@ -98,7 +101,7 @@ def test_lazy_connect(self): wait_until(lambda: len(client.nodes) == 3, "connect to all mongoses") def test_failover(self): - nthreads = 10 + ntasks = 10 client = connected(self.mock_client(localThresholdMS=0.001)) wait_until(lambda: len(client.nodes) == 3, "connect to all mongoses") @@ -118,14 +121,14 @@ def f(): passed.append(True) - threads = [threading.Thread(target=f) for _ in range(nthreads)] - for t in threads: + tasks = [ConcurrentRunner(target=f) for _ in range(ntasks)] + for t in tasks: t.start() - for t in threads: + for t in tasks: t.join() - self.assertEqual(nthreads, len(passed)) + self.assertEqual(ntasks, len(passed)) # Down host removed from list. self.assertEqual(2, len(client.nodes)) @@ -183,8 +186,11 @@ def test_load_balancing(self): client.mock_rtts["a:1"] = 0.045 # Discover only b is within latency window. + def predicate(): + return {("b", 2)} == writable_addresses(topology) + wait_until( - lambda: {("b", 2)} == writable_addresses(topology), + predicate, 'discover server "a" is too far', ) diff --git a/test/test_monitor.py b/test/test_monitor.py index a704f3d8cb..c10662c893 100644 --- a/test/test_monitor.py +++ b/test/test_monitor.py @@ -15,6 +15,7 @@ """Test the monitor module.""" from __future__ import annotations +import asyncio import gc import subprocess import sys @@ -23,14 +24,16 @@ sys.path[0:0] = [""] -from test import IntegrationTest, connected, unittest +from test import IntegrationTest, client_context, connected, unittest from test.utils import ( - ServerAndTopologyEventListener, wait_until, ) +from test.utils_shared import ServerAndTopologyEventListener, gevent_monkey_patched from pymongo.periodic_executor import _EXECUTORS +_IS_SYNC = True + def unregistered(ref): gc.collect() @@ -54,9 +57,13 @@ def create_client(self): connected(client) return client + @unittest.skipIf("PyPy" in sys.version, "PYTHON-5283 fails often on PyPy") + @unittest.skipIf( + gevent_monkey_patched(), "PYTHON-5516 Resources are not cleared when using gevent" + ) def test_cleanup_executors_on_client_del(self): - with warnings.catch_warnings(): - warnings.simplefilter("ignore") + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") client = self.create_client() executors = get_executors(client) self.assertEqual(len(executors), 4) @@ -70,6 +77,19 @@ def test_cleanup_executors_on_client_del(self): for ref, name in executor_refs: wait_until(partial(unregistered, ref), f"unregister executor: {name}", timeout=5) + def resource_warning_caught(): + gc.collect() + for warning in w: + if ( + issubclass(warning.category, ResourceWarning) + and "Call MongoClient.close() to safely shut down your client and free up resources." + in str(warning.message) + ): + return True + return False + + wait_until(resource_warning_caught, "catch resource warning") + def test_cleanup_executors_on_client_close(self): client = self.create_client() executors = get_executors(client) @@ -80,10 +100,15 @@ def test_cleanup_executors_on_client_close(self): for executor in executors: wait_until(lambda: executor._stopped, f"closed executor: {executor._name}", timeout=5) + @client_context.require_sync def test_no_thread_start_runtime_err_on_shutdown(self): """Test we silence noisy runtime errors fired when the MongoClient spawns a new thread on process shutdown.""" - command = [sys.executable, "-c", "from pymongo import MongoClient; c = MongoClient()"] + command = [ + sys.executable, + "-c", + "from pymongo import MongoClient; c = MongoClient()", + ] completed_process: subprocess.CompletedProcess = subprocess.run( command, capture_output=True ) diff --git a/test/test_monitoring.py b/test/test_monitoring.py index 670558c0a0..f5a18af9ed 100644 --- a/test/test_monitoring.py +++ b/test/test_monitoring.py @@ -29,7 +29,7 @@ sanitize_cmd, unittest, ) -from test.utils import ( +from test.utils_shared import ( EventListener, OvertCommandListener, wait_until, @@ -42,7 +42,6 @@ from pymongo.errors import AutoReconnect, NotPrimaryError, OperationFailure from pymongo.read_preferences import ReadPreference from pymongo.synchronous.command_cursor import CommandCursor -from pymongo.synchronous.helpers import next from pymongo.write_concern import WriteConcern _IS_SYNC = True @@ -66,26 +65,26 @@ def test_started_simple(self): started = self.listener.started_events[0] succeeded = self.listener.succeeded_events[0] self.assertEqual(0, len(self.listener.failed_events)) - self.assertTrue(isinstance(succeeded, monitoring.CommandSucceededEvent)) - self.assertTrue(isinstance(started, monitoring.CommandStartedEvent)) + self.assertIsInstance(succeeded, monitoring.CommandSucceededEvent) + self.assertIsInstance(started, monitoring.CommandStartedEvent) self.assertEqualCommand(SON([("ping", 1)]), started.command) self.assertEqual("ping", started.command_name) self.assertEqual(self.client.address, started.connection_id) self.assertEqual("pymongo_test", started.database_name) - self.assertTrue(isinstance(started.request_id, int)) + self.assertIsInstance(started.request_id, int) def test_succeeded_simple(self): self.client.pymongo_test.command("ping") started = self.listener.started_events[0] succeeded = self.listener.succeeded_events[0] self.assertEqual(0, len(self.listener.failed_events)) - self.assertTrue(isinstance(started, monitoring.CommandStartedEvent)) - self.assertTrue(isinstance(succeeded, monitoring.CommandSucceededEvent)) + self.assertIsInstance(started, monitoring.CommandStartedEvent) + self.assertIsInstance(succeeded, monitoring.CommandSucceededEvent) self.assertEqual("ping", succeeded.command_name) self.assertEqual(self.client.address, succeeded.connection_id) self.assertEqual(1, succeeded.reply.get("ok")) - self.assertTrue(isinstance(succeeded.request_id, int)) - self.assertTrue(isinstance(succeeded.duration_micros, int)) + self.assertIsInstance(succeeded.request_id, int) + self.assertIsInstance(succeeded.duration_micros, int) def test_failed_simple(self): try: @@ -95,21 +94,21 @@ def test_failed_simple(self): started = self.listener.started_events[0] failed = self.listener.failed_events[0] self.assertEqual(0, len(self.listener.succeeded_events)) - self.assertTrue(isinstance(started, monitoring.CommandStartedEvent)) - self.assertTrue(isinstance(failed, monitoring.CommandFailedEvent)) + self.assertIsInstance(started, monitoring.CommandStartedEvent) + self.assertIsInstance(failed, monitoring.CommandFailedEvent) self.assertEqual("oops!", failed.command_name) self.assertEqual(self.client.address, failed.connection_id) self.assertEqual(0, failed.failure.get("ok")) - self.assertTrue(isinstance(failed.request_id, int)) - self.assertTrue(isinstance(failed.duration_micros, int)) + self.assertIsInstance(failed.request_id, int) + self.assertIsInstance(failed.duration_micros, int) def test_find_one(self): self.client.pymongo_test.test.find_one() started = self.listener.started_events[0] succeeded = self.listener.succeeded_events[0] self.assertEqual(0, len(self.listener.failed_events)) - self.assertTrue(isinstance(succeeded, monitoring.CommandSucceededEvent)) - self.assertTrue(isinstance(started, monitoring.CommandStartedEvent)) + self.assertIsInstance(succeeded, monitoring.CommandSucceededEvent) + self.assertIsInstance(started, monitoring.CommandStartedEvent) self.assertEqualCommand( SON([("find", "test"), ("filter", {}), ("limit", 1), ("singleBatch", True)]), started.command, @@ -117,7 +116,7 @@ def test_find_one(self): self.assertEqual("find", started.command_name) self.assertEqual(self.client.address, started.connection_id) self.assertEqual("pymongo_test", started.database_name) - self.assertTrue(isinstance(started.request_id, int)) + self.assertIsInstance(started.request_id, int) def test_find_and_get_more(self): self.client.pymongo_test.test.drop() @@ -130,7 +129,7 @@ def test_find_and_get_more(self): started = self.listener.started_events[0] succeeded = self.listener.succeeded_events[0] self.assertEqual(0, len(self.listener.failed_events)) - self.assertTrue(isinstance(started, monitoring.CommandStartedEvent)) + self.assertIsInstance(started, monitoring.CommandStartedEvent) self.assertEqualCommand( SON( [("find", "test"), ("filter", {}), ("projection", {"_id": False}), ("batchSize", 4)] @@ -140,11 +139,11 @@ def test_find_and_get_more(self): self.assertEqual("find", started.command_name) self.assertEqual(self.client.address, started.connection_id) self.assertEqual("pymongo_test", started.database_name) - self.assertTrue(isinstance(started.request_id, int)) - self.assertTrue(isinstance(succeeded, monitoring.CommandSucceededEvent)) - self.assertTrue(isinstance(succeeded.duration_micros, int)) + self.assertIsInstance(started.request_id, int) + self.assertIsInstance(succeeded, monitoring.CommandSucceededEvent) + self.assertIsInstance(succeeded.duration_micros, int) self.assertEqual("find", succeeded.command_name) - self.assertTrue(isinstance(succeeded.request_id, int)) + self.assertIsInstance(succeeded.request_id, int) self.assertEqual(cursor.address, succeeded.connection_id) csr = succeeded.reply["cursor"] self.assertEqual(csr["id"], cursor_id) @@ -159,7 +158,7 @@ def test_find_and_get_more(self): started = self.listener.started_events[0] succeeded = self.listener.succeeded_events[0] self.assertEqual(0, len(self.listener.failed_events)) - self.assertTrue(isinstance(started, monitoring.CommandStartedEvent)) + self.assertIsInstance(started, monitoring.CommandStartedEvent) self.assertEqualCommand( SON([("getMore", cursor_id), ("collection", "test"), ("batchSize", 4)]), started.command, @@ -167,11 +166,11 @@ def test_find_and_get_more(self): self.assertEqual("getMore", started.command_name) self.assertEqual(self.client.address, started.connection_id) self.assertEqual("pymongo_test", started.database_name) - self.assertTrue(isinstance(started.request_id, int)) - self.assertTrue(isinstance(succeeded, monitoring.CommandSucceededEvent)) - self.assertTrue(isinstance(succeeded.duration_micros, int)) + self.assertIsInstance(started.request_id, int) + self.assertIsInstance(succeeded, monitoring.CommandSucceededEvent) + self.assertIsInstance(succeeded.duration_micros, int) self.assertEqual("getMore", succeeded.command_name) - self.assertTrue(isinstance(succeeded.request_id, int)) + self.assertIsInstance(succeeded.request_id, int) self.assertEqual(cursor.address, succeeded.connection_id) csr = succeeded.reply["cursor"] self.assertEqual(csr["id"], cursor_id) @@ -194,16 +193,16 @@ def test_find_with_explain(self): started = self.listener.started_events[0] succeeded = self.listener.succeeded_events[0] self.assertEqual(0, len(self.listener.failed_events)) - self.assertTrue(isinstance(started, monitoring.CommandStartedEvent)) + self.assertIsInstance(started, monitoring.CommandStartedEvent) self.assertEqualCommand(cmd, started.command) self.assertEqual("explain", started.command_name) self.assertEqual(self.client.address, started.connection_id) self.assertEqual("pymongo_test", started.database_name) - self.assertTrue(isinstance(started.request_id, int)) - self.assertTrue(isinstance(succeeded, monitoring.CommandSucceededEvent)) - self.assertTrue(isinstance(succeeded.duration_micros, int)) + self.assertIsInstance(started.request_id, int) + self.assertIsInstance(succeeded, monitoring.CommandSucceededEvent) + self.assertIsInstance(succeeded.duration_micros, int) self.assertEqual("explain", succeeded.command_name) - self.assertTrue(isinstance(succeeded.request_id, int)) + self.assertIsInstance(succeeded.request_id, int) self.assertEqual(self.client.address, succeeded.connection_id) self.assertEqual(res, succeeded.reply) @@ -225,16 +224,16 @@ def _test_find_options(self, query, expected_cmd): started = self.listener.started_events[0] succeeded = self.listener.succeeded_events[0] self.assertEqual(0, len(self.listener.failed_events)) - self.assertTrue(isinstance(started, monitoring.CommandStartedEvent)) + self.assertIsInstance(started, monitoring.CommandStartedEvent) self.assertEqualCommand(expected_cmd, started.command) self.assertEqual("find", started.command_name) self.assertEqual(self.client.address, started.connection_id) self.assertEqual("pymongo_test", started.database_name) - self.assertTrue(isinstance(started.request_id, int)) - self.assertTrue(isinstance(succeeded, monitoring.CommandSucceededEvent)) - self.assertTrue(isinstance(succeeded.duration_micros, int)) + self.assertIsInstance(started.request_id, int) + self.assertIsInstance(succeeded, monitoring.CommandSucceededEvent) + self.assertIsInstance(succeeded.duration_micros, int) self.assertEqual("find", succeeded.command_name) - self.assertTrue(isinstance(succeeded.request_id, int)) + self.assertIsInstance(succeeded.request_id, int) self.assertEqual(self.client.address, succeeded.connection_id) finally: # Exhaust the cursor to avoid kill cursors. @@ -306,7 +305,7 @@ def test_command_and_get_more(self): started = self.listener.started_events[0] succeeded = self.listener.succeeded_events[0] self.assertEqual(0, len(self.listener.failed_events)) - self.assertTrue(isinstance(started, monitoring.CommandStartedEvent)) + self.assertIsInstance(started, monitoring.CommandStartedEvent) self.assertEqualCommand( SON( [ @@ -320,11 +319,11 @@ def test_command_and_get_more(self): self.assertEqual("aggregate", started.command_name) self.assertEqual(self.client.address, started.connection_id) self.assertEqual("pymongo_test", started.database_name) - self.assertTrue(isinstance(started.request_id, int)) - self.assertTrue(isinstance(succeeded, monitoring.CommandSucceededEvent)) - self.assertTrue(isinstance(succeeded.duration_micros, int)) + self.assertIsInstance(started.request_id, int) + self.assertIsInstance(succeeded, monitoring.CommandSucceededEvent) + self.assertIsInstance(succeeded.duration_micros, int) self.assertEqual("aggregate", succeeded.command_name) - self.assertTrue(isinstance(succeeded.request_id, int)) + self.assertIsInstance(succeeded.request_id, int) self.assertEqual(cursor.address, succeeded.connection_id) expected_cursor = { "id": cursor_id, @@ -339,7 +338,7 @@ def test_command_and_get_more(self): started = self.listener.started_events[0] succeeded = self.listener.succeeded_events[0] self.assertEqual(0, len(self.listener.failed_events)) - self.assertTrue(isinstance(started, monitoring.CommandStartedEvent)) + self.assertIsInstance(started, monitoring.CommandStartedEvent) self.assertEqualCommand( SON([("getMore", cursor_id), ("collection", "test"), ("batchSize", 4)]), started.command, @@ -347,11 +346,11 @@ def test_command_and_get_more(self): self.assertEqual("getMore", started.command_name) self.assertEqual(self.client.address, started.connection_id) self.assertEqual("pymongo_test", started.database_name) - self.assertTrue(isinstance(started.request_id, int)) - self.assertTrue(isinstance(succeeded, monitoring.CommandSucceededEvent)) - self.assertTrue(isinstance(succeeded.duration_micros, int)) + self.assertIsInstance(started.request_id, int) + self.assertIsInstance(succeeded, monitoring.CommandSucceededEvent) + self.assertIsInstance(succeeded.duration_micros, int) self.assertEqual("getMore", succeeded.command_name) - self.assertTrue(isinstance(succeeded.request_id, int)) + self.assertIsInstance(succeeded.request_id, int) self.assertEqual(cursor.address, succeeded.connection_id) expected_result = { "cursor": { @@ -379,18 +378,18 @@ def test_get_more_failure(self): started = self.listener.started_events[0] self.assertEqual(0, len(self.listener.succeeded_events)) failed = self.listener.failed_events[0] - self.assertTrue(isinstance(started, monitoring.CommandStartedEvent)) + self.assertIsInstance(started, monitoring.CommandStartedEvent) self.assertEqualCommand( SON([("getMore", cursor_id), ("collection", "test")]), started.command ) self.assertEqual("getMore", started.command_name) self.assertEqual(self.client.address, started.connection_id) self.assertEqual("pymongo_test", started.database_name) - self.assertTrue(isinstance(started.request_id, int)) - self.assertTrue(isinstance(failed, monitoring.CommandFailedEvent)) - self.assertTrue(isinstance(failed.duration_micros, int)) + self.assertIsInstance(started.request_id, int) + self.assertIsInstance(failed, monitoring.CommandFailedEvent) + self.assertIsInstance(failed.duration_micros, int) self.assertEqual("getMore", failed.command_name) - self.assertTrue(isinstance(failed.request_id, int)) + self.assertIsInstance(failed.request_id, int) self.assertEqual(cursor.address, failed.connection_id) self.assertEqual(0, failed.failure.get("ok")) @@ -410,13 +409,13 @@ def test_not_primary_error(self): started = self.listener.started_events[0] failed = self.listener.failed_events[0] self.assertEqual(0, len(self.listener.succeeded_events)) - self.assertTrue(isinstance(started, monitoring.CommandStartedEvent)) - self.assertTrue(isinstance(failed, monitoring.CommandFailedEvent)) + self.assertIsInstance(started, monitoring.CommandStartedEvent) + self.assertIsInstance(failed, monitoring.CommandFailedEvent) self.assertEqual("findAndModify", failed.command_name) self.assertEqual(address, failed.connection_id) self.assertEqual(0, failed.failure.get("ok")) - self.assertTrue(isinstance(failed.request_id, int)) - self.assertTrue(isinstance(failed.duration_micros, int)) + self.assertIsInstance(failed.request_id, int) + self.assertIsInstance(failed.duration_micros, int) self.assertEqual(error, failed.failure) @client_context.require_no_mongos @@ -432,7 +431,7 @@ def test_exhaust(self): started = self.listener.started_events[0] succeeded = self.listener.succeeded_events[0] self.assertEqual(0, len(self.listener.failed_events)) - self.assertTrue(isinstance(started, monitoring.CommandStartedEvent)) + self.assertIsInstance(started, monitoring.CommandStartedEvent) self.assertEqualCommand( SON( [("find", "test"), ("filter", {}), ("projection", {"_id": False}), ("batchSize", 5)] @@ -442,11 +441,11 @@ def test_exhaust(self): self.assertEqual("find", started.command_name) self.assertEqual(cursor.address, started.connection_id) self.assertEqual("pymongo_test", started.database_name) - self.assertTrue(isinstance(started.request_id, int)) - self.assertTrue(isinstance(succeeded, monitoring.CommandSucceededEvent)) - self.assertTrue(isinstance(succeeded.duration_micros, int)) + self.assertIsInstance(started.request_id, int) + self.assertIsInstance(succeeded, monitoring.CommandSucceededEvent) + self.assertIsInstance(succeeded.duration_micros, int) self.assertEqual("find", succeeded.command_name) - self.assertTrue(isinstance(succeeded.request_id, int)) + self.assertIsInstance(succeeded.request_id, int) self.assertEqual(cursor.address, succeeded.connection_id) expected_result = { "cursor": { @@ -462,7 +461,7 @@ def test_exhaust(self): tuple(cursor.to_list()) self.assertEqual(0, len(self.listener.failed_events)) for event in self.listener.started_events: - self.assertTrue(isinstance(event, monitoring.CommandStartedEvent)) + self.assertIsInstance(event, monitoring.CommandStartedEvent) self.assertEqualCommand( SON([("getMore", cursor_id), ("collection", "test"), ("batchSize", 5)]), event.command, @@ -470,12 +469,12 @@ def test_exhaust(self): self.assertEqual("getMore", event.command_name) self.assertEqual(cursor.address, event.connection_id) self.assertEqual("pymongo_test", event.database_name) - self.assertTrue(isinstance(event.request_id, int)) + self.assertIsInstance(event.request_id, int) for event in self.listener.succeeded_events: - self.assertTrue(isinstance(event, monitoring.CommandSucceededEvent)) - self.assertTrue(isinstance(event.duration_micros, int)) + self.assertIsInstance(event, monitoring.CommandSucceededEvent) + self.assertIsInstance(event.duration_micros, int) self.assertEqual("getMore", event.command_name) - self.assertTrue(isinstance(event.request_id, int)) + self.assertIsInstance(event.request_id, int) self.assertEqual(cursor.address, event.connection_id) # Last getMore receives a response with cursor id 0. self.assertEqual(0, self.listener.succeeded_events[-1].reply["cursor"]["id"]) @@ -493,7 +492,7 @@ def test_kill_cursors(self): started = self.listener.started_events[0] succeeded = self.listener.succeeded_events[0] self.assertEqual(0, len(self.listener.failed_events)) - self.assertTrue(isinstance(started, monitoring.CommandStartedEvent)) + self.assertIsInstance(started, monitoring.CommandStartedEvent) # There could be more than one cursor_id here depending on # when the thread last ran. self.assertIn(cursor_id, started.command["cursors"]) @@ -501,18 +500,17 @@ def test_kill_cursors(self): self.assertIs(type(started.connection_id), tuple) self.assertEqual(cursor.address, started.connection_id) self.assertEqual("pymongo_test", started.database_name) - self.assertTrue(isinstance(started.request_id, int)) - self.assertTrue(isinstance(succeeded, monitoring.CommandSucceededEvent)) - self.assertTrue(isinstance(succeeded.duration_micros, int)) + self.assertIsInstance(started.request_id, int) + self.assertIsInstance(succeeded, monitoring.CommandSucceededEvent) + self.assertIsInstance(succeeded.duration_micros, int) self.assertEqual("killCursors", succeeded.command_name) - self.assertTrue(isinstance(succeeded.request_id, int)) + self.assertIsInstance(succeeded.request_id, int) self.assertIs(type(succeeded.connection_id), tuple) self.assertEqual(cursor.address, succeeded.connection_id) # There could be more than one cursor_id here depending on # when the thread last ran. - self.assertTrue( - cursor_id in succeeded.reply["cursorsUnknown"] - or cursor_id in succeeded.reply["cursorsKilled"] + self.assertIn( + cursor_id, succeeded.reply["cursorsUnknown"] + succeeded.reply["cursorsKilled"] ) def test_non_bulk_writes(self): @@ -1064,7 +1062,7 @@ def test_write_errors(self): self.assertEqual(2, len(errors)) fields = {"index", "code", "errmsg"} for error in errors: - self.assertTrue(fields.issubset(set(error))) + self.assertLessEqual(fields, set(error)) def test_first_batch_helper(self): # Regardless of server version and use of helpers._first_batch @@ -1086,8 +1084,8 @@ def test_first_batch_helper(self): self.assertEqual(started.command_name, succeeded.command_name) self.assertEqual(started.request_id, succeeded.request_id) self.assertEqual(started.connection_id, succeeded.connection_id) - self.assertTrue("cursor" in succeeded.reply) - self.assertTrue("ok" in succeeded.reply) + self.assertIn("cursor", succeeded.reply) + self.assertIn("ok", succeeded.reply) self.listener.reset() @@ -1155,13 +1153,13 @@ def test_simple(self): started = self.listener.started_events[0] succeeded = self.listener.succeeded_events[0] self.assertEqual(0, len(self.listener.failed_events)) - self.assertTrue(isinstance(succeeded, monitoring.CommandSucceededEvent)) - self.assertTrue(isinstance(started, monitoring.CommandStartedEvent)) + self.assertIsInstance(succeeded, monitoring.CommandSucceededEvent) + self.assertIsInstance(started, monitoring.CommandStartedEvent) self.assertEqualCommand(SON([("ping", 1)]), started.command) self.assertEqual("ping", started.command_name) self.assertEqual(self.client.address, started.connection_id) self.assertEqual("pymongo_test", started.database_name) - self.assertTrue(isinstance(started.request_id, int)) + self.assertIsInstance(started.request_id, int) class TestEventClasses(unittest.TestCase): diff --git a/test/test_objectid.py b/test/test_objectid.py index 26670832f6..dbc61951d1 100644 --- a/test/test_objectid.py +++ b/test/test_objectid.py @@ -23,7 +23,7 @@ sys.path[0:0] = [""] from test import SkipTest, unittest -from test.utils import oid_generated_on_process +from test.utils_shared import oid_generated_on_process from bson.errors import InvalidId from bson.objectid import _MAX_COUNTER_VALUE, ObjectId @@ -92,7 +92,7 @@ def test_generation_time(self): self.assertEqual(utc, d2.tzinfo) d2 = d2.replace(tzinfo=None) - self.assertTrue(d2 - d1 < datetime.timedelta(seconds=2)) + self.assertLess(d2 - d1, datetime.timedelta(seconds=2)) def test_from_datetime(self): d = datetime.datetime.now(tz=datetime.timezone.utc).replace(tzinfo=None) diff --git a/test/test_on_demand_csfle.py b/test/test_on_demand_csfle.py index 023feca8c2..648e46815a 100644 --- a/test/test_on_demand_csfle.py +++ b/test/test_on_demand_csfle.py @@ -26,18 +26,20 @@ from test import IntegrationTest, client_context from bson.codec_options import CodecOptions -from pymongo.synchronous.encryption import _HAVE_PYMONGOCRYPT, ClientEncryption, EncryptionError +from pymongo.synchronous.encryption import ( + _HAVE_PYMONGOCRYPT, + ClientEncryption, + EncryptionError, +) -pytestmark = pytest.mark.csfle +_IS_SYNC = True + +pytestmark = pytest.mark.kms class TestonDemandGCPCredentials(IntegrationTest): - @classmethod @unittest.skipUnless(_HAVE_PYMONGOCRYPT, "pymongocrypt is not installed") @client_context.require_version_min(4, 2, -1) - def setUpClass(cls): - super().setUpClass() - def setUp(self): super().setUp() self.master_key = { @@ -74,12 +76,8 @@ def test_02_success(self): class TestonDemandAzureCredentials(IntegrationTest): - @classmethod @unittest.skipUnless(_HAVE_PYMONGOCRYPT, "pymongocrypt is not installed") @client_context.require_version_min(4, 2, -1) - def setUpClass(cls): - super().setUpClass() - def setUp(self): super().setUp() self.master_key = { diff --git a/test/test_pooling.py b/test/test_pooling.py index 3b867965bd..cb5b206996 100644 --- a/test/test_pooling.py +++ b/test/test_pooling.py @@ -15,42 +15,42 @@ """Test built in connection-pooling with threads.""" from __future__ import annotations +import asyncio import gc import random import socket import sys -import threading import time +from test.utils import flaky, get_pool, joinall from bson.codec_options import DEFAULT_CODEC_OPTIONS from bson.son import SON from pymongo import MongoClient, message, timeout from pymongo.errors import AutoReconnect, ConnectionFailure, DuplicateKeyError from pymongo.hello import HelloCompat +from pymongo.lock import _create_lock sys.path[0:0] = [""] from test import IntegrationTest, client_context, unittest -from test.utils import delay, get_pool, joinall +from test.helpers import ConcurrentRunner +from test.utils_shared import delay from pymongo.socket_checker import SocketChecker from pymongo.synchronous.pool import Pool, PoolOptions - -@client_context.require_connection -def setUpModule(): - pass +_IS_SYNC = True N = 10 DB = "pymongo-pooling-tests" -def gc_collect_until_done(threads, timeout=60): +def gc_collect_until_done(tasks, timeout=60): start = time.time() - running = list(threads) + running = list(tasks) while running: - assert (time.time() - start) < timeout, "Threads timed out" + assert (time.time() - start) < timeout, "Tasks timed out" for t in running: t.join(0.1) if not t.is_alive(): @@ -58,12 +58,12 @@ def gc_collect_until_done(threads, timeout=60): gc.collect() -class MongoThread(threading.Thread): - """A thread that uses a MongoClient.""" +class MongoTask(ConcurrentRunner): + """A thread/Task that uses a MongoClient.""" def __init__(self, client): super().__init__() - self.daemon = True # Don't hang whole test if thread hangs. + self.daemon = True # Don't hang whole test if task hangs. self.client = client self.db = self.client[DB] self.passed = False @@ -76,21 +76,21 @@ def run_mongo_thread(self): raise NotImplementedError -class InsertOneAndFind(MongoThread): +class InsertOneAndFind(MongoTask): def run_mongo_thread(self): for _ in range(N): rand = random.randint(0, N) - _id = self.db.sf.insert_one({"x": rand}).inserted_id - assert rand == self.db.sf.find_one(_id)["x"] + _id = (self.db.sf.insert_one({"x": rand})).inserted_id + assert rand == (self.db.sf.find_one(_id))["x"] -class Unique(MongoThread): +class Unique(MongoTask): def run_mongo_thread(self): for _ in range(N): self.db.unique.insert_one({}) # no error -class NonUnique(MongoThread): +class NonUnique(MongoTask): def run_mongo_thread(self): for _ in range(N): try: @@ -101,7 +101,7 @@ def run_mongo_thread(self): raise AssertionError("Should have raised DuplicateKeyError") -class SocketGetter(MongoThread): +class SocketGetter(MongoTask): """Utility for TestPooling. Checks out a socket and holds it forever. Used in @@ -124,31 +124,35 @@ def run_mongo_thread(self): self.state = "connection" - def __del__(self): + def release_conn(self): if self.sock: - self.sock.close_conn(None) + self.sock.unpin() + self.sock = None + return True + return False def run_cases(client, cases): - threads = [] + tasks = [] n_runs = 5 for case in cases: for _i in range(n_runs): t = case(client) t.start() - threads.append(t) + tasks.append(t) - for t in threads: + for t in tasks: t.join() - for t in threads: + for t in tasks: assert t.passed, "%s.run() threw an exception" % repr(t) class _TestPoolingBase(IntegrationTest): """Base class for all connection-pool tests.""" + @client_context.require_connection def setUp(self): super().setUp() self.c = self.rs_or_single_client() @@ -158,11 +162,9 @@ def setUp(self): db.unique.insert_one({"_id": "jesse"}) db.test.insert_many([{} for _ in range(10)]) - def tearDown(self): - self.c.close() - super().tearDown() - - def create_pool(self, pair=(client_context.host, client_context.port), *args, **kwargs): + def create_pool(self, pair=None, *args, **kwargs): + if pair is None: + pair = (client_context.host, client_context.port) # Start the pool with the correct ssl options. pool_options = client_context.client._topology_settings.pool_options kwargs["ssl_context"] = pool_options._ssl_context @@ -329,8 +331,9 @@ def test_wait_queue_timeout(self): pass duration = time.time() - start - self.assertTrue( - abs(wait_queue_timeout - duration) < 1, + self.assertLess( + abs(wait_queue_timeout - duration), + 1, f"Waited {duration:.2f} seconds for a socket, expected {wait_queue_timeout:f}", ) @@ -354,6 +357,10 @@ def test_no_wait_queue_timeout(self): self.assertEqual(t.state, "connection") self.assertEqual(t.sock, s1) + # Cleanup + t.release_conn() + t.join() + pool.close() def test_checkout_more_than_max_pool_size(self): pool = self.create_pool(max_pool_size=2) @@ -365,21 +372,30 @@ def test_checkout_more_than_max_pool_size(self): sock.pin_cursor() socks.append(sock) - threads = [] - for _ in range(30): + tasks = [] + for _ in range(10): t = SocketGetter(self.c, pool) t.start() - threads.append(t) + tasks.append(t) time.sleep(1) - for t in threads: + for t in tasks: self.assertEqual(t.state, "get_socket") - + # Cleanup for socket_info in socks: - socket_info.close_conn(None) + socket_info.unpin() + while tasks: + to_remove = [] + for t in tasks: + if t.release_conn(): + to_remove.append(t) + t.join() + for t in to_remove: + tasks.remove(t) + time.sleep(0.05) + pool.close() def test_maxConnecting(self): client = self.rs_or_single_client() - self.addCleanup(client.close) self.client.test.test.insert_one({}) self.addCleanup(self.client.test.test.delete_many, {}) pool = get_pool(client) @@ -389,11 +405,11 @@ def test_maxConnecting(self): def find_one(): docs.append(client.test.test.find_one({})) - threads = [threading.Thread(target=find_one) for _ in range(50)] - for thread in threads: - thread.start() - for thread in threads: - thread.join(10) + tasks = [ConcurrentRunner(target=find_one) for _ in range(50)] + for task in tasks: + task.start() + for task in tasks: + task.join(10) self.assertEqual(len(docs), 50) self.assertLessEqual(len(pool.conns), 50) @@ -416,7 +432,6 @@ def find_one(): @client_context.require_failCommand_appName def test_csot_timeout_message(self): client = self.rs_or_single_client(appName="connectionTimeoutApp") - self.addCleanup(client.close) # Mock an operation failing due to pymongo.timeout(). mock_connection_timeout = { "configureFailPoint": "failCommand", @@ -436,12 +451,11 @@ def test_csot_timeout_message(self): with timeout(0.5): client.db.t.find_one({"$where": delay(2)}) - self.assertTrue("(configured timeouts: timeoutMS: 500.0ms" in str(error.exception)) + self.assertIn("(configured timeouts: timeoutMS: 500.0ms", str(error.exception)) @client_context.require_failCommand_appName def test_socket_timeout_message(self): client = self.rs_or_single_client(socketTimeoutMS=500, appName="connectionTimeoutApp") - self.addCleanup(client.close) # Mock an operation failing due to socketTimeoutMS. mock_connection_timeout = { "configureFailPoint": "failCommand", @@ -460,9 +474,9 @@ def test_socket_timeout_message(self): with self.assertRaises(Exception) as error: client.db.t.find_one({"$where": delay(2)}) - self.assertTrue( - "(configured timeouts: socketTimeoutMS: 500.0ms, connectTimeoutMS: 20000.0ms)" - in str(error.exception) + self.assertIn( + "(configured timeouts: socketTimeoutMS: 500.0ms, connectTimeoutMS: 20000.0ms)", + str(error.exception), ) @client_context.require_failCommand_appName @@ -485,7 +499,6 @@ def test_connection_timeout_message(self): appName="connectionTimeoutApp", heartbeatFrequencyMS=1000000, ) - self.addCleanup(client.close) client.admin.command("ping") pool = get_pool(client) pool.reset_without_pause() @@ -493,9 +506,9 @@ def test_connection_timeout_message(self): with self.assertRaises(Exception) as error: client.admin.command("ping") - self.assertTrue( - "(configured timeouts: socketTimeoutMS: 500.0ms, connectTimeoutMS: 500.0ms)" - in str(error.exception) + self.assertIn( + "(configured timeouts: socketTimeoutMS: 500.0ms, connectTimeoutMS: 500.0ms)", + str(error.exception), ) @@ -503,20 +516,19 @@ class TestPoolMaxSize(_TestPoolingBase): def test_max_pool_size(self): max_pool_size = 4 c = self.rs_or_single_client(maxPoolSize=max_pool_size) - self.addCleanup(c.close) collection = c[DB].test # Need one document. collection.drop() collection.insert_one({}) - # nthreads had better be much larger than max_pool_size to ensure that + # ntasks had better be much larger than max_pool_size to ensure that # max_pool_size connections are actually required at some point in this # test's execution. cx_pool = get_pool(c) - nthreads = 10 - threads = [] - lock = threading.Lock() + ntasks = 10 + tasks = [] + lock = _create_lock() self.n_passed = 0 def f(): @@ -527,19 +539,18 @@ def f(): with lock: self.n_passed += 1 - for _i in range(nthreads): - t = threading.Thread(target=f) - threads.append(t) + for _i in range(ntasks): + t = ConcurrentRunner(target=f) + tasks.append(t) t.start() - joinall(threads) - self.assertEqual(nthreads, self.n_passed) - self.assertTrue(len(cx_pool.conns) > 1) + joinall(tasks) + self.assertEqual(ntasks, self.n_passed) + self.assertGreater(len(cx_pool.conns), 1) self.assertEqual(0, cx_pool.requests) def test_max_pool_size_none(self): c = self.rs_or_single_client(maxPoolSize=None) - self.addCleanup(c.close) collection = c[DB].test # Need one document. @@ -547,9 +558,9 @@ def test_max_pool_size_none(self): collection.insert_one({}) cx_pool = get_pool(c) - nthreads = 10 - threads = [] - lock = threading.Lock() + ntasks = 10 + tasks = [] + lock = _create_lock() self.n_passed = 0 def f(): @@ -559,19 +570,18 @@ def f(): with lock: self.n_passed += 1 - for _i in range(nthreads): - t = threading.Thread(target=f) - threads.append(t) + for _i in range(ntasks): + t = ConcurrentRunner(target=f) + tasks.append(t) t.start() - joinall(threads) - self.assertEqual(nthreads, self.n_passed) - self.assertTrue(len(cx_pool.conns) > 1) + joinall(tasks) + self.assertEqual(ntasks, self.n_passed) + self.assertGreater(len(cx_pool.conns), 1) self.assertEqual(cx_pool.max_pool_size, float("inf")) def test_max_pool_size_zero(self): c = self.rs_or_single_client(maxPoolSize=0) - self.addCleanup(c.close) pool = get_pool(c) self.assertEqual(pool.max_pool_size, float("inf")) diff --git a/test/test_read_concern.py b/test/test_read_concern.py index f7c0901422..62b2491475 100644 --- a/test/test_read_concern.py +++ b/test/test_read_concern.py @@ -21,12 +21,14 @@ sys.path[0:0] = [""] from test import IntegrationTest, client_context -from test.utils import OvertCommandListener +from test.utils_shared import OvertCommandListener from bson.son import SON from pymongo.errors import OperationFailure from pymongo.read_concern import ReadConcern +_IS_SYNC = True + class TestReadConcern(IntegrationTest): listener: OvertCommandListener @@ -71,14 +73,14 @@ def test_invalid_read_concern(self): def test_find_command(self): # readConcern not sent in command if not specified. coll = self.db.coll - tuple(coll.find({"field": "value"})) + coll.find({"field": "value"}).to_list() self.assertNotIn("readConcern", self.listener.started_events[0].command) self.listener.reset() # Explicitly set readConcern to 'local'. coll = self.db.get_collection("coll", read_concern=ReadConcern("local")) - tuple(coll.find({"field": "value"})) + coll.find({"field": "value"}).to_list() self.assertEqualCommand( SON( [ @@ -93,19 +95,19 @@ def test_find_command(self): def test_command_cursor(self): # readConcern not sent in command if not specified. coll = self.db.coll - tuple(coll.aggregate([{"$match": {"field": "value"}}])) + (coll.aggregate([{"$match": {"field": "value"}}])).to_list() self.assertNotIn("readConcern", self.listener.started_events[0].command) self.listener.reset() # Explicitly set readConcern to 'local'. coll = self.db.get_collection("coll", read_concern=ReadConcern("local")) - tuple(coll.aggregate([{"$match": {"field": "value"}}])) + (coll.aggregate([{"$match": {"field": "value"}}])).to_list() self.assertEqual({"level": "local"}, self.listener.started_events[0].command["readConcern"]) def test_aggregate_out(self): coll = self.db.get_collection("coll", read_concern=ReadConcern("local")) - tuple(coll.aggregate([{"$match": {"field": "value"}}, {"$out": "output_collection"}])) + (coll.aggregate([{"$match": {"field": "value"}}, {"$out": "output_collection"}])).to_list() # Aggregate with $out supports readConcern MongoDB 4.2 onwards. if client_context.version >= (4, 1): diff --git a/test/test_read_preferences.py b/test/test_read_preferences.py index 32883399e1..084abdf3e1 100644 --- a/test/test_read_preferences.py +++ b/test/test_read_preferences.py @@ -26,9 +26,16 @@ sys.path[0:0] = [""] -from test import IntegrationTest, SkipTest, client_context, connected, unittest -from test.utils import ( +from test import ( + IntegrationTest, + SkipTest, + client_context, + connected, + unittest, +) +from test.utils_shared import ( OvertCommandListener, + _ignore_deprecations, one, wait_until, ) @@ -52,13 +59,18 @@ from pymongo.synchronous.mongo_client import MongoClient from pymongo.write_concern import WriteConcern +_IS_SYNC = True + class TestSelections(IntegrationTest): @client_context.require_connection def test_bool(self): client = self.single_client() - wait_until(lambda: client.address, "discover primary") + def predicate(): + return client.address + + wait_until(predicate, "discover primary") selection = Selection.from_topology_description(client._topology.description) self.assertTrue(selection) @@ -88,11 +100,7 @@ def test_deepcopy(self): class TestReadPreferencesBase(IntegrationTest): - @classmethod @client_context.require_secondaries_count(1) - def setUpClass(cls): - super().setUpClass() - def setUp(self): super().setUp() # Insert some data so we can use cursors in read_from_which_host @@ -123,11 +131,14 @@ def read_from_which_kind(self, client): f"Cursor used address {address}, expected either primary " f"{client.primary} or secondaries {client.secondaries}" ) - return None def assertReadsFrom(self, expected, **kwargs): c = self.rs_client(**kwargs) - wait_until(lambda: len(c.nodes - c.arbiters) == client_context.w, "discovered all nodes") + + def predicate(): + return len(c.nodes - c.arbiters) == client_context.w + + wait_until(predicate, "discovered all nodes") used = self.read_from_which_kind(c) self.assertEqual(expected, used, f"Cursor used {used}, expected {expected}") @@ -150,7 +161,7 @@ def test_reads_from_secondary(self): # Test find and find_one. self.assertIsNotNone(coll.find_one()) - self.assertEqual(10, len(list(coll.find()))) + self.assertEqual(10, len(coll.find().to_list())) # Test some database helpers. self.assertIsNotNone(db.list_collection_names()) @@ -173,20 +184,22 @@ def test_mode_validation(self): ReadPreference.SECONDARY_PREFERRED, ReadPreference.NEAREST, ): - self.assertEqual(mode, self.rs_client(read_preference=mode).read_preference) + self.assertEqual(mode, (self.rs_client(read_preference=mode)).read_preference) - self.assertRaises(TypeError, self.rs_client, read_preference="foo") + with self.assertRaises(TypeError): + self.rs_client(read_preference="foo") def test_tag_sets_validation(self): S = Secondary(tag_sets=[{}]) - self.assertEqual([{}], self.rs_client(read_preference=S).read_preference.tag_sets) + self.assertEqual([{}], (self.rs_client(read_preference=S)).read_preference.tag_sets) S = Secondary(tag_sets=[{"k": "v"}]) - self.assertEqual([{"k": "v"}], self.rs_client(read_preference=S).read_preference.tag_sets) + self.assertEqual([{"k": "v"}], (self.rs_client(read_preference=S)).read_preference.tag_sets) S = Secondary(tag_sets=[{"k": "v"}, {}]) self.assertEqual( - [{"k": "v"}, {}], self.rs_client(read_preference=S).read_preference.tag_sets + [{"k": "v"}, {}], + (self.rs_client(read_preference=S)).read_preference.tag_sets, ) self.assertRaises(ValueError, Secondary, tag_sets=[]) @@ -200,22 +213,27 @@ def test_tag_sets_validation(self): def test_threshold_validation(self): self.assertEqual( - 17, self.rs_client(localThresholdMS=17, connect=False).options.local_threshold_ms + 17, + (self.rs_client(localThresholdMS=17, connect=False)).options.local_threshold_ms, ) self.assertEqual( - 42, self.rs_client(localThresholdMS=42, connect=False).options.local_threshold_ms + 42, + (self.rs_client(localThresholdMS=42, connect=False)).options.local_threshold_ms, ) self.assertEqual( - 666, self.rs_client(localThresholdMS=666, connect=False).options.local_threshold_ms + 666, + (self.rs_client(localThresholdMS=666, connect=False)).options.local_threshold_ms, ) self.assertEqual( - 0, self.rs_client(localThresholdMS=0, connect=False).options.local_threshold_ms + 0, + (self.rs_client(localThresholdMS=0, connect=False)).options.local_threshold_ms, ) - self.assertRaises(ValueError, self.rs_client, localthresholdms=-1) + with self.assertRaises(ValueError): + self.rs_client(localthresholdms=-1) def test_zero_latency(self): ping_times: set = set() @@ -238,7 +256,8 @@ def test_primary(self): def test_primary_with_tags(self): # Tags not allowed with PRIMARY - self.assertRaises(ConfigurationError, self.rs_client, tag_sets=[{"dc": "ny"}]) + with self.assertRaises(ConfigurationError): + self.rs_client(tag_sets=[{"dc": "ny"}]) def test_primary_preferred(self): self.assertReadsFrom("primary", read_preference=ReadPreference.PRIMARY_PREFERRED) @@ -272,7 +291,7 @@ def test_nearest(self): not_used = data_members.difference(used) latencies = ", ".join( "%s: %sms" % (server.description.address, server.description.round_trip_time) - for server in c._get_topology().select_servers(readable_server_selector, _Op.TEST) + for server in (c._get_topology()).select_servers(readable_server_selector, _Op.TEST) ) self.assertFalse( @@ -289,12 +308,9 @@ def __init__(self, *args, **kwargs): client_options.update(kwargs) super().__init__(*args, **client_options) - @contextlib.contextmanager def _conn_for_reads(self, read_preference, session, operation): context = super()._conn_for_reads(read_preference, session, operation) - with context as (conn, read_preference): - self.record_a_read(conn.address) - yield conn, read_preference + return context @contextlib.contextmanager def _conn_from_server(self, read_preference, server, session): @@ -304,7 +320,7 @@ def _conn_from_server(self, read_preference, server, session): yield conn, read_preference def record_a_read(self, address): - server = self._get_topology().select_server_by_address(address, _Op.TEST, 0) + server = (self._get_topology()).select_server_by_address(address, _Op.TEST, 0) self.has_read_from.add(server) @@ -321,25 +337,23 @@ class TestCommandAndReadPreference(IntegrationTest): c: ReadPrefTester client_version: Version - @classmethod @client_context.require_secondaries_count(1) - def setUpClass(cls): - super().setUpClass() - cls.c = ReadPrefTester( + def setUp(self): + super().setUp() + self.c = ReadPrefTester( # Ignore round trip times, to test ReadPreference modes only. localThresholdMS=1000 * 1000, ) - cls.client_version = Version.from_client(cls.c) + self.client_version = Version.from_client(self.c) # mapReduce fails if the collection does not exist. - coll = cls.c.pymongo_test.get_collection( + coll = self.c.pymongo_test.get_collection( "test", write_concern=WriteConcern(w=client_context.w) ) coll.insert_one({}) - @classmethod - def tearDownClass(cls): - cls.c.drop_database("pymongo_test") - cls.c.close() + def tearDown(self): + self.c.drop_database("pymongo_test") + self.c.close() def executed_on_which_server(self, client, fn, *args, **kwargs): """Execute fn(*args, **kwargs) and return the Server instance used.""" @@ -366,7 +380,7 @@ def _test_fn(self, server_type, fn): break assert self.c.primary is not None - unused = self.c.secondaries.union({self.c.primary}).difference(used) + unused = (self.c.secondaries).union({self.c.primary}).difference(used) if unused: self.fail("Some members not used for NEAREST: %s" % (unused)) else: @@ -401,11 +415,12 @@ def func(): def test_create_collection(self): # create_collection runs listCollections on the primary to check if # the collection already exists. - self._test_primary_helper( - lambda: self.c.pymongo_test.create_collection( + def func(): + return self.c.pymongo_test.create_collection( "some_collection%s" % random.randint(0, sys.maxsize) ) - ) + + self._test_primary_helper(func) def test_count_documents(self): self._test_coll_helper(True, self.c.pymongo_test.test, "count_documents", {}) @@ -507,33 +522,44 @@ def test_read_preference_document_hedge(self): for mode, cls in cases.items(): with self.assertRaises(TypeError): cls(hedge=[]) # type: ignore - - pref = cls(hedge={}) - self.assertEqual(pref.document, {"mode": mode}) - out = _maybe_add_read_preference({}, pref) - if cls == SecondaryPreferred: - # SecondaryPreferred without hedge doesn't add $readPreference. - self.assertEqual(out, {}) - else: + with _ignore_deprecations(): + pref = cls(hedge={}) + self.assertEqual(pref.document, {"mode": mode}) + out = _maybe_add_read_preference({}, pref) + if cls == SecondaryPreferred: + # SecondaryPreferred without hedge doesn't add $readPreference. + self.assertEqual(out, {}) + else: + self.assertEqual(out, SON([("$query", {}), ("$readPreference", pref.document)])) + + hedge: dict[str, Any] = {"enabled": True} + pref = cls(hedge=hedge) + self.assertEqual(pref.document, {"mode": mode, "hedge": hedge}) + out = _maybe_add_read_preference({}, pref) self.assertEqual(out, SON([("$query", {}), ("$readPreference", pref.document)])) - hedge: dict[str, Any] = {"enabled": True} - pref = cls(hedge=hedge) - self.assertEqual(pref.document, {"mode": mode, "hedge": hedge}) - out = _maybe_add_read_preference({}, pref) - self.assertEqual(out, SON([("$query", {}), ("$readPreference", pref.document)])) + hedge = {"enabled": False} + pref = cls(hedge=hedge) + self.assertEqual(pref.document, {"mode": mode, "hedge": hedge}) + out = _maybe_add_read_preference({}, pref) + self.assertEqual(out, SON([("$query", {}), ("$readPreference", pref.document)])) - hedge = {"enabled": False} - pref = cls(hedge=hedge) - self.assertEqual(pref.document, {"mode": mode, "hedge": hedge}) - out = _maybe_add_read_preference({}, pref) - self.assertEqual(out, SON([("$query", {}), ("$readPreference", pref.document)])) + hedge = {"enabled": False, "extra": "option"} + pref = cls(hedge=hedge) + self.assertEqual(pref.document, {"mode": mode, "hedge": hedge}) + out = _maybe_add_read_preference({}, pref) + self.assertEqual(out, SON([("$query", {}), ("$readPreference", pref.document)])) - hedge = {"enabled": False, "extra": "option"} - pref = cls(hedge=hedge) - self.assertEqual(pref.document, {"mode": mode, "hedge": hedge}) - out = _maybe_add_read_preference({}, pref) - self.assertEqual(out, SON([("$query", {}), ("$readPreference", pref.document)])) + def test_read_preference_hedge_deprecated(self): + cases = { + "primaryPreferred": PrimaryPreferred, + "secondary": Secondary, + "secondaryPreferred": SecondaryPreferred, + "nearest": Nearest, + } + for _, cls in cases.items(): + with self.assertRaises(DeprecationWarning): + cls(hedge={"enabled": True}) def test_send_hedge(self): cases = { @@ -545,10 +571,10 @@ def test_send_hedge(self): cases["secondary"] = Secondary listener = OvertCommandListener() client = self.rs_client(event_listeners=[listener]) - self.addCleanup(client.close) client.admin.command("ping") for _mode, cls in cases.items(): - pref = cls(hedge={"enabled": True}) + with _ignore_deprecations(): + pref = cls(hedge={"enabled": True}) coll = client.test.get_collection("test", read_preference=pref) listener.reset() coll.find_one() @@ -645,10 +671,10 @@ def test_mongos(self): # tell what shard member a query ran on. for pref in (Primary(), PrimaryPreferred(), Secondary(), SecondaryPreferred(), Nearest()): qcoll = coll.with_options(read_preference=pref) - results = list(qcoll.find().sort([("_id", 1)])) + results = qcoll.find().sort([("_id", 1)]).to_list() self.assertEqual(first_id, results[0]["_id"]) self.assertEqual(last_id, results[-1]["_id"]) - results = list(qcoll.find().sort([("_id", -1)])) + results = qcoll.find().sort([("_id", -1)]).to_list() self.assertEqual(first_id, results[-1]["_id"]) self.assertEqual(last_id, results[0]["_id"]) @@ -671,14 +697,14 @@ def test_mongos_max_staleness(self): else: self.fail("mongos accepted invalid staleness") - coll = self.single_client( - readPreference="secondaryPreferred", maxStalenessSeconds=120 + coll = ( + self.single_client(readPreference="secondaryPreferred", maxStalenessSeconds=120) ).pymongo_test.test # No error coll.find_one() - coll = self.single_client( - readPreference="secondaryPreferred", maxStalenessSeconds=10 + coll = ( + self.single_client(readPreference="secondaryPreferred", maxStalenessSeconds=10) ).pymongo_test.test try: coll.find_one() diff --git a/test/test_read_write_concern_spec.py b/test/test_read_write_concern_spec.py index db53b67ae4..4b816b7af9 100644 --- a/test/test_read_write_concern_spec.py +++ b/test/test_read_write_concern_spec.py @@ -19,12 +19,13 @@ import os import sys import warnings +from pathlib import Path sys.path[0:0] = [""] from test import IntegrationTest, client_context, unittest from test.unified_format import generate_test_classes -from test.utils import OvertCommandListener +from test.utils_shared import OvertCommandListener from pymongo import DESCENDING from pymongo.errors import ( @@ -39,7 +40,13 @@ from pymongo.synchronous.mongo_client import MongoClient from pymongo.write_concern import WriteConcern -_TEST_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "read_write_concern") +_IS_SYNC = True + +# Location of JSON test specifications. +if _IS_SYNC: + TEST_PATH = os.path.join(Path(__file__).resolve().parent, "read_write_concern") +else: + TEST_PATH = os.path.join(Path(__file__).resolve().parent.parent, "read_write_concern") class TestReadWriteConcernSpec(IntegrationTest): @@ -47,7 +54,6 @@ def test_omit_default_read_write_concern(self): listener = OvertCommandListener() # Client with default readConcern and writeConcern client = self.rs_or_single_client(event_listeners=[listener]) - self.addCleanup(client.close) collection = client.pymongo_test.collection # Prepare for tests of find() and aggregate(). collection.insert_many([{} for _ in range(10)]) @@ -66,9 +72,12 @@ def insert_command_default_write_concern(): "insert", "collection", documents=[{}], write_concern=WriteConcern() ) + def aggregate_op(): + (collection.aggregate([])).to_list() + ops = [ - ("aggregate", lambda: list(collection.aggregate([]))), - ("find", lambda: list(collection.find())), + ("aggregate", aggregate_op), + ("find", lambda: collection.find().to_list()), ("insert_one", lambda: collection.insert_one({})), ("update_one", lambda: collection.update_one({}, {"$set": {"x": 1}})), ("update_many", lambda: collection.update_many({}, {"$set": {"x": 1}})), @@ -169,6 +178,8 @@ def test_raise_wtimeout(self): self.disable_replication(client_context.client) self.assertWriteOpsRaise(WriteConcern(w=client_context.w, wtimeout=1), WTimeoutError) + # https://github.com/mongodb/specifications/tree/master/source/crud/tests + # Test 1 (included here instead of test_client_bulk_write.py) @client_context.require_failCommand_fail_point def test_error_includes_errInfo(self): expected_wce = { @@ -203,11 +214,12 @@ def test_error_includes_errInfo(self): } self.assertEqual(ctx.exception.details, expected_details) + # https://github.com/mongodb/specifications/tree/master/source/crud/tests + # Test 2 (included here instead of test_client_bulk_write.py) @client_context.require_version_min(4, 9) def test_write_error_details_exposes_errinfo(self): listener = OvertCommandListener() client = self.rs_or_single_client(event_listeners=[listener]) - self.addCleanup(client.close) db = client.errinfotest self.addCleanup(client.drop_database, "errinfotest") validator = {"x": {"$type": "string"}} @@ -286,7 +298,7 @@ def run_test(self): def create_tests(): - for dirpath, _, filenames in os.walk(_TEST_PATH): + for dirpath, _, filenames in os.walk(TEST_PATH): dirname = os.path.split(dirpath)[-1] if dirname == "operation": @@ -321,7 +333,7 @@ def create_tests(): # PyMongo does not support MapReduce. globals().update( generate_test_classes( - os.path.join(_TEST_PATH, "operation"), + os.path.join(TEST_PATH, "operation"), module=__name__, expected_failures=["MapReduce .*"], ) diff --git a/test/test_replica_set_reconfig.py b/test/test_replica_set_reconfig.py index 4c23d71b69..3371543f27 100644 --- a/test/test_replica_set_reconfig.py +++ b/test/test_replica_set_reconfig.py @@ -21,7 +21,7 @@ from test import MockClientTest, client_context, client_knobs, unittest from test.pymongo_mocks import MockClient -from test.utils import wait_until +from test.utils_shared import wait_until from pymongo import ReadPreference from pymongo.errors import ConnectionFailure, ServerSelectionTimeoutError diff --git a/test/test_retryable_reads.py b/test/test_retryable_reads.py index 9c3f6b170f..c9f72ae547 100644 --- a/test/test_retryable_reads.py +++ b/test/test_retryable_reads.py @@ -19,8 +19,9 @@ import pprint import sys import threading +from test.utils import set_fail_point -from pymongo.errors import AutoReconnect +from pymongo.errors import OperationFailure sys.path[0:0] = [""] @@ -31,10 +32,9 @@ client_knobs, unittest, ) -from test.utils import ( +from test.utils_shared import ( CMAPListener, OvertCommandListener, - set_fail_point, ) from pymongo.monitoring import ( @@ -80,7 +80,6 @@ def run(self): class TestPoolPausedError(IntegrationTest): # Pools don't get paused in load balanced mode. RUN_ON_LOAD_BALANCER = False - RUN_ON_SERVERLESS = False @client_context.require_sync @client_context.require_failCommand_blockConnection @@ -88,7 +87,7 @@ class TestPoolPausedError(IntegrationTest): def test_pool_paused_error_is_retryable(self): if "PyPy" in sys.version: # Tracked in PYTHON-3519 - self.skipTest("Test is flakey on PyPy") + self.skipTest("Test is flaky on PyPy") cmap_listener = CMAPListener() cmd_listener = OvertCommandListener() client = self.rs_or_single_client( @@ -148,15 +147,11 @@ def test_pool_paused_error_is_retryable(self): class TestRetryableReads(IntegrationTest): @client_context.require_multiple_mongoses @client_context.require_failCommand_fail_point - def test_retryable_reads_in_sharded_cluster_multiple_available(self): + def test_retryable_reads_are_retried_on_a_different_mongos_when_one_is_available(self): fail_command = { "configureFailPoint": "failCommand", "mode": {"times": 1}, - "data": { - "failCommands": ["find"], - "closeConnection": True, - "appName": "retryableReadTest", - }, + "data": {"failCommands": ["find"], "errorCode": 6}, } mongos_clients = [] @@ -169,12 +164,11 @@ def test_retryable_reads_in_sharded_cluster_multiple_available(self): listener = OvertCommandListener() client = self.rs_or_single_client( client_context.mongos_seeds(), - appName="retryableReadTest", event_listeners=[listener], retryReads=True, ) - with self.assertRaises(AutoReconnect): + with self.assertRaises(OperationFailure): client.t.t.find_one({}) # Disable failpoints on each mongos @@ -185,6 +179,86 @@ def test_retryable_reads_in_sharded_cluster_multiple_available(self): self.assertEqual(len(listener.failed_events), 2) self.assertEqual(len(listener.succeeded_events), 0) + # Assert that both events occurred on different mongos. + assert listener.failed_events[0].connection_id != listener.failed_events[1].connection_id + + @client_context.require_multiple_mongoses + @client_context.require_failCommand_fail_point + def test_retryable_reads_are_retried_on_the_same_mongos_when_no_others_are_available(self): + fail_command = { + "configureFailPoint": "failCommand", + "mode": {"times": 1}, + "data": {"failCommands": ["find"], "errorCode": 6}, + } + + host = client_context.mongos_seeds().split(",")[0] + mongos_client = self.rs_or_single_client(host) + set_fail_point(mongos_client, fail_command) + + listener = OvertCommandListener() + client = self.rs_or_single_client( + host, + directConnection=False, + event_listeners=[listener], + retryReads=True, + ) + + client.t.t.find_one({}) + + # Disable failpoint. + fail_command["mode"] = "off" + set_fail_point(mongos_client, fail_command) + + # Assert that exactly one failed command event and one succeeded command event occurred. + self.assertEqual(len(listener.failed_events), 1) + self.assertEqual(len(listener.succeeded_events), 1) + + # Assert that both events occurred on the same mongos. + assert listener.succeeded_events[0].connection_id == listener.failed_events[0].connection_id + + @client_context.require_failCommand_fail_point + def test_retryable_reads_are_retried_on_the_same_implicit_session(self): + listener = OvertCommandListener() + client = self.rs_or_single_client( + directConnection=False, + event_listeners=[listener], + retryReads=True, + ) + + client.t.t.insert_one({"x": 1}) + + commands = [ + ("aggregate", lambda: client.t.t.count_documents({})), + ("aggregate", lambda: client.t.t.aggregate([{"$match": {}}])), + ("count", lambda: client.t.t.estimated_document_count()), + ("distinct", lambda: client.t.t.distinct("x")), + ("find", lambda: client.t.t.find_one({})), + ("listDatabases", lambda: client.list_databases()), + ("listCollections", lambda: client.t.list_collections()), + ("listIndexes", lambda: client.t.t.list_indexes()), + ] + + for command_name, operation in commands: + listener.reset() + fail_command = { + "configureFailPoint": "failCommand", + "mode": {"times": 1}, + "data": {"failCommands": [command_name], "errorCode": 6}, + } + + with self.fail_point(fail_command): + operation() + + # Assert that both events occurred on the same session. + command_docs = [ + event.command + for event in listener.started_events + if event.command_name == command_name + ] + self.assertEqual(len(command_docs), 2) + self.assertEqual(command_docs[0]["lsid"], command_docs[1]["lsid"]) + self.assertIsNot(command_docs[0], command_docs[1]) + if __name__ == "__main__": unittest.main() diff --git a/test/test_retryable_reads_unified.py b/test/test_retryable_reads_unified.py index 3f8740cf4b..b1c6435c9a 100644 --- a/test/test_retryable_reads_unified.py +++ b/test/test_retryable_reads_unified.py @@ -15,6 +15,7 @@ """Test the Retryable Reads unified spec tests.""" from __future__ import annotations +import os import sys from pathlib import Path @@ -23,8 +24,13 @@ from test import unittest from test.unified_format import generate_test_classes +_IS_SYNC = True + # Location of JSON test specifications. -TEST_PATH = Path(__file__).parent / "retryable_reads/unified" +if _IS_SYNC: + TEST_PATH = os.path.join(Path(__file__).resolve().parent, "retryable_reads/unified") +else: + TEST_PATH = os.path.join(Path(__file__).resolve().parent.parent, "retryable_reads/unified") # Generate unified tests. # PyMongo does not support MapReduce, ListDatabaseObjects or ListCollectionObjects. diff --git a/test/test_retryable_writes.py b/test/test_retryable_writes.py index 07bd1db0ba..a74a3e8030 100644 --- a/test/test_retryable_writes.py +++ b/test/test_retryable_writes.py @@ -20,6 +20,7 @@ import pprint import sys import threading +from test.utils import flaky, set_fail_point sys.path[0:0] = [""] @@ -30,12 +31,11 @@ unittest, ) from test.helpers import client_knobs -from test.utils import ( +from test.utils_shared import ( CMAPListener, DeprecationFilter, EventListener, OvertCommandListener, - set_fail_point, ) from test.version import Version @@ -129,7 +129,6 @@ def non_retryable_single_statement_ops(coll): class IgnoreDeprecationsTest(IntegrationTest): RUN_ON_LOAD_BALANCER = True - RUN_ON_SERVERLESS = True deprecation_filter: DeprecationFilter def setUp(self) -> None: @@ -137,43 +136,14 @@ def setUp(self) -> None: self.deprecation_filter = DeprecationFilter() def tearDown(self) -> None: + super().tearDown() self.deprecation_filter.stop() -class TestRetryableWritesMMAPv1(IgnoreDeprecationsTest): - knobs: client_knobs - - def setUp(self) -> None: - super().setUp() - # Speed up the tests by decreasing the heartbeat frequency. - self.knobs = client_knobs(heartbeat_frequency=0.1, min_heartbeat_interval=0.1) - self.knobs.enable() - self.client = self.rs_or_single_client(retryWrites=True) - self.db = self.client.pymongo_test - - def tearDown(self) -> None: - self.knobs.disable() - - @client_context.require_no_standalone - def test_actionable_error_message(self): - if client_context.storage_engine != "mmapv1": - raise SkipTest("This cluster is not running MMAPv1") - - expected_msg = ( - "This MongoDB deployment does not support retryable " - "writes. Please add retryWrites=false to your " - "connection string." - ) - for method, args, kwargs in retryable_single_statement_ops(self.db.retryable_write_test): - with self.assertRaisesRegex(OperationFailure, expected_msg): - method(*args, **kwargs) - - class TestRetryableWrites(IgnoreDeprecationsTest): listener: OvertCommandListener knobs: client_knobs - @client_context.require_no_mmap def setUp(self) -> None: super().setUp() # Speed up the tests by decreasing the heartbeat frequency. @@ -194,6 +164,7 @@ def tearDown(self): SON([("configureFailPoint", "onPrimaryTransactionalWrite"), ("mode", "off")]) ) self.knobs.disable() + super().tearDown() def test_supported_single_statement_no_retry(self): listener = OvertCommandListener() @@ -419,11 +390,9 @@ def test_retryable_writes_in_sharded_cluster_multiple_available(self): class TestWriteConcernError(IntegrationTest): RUN_ON_LOAD_BALANCER = True - RUN_ON_SERVERLESS = True fail_insert: dict @client_context.require_replica_set - @client_context.require_no_mmap @client_context.require_failCommand_fail_point def setUp(self) -> None: super().setUp() @@ -490,12 +459,12 @@ def run(self): class TestPoolPausedError(IntegrationTest): # Pools don't get paused in load balanced mode. RUN_ON_LOAD_BALANCER = False - RUN_ON_SERVERLESS = False @client_context.require_sync @client_context.require_failCommand_blockConnection @client_context.require_retryable_writes @client_knobs(heartbeat_frequency=0.05, min_heartbeat_interval=0.05) + @flaky(reason="PYTHON-5291") def test_pool_paused_error_is_retryable(self): cmap_listener = CMAPListener() cmd_listener = OvertCommandListener() @@ -593,7 +562,6 @@ def test_returns_original_error_code( # TODO: Make this a real integration test where we stepdown the primary. class TestRetryableWritesTxnNumber(IgnoreDeprecationsTest): @client_context.require_replica_set - @client_context.require_no_mmap def test_increment_transaction_id_without_sending_command(self): """Test that the txnNumber field is properly incremented, even when the first attempt fails before sending the command. diff --git a/test/test_retryable_writes_unified.py b/test/test_retryable_writes_unified.py index da16166ec6..036c410e24 100644 --- a/test/test_retryable_writes_unified.py +++ b/test/test_retryable_writes_unified.py @@ -17,14 +17,20 @@ import os import sys +from pathlib import Path sys.path[0:0] = [""] from test import unittest from test.unified_format import generate_test_classes +_IS_SYNC = True + # Location of JSON test specifications. -TEST_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "retryable_writes", "unified") +if _IS_SYNC: + TEST_PATH = os.path.join(Path(__file__).resolve().parent, "retryable_writes/unified") +else: + TEST_PATH = os.path.join(Path(__file__).resolve().parent.parent, "retryable_writes/unified") # Generate unified tests. globals().update(generate_test_classes(TEST_PATH, module=__name__)) diff --git a/test/test_run_command.py b/test/test_run_command.py index 486a4c7e39..d2ef43b97e 100644 --- a/test/test_run_command.py +++ b/test/test_run_command.py @@ -1,15 +1,37 @@ +# Copyright 2024-Present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Run Command unified tests.""" from __future__ import annotations import os import unittest +from pathlib import Path from test.unified_format import generate_test_classes -_TEST_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "run_command") +_IS_SYNC = True + +# Location of JSON test specifications. +if _IS_SYNC: + TEST_PATH = os.path.join(Path(__file__).resolve().parent, "run_command") +else: + TEST_PATH = os.path.join(Path(__file__).resolve().parent.parent, "run_command") globals().update( generate_test_classes( - os.path.join(_TEST_PATH, "unified"), + os.path.join(TEST_PATH, "unified"), module=__name__, ) ) diff --git a/test/test_sdam_monitoring_spec.py b/test/test_sdam_monitoring_spec.py index 6b808b159d..2167e561cf 100644 --- a/test/test_sdam_monitoring_spec.py +++ b/test/test_sdam_monitoring_spec.py @@ -15,15 +15,17 @@ """Run the sdam monitoring spec tests.""" from __future__ import annotations +import asyncio import json import os import sys import time +from pathlib import Path sys.path[0:0] = [""] from test import IntegrationTest, client_context, client_knobs, unittest -from test.utils import ( +from test.utils_shared import ( ServerAndTopologyEventListener, server_name_to_type, wait_until, @@ -39,8 +41,13 @@ from pymongo.synchronous.monitor import Monitor from pymongo.topology_description import TOPOLOGY_TYPE +_IS_SYNC = True + # Location of JSON test specifications. -_TEST_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "sdam_monitoring") +if _IS_SYNC: + TEST_PATH = os.path.join(Path(__file__).resolve().parent, "sdam_monitoring") +else: + TEST_PATH = os.path.join(Path(__file__).resolve().parent.parent, "sdam_monitoring") def compare_server_descriptions(expected, actual): @@ -247,7 +254,7 @@ def _run(self): def create_tests(): - for dirpath, _, filenames in os.walk(_TEST_PATH): + for dirpath, _, filenames in os.walk(TEST_PATH): for filename in filenames: with open(os.path.join(dirpath, filename)) as scenario_stream: scenario_def = json.load(scenario_stream, object_hook=object_hook) @@ -268,31 +275,33 @@ class TestSdamMonitoring(IntegrationTest): coll: Collection @classmethod - @client_context.require_failCommand_fail_point def setUpClass(cls): - super().setUp(cls) # Speed up the tests by decreasing the event publish frequency. cls.knobs = client_knobs( events_queue_frequency=0.1, heartbeat_frequency=0.1, min_heartbeat_interval=0.1 ) cls.knobs.enable() cls.listener = ServerAndTopologyEventListener() - retry_writes = client_context.supports_transactions() - cls.test_client = cls.unmanaged_rs_or_single_client( - event_listeners=[cls.listener], retryWrites=retry_writes - ) - cls.coll = cls.test_client[cls.client.db.name].test - cls.coll.insert_one({}) @classmethod def tearDownClass(cls): - cls.test_client.close() cls.knobs.disable() - super().tearDownClass() + @client_context.require_failCommand_fail_point def setUp(self): + super().setUp() + + retry_writes = client_context.supports_transactions() + self.test_client = self.rs_or_single_client( + event_listeners=[self.listener], retryWrites=retry_writes + ) + self.coll = self.test_client[self.client.db.name].test + self.coll.insert_one({}) self.listener.reset() + def tearDown(self): + super().tearDown() + def _test_app_error(self, fail_command_opts, expected_error): address = self.test_client.address @@ -334,7 +343,7 @@ def marked_unknown_and_rediscovered(): and len(self.listener.matching(discovered_node)) >= 1 ) - # Topology events are published asynchronously + # Topology events are not published synchronously wait_until(marked_unknown_and_rediscovered, "rediscover node") # Expect a single ServerDescriptionChangedEvent for the network error. diff --git a/test/test_server.py b/test/test_server.py index 45d01c10de..ab5a40a79b 100644 --- a/test/test_server.py +++ b/test/test_server.py @@ -31,7 +31,7 @@ def test_repr(self): hello = Hello({"ok": 1}) sd = ServerDescription(("localhost", 27017), hello) server = Server(sd, pool=object(), monitor=object()) # type: ignore[arg-type] - self.assertTrue("Standalone" in str(server)) + self.assertIn("Standalone", str(server)) if __name__ == "__main__": diff --git a/test/test_server_description.py b/test/test_server_description.py index fe7a5f7119..e8c0098cb6 100644 --- a/test/test_server_description.py +++ b/test/test_server_description.py @@ -23,6 +23,7 @@ from bson.int64 import Int64 from bson.objectid import ObjectId +from pymongo import common from pymongo.hello import Hello, HelloCompat from pymongo.server_description import ServerDescription from pymongo.server_type import SERVER_TYPE @@ -132,11 +133,13 @@ def test_fields(self): self.assertEqual(4, s.min_wire_version) self.assertEqual(25, s.max_wire_version) - def test_default_max_message_size(self): - s = parse_hello_response({"ok": 1, HelloCompat.LEGACY_CMD: True, "maxBsonObjectSize": 2}) - - # Twice max_bson_size. - self.assertEqual(4, s.max_message_size) + def test_defaults(self): + s = parse_hello_response({"ok": 1, HelloCompat.LEGACY_CMD: True}) + self.assertEqual(common.MAX_BSON_SIZE, s.max_bson_size) + self.assertEqual(common.MAX_MESSAGE_SIZE, s.max_message_size) + self.assertEqual(common.MIN_WIRE_VERSION, s.min_wire_version) + self.assertEqual(common.MAX_WIRE_VERSION, s.max_wire_version) + self.assertEqual(common.MAX_WRITE_BATCH_SIZE, s.max_write_batch_size) def test_standalone(self): s = parse_hello_response({"ok": 1, HelloCompat.LEGACY_CMD: True}) diff --git a/test/test_server_selection.py b/test/test_server_selection.py index 984b967f50..4384deac2b 100644 --- a/test/test_server_selection.py +++ b/test/test_server_selection.py @@ -17,6 +17,7 @@ import os import sys +from pathlib import Path from pymongo import MongoClient, ReadPreference from pymongo.errors import ServerSelectionTimeoutError @@ -30,24 +31,31 @@ sys.path[0:0] = [""] from test import IntegrationTest, client_context, unittest -from test.utils import ( - EventListener, - FunctionCallRecorder, - OvertCommandListener, - wait_until, -) +from test.utils import wait_until from test.utils_selection_tests import ( create_selection_tests, - get_addresses, get_topology_settings_dict, +) +from test.utils_selection_tests_shared import ( + get_addresses, make_server_description, ) +from test.utils_shared import ( + FunctionCallRecorder, + OvertCommandListener, +) + +_IS_SYNC = True # Location of JSON test specifications. -_TEST_PATH = os.path.join( - os.path.dirname(os.path.realpath(__file__)), - os.path.join("server_selection", "server_selection"), -) +if _IS_SYNC: + TEST_PATH = os.path.join( + Path(__file__).resolve().parent, "server_selection", "server_selection" + ) +else: + TEST_PATH = os.path.join( + Path(__file__).resolve().parent.parent, "server_selection", "server_selection" + ) class SelectionStoreSelector: @@ -61,7 +69,7 @@ def __call__(self, selection): return selection -class TestAllScenarios(create_selection_tests(_TEST_PATH)): # type: ignore +class TestAllScenarios(create_selection_tests(TEST_PATH)): # type: ignore pass @@ -79,13 +87,12 @@ def custom_selector(servers): client = self.rs_or_single_client( server_selector=custom_selector, event_listeners=[listener] ) - self.addCleanup(client.close) coll = client.get_database("testdb", read_preference=ReadPreference.NEAREST).coll self.addCleanup(client.drop_database, "testdb") # Wait the node list to be fully populated. def all_hosts_started(): - return len(client.admin.command(HelloCompat.LEGACY_CMD)["hosts"]) == len( + return len((client.admin.command(HelloCompat.LEGACY_CMD))["hosts"]) == len( client._topology._description.readable_servers ) @@ -121,15 +128,14 @@ def test_selector_called(self): # Client setup. mongo_client = self.rs_or_single_client(server_selector=selector) test_collection = mongo_client.testdb.test_collection - self.addCleanup(mongo_client.close) self.addCleanup(mongo_client.drop_database, "testdb") - # Do N operations and test selector is called at least N times. + # Do N operations and test selector is called at least N-1 times due to fast path. test_collection.insert_one({"age": 20, "name": "John"}) test_collection.insert_one({"age": 31, "name": "Jane"}) test_collection.update_one({"name": "Jane"}, {"$set": {"age": 21}}) test_collection.find_one({"name": "Roe"}) - self.assertGreaterEqual(selector.call_count, 4) + self.assertGreaterEqual(selector.call_count, 3) @client_context.require_replica_set def test_latency_threshold_application(self): diff --git a/test/test_server_selection_in_window.py b/test/test_server_selection_in_window.py index 05772fa385..fcf2cce0e0 100644 --- a/test/test_server_selection_in_window.py +++ b/test/test_server_selection_in_window.py @@ -15,16 +15,19 @@ """Test the topology module's Server Selection Spec implementation.""" from __future__ import annotations +import asyncio import os import threading +from pathlib import Path from test import IntegrationTest, client_context, unittest -from test.utils import ( +from test.helpers import ConcurrentRunner +from test.utils import flaky +from test.utils_selection_tests import create_topology +from test.utils_shared import ( CMAPListener, OvertCommandListener, - get_pool, wait_until, ) -from test.utils_selection_tests import create_topology from test.utils_spec_runner import SpecTestCreator from pymongo.common import clean_node @@ -32,10 +35,14 @@ from pymongo.operations import _Op from pymongo.read_preferences import ReadPreference +_IS_SYNC = True # Location of JSON test specifications. -TEST_PATH = os.path.join( - os.path.dirname(os.path.realpath(__file__)), os.path.join("server_selection", "in_window") -) +if _IS_SYNC: + TEST_PATH = os.path.join(Path(__file__).resolve().parent, "server_selection", "in_window") +else: + TEST_PATH = os.path.join( + Path(__file__).resolve().parent.parent, "server_selection", "in_window" + ) class TestAllScenarios(unittest.TestCase): @@ -92,7 +99,7 @@ def tests(self, scenario_def): CustomSpecTestCreator(create_test, TestAllScenarios, TEST_PATH).create_tests() -class FinderThread(threading.Thread): +class FinderTask(ConcurrentRunner): def __init__(self, collection, iterations): super().__init__() self.daemon = True @@ -109,17 +116,17 @@ def run(self): class TestProse(IntegrationTest): def frequencies(self, client, listener, n_finds=10): coll = client.test.test - N_THREADS = 10 - threads = [FinderThread(coll, n_finds) for _ in range(N_THREADS)] - for thread in threads: - thread.start() - for thread in threads: - thread.join() - for thread in threads: - self.assertTrue(thread.passed) + N_TASKS = 10 + tasks = [FinderTask(coll, n_finds) for _ in range(N_TASKS)] + for task in tasks: + task.start() + for task in tasks: + task.join() + for task in tasks: + self.assertTrue(task.passed) events = listener.started_events - self.assertEqual(len(events), n_finds * N_THREADS) + self.assertEqual(len(events), n_finds * N_TASKS) nodes = client.nodes self.assertEqual(len(nodes), 2) freqs = {address: 0.0 for address in nodes} @@ -131,6 +138,7 @@ def frequencies(self, client, listener, n_finds=10): @client_context.require_failCommand_appName @client_context.require_multiple_mongoses + @flaky(reason="PYTHON-3689") def test_load_balancing(self): listener = OvertCommandListener() cmap_listener = CMAPListener() diff --git a/test/test_server_selection_logging.py b/test/test_server_selection_logging.py index 2df749cb10..d53d8dc84f 100644 --- a/test/test_server_selection_logging.py +++ b/test/test_server_selection_logging.py @@ -17,19 +17,25 @@ import os import sys +from pathlib import Path sys.path[0:0] = [""] from test import unittest from test.unified_format import generate_test_classes +_IS_SYNC = True + # Location of JSON test specifications. -_TEST_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "server_selection_logging") +if _IS_SYNC: + TEST_PATH = os.path.join(Path(__file__).resolve().parent, "server_selection_logging") +else: + TEST_PATH = os.path.join(Path(__file__).resolve().parent.parent, "server_selection_logging") globals().update( generate_test_classes( - _TEST_PATH, + TEST_PATH, module=__name__, ) ) diff --git a/test/test_server_selection_rtt.py b/test/test_server_selection_rtt.py index a129af4585..2aef36a585 100644 --- a/test/test_server_selection_rtt.py +++ b/test/test_server_selection_rtt.py @@ -18,18 +18,24 @@ import json import os import sys +from pathlib import Path sys.path[0:0] = [""] -from test import unittest +from test import PyMongoTestCase, unittest from pymongo.read_preferences import MovingAverage +_IS_SYNC = True + # Location of JSON test specifications. -_TEST_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "server_selection/rtt") +if _IS_SYNC: + TEST_PATH = os.path.join(Path(__file__).resolve().parent, "server_selection/rtt") +else: + TEST_PATH = os.path.join(Path(__file__).resolve().parent.parent, "server_selection/rtt") -class TestAllScenarios(unittest.TestCase): +class TestAllScenarios(PyMongoTestCase): pass @@ -49,7 +55,7 @@ def run_scenario(self): def create_tests(): - for dirpath, _, filenames in os.walk(_TEST_PATH): + for dirpath, _, filenames in os.walk(TEST_PATH): dirname = os.path.split(dirpath)[-1] for filename in filenames: diff --git a/test/test_session.py b/test/test_session.py index 634efa11c0..9aa56a711e 100644 --- a/test/test_session.py +++ b/test/test_session.py @@ -15,10 +15,13 @@ """Test the client_session module.""" from __future__ import annotations +import asyncio import copy import sys import time +from inspect import iscoroutinefunction from io import BytesIO +from test.helpers import ExceptionCatchingTask from typing import Any, Callable, List, Set, Tuple from pymongo.synchronous.mongo_client import MongoClient @@ -27,29 +30,28 @@ from test import ( IntegrationTest, - PyMongoTestCase, SkipTest, UnitTest, client_context, unittest, ) -from test.utils import ( +from test.helpers import client_knobs +from test.utils_shared import ( EventListener, - ExceptionCatchingThread, + HeartbeatEventListener, OvertCommandListener, wait_until, ) from bson import DBRef from gridfs.synchronous.grid_file import GridFS, GridFSBucket -from pymongo import ASCENDING, MongoClient, monitoring +from pymongo import ASCENDING, MongoClient, _csot, monitoring from pymongo.common import _MAX_END_SESSIONS from pymongo.errors import ConfigurationError, InvalidOperation, OperationFailure from pymongo.operations import IndexModel, InsertOne, UpdateOne from pymongo.read_concern import ReadConcern from pymongo.synchronous.command_cursor import CommandCursor from pymongo.synchronous.cursor import Cursor -from pymongo.synchronous.helpers import next _IS_SYNC = True @@ -131,8 +133,9 @@ def _test_ops(self, client, *ops): f(*args, **kw) self.assertGreaterEqual(len(listener.started_events), 1) for event in listener.started_events: - self.assertTrue( - "lsid" in event.command, + self.assertIn( + "lsid", + event.command, f"{f.__name__} sent no lsid with {event.command_name}", ) @@ -167,8 +170,9 @@ def _test_ops(self, client, *ops): self.assertGreaterEqual(len(listener.started_events), 1) lsids = [] for event in listener.started_events: - self.assertTrue( - "lsid" in event.command, + self.assertIn( + "lsid", + event.command, f"{f.__name__} sent no lsid with {event.command_name}", ) @@ -184,16 +188,16 @@ def _test_ops(self, client, *ops): f"{f.__name__} did not return implicit session to pool", ) - @client_context.require_sync def test_implicit_sessions_checkout(self): # "To confirm that implicit sessions only allocate their server session after a # successful connection checkout" test from Driver Sessions Spec. succeeded = False lsid_set = set() - failures = 0 - for _ in range(5): - listener = OvertCommandListener() - client = self.rs_or_single_client(event_listeners=[listener], maxPoolSize=1) + listener = OvertCommandListener() + client = self.rs_or_single_client(event_listeners=[listener], maxPoolSize=1) + # Retry up to 10 times because there is a known race condition that can cause multiple + # sessions to be used: connection check in happens before session check in + for _ in range(10): cursor = client.db.test.find({}) ops: List[Tuple[Callable, List[Any]]] = [ (client.db.test.find_one, [{"_id": 1}]), @@ -210,34 +214,34 @@ def test_implicit_sessions_checkout(self): (cursor.distinct, ["_id"]), (client.db.list_collections, []), ] - threads = [] + tasks = [] listener.reset() - def thread_target(op, *args): - res = op(*args) + def target(op, *args): + if iscoroutinefunction(op): + res = op(*args) + else: + res = op(*args) if isinstance(res, (Cursor, CommandCursor)): - list(res) # type: ignore[call-overload] + res.to_list() for op, args in ops: - threads.append( - ExceptionCatchingThread( - target=thread_target, args=[op, *args], name=op.__name__ - ) + tasks.append( + ExceptionCatchingTask(target=target, args=[op, *args], name=op.__name__) ) - threads[-1].start() - self.assertEqual(len(threads), len(ops)) - for thread in threads: - thread.join() - self.assertIsNone(thread.exc) - client.close() + tasks[-1].start() + self.assertEqual(len(tasks), len(ops)) + for t in tasks: + t.join() + self.assertIsNone(t.exc) lsid_set.clear() for i in listener.started_events: if i.command.get("lsid"): lsid_set.add(i.command.get("lsid")["id"]) if len(lsid_set) == 1: + # Break on first success. succeeded = True - else: - failures += 1 + break self.assertTrue(succeeded, lsid_set) def test_pool_lifo(self): @@ -373,9 +377,9 @@ def test_cursor_clone(self): with self.client.start_session() as s: cursor = coll.find(session=s) - self.assertTrue(cursor.session is s) + self.assertIs(cursor.session, s) clone = cursor.clone() - self.assertTrue(clone.session is s) + self.assertIs(clone.session, s) # No explicit session. cursor = coll.find(batch_size=2) @@ -387,7 +391,7 @@ def test_cursor_clone(self): next(clone) self.assertIsNone(clone.session) self.assertIsNotNone(clone._session) - self.assertFalse(cursor._session is clone._session) + self.assertIsNot(cursor._session, clone._session) cursor.close() clone.close() @@ -419,8 +423,9 @@ def test_cursor(self): f(session=s) self.assertGreaterEqual(len(listener.started_events), 1) for event in listener.started_events: - self.assertTrue( - "lsid" in event.command, + self.assertIn( + "lsid", + event.command, f"{name} sent no lsid with {event.command_name}", ) @@ -438,15 +443,13 @@ def test_cursor(self): listener.reset() f(session=None) event0 = listener.first_command_started() - self.assertTrue( - "lsid" in event0.command, f"{name} sent no lsid with {event0.command_name}" - ) + self.assertIn("lsid", event0.command, f"{name} sent no lsid with {event0.command_name}") lsid = event0.command["lsid"] for event in listener.started_events[1:]: - self.assertTrue( - "lsid" in event.command, f"{name} sent no lsid with {event.command_name}" + self.assertIn( + "lsid", event.command, f"{name} sent no lsid with {event.command_name}" ) self.assertEqual( @@ -538,9 +541,10 @@ def find(session=None): (bucket.download_to_stream_by_name, ["f", sio], {}), (find, [], {}), (bucket.rename, [1, "f2"], {}), + (bucket.rename_by_name, ["f2", "f3"], {}), # Delete both files so _test_ops can run these operations twice. (bucket.delete, [1], {}), - (bucket.delete, [2], {}), + (bucket.delete_by_name, ["f"], {}), ) def test_gridfsbucket_cursor(self): @@ -1026,14 +1030,6 @@ def test_writes_do_not_include_read_concern(self): # Not a write, but explain also doesn't support readConcern. self._test_no_read_concern(lambda coll, session: coll.find({}, session=session).explain()) - @client_context.require_no_standalone - @client_context.require_version_max(4, 1, 0) - def test_aggregate_out_does_not_include_read_concern(self): - def alambda(coll, session): - (coll.aggregate([{"$out": "aggout"}], session=session)).to_list() - - self._test_no_read_concern(alambda) - @client_context.require_no_standalone def test_get_more_does_not_include_read_concern(self): coll = self.client.pymongo_test.test @@ -1076,7 +1072,6 @@ def test_server_not_causal(self): self.assertIsNone(act) @client_context.require_no_standalone - @client_context.require_no_mmap def test_read_concern(self): with self.client.start_session(causal_consistency=True) as s: coll = self.client.pymongo_test.test @@ -1119,10 +1114,10 @@ def setUp(self): if "$clusterTime" not in (client_context.hello): raise SkipTest("$clusterTime not supported") + # Sessions prose test: 3) $clusterTime in commands def test_cluster_time(self): listener = SessionTestListener() - # Prevent heartbeats from updating $clusterTime between operations. - client = self.rs_or_single_client(event_listeners=[listener], heartbeatFrequencyMS=999999) + client = self.rs_or_single_client(event_listeners=[listener]) collection = client.pymongo_test.collection # Prepare for tests of find() and aggregate(). collection.insert_many([{} for _ in range(10)]) @@ -1183,15 +1178,17 @@ def aggregate(): self.assertGreaterEqual(len(listener.started_events), 1) for i, event in enumerate(listener.started_events): - self.assertTrue( - "$clusterTime" in event.command, + self.assertIn( + "$clusterTime", + event.command, f"{f.__name__} sent no $clusterTime with {event.command_name}", ) if i > 0: succeeded = listener.succeeded_events[i - 1] - self.assertTrue( - "$clusterTime" in succeeded.reply, + self.assertIn( + "$clusterTime", + succeeded.reply, f"{f.__name__} received no $clusterTime with {succeeded.command_name}", ) @@ -1201,6 +1198,38 @@ def aggregate(): f"{f.__name__} sent wrong $clusterTime with {event.command_name}", ) + # Sessions prose test: 20) Drivers do not gossip `$clusterTime` on SDAM commands + def test_cluster_time_not_used_by_sdam(self): + heartbeat_listener = HeartbeatEventListener() + cmd_listener = OvertCommandListener() + with client_knobs(min_heartbeat_interval=0.01): + c1 = self.single_client( + event_listeners=[heartbeat_listener, cmd_listener], heartbeatFrequencyMS=10 + ) + cluster_time = (c1.admin.command({"ping": 1}))["$clusterTime"] + self.assertEqual(c1._topology.max_cluster_time(), cluster_time) + + # Advance the server's $clusterTime by performing an insert via another client. + self.db.test.insert_one({"advance": "$clusterTime"}) + # Wait until the client C1 processes the next pair of SDAM heartbeat started + succeeded events. + heartbeat_listener.reset() + + def next_heartbeat(): + events = heartbeat_listener.events + for i in range(len(events) - 1): + if isinstance(events[i], monitoring.ServerHeartbeatStartedEvent): + if isinstance(events[i + 1], monitoring.ServerHeartbeatSucceededEvent): + return True + return False + + wait_until(next_heartbeat, "never found pair of heartbeat started + succeeded events") + # Assert that C1's max $clusterTime is still the same and has not been updated by SDAM. + cmd_listener.reset() + c1.admin.command({"ping": 1}) + started = cmd_listener.started_events[0] + self.assertEqual(started.command_name, "ping") + self.assertEqual(started.command["$clusterTime"], cluster_time) + if __name__ == "__main__": unittest.main() diff --git a/test/test_sessions_unified.py b/test/test_sessions_unified.py index c51b4642e7..3c80c70d38 100644 --- a/test/test_sessions_unified.py +++ b/test/test_sessions_unified.py @@ -17,14 +17,21 @@ import os import sys +from pathlib import Path sys.path[0:0] = [""] from test import unittest from test.unified_format import generate_test_classes +_IS_SYNC = True + # Location of JSON test specifications. -TEST_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "sessions") +if _IS_SYNC: + TEST_PATH = os.path.join(Path(__file__).resolve().parent, "sessions") +else: + TEST_PATH = os.path.join(Path(__file__).resolve().parent.parent, "sessions") + # Generate unified tests. globals().update(generate_test_classes(TEST_PATH, module=__name__)) diff --git a/test/test_son.py b/test/test_son.py index a06d92bcb2..36a6834889 100644 --- a/test/test_son.py +++ b/test/test_son.py @@ -148,8 +148,8 @@ def test_contains_has(self): """has_key and __contains__""" test_son = SON([(1, 100), (2, 200), (3, 300)]) self.assertIn(1, test_son) - self.assertTrue(2 in test_son, "in failed") - self.assertFalse(22 in test_son, "in succeeded when it shouldn't") + self.assertIn(2, test_son, "in failed") + self.assertNotIn(22, test_son, "in succeeded when it shouldn't") self.assertTrue(test_son.has_key(2), "has_key failed") self.assertFalse(test_son.has_key(22), "has_key succeeded when it shouldn't") diff --git a/test/test_srv_polling.py b/test/test_srv_polling.py index e01552bf7d..f5096bea01 100644 --- a/test/test_srv_polling.py +++ b/test/test_srv_polling.py @@ -15,20 +15,24 @@ """Run the SRV support tests.""" from __future__ import annotations +import asyncio import sys -from time import sleep +import time +from test.utils import flaky +from test.utils_shared import FunctionCallRecorder from typing import Any sys.path[0:0] = [""] from test import PyMongoTestCase, client_knobs, unittest -from test.utils import FunctionCallRecorder, wait_until +from test.utils import wait_until import pymongo from pymongo import common from pymongo.errors import ConfigurationError -from pymongo.srv_resolver import _have_dnspython -from pymongo.synchronous.mongo_client import MongoClient +from pymongo.synchronous.srv_resolver import _have_dnspython + +_IS_SYNC = True WAIT_TIME = 0.1 @@ -51,7 +55,9 @@ def __init__( def enable(self): self.old_min_srv_rescan_interval = common.MIN_SRV_RESCAN_INTERVAL - self.old_dns_resolver_response = pymongo.srv_resolver._SrvResolver.get_hosts_and_min_ttl + self.old_dns_resolver_response = ( + pymongo.synchronous.srv_resolver._SrvResolver.get_hosts_and_min_ttl + ) if self.min_srv_rescan_interval is not None: common.MIN_SRV_RESCAN_INTERVAL = self.min_srv_rescan_interval @@ -71,14 +77,14 @@ def mock_get_hosts_and_min_ttl(resolver, *args): else: patch_func = mock_get_hosts_and_min_ttl - pymongo.srv_resolver._SrvResolver.get_hosts_and_min_ttl = patch_func # type: ignore + pymongo.synchronous.srv_resolver._SrvResolver.get_hosts_and_min_ttl = patch_func # type: ignore def __enter__(self): self.enable() def disable(self): common.MIN_SRV_RESCAN_INTERVAL = self.old_min_srv_rescan_interval # type: ignore - pymongo.srv_resolver._SrvResolver.get_hosts_and_min_ttl = ( # type: ignore + pymongo.synchronous.srv_resolver._SrvResolver.get_hosts_and_min_ttl = ( # type: ignore self.old_dns_resolver_response ) @@ -131,7 +137,10 @@ def assert_nodelist_nochange(self, expected_nodelist, client, timeout=(100 * WAI def predicate(): if set(expected_nodelist) == set(self.get_nodelist(client)): - return pymongo.srv_resolver._SrvResolver.get_hosts_and_min_ttl.call_count >= 1 + return ( + pymongo.synchronous.srv_resolver._SrvResolver.get_hosts_and_min_ttl.call_count + >= 1 + ) return False wait_until(predicate, "Node list equals expected nodelist", timeout=timeout) @@ -141,7 +150,7 @@ def predicate(): msg = "Client nodelist %s changed unexpectedly (expected %s)" raise self.fail(msg % (nodelist, expected_nodelist)) self.assertGreaterEqual( - pymongo.srv_resolver._SrvResolver.get_hosts_and_min_ttl.call_count, # type: ignore + pymongo.synchronous.srv_resolver._SrvResolver.get_hosts_and_min_ttl.call_count, # type: ignore 1, "resolver was never called", ) @@ -168,6 +177,7 @@ def dns_resolver_response(): # Patch timeouts to ensure short test running times. with SrvPollingKnobs(ttl_time=WAIT_TIME, min_srv_rescan_interval=WAIT_TIME): client = self.simple_client(self.CONNECTION_STRING) + client._connect() self.assert_nodelist_change(self.BASE_SRV_RESPONSE, client) # Patch list of hosts returned by DNS query. with SrvPollingKnobs( @@ -175,6 +185,9 @@ def dns_resolver_response(): ): assertion_method(expected_response, client) + # Close the client early to avoid affecting the next scenario run. + client.close() + def test_addition(self): response = self.BASE_SRV_RESPONSE[:] response.append(("localhost.test.build.10gen.cc", 27019)) @@ -212,6 +225,20 @@ def response_callback(*args): self.run_scenario(response_callback, False) + @flaky(reason="PYTHON-5500", max_runs=3) + def test_dns_failures_logging(self): + from dns import exception + + with self.assertLogs("pymongo.topology", level="DEBUG") as cm: + + def response_callback(*args): + raise exception.Timeout("DNS Failure!") + + self.run_scenario(response_callback, False) + + srv_failure_logs = [r for r in cm.records if "SRV monitor check failed" in r.getMessage()] + self.assertEqual(len(srv_failure_logs), 1) + def test_dns_record_lookup_empty(self): response: list = [] self.run_scenario(response, False) @@ -232,6 +259,7 @@ def final_callback(): ): # Client uses unpatched method to get initial nodelist client = self.simple_client(self.CONNECTION_STRING) + client._connect() # Invalid DNS resolver response should not change nodelist. self.assert_nodelist_nochange(self.BASE_SRV_RESPONSE, client) @@ -241,12 +269,14 @@ def final_callback(): # Nodelist should reflect new valid DNS resolver response. self.assert_nodelist_change(response_final, client) + @flaky(reason="PYTHON-5315") def test_recover_from_initially_empty_seedlist(self): def empty_seedlist(): return [] self._test_recover_from_initial(empty_seedlist) + @flaky(reason="PYTHON-5315") def test_recover_from_initially_erroring_seedlist(self): def erroring_seedlist(): raise ConfigurationError @@ -265,6 +295,7 @@ def nodelist_callback(): with SrvPollingKnobs(ttl_time=WAIT_TIME, min_srv_rescan_interval=WAIT_TIME): client = self.simple_client(self.CONNECTION_STRING, srvMaxHosts=0) + client._connect() with SrvPollingKnobs(nodelist_callback=nodelist_callback): self.assert_nodelist_change(response, client) @@ -279,6 +310,7 @@ def nodelist_callback(): with SrvPollingKnobs(ttl_time=WAIT_TIME, min_srv_rescan_interval=WAIT_TIME): client = self.simple_client(self.CONNECTION_STRING, srvMaxHosts=2) + client._connect() with SrvPollingKnobs(nodelist_callback=nodelist_callback): self.assert_nodelist_change(response, client) @@ -294,8 +326,9 @@ def nodelist_callback(): with SrvPollingKnobs(ttl_time=WAIT_TIME, min_srv_rescan_interval=WAIT_TIME): client = self.simple_client(self.CONNECTION_STRING, srvMaxHosts=2) + client._connect() with SrvPollingKnobs(nodelist_callback=nodelist_callback): - sleep(2 * common.MIN_SRV_RESCAN_INTERVAL) + time.sleep(2 * common.MIN_SRV_RESCAN_INTERVAL) final_topology = set(client.topology_description.server_descriptions()) self.assertIn(("localhost.test.build.10gen.cc", 27017), final_topology) self.assertEqual(len(final_topology), 2) @@ -303,8 +336,9 @@ def nodelist_callback(): def test_does_not_flipflop(self): with SrvPollingKnobs(ttl_time=WAIT_TIME, min_srv_rescan_interval=WAIT_TIME): client = self.simple_client(self.CONNECTION_STRING, srvMaxHosts=1) + client._connect() old = set(client.topology_description.server_descriptions()) - sleep(4 * WAIT_TIME) + time.sleep(4 * WAIT_TIME) new = set(client.topology_description.server_descriptions()) self.assertSetEqual(old, new) @@ -322,6 +356,7 @@ def nodelist_callback(): client = self.simple_client( "mongodb+srv://test22.test.build.10gen.cc/?srvServiceName=customname" ) + client._connect() with SrvPollingKnobs(nodelist_callback=nodelist_callback): self.assert_nodelist_change(response, client) @@ -337,15 +372,15 @@ def resolver_response(): nodelist_callback=resolver_response, ): client = self.simple_client(self.CONNECTION_STRING) - self.assertRaises( - AssertionError, self.assert_nodelist_change, modified, client, timeout=WAIT_TIME / 2 - ) + client._connect() + with self.assertRaises(AssertionError): + self.assert_nodelist_change(modified, client, timeout=WAIT_TIME / 2) def test_import_dns_resolver(self): # Regression test for PYTHON-4407 import dns.resolver - self.assertTrue(hasattr(dns.resolver, "resolve")) + self.assertTrue(hasattr(dns.resolver, "resolve") or hasattr(dns.resolver, "query")) if __name__ == "__main__": diff --git a/test/test_ssl.py b/test/test_ssl.py index 04db9b61a4..b1e9a65eb5 100644 --- a/test/test_ssl.py +++ b/test/test_ssl.py @@ -16,6 +16,7 @@ from __future__ import annotations import os +import pathlib import socket import sys @@ -31,7 +32,7 @@ remove_all_users, unittest, ) -from test.utils import ( +from test.utils_shared import ( EventListener, OvertCommandListener, cat_files, @@ -42,7 +43,7 @@ from pymongo import MongoClient, ssl_support from pymongo.errors import ConfigurationError, ConnectionFailure, OperationFailure from pymongo.hello import HelloCompat -from pymongo.ssl_support import HAVE_SSL, _ssl, get_ssl_context +from pymongo.ssl_support import HAVE_PYSSL, HAVE_SSL, _ssl, get_ssl_context from pymongo.write_concern import WriteConcern _HAVE_PYOPENSSL = False @@ -65,7 +66,13 @@ if HAVE_SSL: import ssl -CERT_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "certificates") +_IS_SYNC = True + +if _IS_SYNC: + CERT_PATH = os.path.join(pathlib.Path(__file__).resolve().parent, "certificates") +else: + CERT_PATH = os.path.join(pathlib.Path(__file__).resolve().parent.parent, "certificates") + CLIENT_PEM = os.path.join(CERT_PATH, "client.pem") CLIENT_ENCRYPTED_PEM = os.path.join(CERT_PATH, "password_protected.pem") CA_PEM = os.path.join(CERT_PATH, "ca.pem") @@ -127,7 +134,7 @@ def test_config_ssl(self): @unittest.skipUnless(_HAVE_PYOPENSSL, "PyOpenSSL is not available.") def test_use_pyopenssl_when_available(self): - self.assertTrue(_ssl.IS_PYOPENSSL) + self.assertTrue(HAVE_PYSSL) @unittest.skipUnless(_HAVE_PYOPENSSL, "Cannot test without PyOpenSSL") def test_load_trusted_ca_certs(self): @@ -144,36 +151,36 @@ def assertClientWorks(self, client): ) coll.drop() coll.insert_one({"ssl": True}) - self.assertTrue(coll.find_one()["ssl"]) + self.assertTrue((coll.find_one())["ssl"]) coll.drop() - @classmethod @unittest.skipUnless(HAVE_SSL, "The ssl module is not available.") - def setUpClass(cls): - super().setUpClass() + def setUp(self): + super().setUp() # MongoClient should connect to the primary by default. - cls.saved_port = MongoClient.PORT + self.saved_port = MongoClient.PORT MongoClient.PORT = client_context.port - @classmethod - def tearDownClass(cls): - MongoClient.PORT = cls.saved_port - super().tearDownClass() + def tearDown(self): + MongoClient.PORT = self.saved_port @client_context.require_tls def test_simple_ssl(self): + if "PyPy" in sys.version: + self.skipTest("Test is flaky on PyPy") # Expects the server to be running with ssl and with # no --sslPEMKeyFile or with --sslWeakCertificateValidation self.assertClientWorks(self.client) @client_context.require_tlsCertificateKeyFile + @client_context.require_no_api_version @ignore_deprecations def test_tlsCertificateKeyFilePassword(self): # Expects the server to be running with server.pem and ca.pem # # --sslPEMKeyFile=/path/to/pymongo/test/certificates/server.pem # --sslCAFile=/path/to/pymongo/test/certificates/ca.pem - if not hasattr(ssl, "SSLContext") and not _ssl.IS_PYOPENSSL: + if not hasattr(ssl, "SSLContext") and not HAVE_PYSSL: self.assertRaises( ConfigurationError, self.simple_client, @@ -297,26 +304,31 @@ def test_cert_ssl_uri_support(self): client = self.simple_client(uri_fmt % (CLIENT_PEM, "true", CA_PEM)) self.assertClientWorks(client) + @unittest.skipIf( + "PyPy" in sys.version and not _IS_SYNC, + "https://github.com/pypy/pypy/issues/5131 flaky on async PyPy due to SSL EOF", + ) @client_context.require_tlsCertificateKeyFile @client_context.require_server_resolvable + @client_context.require_no_api_version @ignore_deprecations def test_cert_ssl_validation_hostname_matching(self): # Expects the server to be running with server.pem and ca.pem # # --sslPEMKeyFile=/path/to/pymongo/test/certificates/server.pem # --sslCAFile=/path/to/pymongo/test/certificates/ca.pem - ctx = get_ssl_context(None, None, None, None, True, True, False) + ctx = get_ssl_context(None, None, None, None, True, True, False, _IS_SYNC) self.assertFalse(ctx.check_hostname) - ctx = get_ssl_context(None, None, None, None, True, False, False) + ctx = get_ssl_context(None, None, None, None, True, False, False, _IS_SYNC) self.assertFalse(ctx.check_hostname) - ctx = get_ssl_context(None, None, None, None, False, True, False) + ctx = get_ssl_context(None, None, None, None, False, True, False, _IS_SYNC) self.assertFalse(ctx.check_hostname) - ctx = get_ssl_context(None, None, None, None, False, False, False) + ctx = get_ssl_context(None, None, None, None, False, False, False, _IS_SYNC) self.assertTrue(ctx.check_hostname) response = self.client.admin.command(HelloCompat.LEGACY_CMD) - with self.assertRaises(ConnectionFailure): + with self.assertRaises(ConnectionFailure) as cm: connected( self.simple_client( "server", @@ -328,6 +340,8 @@ def test_cert_ssl_validation_hostname_matching(self): **self.credentials, # type: ignore[arg-type] ) ) + # PYTHON-5414 Check for "module service_identity has no attribute SICertificateError" + self.assertNotIn("has no attribute", str(cm.exception)) connected( self.simple_client( @@ -372,9 +386,11 @@ def test_cert_ssl_validation_hostname_matching(self): ) @client_context.require_tlsCertificateKeyFile + @client_context.require_sync + @client_context.require_no_api_version @ignore_deprecations def test_tlsCRLFile_support(self): - if not hasattr(ssl, "VERIFY_CRL_CHECK_LEAF") or _ssl.IS_PYOPENSSL: + if not hasattr(ssl, "VERIFY_CRL_CHECK_LEAF") or HAVE_PYSSL: self.assertRaises( ConfigurationError, self.simple_client, @@ -419,8 +435,13 @@ def test_tlsCRLFile_support(self): self.simple_client(uri_fmt % (CRL_PEM, CA_PEM), **self.credentials) # type: ignore[arg-type] ) + @unittest.skipIf( + "PyPy" in sys.version and not _IS_SYNC, + "https://github.com/pypy/pypy/issues/5131 flaky on async PyPy due to SSL EOF", + ) @client_context.require_tlsCertificateKeyFile @client_context.require_server_resolvable + @client_context.require_no_api_version @ignore_deprecations def test_validation_with_system_ca_certs(self): # Expects the server to be running with server.pem and ca.pem. @@ -465,7 +486,7 @@ def test_validation_with_system_ca_certs(self): ) def test_system_certs_config_error(self): - ctx = get_ssl_context(None, None, None, None, True, True, False) + ctx = get_ssl_context(None, None, None, None, True, True, False, _IS_SYNC) if (sys.platform != "win32" and hasattr(ctx, "set_default_verify_paths")) or hasattr( ctx, "load_default_certs" ): @@ -496,11 +517,11 @@ def test_certifi_support(self): # Force the test on Windows, regardless of environment. ssl_support.HAVE_WINCERTSTORE = False try: - ctx = get_ssl_context(None, None, CA_PEM, None, False, False, False) + ctx = get_ssl_context(None, None, CA_PEM, None, False, False, False, _IS_SYNC) ssl_sock = ctx.wrap_socket(socket.socket()) self.assertEqual(ssl_sock.ca_certs, CA_PEM) - ctx = get_ssl_context(None, None, None, None, False, False, False) + ctx = get_ssl_context(None, None, None, None, False, False, False, _IS_SYNC) ssl_sock = ctx.wrap_socket(socket.socket()) self.assertEqual(ssl_sock.ca_certs, ssl_support.certifi.where()) finally: @@ -517,16 +538,17 @@ def test_wincertstore(self): if not ssl_support.HAVE_WINCERTSTORE: raise SkipTest("Need wincertstore to test wincertstore.") - ctx = get_ssl_context(None, None, CA_PEM, None, False, False, False) + ctx = get_ssl_context(None, None, CA_PEM, None, False, False, False, _IS_SYNC) ssl_sock = ctx.wrap_socket(socket.socket()) self.assertEqual(ssl_sock.ca_certs, CA_PEM) - ctx = get_ssl_context(None, None, None, None, False, False, False) + ctx = get_ssl_context(None, None, None, None, False, False, False, _IS_SYNC) ssl_sock = ctx.wrap_socket(socket.socket()) self.assertEqual(ssl_sock.ca_certs, ssl_support._WINCERTS.name) @client_context.require_auth @client_context.require_tlsCertificateKeyFile + @client_context.require_no_api_version @ignore_deprecations def test_mongodb_x509_auth(self): host, port = client_context.host, client_context.port @@ -548,7 +570,6 @@ def test_mongodb_x509_auth(self): tlsAllowInvalidCertificates=True, tlsCertificateKeyFile=CLIENT_PEM, ) - self.addCleanup(noauth.close) with self.assertRaises(OperationFailure): noauth.pymongo_test.test.find_one() @@ -562,7 +583,6 @@ def test_mongodb_x509_auth(self): tlsCertificateKeyFile=CLIENT_PEM, event_listeners=[listener], ) - self.addCleanup(auth.close) # No error auth.pymongo_test.test.find_one() @@ -581,7 +601,6 @@ def test_mongodb_x509_auth(self): client = self.simple_client( uri, ssl=True, tlsAllowInvalidCertificates=True, tlsCertificateKeyFile=CLIENT_PEM ) - self.addCleanup(client.close) # No error client.pymongo_test.test.find_one() @@ -589,7 +608,6 @@ def test_mongodb_x509_auth(self): client = self.simple_client( uri, ssl=True, tlsAllowInvalidCertificates=True, tlsCertificateKeyFile=CLIENT_PEM ) - self.addCleanup(client.close) # No error client.pymongo_test.test.find_one() # Auth should fail if username and certificate do not match @@ -602,7 +620,6 @@ def test_mongodb_x509_auth(self): bad_client = self.simple_client( uri, ssl=True, tlsAllowInvalidCertificates=True, tlsCertificateKeyFile=CLIENT_PEM ) - self.addCleanup(bad_client.close) with self.assertRaises(OperationFailure): bad_client.pymongo_test.test.find_one() @@ -615,7 +632,6 @@ def test_mongodb_x509_auth(self): tlsAllowInvalidCertificates=True, tlsCertificateKeyFile=CLIENT_PEM, ) - self.addCleanup(bad_client.close) with self.assertRaises(OperationFailure): bad_client.pymongo_test.test.find_one() @@ -642,6 +658,7 @@ def test_mongodb_x509_auth(self): self.fail("Invalid certificate accepted.") @client_context.require_tlsCertificateKeyFile + @client_context.require_no_api_version @ignore_deprecations def test_connect_with_ca_bundle(self): def remove(path): @@ -659,6 +676,14 @@ def remove(path): ) as client: self.assertTrue(client.admin.command("ping")) + @client_context.require_async + @unittest.skipUnless(_HAVE_PYOPENSSL, "PyOpenSSL is not available.") + @unittest.skipUnless(HAVE_SSL, "The ssl module is not available.") + def test_pyopenssl_ignored_in_async(self): + client = MongoClient("mongodb://localhost:27017?tls=true&tlsAllowInvalidCertificates=true") + client.admin.command("ping") # command doesn't matter, just needs it to connect + client.close() + if __name__ == "__main__": unittest.main() diff --git a/test/test_streaming_protocol.py b/test/test_streaming_protocol.py index d782aa1dd7..927230091f 100644 --- a/test/test_streaming_protocol.py +++ b/test/test_streaming_protocol.py @@ -21,7 +21,7 @@ sys.path[0:0] = [""] from test import IntegrationTest, client_context, unittest -from test.utils import ( +from test.utils_shared import ( HeartbeatEventListener, ServerEventListener, wait_until, @@ -30,6 +30,8 @@ from pymongo import monitoring from pymongo.hello import HelloCompat +_IS_SYNC = True + class TestStreamingProtocol(IntegrationTest): @client_context.require_failCommand_appName @@ -41,7 +43,6 @@ def test_failCommand_streaming(self): heartbeatFrequencyMS=500, appName="failingHeartbeatTest", ) - self.addCleanup(client.close) # Force a connection. client.admin.command("ping") address = client.address @@ -78,7 +79,7 @@ def marked_unknown(): def rediscovered(): return len(listener.matching(_discovered_node)) >= 1 - # Topology events are published asynchronously + # Topology events are not published synchronously wait_until(marked_unknown, "mark node unknown") wait_until(rediscovered, "rediscover node") @@ -108,7 +109,6 @@ def test_streaming_rtt(self): client = self.rs_or_single_client( event_listeners=[listener, hb_listener], heartbeatFrequencyMS=500, appName=name ) - self.addCleanup(client.close) # Force a connection. client.admin.command("ping") address = client.address @@ -156,7 +156,6 @@ def test_monitor_waits_after_server_check_error(self): client = self.single_client( appName="SDAMMinHeartbeatFrequencyTest", serverSelectionTimeoutMS=5000 ) - self.addCleanup(client.close) # Force a connection. client.admin.command("ping") duration = time.time() - start @@ -173,7 +172,7 @@ def test_monitor_waits_after_server_check_error(self): # 2504ms: application handshake succeeds # 2505ms: ping command succeeds self.assertGreaterEqual(duration, 2) - self.assertLessEqual(duration, 3.5) + self.assertLessEqual(duration, 4.0) @client_context.require_failCommand_appName def test_heartbeat_awaited_flag(self): @@ -183,7 +182,6 @@ def test_heartbeat_awaited_flag(self): heartbeatFrequencyMS=500, appName="heartbeatEventAwaitedFlag", ) - self.addCleanup(client.close) # Force a connection. client.admin.command("ping") diff --git a/test/test_timestamp.py b/test/test_timestamp.py index 7495d2ec9f..ef7d8bde15 100644 --- a/test/test_timestamp.py +++ b/test/test_timestamp.py @@ -33,7 +33,7 @@ def test_timestamp(self): t = Timestamp(123, 456) self.assertEqual(t.time, 123) self.assertEqual(t.inc, 456) - self.assertTrue(isinstance(t, Timestamp)) + self.assertIsInstance(t, Timestamp) def test_datetime(self): d = datetime.datetime(2010, 5, 5, tzinfo=utc) diff --git a/test/test_topology.py b/test/test_topology.py index 86aa87c2cc..d3bbcd9060 100644 --- a/test/test_topology.py +++ b/test/test_topology.py @@ -23,13 +23,14 @@ from test import client_knobs, unittest from test.pymongo_mocks import DummyMonitor -from test.utils import MockPool, wait_until +from test.utils import MockPool, flaky +from test.utils_shared import wait_until from bson.objectid import ObjectId from pymongo import common from pymongo.errors import AutoReconnect, ConfigurationError, ConnectionFailure from pymongo.hello import Hello, HelloCompat -from pymongo.read_preferences import ReadPreference, Secondary +from pymongo.read_preferences import Primary, ReadPreference, Secondary from pymongo.server_description import ServerDescription from pymongo.server_selectors import any_server_selector, writable_server_selector from pymongo.server_type import SERVER_TYPE @@ -50,7 +51,10 @@ def get_topology_type(self): def create_mock_topology( - seeds=None, replica_set_name=None, monitor_class=DummyMonitor, direct_connection=False + seeds=None, + replica_set_name=None, + monitor_class=DummyMonitor, + direct_connection=False, ): partitioned_seeds = list(map(common.partition_node, seeds or ["a"])) topology_settings = TopologySettings( @@ -120,7 +124,26 @@ def test_timeout_configuration(self): self.assertEqual(1, monitor._pool.opts.socket_timeout) # The monitor, not its pool, is responsible for calling hello. - self.assertFalse(monitor._pool.handshake) + self.assertTrue(monitor._pool.is_sdam) + + def test_selector_fast_path(self): + topology = create_mock_topology(seeds=["a", "b:27018"], replica_set_name="foo") + description = topology.description + description._topology_type = TOPOLOGY_TYPE.ReplicaSetWithPrimary + + # There is no primary yet, so it should give an empty list. + self.assertEqual(description.apply_selector(Primary()), []) + + # If we set a primary server, we should get it back. + sd = list(description._server_descriptions.values())[0] + sd._server_type = SERVER_TYPE.RSPrimary + self.assertEqual(description.apply_selector(Primary()), [sd]) + + # If there is a custom selector, it should be applied. + def custom_selector(servers): + return [] + + self.assertEqual(description.apply_selector(Primary(), custom_selector=custom_selector), []) class TestSingleServerTopology(TopologyTest): @@ -558,7 +581,7 @@ def test_wire_version(self): ) self.assertEqual(server.description.min_wire_version, 1) - self.assertEqual(server.description.max_wire_version, 7) + self.assertEqual(server.description.max_wire_version, 8) t.select_servers(any_server_selector, _Op.TEST) # Incompatible. @@ -749,6 +772,7 @@ def get_primary(): class TestTopologyErrors(TopologyTest): # Errors when calling hello. + @flaky(reason="PYTHON-5366") def test_pool_reset(self): # hello succeeds at first, then always raises socket error. hello_count = [0] diff --git a/test/test_transactions.py b/test/test_transactions.py index 949b88e60b..813d6a688d 100644 --- a/test/test_transactions.py +++ b/test/test_transactions.py @@ -20,11 +20,13 @@ from test.utils_spec_runner import SpecRunner from gridfs.synchronous.grid_file import GridFS, GridFSBucket +from pymongo.server_selectors import writable_server_selector +from pymongo.synchronous.pool import PoolState sys.path[0:0] = [""] from test import IntegrationTest, client_context, unittest -from test.utils import ( +from test.utils_shared import ( OvertCommandListener, wait_until, ) @@ -32,8 +34,9 @@ from bson import encode from bson.raw_bson import RawBSONDocument -from pymongo import WriteConcern +from pymongo import WriteConcern, _csot from pymongo.errors import ( + AutoReconnect, CollectionInvalid, ConfigurationError, ConnectionFailure, @@ -47,7 +50,6 @@ from pymongo.synchronous.client_session import TransactionOptions from pymongo.synchronous.command_cursor import CommandCursor from pymongo.synchronous.cursor import Cursor -from pymongo.synchronous.helpers import next _IS_SYNC = True @@ -70,8 +72,6 @@ def maybe_skip_scenario(self, test): class TestTransactions(TransactionsBase): - RUN_ON_SERVERLESS = True - @client_context.require_transactions def test_transaction_options_validation(self): default_options = TransactionOptions() @@ -287,6 +287,14 @@ def gridfs_open_upload_stream(*args, **kwargs): "new-name", ), ), + ( + bucket.rename_by_name, + ( + "new-name", + "new-name2", + ), + ), + (bucket.delete_by_name, ("new-name2",)), ] with client.start_session() as s, s.start_transaction(): @@ -378,6 +386,22 @@ def find_raw_batches(*args, **kwargs): if isinstance(res, (CommandCursor, Cursor)): res.to_list() + @client_context.require_transactions + def test_transaction_pool_cleared_error_labelled_transient(self): + c = self.single_client() + + with self.assertRaises(AutoReconnect) as context: + with c.start_session() as session: + with session.start_transaction(): + server = c._select_server(writable_server_selector, session, "test") + # Pause the server's pool, causing it to fail connection checkout. + server.pool.state = PoolState.PAUSED + with c._checkout(server, session): + pass + + # Verify that the TransientTransactionError label is present in the error. + self.assertTrue(context.exception.has_error_label("TransientTransactionError")) + class PatchSessionTimeout: """Patches the client_session's with_transaction timeout for testing.""" @@ -402,15 +426,10 @@ def setUp(self) -> None: for address in client_context.mongoses: self.mongos_clients.append(self.single_client("{}:{}".format(*address))) - def _set_fail_point(self, client, command_args): - cmd = {"configureFailPoint": "failCommand"} - cmd.update(command_args) - client.admin.command(cmd) - def set_fail_point(self, command_args): clients = self.mongos_clients if self.mongos_clients else [self.client] for client in clients: - self._set_fail_point(client, command_args) + self.configure_fail_point(client, command_args) @client_context.require_transactions def test_callback_raises_custom_error(self): @@ -571,5 +590,29 @@ def callback(session): self.assertFalse(s.in_transaction) +class TestOptionsInsideTransactionProse(TransactionsBase): + @client_context.require_transactions + @client_context.require_no_standalone + def test_case_1(self): + # Write concern not inherited from collection object inside transaction + # Create a MongoClient running against a configured sharded/replica set/load balanced cluster. + client = client_context.client + coll = client[self.db.name].test + coll.delete_many({}) + # Start a new session on the client. + with client.start_session() as s: + # Start a transaction on the session. + s.start_transaction() + # Instantiate a collection object in the driver with a default write concern of { w: 0 }. + inner_coll = coll.with_options(write_concern=WriteConcern(w=0)) + # Insert the document { n: 1 } on the instantiated collection. + result = inner_coll.insert_one({"n": 1}, session=s) + # Commit the transaction. + s.commit_transaction() + # End the session. + # Ensure the document was inserted and no error was thrown from the transaction. + assert result.inserted_id is not None + + if __name__ == "__main__": unittest.main() diff --git a/test/test_transactions_unified.py b/test/test_transactions_unified.py index 81137bf658..4ab4885e2a 100644 --- a/test/test_transactions_unified.py +++ b/test/test_transactions_unified.py @@ -17,28 +17,36 @@ import os import sys +from pathlib import Path sys.path[0:0] = [""] from test import client_context, unittest from test.unified_format import generate_test_classes +_IS_SYNC = True + -@client_context.require_no_mmap def setUpModule(): pass # Location of JSON test specifications. -TEST_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "transactions", "unified") +if _IS_SYNC: + TEST_PATH = os.path.join(Path(__file__).resolve().parent, "transactions/unified") +else: + TEST_PATH = os.path.join(Path(__file__).resolve().parent.parent, "transactions/unified") # Generate unified tests. globals().update(generate_test_classes(TEST_PATH, module=__name__)) # Location of JSON test specifications for transactions-convenient-api. -TEST_PATH = os.path.join( - os.path.dirname(os.path.realpath(__file__)), "transactions-convenient-api", "unified" -) +if _IS_SYNC: + TEST_PATH = os.path.join(Path(__file__).resolve().parent, "transactions-convenient-api/unified") +else: + TEST_PATH = os.path.join( + Path(__file__).resolve().parent.parent, "transactions-convenient-api/unified" + ) # Generate unified tests. globals().update(generate_test_classes(TEST_PATH, module=__name__)) diff --git a/test/test_typing.py b/test/test_typing.py index bfe4d032c1..17dc21b4e0 100644 --- a/test/test_typing.py +++ b/test/test_typing.py @@ -15,6 +15,7 @@ """Test that each file in mypy_fails/ actually fails mypy, and test some sample client code that uses PyMongo typings. """ + from __future__ import annotations import os @@ -37,7 +38,8 @@ if TYPE_CHECKING: from typing_extensions import NotRequired, TypedDict - from bson import ObjectId + from bson import Binary, ObjectId + from bson.binary import BinaryVector, BinaryVectorDtype class Movie(TypedDict): name: str @@ -67,7 +69,7 @@ class ImplicitMovie(TypedDict): from test import IntegrationTest, PyMongoTestCase, client_context -from bson import CodecOptions, decode, decode_all, decode_file_iter, decode_iter, encode +from bson import CodecOptions, ObjectId, decode, decode_all, decode_file_iter, decode_iter, encode from bson.raw_bson import RawBSONDocument from bson.son import SON from pymongo import ASCENDING, MongoClient @@ -139,6 +141,32 @@ def to_list(iterable: Iterable[Dict[str, Any]]) -> List[Dict[str, Any]]: docs = to_list(cursor) self.assertTrue(docs) + def test_distinct(self) -> None: + self.coll.delete_many({}) + self.coll.insert_many( + [ + {"_id": None}, + {"_id": 0}, + {"_id": ""}, + {"_id": ObjectId()}, + {"_id": True}, + ] + ) + + def collection_distinct( + collection: Collection, + ) -> list[None | int | str | ObjectId | bool]: + return collection.distinct("_id") + + def cursor_distinct( + collection: Collection, + ) -> list[None | int | str | ObjectId | bool]: + cursor = collection.find() + return cursor.distinct("_id") + + collection_distinct(self.coll) + cursor_distinct(self.coll) + @only_type_check def test_bulk_write(self) -> None: self.coll.insert_one({}) @@ -168,7 +196,7 @@ def test_bulk_write_heterogeneous(self): InsertOne(Movie(name="American Graffiti", year=1973)), ReplaceOne( {}, - {"name": "American Graffiti", "year": "WRONG_TYPE"}, # type:ignore[typeddict-item] + {"name": "American Graffiti", "year": "WRONG_TYPE"}, # type:ignore[arg-type] ), DeleteOne({}), ] @@ -241,7 +269,7 @@ def test_with_options(self) -> None: assert retrieved is not None assert retrieved["name"] == "foo" # We expect a type error here. - assert retrieved["other"] == 1 # type:ignore[typeddict-item] + assert retrieved["other"] == 1 # type:ignore[misc] class TestDecode(unittest.TestCase): @@ -414,11 +442,11 @@ def test_typeddict_document_type_insertion(self) -> None: bad_mov = {"name": "THX-1138", "year": "WRONG TYPE"} bad_movie = Movie(name="THX-1138", year="WRONG TYPE") # type: ignore[typeddict-item] coll.insert_one(bad_mov) # type:ignore[arg-type] - coll.insert_one({"name": "THX-1138", "year": "WRONG TYPE"}) # type: ignore[typeddict-item] + coll.insert_one({"name": "THX-1138", "year": "WRONG TYPE"}) # type: ignore[arg-type] coll.insert_one(bad_movie) coll.insert_many([bad_mov]) # type: ignore[list-item] coll.insert_many( - [{"name": "THX-1138", "year": "WRONG TYPE"}] # type: ignore[typeddict-item] + [{"name": "THX-1138", "year": "WRONG TYPE"}] # type: ignore[list-item] ) coll.insert_many([bad_movie]) @@ -473,7 +501,7 @@ def test_typeddict_not_required_document_type(self) -> None: # This should fail because the output is a Movie. assert out["foo"] # type:ignore[typeddict-item] # pyright gives reportTypedDictNotRequiredAccess for the following: - assert out["_id"] # type:ignore + assert out["_id"] # type:ignore[unused-ignore] @only_type_check def test_typeddict_empty_document_type(self) -> None: @@ -494,7 +522,7 @@ def test_typeddict_find_notrequired(self): out = coll.find_one({}) assert out is not None # pyright gives reportTypedDictNotRequiredAccess for the following: - assert out["_id"] # type:ignore + assert out["_id"] # type:ignore[unused-ignore] @only_type_check def test_raw_bson_document_type(self) -> None: @@ -591,5 +619,22 @@ def test_son_document_type(self) -> None: obj["a"] = 1 +class TestBSONFromVectorType(unittest.TestCase): + @only_type_check + def test_from_vector_binaryvector(self): + list_vector = BinaryVector([127, 7], BinaryVectorDtype.INT8) + Binary.from_vector(list_vector) + + @only_type_check + def test_from_vector_list_int(self): + list_vector = [127, 7] + Binary.from_vector(list_vector, BinaryVectorDtype.INT8) + + @only_type_check + def test_from_vector_list_float(self): + list_vector = [127.0, 7.0] + Binary.from_vector(list_vector, BinaryVectorDtype.INT8) + + if __name__ == "__main__": unittest.main() diff --git a/test/test_unified_format.py b/test/test_unified_format.py index 1b3a134237..f1cfd0139b 100644 --- a/test/test_unified_format.py +++ b/test/test_unified_format.py @@ -15,46 +15,51 @@ import os import sys +from pathlib import Path from typing import Any sys.path[0:0] = [""] -from test import unittest +from test import UnitTest, unittest from test.unified_format import MatchEvaluatorUtil, generate_test_classes from bson import ObjectId -_TEST_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "unified-test-format") +_IS_SYNC = True + +# Location of JSON test specifications. +if _IS_SYNC: + TEST_PATH = os.path.join(Path(__file__).resolve().parent, "unified-test-format") +else: + TEST_PATH = os.path.join(Path(__file__).resolve().parent.parent, "unified-test-format") globals().update( generate_test_classes( - os.path.join(_TEST_PATH, "valid-pass"), + os.path.join(TEST_PATH, "valid-pass"), module=__name__, class_name_prefix="UnifiedTestFormat", expected_failures=[ "Client side error in command starting transaction", # PYTHON-1894 ], - RUN_ON_SERVERLESS=False, ) ) globals().update( generate_test_classes( - os.path.join(_TEST_PATH, "valid-fail"), + os.path.join(TEST_PATH, "valid-fail"), module=__name__, class_name_prefix="UnifiedTestFormat", bypass_test_generation_errors=True, expected_failures=[ ".*", # All tests expected to fail ], - RUN_ON_SERVERLESS=False, ) ) -class TestMatchEvaluatorUtil(unittest.TestCase): +class TestMatchEvaluatorUtil(UnitTest): def setUp(self): self.match_evaluator = MatchEvaluatorUtil(self) diff --git a/test/test_uri_parser.py b/test/test_uri_parser.py index f95717e95f..502faf82b0 100644 --- a/test/test_uri_parser.py +++ b/test/test_uri_parser.py @@ -24,12 +24,13 @@ sys.path[0:0] = [""] from test import unittest +from unittest.mock import patch from bson.binary import JAVA_LEGACY from pymongo import ReadPreference from pymongo.errors import ConfigurationError, InvalidURI -from pymongo.uri_parser import ( - parse_uri, +from pymongo.synchronous.uri_parser import parse_uri +from pymongo.uri_parser_shared import ( parse_userinfo, split_hosts, split_options, @@ -134,16 +135,16 @@ def test_split_options(self): self.assertEqual({"connecttimeoutms": 0.3}, split_options("connectTimeoutMS=300")) self.assertEqual({"connecttimeoutms": 0.0001}, split_options("connectTimeoutMS=0.1")) self.assertTrue(split_options("connectTimeoutMS=300")) - self.assertTrue(isinstance(split_options("w=5")["w"], int)) - self.assertTrue(isinstance(split_options("w=5.5")["w"], str)) + self.assertIsInstance(split_options("w=5")["w"], int) + self.assertIsInstance(split_options("w=5.5")["w"], str) self.assertTrue(split_options("w=foo")) self.assertTrue(split_options("w=majority")) self.assertTrue(split_options("wtimeoutms=500")) self.assertEqual({"fsync": True}, split_options("fsync=true")) self.assertEqual({"fsync": False}, split_options("fsync=false")) - self.assertEqual({"authmechanism": "GSSAPI"}, split_options("authMechanism=GSSAPI")) + self.assertEqual({"authMechanism": "GSSAPI"}, split_options("authMechanism=GSSAPI")) self.assertEqual( - {"authmechanism": "SCRAM-SHA-1"}, split_options("authMechanism=SCRAM-SHA-1") + {"authMechanism": "SCRAM-SHA-1"}, split_options("authMechanism=SCRAM-SHA-1") ) self.assertEqual({"authsource": "foobar"}, split_options("authSource=foobar")) self.assertEqual({"maxpoolsize": 50}, split_options("maxpoolsize=50")) @@ -289,12 +290,12 @@ def test_parse_uri(self): self.assertEqual(res, parse_uri('mongodb://localhost/test.name/with "delimiters')) res = copy.deepcopy(orig) - res["options"] = {"readpreference": ReadPreference.SECONDARY.mongos_mode} + res["options"] = {"readPreference": ReadPreference.SECONDARY.mongos_mode} self.assertEqual(res, parse_uri("mongodb://localhost/?readPreference=secondary")) # Various authentication tests res = copy.deepcopy(orig) - res["options"] = {"authmechanism": "SCRAM-SHA-256"} + res["options"] = {"authMechanism": "SCRAM-SHA-256"} res["username"] = "user" res["password"] = "password" self.assertEqual( @@ -302,7 +303,7 @@ def test_parse_uri(self): ) res = copy.deepcopy(orig) - res["options"] = {"authmechanism": "SCRAM-SHA-256", "authsource": "bar"} + res["options"] = {"authMechanism": "SCRAM-SHA-256", "authSource": "bar"} res["username"] = "user" res["password"] = "password" res["database"] = "foo" @@ -314,7 +315,7 @@ def test_parse_uri(self): ) res = copy.deepcopy(orig) - res["options"] = {"authmechanism": "SCRAM-SHA-256"} + res["options"] = {"authMechanism": "SCRAM-SHA-256"} res["username"] = "user" res["password"] = "" self.assertEqual(res, parse_uri("mongodb://user:@localhost/?authMechanism=SCRAM-SHA-256")) @@ -326,7 +327,7 @@ def test_parse_uri(self): self.assertEqual(res, parse_uri("mongodb://user%40domain.com:password@localhost/foo")) res = copy.deepcopy(orig) - res["options"] = {"authmechanism": "GSSAPI"} + res["options"] = {"authMechanism": "GSSAPI"} res["username"] = "user@domain.com" res["password"] = "password" res["database"] = "foo" @@ -336,7 +337,7 @@ def test_parse_uri(self): ) res = copy.deepcopy(orig) - res["options"] = {"authmechanism": "GSSAPI"} + res["options"] = {"authMechanism": "GSSAPI"} res["username"] = "user@domain.com" res["password"] = "" res["database"] = "foo" @@ -346,8 +347,8 @@ def test_parse_uri(self): res = copy.deepcopy(orig) res["options"] = { - "readpreference": ReadPreference.SECONDARY.mongos_mode, - "readpreferencetags": [ + "readPreference": ReadPreference.SECONDARY.mongos_mode, + "readPreferenceTags": [ {"dc": "west", "use": "website"}, {"dc": "east", "use": "website"}, ], @@ -367,8 +368,8 @@ def test_parse_uri(self): res = copy.deepcopy(orig) res["options"] = { - "readpreference": ReadPreference.SECONDARY.mongos_mode, - "readpreferencetags": [ + "readPreference": ReadPreference.SECONDARY.mongos_mode, + "readPreferenceTags": [ {"dc": "west", "use": "website"}, {"dc": "east", "use": "website"}, {}, @@ -461,6 +462,7 @@ def test_tlsinsecure_simple(self): "tlsInsecure": True, "tlsDisableOCSPEndpointCheck": True, } + print(parse_uri(uri)["options"]) self.assertEqual(res, parse_uri(uri)["options"]) def test_normalize_options(self): @@ -478,8 +480,8 @@ def test_unquote_during_parsing(self): ) res = parse_uri(uri) options: dict[str, Any] = { - "authmechanism": "MONGODB-AWS", - "authmechanismproperties": {"AWS_SESSION_TOKEN": unquoted_val}, + "authMechanism": "MONGODB-AWS", + "authMechanismProperties": {"AWS_SESSION_TOKEN": unquoted_val}, } self.assertEqual(options, res["options"]) @@ -490,8 +492,8 @@ def test_unquote_during_parsing(self): ) res = parse_uri(uri) options = { - "readpreference": ReadPreference.SECONDARY.mongos_mode, - "readpreferencetags": [ + "readPreference": ReadPreference.SECONDARY.mongos_mode, + "readPreferenceTags": [ {"dc": "west", unquoted_val: unquoted_val}, {"dc": "east", "use": unquoted_val}, ], @@ -518,7 +520,7 @@ def test_handle_colon(self): ) res = parse_uri(uri) options = { - "authmechanism": "MONGODB-AWS", + "authMechanism": "MONGODB-AWS", "authMechanismProperties": {"AWS_SESSION_TOKEN": token}, } self.assertEqual(options, res["options"]) @@ -553,6 +555,10 @@ def test_port_with_whitespace(self): with self.assertRaisesRegex(ValueError, r"Port contains whitespace character: '\\n'"): parse_uri("mongodb://localhost:27\n017") + def test_parse_uri_options_type(self): + opts = parse_uri("mongodb://localhost:27017")["options"] + self.assertIsInstance(opts, dict) + if __name__ == "__main__": unittest.main() diff --git a/test/test_uri_spec.py b/test/test_uri_spec.py index 29cde7e078..3d8f7b2b75 100644 --- a/test/test_uri_spec.py +++ b/test/test_uri_spec.py @@ -25,11 +25,11 @@ sys.path[0:0] = [""] from test import unittest -from test.helpers import clear_warning_registry +from test.helpers_shared import clear_warning_registry -from pymongo.common import INTERNAL_URI_OPTION_NAME_MAP, validate +from pymongo.common import INTERNAL_URI_OPTION_NAME_MAP, _CaseInsensitiveDictionary, validate from pymongo.compression_support import _have_snappy -from pymongo.uri_parser import parse_uri +from pymongo.synchronous.uri_parser import parse_uri CONN_STRING_TEST_PATH = os.path.join( os.path.dirname(os.path.realpath(__file__)), os.path.join("connection_string", "test") @@ -169,7 +169,8 @@ def run_scenario(self): # Compare URI options. err_msg = "For option %s expected %s but got %s" if test["options"]: - opts = options["options"] + opts = _CaseInsensitiveDictionary() + opts.update(options["options"]) for opt in test["options"]: lopt = opt.lower() optname = INTERNAL_URI_OPTION_NAME_MAP.get(lopt, lopt) diff --git a/test/test_versioned_api.py b/test/test_versioned_api.py index 7a25a507dc..19b125770f 100644 --- a/test/test_versioned_api.py +++ b/test/test_versioned_api.py @@ -13,28 +13,18 @@ # limitations under the License. from __future__ import annotations -import os import sys +from test import UnitTest sys.path[0:0] = [""] -from test import IntegrationTest, client_context, unittest -from test.unified_format import generate_test_classes -from test.utils import OvertCommandListener +from test import unittest +from pymongo.mongo_client import MongoClient from pymongo.server_api import ServerApi, ServerApiVersion -from pymongo.synchronous.mongo_client import MongoClient -TEST_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "versioned-api") - -# Generate unified tests. -globals().update(generate_test_classes(TEST_PATH, module=__name__)) - - -class TestServerApi(IntegrationTest): - RUN_ON_LOAD_BALANCER = True - RUN_ON_SERVERLESS = True +class TestServerApi(UnitTest): def test_server_api_defaults(self): api = ServerApi(ServerApiVersion.V1) self.assertEqual(api.version, "1") @@ -74,35 +64,6 @@ def assertServerApiInAllCommands(self, events): for event in events: self.assertServerApi(event) - @client_context.require_version_min(4, 7) - def test_command_options(self): - listener = OvertCommandListener() - client = self.rs_or_single_client(server_api=ServerApi("1"), event_listeners=[listener]) - self.addCleanup(client.close) - coll = client.test.test - coll.insert_many([{} for _ in range(100)]) - self.addCleanup(coll.delete_many, {}) - list(coll.find(batch_size=25)) - client.admin.command("ping") - self.assertServerApiInAllCommands(listener.started_events) - - @client_context.require_version_min(4, 7) - @client_context.require_transactions - def test_command_options_txn(self): - listener = OvertCommandListener() - client = self.rs_or_single_client(server_api=ServerApi("1"), event_listeners=[listener]) - self.addCleanup(client.close) - coll = client.test.test - coll.insert_many([{} for _ in range(100)]) - self.addCleanup(coll.delete_many, {}) - - listener.reset() - with client.start_session() as s, s.start_transaction(): - coll.insert_many([{} for _ in range(100)], session=s) - list(coll.find(batch_size=25, session=s)) - client.test.command("find", "test", session=s) - self.assertServerApiInAllCommands(listener.started_events) - if __name__ == "__main__": unittest.main() diff --git a/test/test_versioned_api_integration.py b/test/test_versioned_api_integration.py new file mode 100644 index 0000000000..066a1935ca --- /dev/null +++ b/test/test_versioned_api_integration.py @@ -0,0 +1,81 @@ +# Copyright 2020-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import os +import sys +from pathlib import Path +from test.unified_format import generate_test_classes + +sys.path[0:0] = [""] + +from test import IntegrationTest, client_context, unittest +from test.utils_shared import OvertCommandListener + +from pymongo.server_api import ServerApi + +_IS_SYNC = True + +# Location of JSON test specifications. +if _IS_SYNC: + TEST_PATH = os.path.join(Path(__file__).resolve().parent, "versioned-api") +else: + TEST_PATH = os.path.join(Path(__file__).resolve().parent.parent, "versioned-api") + + +# Generate unified tests. +globals().update(generate_test_classes(TEST_PATH, module=__name__)) + + +class TestServerApiIntegration(IntegrationTest): + RUN_ON_LOAD_BALANCER = True + + def assertServerApi(self, event): + self.assertIn("apiVersion", event.command) + self.assertEqual(event.command["apiVersion"], "1") + + def assertServerApiInAllCommands(self, events): + for event in events: + self.assertServerApi(event) + + @client_context.require_version_min(4, 7) + def test_command_options(self): + listener = OvertCommandListener() + client = self.rs_or_single_client(server_api=ServerApi("1"), event_listeners=[listener]) + coll = client.test.test + coll.insert_many([{} for _ in range(100)]) + self.addCleanup(coll.delete_many, {}) + coll.find(batch_size=25).to_list() + client.admin.command("ping") + self.assertServerApiInAllCommands(listener.started_events) + + @client_context.require_version_min(4, 7) + @client_context.require_transactions + def test_command_options_txn(self): + listener = OvertCommandListener() + client = self.rs_or_single_client(server_api=ServerApi("1"), event_listeners=[listener]) + coll = client.test.test + coll.insert_many([{} for _ in range(100)]) + self.addCleanup(coll.delete_many, {}) + + listener.reset() + with client.start_session() as s, s.start_transaction(): + coll.insert_many([{} for _ in range(100)], session=s) + coll.find(batch_size=25, session=s).to_list() + client.test.command("find", "test", session=s) + self.assertServerApiInAllCommands(listener.started_events) + + +if __name__ == "__main__": + unittest.main() diff --git a/test/test_write_concern.py b/test/test_write_concern.py index e22c7e7a8c..02a7cb6e5c 100644 --- a/test/test_write_concern.py +++ b/test/test_write_concern.py @@ -67,6 +67,19 @@ def test_equality_incompatible_type(self): _fake_type = collections.namedtuple("NotAWriteConcern", ["document"]) # type: ignore self.assertNotEqual(WriteConcern(j=True), _fake_type({"j": True})) + def assertRepr(self, obj): + new_obj = eval(repr(obj)) + self.assertEqual(type(new_obj), type(obj)) + self.assertEqual(repr(new_obj), repr(obj)) + + def test_repr(self): + concern = WriteConcern(j=True, wtimeout=3000, w="majority", fsync=False) + self.assertRepr(concern) + self.assertEqual( + repr(concern), + "WriteConcern(wtimeout=3000, j=True, fsync=False, w='majority')", + ) + if __name__ == "__main__": unittest.main() diff --git a/test/transactions-convenient-api/unified/commit-retry.json b/test/transactions-convenient-api/unified/commit-retry.json index cc80201167..928f0167e4 100644 --- a/test/transactions-convenient-api/unified/commit-retry.json +++ b/test/transactions-convenient-api/unified/commit-retry.json @@ -422,11 +422,6 @@ }, { "description": "commit is not retried after MaxTimeMSExpired error", - "runOnRequirements": [ - { - "serverless": "forbid" - } - ], "operations": [ { "name": "failPoint", diff --git a/test/transactions-convenient-api/unified/commit-writeconcernerror.json b/test/transactions-convenient-api/unified/commit-writeconcernerror.json index a455a450bf..568f7ede42 100644 --- a/test/transactions-convenient-api/unified/commit-writeconcernerror.json +++ b/test/transactions-convenient-api/unified/commit-writeconcernerror.json @@ -1,6 +1,6 @@ { "description": "commit-writeconcernerror", - "schemaVersion": "1.4", + "schemaVersion": "1.3", "runOnRequirements": [ { "minServerVersion": "4.0", @@ -56,7 +56,7 @@ ], "tests": [ { - "description": "commitTransaction is retried after WriteConcernFailed timeout error", + "description": "commitTransaction is retried after WriteConcernTimeout timeout error", "operations": [ { "name": "failPoint", @@ -74,7 +74,6 @@ ], "writeConcernError": { "code": 64, - "codeName": "WriteConcernFailed", "errmsg": "waiting for replication timed out", "errInfo": { "wtimeout": true @@ -236,7 +235,7 @@ ] }, { - "description": "commitTransaction is retried after WriteConcernFailed non-timeout error", + "description": "commitTransaction is retried after WriteConcernTimeout non-timeout error", "operations": [ { "name": "failPoint", @@ -254,7 +253,6 @@ ], "writeConcernError": { "code": 64, - "codeName": "WriteConcernFailed", "errmsg": "multiple errors reported" } } @@ -414,11 +412,6 @@ }, { "description": "commitTransaction is not retried after UnknownReplWriteConcern error", - "runOnRequirements": [ - { - "serverless": "forbid" - } - ], "operations": [ { "name": "failPoint", @@ -551,11 +544,6 @@ }, { "description": "commitTransaction is not retried after UnsatisfiableWriteConcern error", - "runOnRequirements": [ - { - "serverless": "forbid" - } - ], "operations": [ { "name": "failPoint", @@ -688,11 +676,6 @@ }, { "description": "commitTransaction is not retried after MaxTimeMSExpired error", - "runOnRequirements": [ - { - "serverless": "forbid" - } - ], "operations": [ { "name": "failPoint", diff --git a/test/transactions/unified/error-labels.json b/test/transactions/unified/error-labels.json index be8df10ed3..74ed750b07 100644 --- a/test/transactions/unified/error-labels.json +++ b/test/transactions/unified/error-labels.json @@ -1176,7 +1176,7 @@ ] }, { - "description": "add UnknownTransactionCommitResult label to writeConcernError WriteConcernFailed", + "description": "add UnknownTransactionCommitResult label to writeConcernError WriteConcernTimeout", "operations": [ { "object": "testRunner", @@ -1338,7 +1338,7 @@ ] }, { - "description": "add UnknownTransactionCommitResult label to writeConcernError WriteConcernFailed with wtimeout", + "description": "add UnknownTransactionCommitResult label to writeConcernError WriteConcernTimeout with wtimeout", "operations": [ { "object": "testRunner", @@ -1356,7 +1356,6 @@ ], "writeConcernError": { "code": 64, - "codeName": "WriteConcernFailed", "errmsg": "waiting for replication timed out", "errInfo": { "wtimeout": true diff --git a/test/transactions/unified/findOneAndReplace.json b/test/transactions/unified/findOneAndReplace.json index d9248244b3..f0742f0c60 100644 --- a/test/transactions/unified/findOneAndReplace.json +++ b/test/transactions/unified/findOneAndReplace.json @@ -127,7 +127,9 @@ "update": { "x": 1 }, - "new": false, + "new": { + "$$unsetOrMatches": false + }, "lsid": { "$$sessionLsid": "session0" }, @@ -299,7 +301,9 @@ "update": { "x": 1 }, - "new": false, + "new": { + "$$unsetOrMatches": false + }, "lsid": { "$$sessionLsid": "session0" }, diff --git a/test/transactions/unified/findOneAndUpdate.json b/test/transactions/unified/findOneAndUpdate.json index 34a40bb570..f5308efef3 100644 --- a/test/transactions/unified/findOneAndUpdate.json +++ b/test/transactions/unified/findOneAndUpdate.json @@ -189,7 +189,9 @@ "x": 1 } }, - "new": false, + "new": { + "$$unsetOrMatches": false + }, "lsid": { "$$sessionLsid": "session0" }, @@ -281,7 +283,9 @@ "x": 1 } }, - "new": false, + "new": { + "$$unsetOrMatches": false + }, "lsid": { "$$sessionLsid": "session0" }, @@ -340,7 +344,9 @@ "x": 1 } }, - "new": false, + "new": { + "$$unsetOrMatches": false + }, "lsid": { "$$sessionLsid": "session0" }, @@ -485,7 +491,9 @@ "x": 1 } }, - "new": false, + "new": { + "$$unsetOrMatches": false + }, "lsid": { "$$sessionLsid": "session0" }, diff --git a/test/transactions/unified/mongos-recovery-token.json b/test/transactions/unified/mongos-recovery-token.json index 00909c4218..bb88aa16bd 100644 --- a/test/transactions/unified/mongos-recovery-token.json +++ b/test/transactions/unified/mongos-recovery-token.json @@ -232,7 +232,8 @@ "id": "client1", "useMultipleMongoses": true, "uriOptions": { - "heartbeatFrequencyMS": 30000 + "heartbeatFrequencyMS": 30000, + "appName": "transactionsClient" }, "observeEvents": [ "commandStartedEvent" @@ -299,7 +300,8 @@ "isMaster", "hello" ], - "closeConnection": true + "closeConnection": true, + "appName": "transactionsClient" } } } diff --git a/test/transactions/unified/pin-mongos.json b/test/transactions/unified/pin-mongos.json index 5f2ecca5c1..c96f3f341f 100644 --- a/test/transactions/unified/pin-mongos.json +++ b/test/transactions/unified/pin-mongos.json @@ -1249,7 +1249,8 @@ "id": "client1", "useMultipleMongoses": true, "uriOptions": { - "heartbeatFrequencyMS": 30000 + "heartbeatFrequencyMS": 30000, + "appName": "transactionsClient" }, "observeEvents": [ "commandStartedEvent" @@ -1316,7 +1317,8 @@ "isMaster", "hello" ], - "closeConnection": true + "closeConnection": true, + "appName": "transactionsClient" } } } diff --git a/test/transactions/unified/retryable-commit.json b/test/transactions/unified/retryable-commit.json index 7d7e52495d..b794c1c55c 100644 --- a/test/transactions/unified/retryable-commit.json +++ b/test/transactions/unified/retryable-commit.json @@ -89,11 +89,6 @@ "tests": [ { "description": "commitTransaction fails after Interrupted", - "runOnRequirements": [ - { - "serverless": "forbid" - } - ], "operations": [ { "object": "testRunner", diff --git a/test/transactions/unified/write-concern.json b/test/transactions/unified/write-concern.json index 7acdd54066..29d1977a82 100644 --- a/test/transactions/unified/write-concern.json +++ b/test/transactions/unified/write-concern.json @@ -1417,7 +1417,9 @@ "update": { "x": 1 }, - "new": false, + "new": { + "$$unsetOrMatches": false + }, "lsid": { "$$sessionLsid": "session0" }, @@ -1522,7 +1524,9 @@ "x": 1 } }, - "new": false, + "new": { + "$$unsetOrMatches": false + }, "lsid": { "$$sessionLsid": "session0" }, diff --git a/test/unified-test-format/invalid/entity-client-storeEventsAsEntities-minItems.json b/test/unified-test-format/invalid/entity-client-storeEventsAsEntities-minItems.json deleted file mode 100644 index d94863ed11..0000000000 --- a/test/unified-test-format/invalid/entity-client-storeEventsAsEntities-minItems.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "description": "entity-client-storeEventsAsEntities-minItems", - "schemaVersion": "1.2", - "createEntities": [ - { - "client": { - "id": "client0", - "storeEventsAsEntities": [] - } - } - ], - "tests": [ - { - "description": "foo", - "operations": [] - } - ] -} diff --git a/test/unified-test-format/invalid/entity-client-storeEventsAsEntities-type.json b/test/unified-test-format/invalid/entity-client-storeEventsAsEntities-type.json deleted file mode 100644 index 79f6b85ed2..0000000000 --- a/test/unified-test-format/invalid/entity-client-storeEventsAsEntities-type.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "description": "entity-client-storeEventsAsEntities-type", - "schemaVersion": "1.2", - "createEntities": [ - { - "client": { - "id": "client0", - "storeEventsAsEntities": 0 - } - } - ], - "tests": [ - { - "description": "foo", - "operations": [] - } - ] -} diff --git a/test/unified-test-format/invalid/runOnRequirement-csfle-minLibmongocryptVersion-pattern.json b/test/unified-test-format/invalid/runOnRequirement-csfle-minLibmongocryptVersion-pattern.json new file mode 100644 index 0000000000..1db023bf68 --- /dev/null +++ b/test/unified-test-format/invalid/runOnRequirement-csfle-minLibmongocryptVersion-pattern.json @@ -0,0 +1,17 @@ +{ + "description": "runOnRequirement-csfle-minLibmongocryptVersion-pattern", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "csfle": { + "minLibmongocryptVersion": "1.2.3.4" + } + } + ], + "tests": [ + { + "description": "foo", + "operations": [] + } + ] +} diff --git a/test/unified-test-format/invalid/runOnRequirement-csfle-minLibmongocryptVersion-type.json b/test/unified-test-format/invalid/runOnRequirement-csfle-minLibmongocryptVersion-type.json new file mode 100644 index 0000000000..8de7b293f1 --- /dev/null +++ b/test/unified-test-format/invalid/runOnRequirement-csfle-minLibmongocryptVersion-type.json @@ -0,0 +1,17 @@ +{ + "description": "runOnRequirement-csfle-minLibmongocryptVersion-type", + "schemaVersion": "1.25", + "runOnRequirements": [ + { + "csfle": { + "minLibmongocryptVersion": 0 + } + } + ], + "tests": [ + { + "description": "foo", + "operations": [] + } + ] +} diff --git a/test/unified-test-format/invalid/storeEventsAsEntity-additionalProperties.json b/test/unified-test-format/invalid/storeEventsAsEntity-additionalProperties.json deleted file mode 100644 index 5357da8d8d..0000000000 --- a/test/unified-test-format/invalid/storeEventsAsEntity-additionalProperties.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "description": "storeEventsAsEntity-additionalProperties", - "schemaVersion": "1.2", - "createEntities": [ - { - "client": { - "id": "client0", - "storeEventsAsEntities": [ - { - "id": "client0_events", - "events": [ - "CommandStartedEvent" - ], - "foo": 0 - } - ] - } - } - ], - "tests": [ - { - "description": "foo", - "operations": [] - } - ] -} diff --git a/test/unified-test-format/invalid/storeEventsAsEntity-events-enum.json b/test/unified-test-format/invalid/storeEventsAsEntity-events-enum.json deleted file mode 100644 index ee99a55381..0000000000 --- a/test/unified-test-format/invalid/storeEventsAsEntity-events-enum.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "description": "storeEventsAsEntity-events-enum", - "schemaVersion": "1.2", - "createEntities": [ - { - "client": { - "id": "client0", - "storeEventsAsEntities": [ - { - "id": "client0_events", - "events": [ - "foo" - ] - } - ] - } - } - ], - "tests": [ - { - "description": "foo", - "operations": [] - } - ] -} diff --git a/test/unified-test-format/invalid/storeEventsAsEntity-events-minItems.json b/test/unified-test-format/invalid/storeEventsAsEntity-events-minItems.json deleted file mode 100644 index ddab042b1b..0000000000 --- a/test/unified-test-format/invalid/storeEventsAsEntity-events-minItems.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "description": "storeEventsAsEntity-events-minItems", - "schemaVersion": "1.2", - "createEntities": [ - { - "client": { - "id": "client0", - "storeEventsAsEntities": [ - { - "id": "client0_events", - "events": [] - } - ] - } - } - ], - "tests": [ - { - "description": "foo", - "operations": [] - } - ] -} diff --git a/test/unified-test-format/invalid/storeEventsAsEntity-events-required.json b/test/unified-test-format/invalid/storeEventsAsEntity-events-required.json deleted file mode 100644 index 90b45918ce..0000000000 --- a/test/unified-test-format/invalid/storeEventsAsEntity-events-required.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "description": "storeEventsAsEntity-events-required", - "schemaVersion": "1.2", - "createEntities": [ - { - "client": { - "id": "client0", - "storeEventsAsEntities": [ - { - "id": "client0_events" - } - ] - } - } - ], - "tests": [ - { - "description": "foo", - "operations": [] - } - ] -} diff --git a/test/unified-test-format/invalid/storeEventsAsEntity-events-type.json b/test/unified-test-format/invalid/storeEventsAsEntity-events-type.json deleted file mode 100644 index 1b920ebd5d..0000000000 --- a/test/unified-test-format/invalid/storeEventsAsEntity-events-type.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "description": "storeEventsAsEntity-events-type", - "schemaVersion": "1.2", - "createEntities": [ - { - "client": { - "id": "client0", - "storeEventsAsEntities": [ - { - "id": "client0_events", - "events": 0 - } - ] - } - } - ], - "tests": [ - { - "description": "foo", - "operations": [] - } - ] -} diff --git a/test/unified-test-format/invalid/storeEventsAsEntity-id-required.json b/test/unified-test-format/invalid/storeEventsAsEntity-id-required.json deleted file mode 100644 index 71387c5315..0000000000 --- a/test/unified-test-format/invalid/storeEventsAsEntity-id-required.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "description": "storeEventsAsEntity-id-required", - "schemaVersion": "1.2", - "createEntities": [ - { - "client": { - "id": "client0", - "storeEventsAsEntities": [ - { - "events": [ - "CommandStartedEvent" - ] - } - ] - } - } - ], - "tests": [ - { - "description": "foo", - "operations": [] - } - ] -} diff --git a/test/unified-test-format/invalid/storeEventsAsEntity-id-type.json b/test/unified-test-format/invalid/storeEventsAsEntity-id-type.json deleted file mode 100644 index 4f52dc2533..0000000000 --- a/test/unified-test-format/invalid/storeEventsAsEntity-id-type.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "description": "storeEventsAsEntity-id-type", - "schemaVersion": "1.2", - "createEntities": [ - { - "client": { - "id": "client0", - "storeEventsAsEntities": [ - { - "id": 0, - "events": [ - "CommandStartedEvent" - ] - } - ] - } - } - ], - "tests": [ - { - "description": "foo", - "operations": [] - } - ] -} diff --git a/test/unified-test-format/valid-fail/entity-client-storeEventsAsEntities-conflict_with_client_id.json b/test/unified-test-format/valid-fail/entity-client-storeEventsAsEntities-conflict_with_client_id.json deleted file mode 100644 index 8c0c4d2041..0000000000 --- a/test/unified-test-format/valid-fail/entity-client-storeEventsAsEntities-conflict_with_client_id.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "description": "entity-client-storeEventsAsEntities-conflict_with_client_id", - "schemaVersion": "1.2", - "createEntities": [ - { - "client": { - "id": "client0", - "storeEventsAsEntities": [ - { - "id": "client0", - "events": [ - "PoolCreatedEvent", - "PoolReadyEvent", - "PoolClearedEvent", - "PoolClosedEvent" - ] - } - ] - } - } - ], - "tests": [ - { - "description": "foo", - "operations": [] - } - ] -} diff --git a/test/unified-test-format/valid-fail/entity-client-storeEventsAsEntities-conflict_within_different_array.json b/test/unified-test-format/valid-fail/entity-client-storeEventsAsEntities-conflict_within_different_array.json deleted file mode 100644 index 77bc4abf2e..0000000000 --- a/test/unified-test-format/valid-fail/entity-client-storeEventsAsEntities-conflict_within_different_array.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "description": "entity-client-storeEventsAsEntities-conflict_within_different_array", - "schemaVersion": "1.2", - "createEntities": [ - { - "client": { - "id": "client0", - "storeEventsAsEntities": [ - { - "id": "events", - "events": [ - "PoolCreatedEvent", - "PoolReadyEvent", - "PoolClearedEvent", - "PoolClosedEvent" - ] - } - ] - } - }, - { - "client": { - "id": "client1", - "storeEventsAsEntities": [ - { - "id": "events", - "events": [ - "CommandStartedEvent", - "CommandSucceededEvent", - "CommandFailedEvent" - ] - } - ] - } - } - ], - "tests": [ - { - "description": "foo", - "operations": [] - } - ] -} diff --git a/test/unified-test-format/valid-fail/entity-client-storeEventsAsEntities-conflict_within_same_array.json b/test/unified-test-format/valid-fail/entity-client-storeEventsAsEntities-conflict_within_same_array.json deleted file mode 100644 index e1a9499883..0000000000 --- a/test/unified-test-format/valid-fail/entity-client-storeEventsAsEntities-conflict_within_same_array.json +++ /dev/null @@ -1,36 +0,0 @@ -{ - "description": "entity-client-storeEventsAsEntities-conflict_within_same_array", - "schemaVersion": "1.2", - "createEntities": [ - { - "client": { - "id": "client0", - "storeEventsAsEntities": [ - { - "id": "events", - "events": [ - "PoolCreatedEvent", - "PoolReadyEvent", - "PoolClearedEvent", - "PoolClosedEvent" - ] - }, - { - "id": "events", - "events": [ - "CommandStartedEvent", - "CommandSucceededEvent", - "CommandFailedEvent" - ] - } - ] - } - } - ], - "tests": [ - { - "description": "foo", - "operations": [] - } - ] -} diff --git a/test/unified-test-format/valid-pass/entity-client-storeEventsAsEntities.json b/test/unified-test-format/valid-pass/entity-client-storeEventsAsEntities.json deleted file mode 100644 index e37e5a1acd..0000000000 --- a/test/unified-test-format/valid-pass/entity-client-storeEventsAsEntities.json +++ /dev/null @@ -1,67 +0,0 @@ -{ - "description": "entity-client-storeEventsAsEntities", - "schemaVersion": "1.2", - "createEntities": [ - { - "client": { - "id": "client0", - "storeEventsAsEntities": [ - { - "id": "client0_events", - "events": [ - "CommandStartedEvent", - "CommandSucceededEvent", - "CommandFailedEvent" - ] - } - ] - } - }, - { - "database": { - "id": "database0", - "client": "client0", - "databaseName": "test" - } - }, - { - "collection": { - "id": "collection0", - "database": "database0", - "collectionName": "coll0" - } - } - ], - "initialData": [ - { - "collectionName": "coll0", - "databaseName": "test", - "documents": [ - { - "_id": 1, - "x": 11 - } - ] - } - ], - "tests": [ - { - "description": "storeEventsAsEntities captures events", - "operations": [ - { - "name": "find", - "object": "collection0", - "arguments": { - "filter": {} - }, - "expectResult": [ - { - "_id": 1, - "x": 11 - } - ] - } - ] - } - ] -} diff --git a/test/unified-test-format/valid-pass/expectedError-isClientError.json b/test/unified-test-format/valid-pass/expectedError-isClientError.json new file mode 100644 index 0000000000..9c6beda588 --- /dev/null +++ b/test/unified-test-format/valid-pass/expectedError-isClientError.json @@ -0,0 +1,74 @@ +{ + "description": "expectedError-isClientError", + "schemaVersion": "1.3", + "runOnRequirements": [ + { + "minServerVersion": "4.0", + "topologies": [ + "single", + "replicaset" + ] + }, + { + "minServerVersion": "4.1.7", + "topologies": [ + "sharded", + "load-balanced" + ] + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "useMultipleMongoses": false + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "test" + } + } + ], + "tests": [ + { + "description": "isClientError considers network errors", + "operations": [ + { + "name": "failPoint", + "object": "testRunner", + "arguments": { + "client": "client0", + "failPoint": { + "configureFailPoint": "failCommand", + "mode": { + "times": 1 + }, + "data": { + "failCommands": [ + "ping" + ], + "closeConnection": true + } + } + } + }, + { + "name": "runCommand", + "object": "database0", + "arguments": { + "commandName": "ping", + "command": { + "ping": 1 + } + }, + "expectError": { + "isClientError": true + } + } + ] + } + ] +} diff --git a/test/unified-test-format/valid-pass/operation-empty_array.json b/test/unified-test-format/valid-pass/operation-empty_array.json new file mode 100644 index 0000000000..93b25c983c --- /dev/null +++ b/test/unified-test-format/valid-pass/operation-empty_array.json @@ -0,0 +1,10 @@ +{ + "description": "operation-empty_array", + "schemaVersion": "1.0", + "tests": [ + { + "description": "Empty operations array", + "operations": [] + } + ] +} diff --git a/test/unified-test-format/valid-pass/operator-lte.json b/test/unified-test-format/valid-pass/operator-lte.json index 4a13b16d15..7a6a8057ad 100644 --- a/test/unified-test-format/valid-pass/operator-lte.json +++ b/test/unified-test-format/valid-pass/operator-lte.json @@ -42,7 +42,9 @@ "arguments": { "document": { "_id": 1, - "y": 1 + "x": 2, + "y": 3, + "z": 4 } } } @@ -58,10 +60,18 @@ "documents": [ { "_id": { - "$$lte": 1 + "$$lte": 2 + }, + "x": { + "$$lte": 2.1 }, "y": { - "$$lte": 2 + "$$lte": { + "$numberLong": "3" + } + }, + "z": { + "$$lte": 4 } } ] diff --git a/test/unified-test-format/valid-pass/operator-type-number_alias.json b/test/unified-test-format/valid-pass/operator-type-number_alias.json new file mode 100644 index 0000000000..e628d0d777 --- /dev/null +++ b/test/unified-test-format/valid-pass/operator-type-number_alias.json @@ -0,0 +1,174 @@ +{ + "description": "operator-type-number_alias", + "schemaVersion": "1.0", + "createEntities": [ + { + "client": { + "id": "client0" + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "test" + } + }, + { + "collection": { + "id": "collection0", + "database": "database0", + "collectionName": "coll0" + } + } + ], + "initialData": [ + { + "collectionName": "coll0", + "databaseName": "test", + "documents": [] + } + ], + "tests": [ + { + "description": "type number alias matches int32", + "operations": [ + { + "name": "insertOne", + "object": "collection0", + "arguments": { + "document": { + "_id": 1, + "x": { + "$numberInt": "2147483647" + } + } + } + }, + { + "name": "find", + "object": "collection0", + "arguments": { + "filter": { + "_id": 1 + }, + "limit": 1 + }, + "expectResult": [ + { + "_id": 1, + "x": { + "$$type": "number" + } + } + ] + } + ] + }, + { + "description": "type number alias matches int64", + "operations": [ + { + "name": "insertOne", + "object": "collection0", + "arguments": { + "document": { + "_id": 1, + "x": { + "$numberLong": "9223372036854775807" + } + } + } + }, + { + "name": "find", + "object": "collection0", + "arguments": { + "filter": { + "_id": 1 + }, + "limit": 1 + }, + "expectResult": [ + { + "_id": 1, + "x": { + "$$type": "number" + } + } + ] + } + ] + }, + { + "description": "type number alias matches double", + "operations": [ + { + "name": "insertOne", + "object": "collection0", + "arguments": { + "document": { + "_id": 1, + "x": { + "$numberDouble": "2.71828" + } + } + } + }, + { + "name": "find", + "object": "collection0", + "arguments": { + "filter": { + "_id": 1 + }, + "limit": 1 + }, + "expectResult": [ + { + "_id": 1, + "x": { + "$$type": "number" + } + } + ] + } + ] + }, + { + "description": "type number alias matches decimal128", + "operations": [ + { + "name": "insertOne", + "object": "collection0", + "arguments": { + "document": { + "_id": 1, + "x": { + "$numberDecimal": "3.14159" + } + } + } + }, + { + "name": "find", + "object": "collection0", + "arguments": { + "filter": { + "_id": 1 + }, + "limit": 1 + }, + "expectResult": [ + { + "_id": 1, + "x": { + "$$type": "number" + } + } + ] + } + ] + } + ] +} diff --git a/test/unified-test-format/valid-pass/poc-queryable-encryption.json b/test/unified-test-format/valid-pass/poc-queryable-encryption.json new file mode 100644 index 0000000000..309d1d3b4b --- /dev/null +++ b/test/unified-test-format/valid-pass/poc-queryable-encryption.json @@ -0,0 +1,193 @@ +{ + "description": "poc-queryable-encryption", + "schemaVersion": "1.23", + "runOnRequirements": [ + { + "minServerVersion": "7.0", + "csfle": true, + "topologies": [ + "replicaset", + "load-balanced", + "sharded" + ] + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "autoEncryptOpts": { + "keyVaultNamespace": "keyvault.datakeys", + "kmsProviders": { + "local": { + "key": "Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk" + } + } + } + } + }, + { + "database": { + "id": "encryptedDB", + "client": "client0", + "databaseName": "poc-queryable-encryption" + } + }, + { + "collection": { + "id": "encryptedColl", + "database": "encryptedDB", + "collectionName": "encrypted" + } + }, + { + "client": { + "id": "client1" + } + }, + { + "database": { + "id": "unencryptedDB", + "client": "client1", + "databaseName": "poc-queryable-encryption" + } + }, + { + "collection": { + "id": "unencryptedColl", + "database": "unencryptedDB", + "collectionName": "encrypted" + } + } + ], + "initialData": [ + { + "databaseName": "keyvault", + "collectionName": "datakeys", + "documents": [ + { + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1641024000000" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1641024000000" + } + }, + "status": 1, + "masterKey": { + "provider": "local" + } + } + ] + }, + { + "databaseName": "poc-queryable-encryption", + "collectionName": "encrypted", + "documents": [], + "createOptions": { + "encryptedFields": { + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "encryptedInt", + "bsonType": "int", + "queries": { + "queryType": "equality", + "contention": { + "$numberLong": "0" + } + } + } + ] + } + } + } + ], + "tests": [ + { + "description": "insert, replace, and find with queryable encryption", + "operations": [ + { + "object": "encryptedColl", + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "encryptedInt": 11 + } + } + }, + { + "object": "encryptedColl", + "name": "replaceOne", + "arguments": { + "filter": { + "encryptedInt": 11 + }, + "replacement": { + "encryptedInt": 22 + } + } + }, + { + "object": "encryptedColl", + "name": "find", + "arguments": { + "filter": { + "encryptedInt": 22 + } + }, + "expectResult": [ + { + "_id": 1, + "encryptedInt": 22 + } + ] + }, + { + "object": "unencryptedColl", + "name": "find", + "arguments": { + "filter": {} + }, + "expectResult": [ + { + "_id": 1, + "encryptedInt": { + "$$type": "binData" + }, + "__safeContent__": [ + { + "$binary": { + "base64": "rhS16TJojgDDBtbluxBokvcotP1mQTGeYpNt8xd3MJQ=", + "subType": "00" + } + } + ] + } + ] + } + ] + } + ] +} diff --git a/test/unified_format.py b/test/unified_format.py index 372eb8abba..0c5f68edd3 100644 --- a/test/unified_format.py +++ b/test/unified_format.py @@ -27,18 +27,18 @@ import sys import time import traceback -from asyncio import iscoroutinefunction from collections import defaultdict +from inspect import iscoroutinefunction from test import ( IntegrationTest, client_context, client_knobs, unittest, ) +from test.helpers_shared import ALL_KMS_PROVIDERS, DEFAULT_KMS_TLS from test.unified_format_shared import ( KMS_TLS_OPTS, PLACEHOLDER_MAP, - SKIP_CSOT_TESTS, EventListenerUtil, MatchEvaluatorUtil, coerce_result, @@ -48,10 +48,10 @@ parse_collection_or_database_options, with_metaclass, ) -from test.utils import ( +from test.utils import flaky, get_pool +from test.utils_shared import ( camel_to_snake, camel_to_snake_args, - get_pool, parse_spec_options, prepare_spec_arguments, snake_to_camel, @@ -61,13 +61,17 @@ from test.version import Version from typing import Any, Dict, List, Mapping, Optional +import pytest + import pymongo from bson import SON, json_util from bson.codec_options import DEFAULT_CODEC_OPTIONS from bson.objectid import ObjectId -from gridfs import GridFSBucket, GridOut +from gridfs import GridFSBucket, GridOut, NoFile +from gridfs.errors import CorruptGridFile from pymongo import ASCENDING, CursorType, MongoClient, _csot -from pymongo.encryption_options import _HAVE_PYMONGOCRYPT +from pymongo.driver_info import DriverInfo +from pymongo.encryption_options import _HAVE_PYMONGOCRYPT, AutoEncryptionOpts from pymongo.errors import ( AutoReconnect, BulkWriteError, @@ -97,7 +101,6 @@ from pymongo.synchronous.command_cursor import CommandCursor from pymongo.synchronous.database import Database from pymongo.synchronous.encryption import ClientEncryption -from pymongo.synchronous.helpers import next from pymongo.topology_description import TopologyDescription from pymongo.typings import _Address from pymongo.write_concern import WriteConcern @@ -130,14 +133,6 @@ def is_run_on_requirement_satisfied(requirement): if req_max_server_version: max_version_satisfied = Version.from_string(req_max_server_version) >= server_version - serverless = requirement.get("serverless") - if serverless == "require": - serverless_satisfied = client_context.serverless - elif serverless == "forbid": - serverless_satisfied = not client_context.serverless - else: # unset or "allow" - serverless_satisfied = True - params_satisfied = True params = requirement.get("serverParameters") if params: @@ -160,14 +155,23 @@ def is_run_on_requirement_satisfied(requirement): csfle_satisfied = True req_csfle = requirement.get("csfle") if req_csfle is True: - min_version_satisfied = Version.from_string("4.2") <= server_version + # Don't overwrite unsatisfied minimum version requirements. + if min_version_satisfied: + min_version_satisfied = Version.from_string("4.2") <= server_version csfle_satisfied = _HAVE_PYMONGOCRYPT and min_version_satisfied + elif isinstance(req_csfle, dict) and "minLibmongocryptVersion" in req_csfle: + csfle_satisfied = False + req_version = req_csfle["minLibmongocryptVersion"] + if _HAVE_PYMONGOCRYPT: + from pymongocrypt import libmongocrypt_version + + if Version.from_string(libmongocrypt_version()) >= Version.from_string(req_version): + csfle_satisfied = True return ( topology_satisfied and min_version_satisfied and max_version_satisfied - and serverless_satisfied and params_satisfied and auth_satisfied and csfle_satisfied @@ -221,7 +225,6 @@ def __init__(self, test_class): self._listeners: Dict[str, EventListenerUtil] = {} self._session_lsids: Dict[str, Mapping[str, Any]] = {} self.test: UnifiedSpecTestMixinV1 = test_class - self._cluster_time: Mapping[str, Any] = {} def __contains__(self, item): return item in self._entities @@ -250,6 +253,10 @@ def _handle_placeholders(self, spec: dict, current: dict, path: str) -> Any: raise ValueError(f"Could not find a placeholder value for {path}") return PLACEHOLDER_MAP[path] + # Distinguish between temp and non-temp aws credentials. + if path.endswith("/kmsProviders/aws") and "sessionToken" in current: + path = path.replace("aws", "aws_temp") + for key in list(current): value = current[key] if isinstance(value, dict): @@ -267,6 +274,21 @@ def _create_entity(self, entity_spec, uri=None): kwargs: dict = {} observe_events = spec.get("observeEvents", []) + if "autoEncryptOpts" in spec: + auto_encrypt_opts = spec["autoEncryptOpts"].copy() + auto_encrypt_kwargs: dict = dict(kms_tls_options=DEFAULT_KMS_TLS) + kms_providers = auto_encrypt_opts.pop("kmsProviders", ALL_KMS_PROVIDERS.copy()) + key_vault_namespace = auto_encrypt_opts.pop("keyVaultNamespace") + extra_opts = auto_encrypt_opts.pop("extraOptions", {}) + for key, value in extra_opts.items(): + auto_encrypt_kwargs[camel_to_snake(key)] = value + for key, value in auto_encrypt_opts.items(): + auto_encrypt_kwargs[camel_to_snake(key)] = value + auto_encryption_opts = AutoEncryptionOpts( + kms_providers, key_vault_namespace, **auto_encrypt_kwargs + ) + kwargs["auto_encryption_opts"] = auto_encryption_opts + # The unified tests use topologyOpeningEvent, we use topologyOpenedEvent for i in range(len(observe_events)): if "topologyOpeningEvent" == observe_events[i]: @@ -284,7 +306,7 @@ def _create_entity(self, entity_spec, uri=None): self._listeners[spec["id"]] = listener kwargs["event_listeners"] = [listener] if spec.get("useMultipleMongoses"): - if client_context.load_balancer or client_context.serverless: + if client_context.load_balancer: kwargs["h"] = client_context.MULTI_MONGOS_LB_URI elif client_context.is_mongos: kwargs["h"] = client_context.mongos_seeds() @@ -303,6 +325,7 @@ def _create_entity(self, entity_spec, uri=None): if uri: kwargs["h"] = uri client = self.test.rs_or_single_client(**kwargs) + client._connect() self[spec["id"]] = client return elif entity_type == "database": @@ -377,12 +400,14 @@ def drop(self: GridFSBucket, *args: Any, **kwargs: Any) -> None: opts["key_vault_client"], DEFAULT_CODEC_OPTIONS, opts.get("kms_tls_options", kms_tls_options), + opts.get("key_expiration_ms"), ) return elif entity_type == "thread": name = spec["id"] thread = SpecRunnerThread(name) thread.start() + self.test.addCleanup(thread.join, 5) self[name] = thread return @@ -418,13 +443,11 @@ def get_lsid_for_session(self, session_name): # session has been closed. return self._session_lsids[session_name] - def advance_cluster_times(self) -> None: + def advance_cluster_times(self, cluster_time) -> None: """Manually synchronize entities when desired""" - if not self._cluster_time: - self._cluster_time = (self.test.client.admin.command("ping")).get("$clusterTime") for entity in self._entities.values(): - if isinstance(entity, ClientSession) and self._cluster_time: - entity.advance_cluster_time(self._cluster_time) + if isinstance(entity, ClientSession) and cluster_time: + entity.advance_cluster_time(cluster_time) class UnifiedSpecTestMixinV1(IntegrationTest): @@ -437,9 +460,8 @@ class UnifiedSpecTestMixinV1(IntegrationTest): a class attribute ``TEST_SPEC``. """ - SCHEMA_VERSION = Version.from_string("1.21") + SCHEMA_VERSION = Version.from_string("1.25") RUN_ON_LOAD_BALANCER = True - RUN_ON_SERVERLESS = True TEST_SPEC: Any TEST_PATH = "" # This gets filled in by generate_test_classes mongos_clients: list[MongoClient] = [] @@ -470,6 +492,13 @@ def insert_initial_data(self, initial_data): wc = WriteConcern(w="majority") else: wc = WriteConcern(w=1) + + # Remove any encryption collections associated with the collection. + collections = db.list_collection_names() + for collection in collections: + if collection in [f"enxcol_.{coll_name}.esc", f"enxcol_.{coll_name}.ecoc"]: + db.drop_collection(collection) + if documents: if opts: db.create_collection(coll_name, **opts) @@ -502,19 +531,10 @@ def setUp(self): raise unittest.SkipTest(f"{self.__class__.__name__} runOnRequirements not satisfied") # add any special-casing for skipping tests here - if client_context.storage_engine == "mmapv1": - if "retryable-writes" in self.TEST_SPEC["description"] or "retryable_writes" in str( - self.TEST_PATH - ): - raise unittest.SkipTest("MMAPv1 does not support retryWrites=True") # Handle mongos_clients for transactions tests. self.mongos_clients = [] - if ( - client_context.supports_transactions() - and not client_context.load_balancer - and not client_context.serverless - ): + if client_context.supports_transactions() and not client_context.load_balancer: for address in client_context.mongoses: self.mongos_clients.append(self.single_client("{}:{}".format(*address))) @@ -532,32 +552,63 @@ def setUp(self): def maybe_skip_test(self, spec): # add any special-casing for skipping tests here - if client_context.storage_engine == "mmapv1": - if ( - "Dirty explicit session is discarded" in spec["description"] - or "Dirty implicit session is discarded" in spec["description"] - or "Cancel server check" in spec["description"] - ): - self.skipTest("MMAPv1 does not support retryWrites=True") - if "Client side error in command starting transaction" in spec["description"]: + class_name = self.__class__.__name__.lower() + description = spec["description"].lower() + + if "client side error in command starting transaction" in description: self.skipTest("Implement PYTHON-1894") - if "timeoutMS applied to entire download" in spec["description"]: + if "type=symbol" in description: + self.skipTest("PyMongo does not support the symbol type") + if "timeoutms applied to entire download" in description: self.skipTest("PyMongo's open_download_stream does not cap the stream's lifetime") + if any( + x in description + for x in [ + "first insertone is never committed", + "second updateone is never committed", + "third updateone is never committed", + ] + ): + self.skipTest("Implement PYTHON-4597") - class_name = self.__class__.__name__.lower() - description = spec["description"].lower() if "csot" in class_name: - if "gridfs" in class_name and sys.platform == "win32": - self.skipTest("PYTHON-3522 CSOT GridFS tests are flaky on Windows") - if client_context.storage_engine == "mmapv1": - self.skipTest( - "MMAPv1 does not support retryable writes which is required for CSOT tests" - ) + # Skip tests that are too slow to run on a given platform. + slow_macos = [ + "operation fails after two consecutive socket timeouts.*", + "operation succeeds after one socket timeout.*", + "Non-tailable cursor lifetime remaining timeoutMS applied to getMore if timeoutMode is unset", + ] + slow_win32 = [ + *slow_macos, + "maxTimeMS value in the command is less than timeoutMS", + "timeoutMS applies to whole operation.*", + ] + slow_pypy = [ + "timeoutMS applies to whole operation.*", + ] + if "CI" in os.environ and sys.platform == "win32" and "gridfs" in class_name: + self.skipTest("PYTHON-3522 CSOT GridFS test runs too slow on Windows") + if "CI" in os.environ and sys.platform == "win32": + for pat in slow_win32: + if re.match(pat.lower(), description): + self.skipTest("PYTHON-3522 CSOT test runs too slow on Windows") + if "CI" in os.environ and sys.platform == "darwin": + for pat in slow_macos: + if re.match(pat.lower(), description): + self.skipTest("PYTHON-3522 CSOT test runs too slow on MacOS") + if "CI" in os.environ and sys.implementation.name.lower() == "pypy": + for pat in slow_pypy: + if re.match(pat.lower(), description): + self.skipTest("PYTHON-3522 CSOT test runs too slow on PyPy") if "change" in description or "change" in class_name: self.skipTest("CSOT not implemented for watch()") if "cursors" in class_name: self.skipTest("CSOT not implemented for cursors") - if "tailable" in class_name: + if ( + "tailable" in class_name + or "tailable" in description + and "non-tailable" not in description + ): self.skipTest("CSOT not implemented for tailable cursors") if "sessions" in class_name: self.skipTest("CSOT not implemented for sessions") @@ -573,11 +624,6 @@ def maybe_skip_test(self, spec): self.skipTest("PyMongo does not support count()") if name == "listIndexNames": self.skipTest("PyMongo does not support list_index_names()") - if client_context.storage_engine == "mmapv1": - if name == "createChangeStream": - self.skipTest("MMAPv1 does not support change streams") - if name == "withTransaction" or name == "startTransaction": - self.skipTest("MMAPv1 does not support document-level locking") if not client_context.test_commands_enabled: if name == "failPoint" or name == "targetedFailPoint": self.skipTest("Test commands must be enabled to use fail points") @@ -617,7 +663,9 @@ def process_error(self, exception, spec): # Connection errors are considered client errors. if isinstance(error, ConnectionFailure): self.assertNotIsInstance(error, NotPrimaryError) - elif isinstance(error, (InvalidOperation, ConfigurationError, EncryptionError)): + elif isinstance(error, CorruptGridFile): + pass + elif isinstance(error, (InvalidOperation, ConfigurationError, EncryptionError, NoFile)): pass else: self.assertNotIsInstance(error, PyMongoError) @@ -673,7 +721,7 @@ def process_error(self, exception, spec): self.match_evaluator.match_result(expect_result, result) else: self.fail( - f"expectResult can only be specified with {BulkWriteError} or {ClientBulkWriteException} exceptions" + f"expectResult can only be specified with {BulkWriteError} or {ClientBulkWriteException} exceptions, got {exception}" ) return exception @@ -683,8 +731,6 @@ def __raise_if_unsupported(self, opname, target, *target_types): self.fail(f"Operation {opname} not supported for entity of type {type(target)}") def __entityOperation_createChangeStream(self, target, *args, **kwargs): - if client_context.storage_engine == "mmapv1": - self.skipTest("MMAPv1 does not support change streams") self.__raise_if_unsupported("createChangeStream", target, MongoClient, Database, Collection) stream = target.watch(*args, **kwargs) self.addCleanup(stream.close) @@ -708,7 +754,7 @@ def _databaseOperation_runCommand(self, target, **kwargs): return target.command(**kwargs) def _databaseOperation_runCursorCommand(self, target, **kwargs): - return list(self._databaseOperation_createCommandCursor(target, **kwargs)) + return (self._databaseOperation_createCommandCursor(target, **kwargs)).to_list() def _databaseOperation_createCommandCursor(self, target, **kwargs): self.__raise_if_unsupported("createCommandCursor", target, Database) @@ -740,6 +786,38 @@ def _databaseOperation_createCommandCursor(self, target, **kwargs): return cursor + def _collectionOperation_assertIndexExists(self, target, **kwargs): + collection = self.client[kwargs["database_name"]][kwargs["collection_name"]] + index_names = [idx["name"] for idx in collection.list_indexes()] + self.assertIn(kwargs["index_name"], index_names) + + def _collectionOperation_assertIndexNotExists(self, target, **kwargs): + collection = self.client[kwargs["database_name"]][kwargs["collection_name"]] + for index in collection.list_indexes(): + self.assertNotEqual(kwargs["indexName"], index["name"]) + + def _collectionOperation_assertCollectionExists(self, target, **kwargs): + database_name = kwargs["database_name"] + collection_name = kwargs["collection_name"] + collection_name_list = self.client.get_database(database_name).list_collection_names() + self.assertIn(collection_name, collection_name_list) + + def _databaseOperation_assertIndexExists(self, target, **kwargs): + collection = self.client[kwargs["database_name"]][kwargs["collection_name"]] + index_names = [idx["name"] for idx in collection.list_indexes()] + self.assertIn(kwargs["index_name"], index_names) + + def _databaseOperation_assertIndexNotExists(self, target, **kwargs): + collection = self.client[kwargs["database_name"]][kwargs["collection_name"]] + for index in collection.list_indexes(): + self.assertNotEqual(kwargs["indexName"], index["name"]) + + def _databaseOperation_assertCollectionExists(self, target, **kwargs): + database_name = kwargs["database_name"] + collection_name = kwargs["collection_name"] + collection_name_list = self.client.get_database(database_name).list_collection_names() + self.assertIn(collection_name, collection_name_list) + def kill_all_sessions(self): if getattr(self, "client", None) is None: return @@ -809,14 +887,10 @@ def _collectionOperation_listSearchIndexes(self, target, *args, **kwargs): return (target.list_search_indexes(name, **agg_kwargs)).to_list() def _sessionOperation_withTransaction(self, target, *args, **kwargs): - if client_context.storage_engine == "mmapv1": - self.skipTest("MMAPv1 does not support document-level locking") self.__raise_if_unsupported("withTransaction", target, ClientSession) return target.with_transaction(*args, **kwargs) def _sessionOperation_startTransaction(self, target, *args, **kwargs): - if client_context.storage_engine == "mmapv1": - self.skipTest("MMAPv1 does not support document-level locking") self.__raise_if_unsupported("startTransaction", target, ClientSession) return target.start_transaction(*args, **kwargs) @@ -839,6 +913,11 @@ def _cursor_close(self, target, *args, **kwargs): self.__raise_if_unsupported("close", target, NonLazyCursor, CommandCursor) return target.close() + def _clientOperation_appendMetadata(self, target, *args, **kwargs): + info_opts = kwargs["driver_info_options"] + driver_info = DriverInfo(info_opts["name"], info_opts["version"], info_opts["platform"]) + target.append_metadata(driver_info) + def _clientEncryptionOperation_createDataKey(self, target, *args, **kwargs): if "opts" in kwargs: kwargs.update(camel_to_snake_args(kwargs.pop("opts"))) @@ -976,15 +1055,11 @@ def run_entity_operation(self, spec): if ignore and isinstance(exc, (PyMongoError,)): return exc if expect_error: - if method_name == "_collectionOperation_bulkWrite": - self.skipTest("Skipping test pending PYTHON-4598") return self.process_error(exc, expect_error) raise else: - if method_name == "_collectionOperation_bulkWrite": - self.skipTest("Skipping test pending PYTHON-4598") if expect_error: - self.fail(f'Excepted error {expect_error} but "{opname}" succeeded: {result}') + self.fail(f'Expected error {expect_error} but "{opname}" succeeded: {result}') if expect_result: actual = coerce_result(opname, result) @@ -999,12 +1074,8 @@ def __set_fail_point(self, client, command_args): if not client_context.test_commands_enabled: self.skipTest("Test commands must be enabled") - cmd_on = SON([("configureFailPoint", "failCommand")]) - cmd_on.update(command_args) - client.admin.command(cmd_on) - self.addCleanup( - client.admin.command, "configureFailPoint", cmd_on["configureFailPoint"], mode="off" - ) + self.configure_fail_point(client, command_args) + self.addCleanup(self.configure_fail_point, client, command_args, off=True) def _testOperation_failPoint(self, spec): self.__set_fail_point( @@ -1025,7 +1096,7 @@ def _testOperation_targetedFailPoint(self, spec): def _testOperation_createEntities(self, spec): self.entity_map.create_entities_from_spec(spec["entities"], uri=self._uri) - self.entity_map.advance_cluster_times() + self.entity_map.advance_cluster_times(self._cluster_time) def _testOperation_assertSessionTransactionState(self, spec): session = self.entity_map[spec["session"]] @@ -1167,7 +1238,7 @@ def primary_changed() -> bool: def _testOperation_runOnThread(self, spec): """Run the 'runOnThread' operation.""" thread = self.entity_map[spec["thread"]] - thread.schedule(lambda: self.run_entity_operation(spec["operation"])) + thread.schedule(functools.partial(self.run_entity_operation, spec["operation"])) def _testOperation_waitForThread(self, spec): """Run the 'waitForThread' operation.""" @@ -1367,35 +1438,31 @@ def verify_outcome(self, spec): self.assertListEqual(sorted_expected_documents, actual_documents) def run_scenario(self, spec, uri=None): - if "csot" in self.id().lower() and SKIP_CSOT_TESTS: - raise unittest.SkipTest("SKIP_CSOT_TESTS is set, skipping...") - # Kill all sessions before and after each test to prevent an open # transaction (from a test failure) from blocking collection/database # operations during test set up and tear down. self.kill_all_sessions() - self.addCleanup(self.kill_all_sessions) - - if "csot" in self.id().lower(): - # Retry CSOT tests up to 2 times to deal with flakey tests. - attempts = 3 - for i in range(attempts): - try: - return self._run_scenario(spec, uri) - except AssertionError: - if i < attempts - 1: - print( - f"Retrying after attempt {i+1} of {self.id()} failed with:\n" - f"{traceback.format_exc()}", - file=sys.stderr, - ) - self.setUp() - continue - raise - return None - else: - self._run_scenario(spec, uri) - return None + + # Handle flaky tests. + flaky_tests = [ + ("PYTHON-5170", ".*test_discovery_and_monitoring.*"), + ("PYTHON-5174", ".*Driver_extends_timeout_while_streaming"), + ("PYTHON-5315", ".*TestSrvPolling.test_recover_from_initially_.*"), + ("PYTHON-4987", ".*UnknownTransactionCommitResult_labels_to_connection_errors"), + ("PYTHON-3689", ".*TestProse.test_load_balancing"), + ("PYTHON-3522", ".*csot.*"), + ] + for reason, flaky_test in flaky_tests: + if re.match(flaky_test.lower(), self.id().lower()) is not None: + func_name = self.id() + options = dict(reason=reason, reset_func=self.setUp, func_name=func_name) + if "csot" in func_name.lower(): + options["max_runs"] = 3 + options["affects_cpython_linux"] = True + decorator = flaky(**options) + decorator(self._run_scenario)(spec, uri) + return + self._run_scenario(spec, uri) def _run_scenario(self, spec, uri=None): # maybe skip test manually @@ -1415,11 +1482,12 @@ def _run_scenario(self, spec, uri=None): self._uri = uri self.entity_map = EntityMapUtil(self) self.entity_map.create_entities_from_spec(self.TEST_SPEC.get("createEntities", []), uri=uri) + self._cluster_time = None # process initialData if "initialData" in self.TEST_SPEC: self.insert_initial_data(self.TEST_SPEC["initialData"]) - self._cluster_time = (self.client.admin.command("ping")).get("$clusterTime") - self.entity_map.advance_cluster_times() + self._cluster_time = self.client._topology.max_cluster_time() + self.entity_map.advance_cluster_times(self._cluster_time) if "expectLogMessages" in spec: expect_log_messages = spec["expectLogMessages"] @@ -1504,7 +1572,14 @@ class SpecTestBase(with_metaclass(UnifiedSpecTestMeta)): # type: ignore TEST_SPEC = test_spec EXPECTED_FAILURES = expected_failures - return SpecTestBase + base = SpecTestBase + + # Add "encryption" marker if the "csfle" runOnRequirement is set. + for req in test_spec.get("runOnRequirements", []): + if "csfle" in req: + base = pytest.mark.encryption(base) + + return base for dirpath, _, filenames in os.walk(test_path): dirname = os.path.split(dirpath)[-1] diff --git a/test/unified_format_shared.py b/test/unified_format_shared.py index 0c685366f4..5aa989cb24 100644 --- a/test/unified_format_shared.py +++ b/test/unified_format_shared.py @@ -25,9 +25,10 @@ import time import types from collections import abc -from test.helpers import ( +from test.helpers_shared import ( AWS_CREDS, AWS_CREDS_2, + AWS_TEMP_CREDS, AZURE_CREDS, CA_PEM, CLIENT_PEM, @@ -35,8 +36,8 @@ KMIP_CREDS, LOCAL_MASTER_KEY, ) -from test.utils import CMAPListener, camel_to_snake, parse_collection_options -from typing import Any, Union +from test.utils_shared import CMAPListener, camel_to_snake, parse_collection_options +from typing import Any, MutableMapping, Union from bson import ( RE_TYPE, @@ -91,8 +92,6 @@ from pymongo.server_description import ServerDescription from pymongo.topology_description import TopologyDescription -SKIP_CSOT_TESTS = os.getenv("SKIP_CSOT_TESTS") - JSON_OPTS = json_util.JSONOptions(tz_aware=False) IS_INTERRUPTED = False @@ -110,6 +109,7 @@ for provider_name, provider_data in [ ("local", {"key": LOCAL_MASTER_KEY}), ("local:name1", {"key": LOCAL_MASTER_KEY}), + ("aws_temp", AWS_TEMP_CREDS), ("aws", AWS_CREDS), ("aws:name1", AWS_CREDS), ("aws:name2", AWS_CREDS_2), @@ -124,6 +124,9 @@ placeholder = f"/clientEncryptionOpts/kmsProviders/{provider_name}/{key}" PLACEHOLDER_MAP[placeholder] = value + placeholder = f"/autoEncryptOpts/kmsProviders/{provider_name}/{key}" + PLACEHOLDER_MAP[placeholder] = value + OIDC_ENV = os.environ.get("OIDC_ENV", "test") if OIDC_ENV == "test": PLACEHOLDER_MAP["/uriOptions/authMechanismProperties"] = {"ENVIRONMENT": "test"} @@ -159,7 +162,9 @@ def __new__(cls, name, this_bases, d): return meta(name, resolved_bases, d) @classmethod - def __prepare__(cls, name, this_bases): + def __prepare__( + cls, name: str, this_bases: tuple[type, ...], /, **kwds: Any + ) -> MutableMapping[str, object]: return meta.__prepare__(name, bases) return type.__new__(metaclass, "temporary_class", (), {}) @@ -363,6 +368,7 @@ def closed(self, event: Union[ServerClosedEvent, TopologyClosedEvent]) -> None: "decimal": (Decimal128,), "maxKey": (MaxKey,), "minKey": (MinKey,), + "number": (float, int, Int64, Decimal128), } diff --git a/test/utils.py b/test/utils.py index 69154bc63b..bfc606fe83 100644 --- a/test/utils.py +++ b/test/utils.py @@ -12,418 +12,80 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Utilities for testing pymongo""" +"""Utilities for testing pymongo that require synchronization.""" from __future__ import annotations import asyncio import contextlib -import copy -import functools import os import random -import re -import shutil import sys -import threading +import threading # Used in the synchronized version of this file import time -import unittest -import warnings -from asyncio import iscoroutinefunction -from collections import abc, defaultdict -from functools import partial -from test import client_context, db_pwd, db_user -from test.asynchronous import async_client_context -from typing import Any, List +import traceback +from functools import wraps +from inspect import iscoroutinefunction -from bson import json_util -from bson.objectid import ObjectId from bson.son import SON -from pymongo import AsyncMongoClient, monitoring, operations, read_preferences -from pymongo.cursor_shared import CursorType -from pymongo.errors import ConfigurationError, OperationFailure +from pymongo import MongoClient +from pymongo.errors import ConfigurationError from pymongo.hello import HelloCompat -from pymongo.helpers_shared import _SENSITIVE_COMMANDS from pymongo.lock import _create_lock -from pymongo.monitoring import ( - ConnectionCheckedInEvent, - ConnectionCheckedOutEvent, - ConnectionCheckOutFailedEvent, - ConnectionCheckOutStartedEvent, - ConnectionClosedEvent, - ConnectionCreatedEvent, - ConnectionReadyEvent, - PoolClearedEvent, - PoolClosedEvent, - PoolCreatedEvent, - PoolReadyEvent, -) from pymongo.operations import _Op -from pymongo.read_concern import ReadConcern from pymongo.read_preferences import ReadPreference from pymongo.server_selectors import any_server_selector, writable_server_selector -from pymongo.server_type import SERVER_TYPE -from pymongo.synchronous.collection import ReturnDocument -from pymongo.synchronous.mongo_client import MongoClient from pymongo.synchronous.pool import _CancellationContext, _PoolGeneration -from pymongo.uri_parser import parse_uri -from pymongo.write_concern import WriteConcern -IMPOSSIBLE_WRITE_CONCERN = WriteConcern(w=50) +_IS_SYNC = True -class BaseListener: - def __init__(self): - self.events = [] - - def reset(self): - self.events = [] - - def add_event(self, event): - self.events.append(event) - - def event_count(self, event_type): - return len(self.events_by_type(event_type)) - - def events_by_type(self, event_type): - """Return the matching events by event class. - - event_type can be a single class or a tuple of classes. - """ - return self.matching(lambda e: isinstance(e, event_type)) - - def matching(self, matcher): - """Return the matching events.""" - return [event for event in self.events[:] if matcher(event)] - - def wait_for_event(self, event, count): - """Wait for a number of events to be published, or fail.""" - wait_until(lambda: self.event_count(event) >= count, f"find {count} {event} event(s)") - - async def async_wait_for_event(self, event, count): - """Wait for a number of events to be published, or fail.""" - await async_wait_until( - lambda: self.event_count(event) >= count, f"find {count} {event} event(s)" - ) - - -class CMAPListener(BaseListener, monitoring.ConnectionPoolListener): - def connection_created(self, event): - assert isinstance(event, ConnectionCreatedEvent) - self.add_event(event) - - def connection_ready(self, event): - assert isinstance(event, ConnectionReadyEvent) - self.add_event(event) - - def connection_closed(self, event): - assert isinstance(event, ConnectionClosedEvent) - self.add_event(event) - - def connection_check_out_started(self, event): - assert isinstance(event, ConnectionCheckOutStartedEvent) - self.add_event(event) - - def connection_check_out_failed(self, event): - assert isinstance(event, ConnectionCheckOutFailedEvent) - self.add_event(event) - - def connection_checked_out(self, event): - assert isinstance(event, ConnectionCheckedOutEvent) - self.add_event(event) - - def connection_checked_in(self, event): - assert isinstance(event, ConnectionCheckedInEvent) - self.add_event(event) - - def pool_created(self, event): - assert isinstance(event, PoolCreatedEvent) - self.add_event(event) - - def pool_ready(self, event): - assert isinstance(event, PoolReadyEvent) - self.add_event(event) - - def pool_cleared(self, event): - assert isinstance(event, PoolClearedEvent) - self.add_event(event) - - def pool_closed(self, event): - assert isinstance(event, PoolClosedEvent) - self.add_event(event) - - -class EventListener(BaseListener, monitoring.CommandListener): - def __init__(self): - super().__init__() - self.results = defaultdict(list) - - @property - def started_events(self) -> List[monitoring.CommandStartedEvent]: - return self.results["started"] - - @property - def succeeded_events(self) -> List[monitoring.CommandSucceededEvent]: - return self.results["succeeded"] - - @property - def failed_events(self) -> List[monitoring.CommandFailedEvent]: - return self.results["failed"] - - def started(self, event: monitoring.CommandStartedEvent) -> None: - self.started_events.append(event) - self.add_event(event) - - def succeeded(self, event: monitoring.CommandSucceededEvent) -> None: - self.succeeded_events.append(event) - self.add_event(event) - - def failed(self, event: monitoring.CommandFailedEvent) -> None: - self.failed_events.append(event) - self.add_event(event) - - def started_command_names(self) -> List[str]: - """Return list of command names started.""" - return [event.command_name for event in self.started_events] - - def reset(self) -> None: - """Reset the state of this listener.""" - self.results.clear() - super().reset() - - -class TopologyEventListener(monitoring.TopologyListener): - def __init__(self): - self.results = defaultdict(list) - - def closed(self, event): - self.results["closed"].append(event) - - def description_changed(self, event): - self.results["description_changed"].append(event) - - def opened(self, event): - self.results["opened"].append(event) - - def reset(self): - """Reset the state of this listener.""" - self.results.clear() - - -class AllowListEventListener(EventListener): - def __init__(self, *commands): - self.commands = set(commands) - super().__init__() - - def started(self, event): - if event.command_name in self.commands: - super().started(event) - - def succeeded(self, event): - if event.command_name in self.commands: - super().succeeded(event) - - def failed(self, event): - if event.command_name in self.commands: - super().failed(event) - - -class OvertCommandListener(EventListener): - """A CommandListener that ignores sensitive commands.""" - - ignore_list_collections = False - - def started(self, event): - if event.command_name.lower() not in _SENSITIVE_COMMANDS: - super().started(event) - - def succeeded(self, event): - if event.command_name.lower() not in _SENSITIVE_COMMANDS: - super().succeeded(event) - - def failed(self, event): - if event.command_name.lower() not in _SENSITIVE_COMMANDS: - super().failed(event) - - -class _ServerEventListener: - """Listens to all events.""" - - def __init__(self): - self.results = [] - - def opened(self, event): - self.results.append(event) - - def description_changed(self, event): - self.results.append(event) - - def closed(self, event): - self.results.append(event) - - def matching(self, matcher): - """Return the matching events.""" - results = self.results[:] - return [event for event in results if matcher(event)] - - def reset(self): - self.results = [] - - -class ServerEventListener(_ServerEventListener, monitoring.ServerListener): - """Listens to Server events.""" - - -class ServerAndTopologyEventListener( # type: ignore[misc] - ServerEventListener, monitoring.TopologyListener -): - """Listens to Server and Topology events.""" - - -class HeartbeatEventListener(BaseListener, monitoring.ServerHeartbeatListener): - """Listens to only server heartbeat events.""" - - def started(self, event): - self.add_event(event) - - def succeeded(self, event): - self.add_event(event) - - def failed(self, event): - self.add_event(event) - - -class HeartbeatEventsListListener(HeartbeatEventListener): - """Listens to only server heartbeat events and publishes them to a provided list.""" - - def __init__(self, events): - super().__init__() - self.event_list = events - - def started(self, event): - self.add_event(event) - self.event_list.append("serverHeartbeatStartedEvent") - - def succeeded(self, event): - self.add_event(event) - self.event_list.append("serverHeartbeatSucceededEvent") - - def failed(self, event): - self.add_event(event) - self.event_list.append("serverHeartbeatFailedEvent") - - -class MockConnection: - def __init__(self): - self.cancel_context = _CancellationContext() - self.more_to_come = False - self.id = random.randint(0, 100) - - def close_conn(self, reason): - pass - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - pass - - -class MockPool: - def __init__(self, address, options, handshake=True, client_id=None): - self.gen = _PoolGeneration() - self._lock = _create_lock() - self.opts = options - self.operation_count = 0 - self.conns = [] - - def stale_generation(self, gen, service_id): - return self.gen.stale(gen, service_id) - - def checkout(self, handler=None): - return MockConnection() - - def checkin(self, *args, **kwargs): - pass - - def _reset(self, service_id=None): - with self._lock: - self.gen.inc(service_id) - - def ready(self): - pass - - def reset(self, service_id=None, interrupt_connections=False): - self._reset() - - def reset_without_pause(self): - self._reset() - - def close(self): - self._reset() - - def update_is_writable(self, is_writable): - pass - - def remove_stale_sockets(self, *args, **kwargs): - pass - - -class ScenarioDict(dict): - """Dict that returns {} for any unknown key, recursively.""" - - def __init__(self, data): - def convert(v): - if isinstance(v, abc.Mapping): - return ScenarioDict(v) - if isinstance(v, (str, bytes)): - return v - if isinstance(v, abc.Sequence): - return [convert(item) for item in v] - return v - - dict.__init__(self, [(k, convert(v)) for k, v in data.items()]) +def get_pool(client): + """Get the standalone, primary, or mongos pool.""" + topology = client._get_topology() + server = topology._select_server(writable_server_selector, _Op.TEST) + return server.pool - def __getitem__(self, item): - try: - return dict.__getitem__(self, item) - except KeyError: - # Unlike a defaultdict, don't set the key, just return a dict. - return ScenarioDict({}) +def get_pools(client): + """Get all pools.""" + return [ + server.pool + for server in (client._get_topology()).select_servers(any_server_selector, _Op.TEST) + ] -class CompareType: - """Class that compares equal to any object of the given type(s).""" - def __init__(self, types): - self.types = types +def wait_until(predicate, success_description, timeout=10): + """Wait up to 10 seconds (by default) for predicate to be true. - def __eq__(self, other): - return isinstance(other, self.types) + E.g.: + wait_until(lambda: client.primary == ('a', 1), + 'connect to the primary') -class FunctionCallRecorder: - """Utility class to wrap a callable and record its invocations.""" + If the lambda-expression isn't true after 10 seconds, we raise + AssertionError("Didn't ever connect to the primary"). - def __init__(self, function): - self._function = function - self._call_list = [] + Returns the predicate's first true value. + """ + start = time.time() + interval = min(float(timeout) / 100, 0.1) + while True: + if iscoroutinefunction(predicate): + retval = predicate() + else: + retval = predicate() + if retval: + return retval - def __call__(self, *args, **kwargs): - self._call_list.append((args, kwargs)) - return self._function(*args, **kwargs) + if time.time() - start > timeout: + raise AssertionError("Didn't ever %s" % success_description) - def reset(self): - """Wipes the call list.""" - self._call_list = [] + time.sleep(interval) - def call_list(self): - """Returns a copy of the call list.""" - return self._call_list[:] - @property - def call_count(self): - """Returns the number of times the function has been called.""" - return len(self._call_list) +def is_mongos(client): + res = client.admin.command(HelloCompat.LEGACY_CMD) + return res.get("msg", "") == "isdbgrid" def ensure_all_connected(client: MongoClient) -> None: @@ -452,227 +114,18 @@ def discover(): i += 1 return connected_host_list - try: - wait_until(lambda: target_host_list == discover(), "connected to all hosts") - except AssertionError as exc: - raise AssertionError( - f"{exc}, {connected_host_list} != {target_host_list}, {client.topology_description}" - ) - - -async def async_ensure_all_connected(client: AsyncMongoClient) -> None: - """Ensure that the client's connection pool has socket connections to all - members of a replica set. Raises ConfigurationError when called with a - non-replica set client. - - Depending on the use-case, the caller may need to clear any event listeners - that are configured on the client. - """ - hello: dict = await client.admin.command(HelloCompat.LEGACY_CMD) - if "setName" not in hello: - raise ConfigurationError("cluster is not a replica set") - - target_host_list = set(hello["hosts"] + hello.get("passives", [])) - connected_host_list = {hello["me"]} - - # Run hello until we have connected to each host at least once. - async def discover(): - i = 0 - while i < 100 and connected_host_list != target_host_list: - hello: dict = await client.admin.command( - HelloCompat.LEGACY_CMD, read_preference=ReadPreference.SECONDARY - ) - connected_host_list.update([hello["me"]]) - i += 1 - return connected_host_list - try: - async def predicate(): - return target_host_list == await discover() + def predicate(): + return target_host_list == discover() - await async_wait_until(predicate, "connected to all hosts") + wait_until(predicate, "connected to all hosts") except AssertionError as exc: raise AssertionError( f"{exc}, {connected_host_list} != {target_host_list}, {client.topology_description}" ) -def one(s): - """Get one element of a set""" - return next(iter(s)) - - -def oid_generated_on_process(oid): - """Makes a determination as to whether the given ObjectId was generated - by the current process, based on the 5-byte random number in the ObjectId. - """ - return ObjectId._random() == oid.binary[4:9] - - -def delay(sec): - return """function() { sleep(%f * 1000); return true; }""" % sec - - -def get_command_line(client): - command_line = client.admin.command("getCmdLineOpts") - assert command_line["ok"] == 1, "getCmdLineOpts() failed" - return command_line - - -def camel_to_snake(camel): - # Regex to convert CamelCase to snake_case. - snake = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", camel) - return re.sub("([a-z0-9])([A-Z])", r"\1_\2", snake).lower() - - -def camel_to_upper_camel(camel): - return camel[0].upper() + camel[1:] - - -def camel_to_snake_args(arguments): - for arg_name in list(arguments): - c2s = camel_to_snake(arg_name) - arguments[c2s] = arguments.pop(arg_name) - return arguments - - -def snake_to_camel(snake): - # Regex to convert snake_case to lowerCamelCase. - return re.sub(r"_([a-z])", lambda m: m.group(1).upper(), snake) - - -def parse_collection_options(opts): - if "readPreference" in opts: - opts["read_preference"] = parse_read_preference(opts.pop("readPreference")) - - if "writeConcern" in opts: - opts["write_concern"] = WriteConcern(**dict(opts.pop("writeConcern"))) - - if "readConcern" in opts: - opts["read_concern"] = ReadConcern(**dict(opts.pop("readConcern"))) - - if "timeoutMS" in opts: - opts["timeout"] = int(opts.pop("timeoutMS")) / 1000.0 - return opts - - -def server_started_with_option(client, cmdline_opt, config_opt): - """Check if the server was started with a particular option. - - :Parameters: - - `cmdline_opt`: The command line option (i.e. --nojournal) - - `config_opt`: The config file option (i.e. nojournal) - """ - command_line = get_command_line(client) - if "parsed" in command_line: - parsed = command_line["parsed"] - if config_opt in parsed: - return parsed[config_opt] - argv = command_line["argv"] - return cmdline_opt in argv - - -def server_started_with_auth(client): - try: - command_line = get_command_line(client) - except OperationFailure as e: - assert e.details is not None - msg = e.details.get("errmsg", "") - if e.code == 13 or "unauthorized" in msg or "login" in msg: - # Unauthorized. - return True - raise - - # MongoDB >= 2.0 - if "parsed" in command_line: - parsed = command_line["parsed"] - # MongoDB >= 2.6 - if "security" in parsed: - security = parsed["security"] - # >= rc3 - if "authorization" in security: - return security["authorization"] == "enabled" - # < rc3 - return security.get("auth", False) or bool(security.get("keyFile")) - return parsed.get("auth", False) or bool(parsed.get("keyFile")) - # Legacy - argv = command_line["argv"] - return "--auth" in argv or "--keyFile" in argv - - -def joinall(threads): - """Join threads with a 5-minute timeout, assert joins succeeded""" - for t in threads: - t.join(300) - assert not t.is_alive(), "Thread %s hung" % t - - -def wait_until(predicate, success_description, timeout=10): - """Wait up to 10 seconds (by default) for predicate to be true. - - E.g.: - - wait_until(lambda: client.primary == ('a', 1), - 'connect to the primary') - - If the lambda-expression isn't true after 10 seconds, we raise - AssertionError("Didn't ever connect to the primary"). - - Returns the predicate's first true value. - """ - start = time.time() - interval = min(float(timeout) / 100, 0.1) - while True: - retval = predicate() - if retval: - return retval - - if time.time() - start > timeout: - raise AssertionError("Didn't ever %s" % success_description) - - time.sleep(interval) - - -async def async_wait_until(predicate, success_description, timeout=10): - """Wait up to 10 seconds (by default) for predicate to be true. - - E.g.: - - wait_until(lambda: client.primary == ('a', 1), - 'connect to the primary') - - If the lambda-expression isn't true after 10 seconds, we raise - AssertionError("Didn't ever connect to the primary"). - - Returns the predicate's first true value. - """ - start = time.time() - interval = min(float(timeout) / 100, 0.1) - while True: - if iscoroutinefunction(predicate): - retval = await predicate() - else: - retval = predicate() - if retval: - return retval - - if time.time() - start > timeout: - raise AssertionError("Didn't ever %s" % success_description) - - await asyncio.sleep(interval) - - -def is_mongos(client): - res = client.admin.command(HelloCompat.LEGACY_CMD) - return res.get("msg", "") == "isdbgrid" - - -async def async_is_mongos(client): - res = await client.admin.command(HelloCompat.LEGACY_CMD) - return res.get("msg", "") == "isdbgrid" - - def assertRaisesExactly(cls, fn, *args, **kwargs): """ Unlike the standard assertRaises, this checks that a function raises a @@ -687,338 +140,135 @@ def assertRaisesExactly(cls, fn, *args, **kwargs): raise AssertionError("%s not raised" % cls) -async def asyncAssertRaisesExactly(cls, fn, *args, **kwargs): - """ - Unlike the standard assertRaises, this checks that a function raises a - specific class of exception, and not a subclass. E.g., check that - MongoClient() raises ConnectionFailure but not its subclass, AutoReconnect. - """ - try: - await fn(*args, **kwargs) - except Exception as e: - assert e.__class__ == cls, f"got {e.__class__.__name__}, expected {cls.__name__}" - else: - raise AssertionError("%s not raised" % cls) - - -@contextlib.contextmanager -def _ignore_deprecations(): - with warnings.catch_warnings(): - warnings.simplefilter("ignore", DeprecationWarning) - yield - - -def ignore_deprecations(wrapped=None): - """A context manager or a decorator.""" - if wrapped: - if iscoroutinefunction(wrapped): - - @functools.wraps(wrapped) - async def wrapper(*args, **kwargs): - with _ignore_deprecations(): - return await wrapped(*args, **kwargs) - else: +def set_fail_point(client, command_args): + cmd = SON([("configureFailPoint", "failCommand")]) + cmd.update(command_args) + client.admin.command(cmd) - @functools.wraps(wrapped) - def wrapper(*args, **kwargs): - with _ignore_deprecations(): - return wrapped(*args, **kwargs) - - return wrapper +def joinall(tasks): + """Join threads with a 5-minute timeout, assert joins succeeded""" + if _IS_SYNC: + for t in tasks: + t.join(300) + assert not t.is_alive(), "Thread %s hung" % t else: - return _ignore_deprecations() - - -class DeprecationFilter: - def __init__(self, action="ignore"): - """Start filtering deprecations.""" - self.warn_context = warnings.catch_warnings() - self.warn_context.__enter__() - warnings.simplefilter(action, DeprecationWarning) - - def stop(self): - """Stop filtering deprecations.""" - self.warn_context.__exit__() # type: ignore - self.warn_context = None # type: ignore - - -def get_pool(client): - """Get the standalone, primary, or mongos pool.""" - topology = client._get_topology() - server = topology._select_server(writable_server_selector, _Op.TEST) - return server.pool - - -async def async_get_pool(client): - """Get the standalone, primary, or mongos pool.""" - topology = await client._get_topology() - server = await topology._select_server(writable_server_selector, _Op.TEST) - return server.pool - - -def get_pools(client): - """Get all pools.""" - return [ - server.pool - for server in client._get_topology().select_servers(any_server_selector, _Op.TEST) - ] - - -async def async_get_pools(client): - """Get all pools.""" - return [ - server.pool - async for server in await (await client._get_topology()).select_servers( - any_server_selector, _Op.TEST - ) - ] - - -# Constants for run_threads and lazy_client_trial. -NTRIALS = 5 -NTHREADS = 10 - - -def run_threads(collection, target): - """Run a target function in many threads. - - target is a function taking a Collection and an integer. - """ - threads = [] - for i in range(NTHREADS): - bound_target = partial(target, collection, i) - threads.append(threading.Thread(target=bound_target)) - - for t in threads: - t.start() - - for t in threads: - t.join(60) - assert not t.is_alive() - - -@contextlib.contextmanager -def frequent_thread_switches(): - """Make concurrency bugs more likely to manifest.""" - interval = sys.getswitchinterval() - sys.setswitchinterval(1e-6) - - try: - yield - finally: - sys.setswitchinterval(interval) - - -def lazy_client_trial(reset, target, test, get_client): - """Test concurrent operations on a lazily-connecting client. - - `reset` takes a collection and resets it for the next trial. + asyncio.wait([t.task for t in tasks if t is not None], timeout=300) + + +def flaky( + *, + reason=None, + max_runs=2, + min_passes=1, + delay=1, + affects_cpython_linux=False, + func_name=None, + reset_func=None, +): + """Decorate a test as flaky. - `target` takes a lazily-connecting collection and an index from - 0 to NTHREADS, and performs some operation, e.g. an insert. + :param reason: the reason why the test is flaky + :param max_runs: the maximum number of runs before raising an error + :param min_passes: the minimum number of passing runs + :param delay: the delay in seconds between retries + :param affects_cpython_links: whether the test is flaky on CPython on Linux + :param func_name: the name of the function, used for the rety message + :param reset_func: a function to call before retrying - `test` takes the lazily-connecting collection and asserts a - post-condition to prove `target` succeeded. """ - collection = client_context.client.pymongo_test.test - - with frequent_thread_switches(): - for _i in range(NTRIALS): - reset(collection) - lazy_client = get_client() - lazy_collection = lazy_client.pymongo_test.test - run_threads(lazy_collection, target) - test(lazy_collection) - - -def gevent_monkey_patched(): - """Check if gevent's monkey patching is active.""" - try: - import socket - - import gevent.socket # type:ignore[import] - - return socket.socket is gevent.socket.socket - except ImportError: - return False - - -def eventlet_monkey_patched(): - """Check if eventlet's monkey patching is active.""" - import threading - - return threading.current_thread.__module__ == "eventlet.green.threading" - - -def is_greenthread_patched(): - return gevent_monkey_patched() or eventlet_monkey_patched() - - -class ExceptionCatchingThread(threading.Thread): - """A thread that stores any exception encountered from run().""" + if reason is None: + raise ValueError("flaky requires a reason input") + is_cpython_linux = sys.platform == "linux" and sys.implementation.name == "cpython" + disable_flaky = "DISABLE_FLAKY" in os.environ + if "CI" not in os.environ and "ENABLE_FLAKY" not in os.environ: + disable_flaky = True + + if disable_flaky or (is_cpython_linux and not affects_cpython_linux): + max_runs = 1 + min_passes = 1 + + def decorator(target_func): + @wraps(target_func) + def wrapper(*args, **kwargs): + passes = 0 + for i in range(max_runs): + try: + result = target_func(*args, **kwargs) + passes += 1 + if passes == min_passes: + return result + except Exception as e: + if i == max_runs - 1: + raise e + print( + f"Retrying after attempt {i+1} of {func_name or target_func.__name__} failed with ({reason})):\n" + f"{traceback.format_exc()}", + file=sys.stderr, + ) + time.sleep(delay) + if reset_func: + reset_func() - def __init__(self, *args, **kwargs): - self.exc = None - super().__init__(*args, **kwargs) - - def run(self): - try: - super().run() - except BaseException as exc: - self.exc = exc - raise - - -def parse_read_preference(pref): - # Make first letter lowercase to match read_pref's modes. - mode_string = pref.get("mode", "primary") - mode_string = mode_string[:1].lower() + mode_string[1:] - mode = read_preferences.read_pref_mode_from_name(mode_string) - max_staleness = pref.get("maxStalenessSeconds", -1) - tag_sets = pref.get("tagSets") or pref.get("tag_sets") - return read_preferences.make_read_preference( - mode, tag_sets=tag_sets, max_staleness=max_staleness - ) - - -def server_name_to_type(name): - """Convert a ServerType name to the corresponding value. For SDAM tests.""" - # Special case, some tests in the spec include the PossiblePrimary - # type, but only single-threaded drivers need that type. We call - # possible primaries Unknown. - if name == "PossiblePrimary": - return SERVER_TYPE.Unknown - return getattr(SERVER_TYPE, name) + return wrapper + return decorator -def cat_files(dest, *sources): - """Cat multiple files into dest.""" - with open(dest, "wb") as fdst: - for src in sources: - with open(src, "rb") as fsrc: - shutil.copyfileobj(fsrc, fdst) +class MockConnection: + def __init__(self): + self.cancel_context = _CancellationContext() + self.more_to_come = False + self.id = random.randint(0, 100) + self.is_sdam = False + self.server_connection_id = random.randint(0, 100) -@contextlib.contextmanager -def assertion_context(msg): - """A context manager that adds info to an assertion failure.""" - try: - yield - except AssertionError as exc: - raise AssertionError(f"{msg}: {exc}") + def close_conn(self, reason): + pass + def __enter__(self): + return self -def parse_spec_options(opts): - if "readPreference" in opts: - opts["read_preference"] = parse_read_preference(opts.pop("readPreference")) + def __exit__(self, exc_type, exc_val, exc_tb): + pass - if "writeConcern" in opts: - w_opts = opts.pop("writeConcern") - if "journal" in w_opts: - w_opts["j"] = w_opts.pop("journal") - if "wtimeoutMS" in w_opts: - w_opts["wtimeout"] = w_opts.pop("wtimeoutMS") - opts["write_concern"] = WriteConcern(**dict(w_opts)) - if "readConcern" in opts: - opts["read_concern"] = ReadConcern(**dict(opts.pop("readConcern"))) +class MockPool: + def __init__(self, address, options, is_sdam=False, client_id=None): + self.gen = _PoolGeneration() + self._lock = _create_lock() + self.opts = options + self.operation_count = 0 + self.conns = [] - if "timeoutMS" in opts: - assert isinstance(opts["timeoutMS"], int) - opts["timeout"] = int(opts.pop("timeoutMS")) / 1000.0 + def stale_generation(self, gen, service_id): + return self.gen.stale(gen, service_id) - if "maxTimeMS" in opts: - opts["max_time_ms"] = opts.pop("maxTimeMS") + @contextlib.contextmanager + def checkout(self, handler=None): + yield MockConnection() - if "maxCommitTimeMS" in opts: - opts["max_commit_time_ms"] = opts.pop("maxCommitTimeMS") + def checkin(self, *args, **kwargs): + pass - return dict(opts) + def _reset(self, service_id=None): + with self._lock: + self.gen.inc(service_id) + def ready(self): + pass -def prepare_spec_arguments(spec, arguments, opname, entity_map, with_txn_callback): - for arg_name in list(arguments): - c2s = camel_to_snake(arg_name) - # Named "key" instead not fieldName. - if arg_name == "fieldName": - arguments["key"] = arguments.pop(arg_name) - # Aggregate uses "batchSize", while find uses batch_size. - elif (arg_name == "batchSize" or arg_name == "allowDiskUse") and opname == "aggregate": - continue - elif arg_name == "timeoutMode": - raise unittest.SkipTest("PyMongo does not support timeoutMode") - # Requires boolean returnDocument. - elif arg_name == "returnDocument": - arguments[c2s] = getattr(ReturnDocument, arguments.pop(arg_name).upper()) - elif "bulk_write" in opname and (c2s == "requests" or c2s == "models"): - # Parse each request into a bulk write model. - requests = [] - for request in arguments[c2s]: - if "name" in request: - # CRUD v2 format - bulk_model = camel_to_upper_camel(request["name"]) - bulk_class = getattr(operations, bulk_model) - bulk_arguments = camel_to_snake_args(request["arguments"]) - else: - # Unified test format - bulk_model, spec = next(iter(request.items())) - bulk_class = getattr(operations, camel_to_upper_camel(bulk_model)) - bulk_arguments = camel_to_snake_args(spec) - requests.append(bulk_class(**dict(bulk_arguments))) - arguments[c2s] = requests - elif arg_name == "session": - arguments["session"] = entity_map[arguments["session"]] - elif opname == "open_download_stream" and arg_name == "id": - arguments["file_id"] = arguments.pop(arg_name) - elif opname not in ("find", "find_one") and c2s == "max_time_ms": - # find is the only method that accepts snake_case max_time_ms. - # All other methods take kwargs which must use the server's - # camelCase maxTimeMS. See PYTHON-1855. - arguments["maxTimeMS"] = arguments.pop("max_time_ms") - elif opname == "with_transaction" and arg_name == "callback": - if "operations" in arguments[arg_name]: - # CRUD v2 format - callback_ops = arguments[arg_name]["operations"] - else: - # Unified test format - callback_ops = arguments[arg_name] - arguments["callback"] = lambda _: with_txn_callback(copy.deepcopy(callback_ops)) - elif opname == "drop_collection" and arg_name == "collection": - arguments["name_or_collection"] = arguments.pop(arg_name) - elif opname == "create_collection": - if arg_name == "collection": - arguments["name"] = arguments.pop(arg_name) - arguments["check_exists"] = False - # Any other arguments to create_collection are passed through - # **kwargs. - elif opname == "create_index" and arg_name == "keys": - arguments["keys"] = list(arguments.pop(arg_name).items()) - elif opname == "drop_index" and arg_name == "name": - arguments["index_or_name"] = arguments.pop(arg_name) - elif opname == "rename" and arg_name == "to": - arguments["new_name"] = arguments.pop(arg_name) - elif opname == "rename" and arg_name == "dropTarget": - arguments["dropTarget"] = arguments.pop(arg_name) - elif arg_name == "cursorType": - cursor_type = arguments.pop(arg_name) - if cursor_type == "tailable": - arguments["cursor_type"] = CursorType.TAILABLE - elif cursor_type == "tailableAwait": - arguments["cursor_type"] = CursorType.TAILABLE - else: - raise AssertionError(f"Unsupported cursorType: {cursor_type}") - else: - arguments[c2s] = arguments.pop(arg_name) + def reset(self, service_id=None, interrupt_connections=False): + self._reset() + def reset_without_pause(self): + self._reset() -def set_fail_point(client, command_args): - cmd = SON([("configureFailPoint", "failCommand")]) - cmd.update(command_args) - client.admin.command(cmd) + def close(self): + self._reset() + def update_is_writable(self, is_writable): + pass -async def async_set_fail_point(client, command_args): - cmd = SON([("configureFailPoint", "failCommand")]) - cmd.update(command_args) - await client.admin.command(cmd) + def remove_stale_sockets(self, *args, **kwargs): + pass diff --git a/test/utils_selection_tests.py b/test/utils_selection_tests.py index 2d21888e27..2772f06070 100644 --- a/test/utils_selection_tests.py +++ b/test/utils_selection_tests.py @@ -18,96 +18,29 @@ import datetime import os import sys +from test import PyMongoTestCase +from test.utils import MockPool sys.path[0:0] = [""] from test import unittest from test.pymongo_mocks import DummyMonitor -from test.utils import MockPool, parse_read_preference +from test.utils_selection_tests_shared import ( + get_addresses, + get_topology_type_name, + make_server_description, +) +from test.utils_shared import parse_read_preference from bson import json_util -from pymongo.common import HEARTBEAT_FREQUENCY, MIN_SUPPORTED_WIRE_VERSION, clean_node +from pymongo.common import HEARTBEAT_FREQUENCY from pymongo.errors import AutoReconnect, ConfigurationError -from pymongo.hello import Hello, HelloCompat from pymongo.operations import _Op -from pymongo.server_description import ServerDescription from pymongo.server_selectors import writable_server_selector from pymongo.synchronous.settings import TopologySettings from pymongo.synchronous.topology import Topology - -def get_addresses(server_list): - seeds = [] - hosts = [] - for server in server_list: - seeds.append(clean_node(server["address"])) - hosts.append(server["address"]) - return seeds, hosts - - -def make_last_write_date(server): - epoch = datetime.datetime.fromtimestamp(0, tz=datetime.timezone.utc).replace(tzinfo=None) - millis = server.get("lastWrite", {}).get("lastWriteDate") - if millis: - diff = ((millis % 1000) + 1000) % 1000 - seconds = (millis - diff) / 1000 - micros = diff * 1000 - return epoch + datetime.timedelta(seconds=seconds, microseconds=micros) - else: - # "Unknown" server. - return epoch - - -def make_server_description(server, hosts): - """Make a ServerDescription from server info in a JSON test.""" - server_type = server["type"] - if server_type in ("Unknown", "PossiblePrimary"): - return ServerDescription(clean_node(server["address"]), Hello({})) - - hello_response = {"ok": True, "hosts": hosts} - if server_type not in ("Standalone", "Mongos", "RSGhost"): - hello_response["setName"] = "rs" - - if server_type == "RSPrimary": - hello_response[HelloCompat.LEGACY_CMD] = True - elif server_type == "RSSecondary": - hello_response["secondary"] = True - elif server_type == "Mongos": - hello_response["msg"] = "isdbgrid" - elif server_type == "RSGhost": - hello_response["isreplicaset"] = True - elif server_type == "RSArbiter": - hello_response["arbiterOnly"] = True - - hello_response["lastWrite"] = {"lastWriteDate": make_last_write_date(server)} - - for field in "maxWireVersion", "tags", "idleWritePeriodMillis": - if field in server: - hello_response[field] = server[field] - - hello_response.setdefault("maxWireVersion", MIN_SUPPORTED_WIRE_VERSION) - - # Sets _last_update_time to now. - sd = ServerDescription( - clean_node(server["address"]), - Hello(hello_response), - round_trip_time=server["avg_rtt_ms"] / 1000.0, - ) - - if "lastUpdateTime" in server: - sd._last_update_time = server["lastUpdateTime"] / 1000.0 # ms to sec. - - return sd - - -def get_topology_type_name(scenario_def): - td = scenario_def["topology_description"] - name = td["type"] - if name == "Unknown": - # PyMongo never starts a topology in type Unknown. - return "Sharded" if len(td["servers"]) > 1 else "Single" - else: - return name +_IS_SYNC = True def get_topology_settings_dict(**kwargs): @@ -244,7 +177,7 @@ def run_scenario(self): def create_selection_tests(test_dir): - class TestAllScenarios(unittest.TestCase): + class TestAllScenarios(PyMongoTestCase): pass for dirpath, _, filenames in os.walk(test_dir): diff --git a/test/utils_selection_tests_shared.py b/test/utils_selection_tests_shared.py new file mode 100644 index 0000000000..dbaed1034f --- /dev/null +++ b/test/utils_selection_tests_shared.py @@ -0,0 +1,100 @@ +# Copyright 2015-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Utilities for testing Server Selection and Max Staleness.""" +from __future__ import annotations + +import datetime +import os +import sys + +sys.path[0:0] = [""] + +from pymongo.common import MIN_SUPPORTED_WIRE_VERSION, clean_node +from pymongo.hello import Hello, HelloCompat +from pymongo.server_description import ServerDescription + + +def get_addresses(server_list): + seeds = [] + hosts = [] + for server in server_list: + seeds.append(clean_node(server["address"])) + hosts.append(server["address"]) + return seeds, hosts + + +def make_last_write_date(server): + epoch = datetime.datetime.fromtimestamp(0, tz=datetime.timezone.utc).replace(tzinfo=None) + millis = server.get("lastWrite", {}).get("lastWriteDate") + if millis: + diff = ((millis % 1000) + 1000) % 1000 + seconds = (millis - diff) / 1000 + micros = diff * 1000 + return epoch + datetime.timedelta(seconds=seconds, microseconds=micros) + else: + # "Unknown" server. + return epoch + + +def make_server_description(server, hosts): + """Make a ServerDescription from server info in a JSON test.""" + server_type = server["type"] + if server_type in ("Unknown", "PossiblePrimary"): + return ServerDescription(clean_node(server["address"]), Hello({})) + + hello_response = {"ok": True, "hosts": hosts} + if server_type not in ("Standalone", "Mongos", "RSGhost"): + hello_response["setName"] = "rs" + + if server_type == "RSPrimary": + hello_response[HelloCompat.LEGACY_CMD] = True + elif server_type == "RSSecondary": + hello_response["secondary"] = True + elif server_type == "Mongos": + hello_response["msg"] = "isdbgrid" + elif server_type == "RSGhost": + hello_response["isreplicaset"] = True + elif server_type == "RSArbiter": + hello_response["arbiterOnly"] = True + + hello_response["lastWrite"] = {"lastWriteDate": make_last_write_date(server)} + + for field in "maxWireVersion", "tags", "idleWritePeriodMillis": + if field in server: + hello_response[field] = server[field] + + hello_response.setdefault("maxWireVersion", MIN_SUPPORTED_WIRE_VERSION) + + # Sets _last_update_time to now. + sd = ServerDescription( + clean_node(server["address"]), + Hello(hello_response), + round_trip_time=server["avg_rtt_ms"] / 1000.0, + ) + + if "lastUpdateTime" in server: + sd._last_update_time = server["lastUpdateTime"] / 1000.0 # ms to sec. + + return sd + + +def get_topology_type_name(scenario_def): + td = scenario_def["topology_description"] + name = td["type"] + if name == "Unknown": + # PyMongo never starts a topology in type Unknown. + return "Sharded" if len(td["servers"]) > 1 else "Single" + else: + return name diff --git a/test/utils_shared.py b/test/utils_shared.py new file mode 100644 index 0000000000..72fb943fc1 --- /dev/null +++ b/test/utils_shared.py @@ -0,0 +1,702 @@ +# Copyright 2012-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Shared utilities for testing pymongo""" +from __future__ import annotations + +import asyncio +import contextlib +import copy +import functools +import random +import re +import shutil +import sys +import threading +import unittest +import warnings +from collections import abc, defaultdict +from functools import partial +from inspect import iscoroutinefunction +from test import client_context +from test.asynchronous.utils import async_wait_until +from test.utils import wait_until +from typing import List + +from bson.objectid import ObjectId +from pymongo import monitoring, operations, read_preferences +from pymongo.cursor_shared import CursorType +from pymongo.errors import OperationFailure +from pymongo.helpers_shared import _SENSITIVE_COMMANDS +from pymongo.lock import _async_create_lock, _create_lock +from pymongo.monitoring import ( + ConnectionCheckedInEvent, + ConnectionCheckedOutEvent, + ConnectionCheckOutFailedEvent, + ConnectionCheckOutStartedEvent, + ConnectionClosedEvent, + ConnectionCreatedEvent, + ConnectionReadyEvent, + PoolClearedEvent, + PoolClosedEvent, + PoolCreatedEvent, + PoolReadyEvent, +) +from pymongo.read_concern import ReadConcern +from pymongo.server_type import SERVER_TYPE +from pymongo.synchronous.collection import ReturnDocument +from pymongo.synchronous.pool import _CancellationContext, _PoolGeneration +from pymongo.write_concern import WriteConcern + +IMPOSSIBLE_WRITE_CONCERN = WriteConcern(w=50) + + +class BaseListener: + def __init__(self): + self.events = [] + + def reset(self): + self.events = [] + + def add_event(self, event): + self.events.append(event) + + def event_count(self, event_type): + return len(self.events_by_type(event_type)) + + def events_by_type(self, event_type): + """Return the matching events by event class. + + event_type can be a single class or a tuple of classes. + """ + return self.matching(lambda e: isinstance(e, event_type)) + + def matching(self, matcher): + """Return the matching events.""" + return [event for event in self.events[:] if matcher(event)] + + def wait_for_event(self, event, count): + """Wait for a number of events to be published, or fail.""" + wait_until(lambda: self.event_count(event) >= count, f"find {count} {event} event(s)") + + async def async_wait_for_event(self, event, count): + """Wait for a number of events to be published, or fail.""" + await async_wait_until( + lambda: self.event_count(event) >= count, f"find {count} {event} event(s)" + ) + + +class CMAPListener(BaseListener, monitoring.ConnectionPoolListener): + def connection_created(self, event): + assert isinstance(event, ConnectionCreatedEvent) + self.add_event(event) + + def connection_ready(self, event): + assert isinstance(event, ConnectionReadyEvent) + self.add_event(event) + + def connection_closed(self, event): + assert isinstance(event, ConnectionClosedEvent) + self.add_event(event) + + def connection_check_out_started(self, event): + assert isinstance(event, ConnectionCheckOutStartedEvent) + self.add_event(event) + + def connection_check_out_failed(self, event): + assert isinstance(event, ConnectionCheckOutFailedEvent) + self.add_event(event) + + def connection_checked_out(self, event): + assert isinstance(event, ConnectionCheckedOutEvent) + self.add_event(event) + + def connection_checked_in(self, event): + assert isinstance(event, ConnectionCheckedInEvent) + self.add_event(event) + + def pool_created(self, event): + assert isinstance(event, PoolCreatedEvent) + self.add_event(event) + + def pool_ready(self, event): + assert isinstance(event, PoolReadyEvent) + self.add_event(event) + + def pool_cleared(self, event): + assert isinstance(event, PoolClearedEvent) + self.add_event(event) + + def pool_closed(self, event): + assert isinstance(event, PoolClosedEvent) + self.add_event(event) + + +class EventListener(BaseListener, monitoring.CommandListener): + def __init__(self): + super().__init__() + self.results = defaultdict(list) + + @property + def started_events(self) -> List[monitoring.CommandStartedEvent]: + return self.results["started"] + + @property + def succeeded_events(self) -> List[monitoring.CommandSucceededEvent]: + return self.results["succeeded"] + + @property + def failed_events(self) -> List[monitoring.CommandFailedEvent]: + return self.results["failed"] + + def started(self, event: monitoring.CommandStartedEvent) -> None: + self.started_events.append(event) + self.add_event(event) + + def succeeded(self, event: monitoring.CommandSucceededEvent) -> None: + self.succeeded_events.append(event) + self.add_event(event) + + def failed(self, event: monitoring.CommandFailedEvent) -> None: + self.failed_events.append(event) + self.add_event(event) + + def started_command_names(self) -> List[str]: + """Return list of command names started.""" + return [event.command_name for event in self.started_events] + + def reset(self) -> None: + """Reset the state of this listener.""" + self.results.clear() + super().reset() + + +class TopologyEventListener(monitoring.TopologyListener): + def __init__(self): + self.results = defaultdict(list) + + def closed(self, event): + self.results["closed"].append(event) + + def description_changed(self, event): + self.results["description_changed"].append(event) + + def opened(self, event): + self.results["opened"].append(event) + + def reset(self): + """Reset the state of this listener.""" + self.results.clear() + + +class AllowListEventListener(EventListener): + def __init__(self, *commands): + self.commands = set(commands) + super().__init__() + + def started(self, event): + if event.command_name in self.commands: + super().started(event) + + def succeeded(self, event): + if event.command_name in self.commands: + super().succeeded(event) + + def failed(self, event): + if event.command_name in self.commands: + super().failed(event) + + +class OvertCommandListener(EventListener): + """A CommandListener that ignores sensitive commands.""" + + ignore_list_collections = False + + def started(self, event): + if event.command_name.lower() not in _SENSITIVE_COMMANDS: + super().started(event) + + def succeeded(self, event): + if event.command_name.lower() not in _SENSITIVE_COMMANDS: + super().succeeded(event) + + def failed(self, event): + if event.command_name.lower() not in _SENSITIVE_COMMANDS: + super().failed(event) + + +class _ServerEventListener: + """Listens to all events.""" + + def __init__(self): + self.results = [] + + def opened(self, event): + self.results.append(event) + + def description_changed(self, event): + self.results.append(event) + + def closed(self, event): + self.results.append(event) + + def matching(self, matcher): + """Return the matching events.""" + results = self.results[:] + return [event for event in results if matcher(event)] + + def reset(self): + self.results = [] + + +class ServerEventListener(_ServerEventListener, monitoring.ServerListener): + """Listens to Server events.""" + + +class ServerAndTopologyEventListener( # type: ignore[misc] + ServerEventListener, monitoring.TopologyListener +): + """Listens to Server and Topology events.""" + + +class HeartbeatEventListener(BaseListener, monitoring.ServerHeartbeatListener): + """Listens to only server heartbeat events.""" + + def started(self, event): + self.add_event(event) + + def succeeded(self, event): + self.add_event(event) + + def failed(self, event): + self.add_event(event) + + +class HeartbeatEventsListListener(HeartbeatEventListener): + """Listens to only server heartbeat events and publishes them to a provided list.""" + + def __init__(self, events): + super().__init__() + self.event_list = events + + def started(self, event): + self.add_event(event) + self.event_list.append("serverHeartbeatStartedEvent") + + def succeeded(self, event): + self.add_event(event) + self.event_list.append("serverHeartbeatSucceededEvent") + + def failed(self, event): + self.add_event(event) + self.event_list.append("serverHeartbeatFailedEvent") + + +class ScenarioDict(dict): + """Dict that returns {} for any unknown key, recursively.""" + + def __init__(self, data): + def convert(v): + if isinstance(v, abc.Mapping): + return ScenarioDict(v) + if isinstance(v, (str, bytes)): + return v + if isinstance(v, abc.Sequence): + return [convert(item) for item in v] + return v + + dict.__init__(self, [(k, convert(v)) for k, v in data.items()]) + + def __getitem__(self, item): + try: + return dict.__getitem__(self, item) + except KeyError: + # Unlike a defaultdict, don't set the key, just return a dict. + return ScenarioDict({}) + + +class CompareType: + """Class that compares equal to any object of the given type(s).""" + + def __init__(self, types): + self.types = types + + def __eq__(self, other): + return isinstance(other, self.types) + + +class FunctionCallRecorder: + """Utility class to wrap a callable and record its invocations.""" + + def __init__(self, function): + self._function = function + self._call_list = [] + + def __call__(self, *args, **kwargs): + self._call_list.append((args, kwargs)) + if iscoroutinefunction(self._function): + return self._function(*args, **kwargs) + else: + return self._function(*args, **kwargs) + + def reset(self): + """Wipes the call list.""" + self._call_list = [] + + def call_list(self): + """Returns a copy of the call list.""" + return self._call_list[:] + + @property + def call_count(self): + """Returns the number of times the function has been called.""" + return len(self._call_list) + + +def one(s): + """Get one element of a set""" + return next(iter(s)) + + +def oid_generated_on_process(oid): + """Makes a determination as to whether the given ObjectId was generated + by the current process, based on the 5-byte random number in the ObjectId. + """ + return ObjectId._random() == oid.binary[4:9] + + +def delay(sec): + return """function() { sleep(%f * 1000); return true; }""" % sec + + +def camel_to_snake(camel): + # Regex to convert CamelCase to snake_case. + snake = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", camel) + return re.sub("([a-z0-9])([A-Z])", r"\1_\2", snake).lower() + + +def camel_to_upper_camel(camel): + return camel[0].upper() + camel[1:] + + +def camel_to_snake_args(arguments): + for arg_name in list(arguments): + c2s = camel_to_snake(arg_name) + arguments[c2s] = arguments.pop(arg_name) + return arguments + + +def snake_to_camel(snake): + # Regex to convert snake_case to lowerCamelCase. + return re.sub(r"_([a-z])", lambda m: m.group(1).upper(), snake) + + +def parse_collection_options(opts): + if "readPreference" in opts: + opts["read_preference"] = parse_read_preference(opts.pop("readPreference")) + + if "writeConcern" in opts: + opts["write_concern"] = WriteConcern(**dict(opts.pop("writeConcern"))) + + if "readConcern" in opts: + opts["read_concern"] = ReadConcern(**dict(opts.pop("readConcern"))) + + if "timeoutMS" in opts: + opts["timeout"] = int(opts.pop("timeoutMS")) / 1000.0 + return opts + + +@contextlib.contextmanager +def _ignore_deprecations(): + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + yield + + +def ignore_deprecations(wrapped=None): + """A context manager or a decorator.""" + if wrapped: + if iscoroutinefunction(wrapped): + + @functools.wraps(wrapped) + async def wrapper(*args, **kwargs): + with _ignore_deprecations(): + return await wrapped(*args, **kwargs) + else: + + @functools.wraps(wrapped) + def wrapper(*args, **kwargs): + with _ignore_deprecations(): + return wrapped(*args, **kwargs) + + return wrapper + + else: + return _ignore_deprecations() + + +class DeprecationFilter: + def __init__(self, action="ignore"): + """Start filtering deprecations.""" + self.warn_context = warnings.catch_warnings() + self.warn_context.__enter__() + warnings.simplefilter(action, DeprecationWarning) + + def stop(self): + """Stop filtering deprecations.""" + self.warn_context.__exit__() # type: ignore + self.warn_context = None # type: ignore + + +# Constants for run_threads and lazy_client_trial. +NTRIALS = 5 +NTHREADS = 10 + + +def run_threads(collection, target): + """Run a target function in many threads. + + target is a function taking a Collection and an integer. + """ + threads = [] + for i in range(NTHREADS): + bound_target = partial(target, collection, i) + threads.append(threading.Thread(target=bound_target)) + + for t in threads: + t.start() + + for t in threads: + t.join(60) + assert not t.is_alive() + + +@contextlib.contextmanager +def frequent_thread_switches(): + """Make concurrency bugs more likely to manifest.""" + interval = sys.getswitchinterval() + sys.setswitchinterval(1e-6) + + try: + yield + finally: + sys.setswitchinterval(interval) + + +def lazy_client_trial(reset, target, test, get_client): + """Test concurrent operations on a lazily-connecting client. + + `reset` takes a collection and resets it for the next trial. + + `target` takes a lazily-connecting collection and an index from + 0 to NTHREADS, and performs some operation, e.g. an insert. + + `test` takes the lazily-connecting collection and asserts a + post-condition to prove `target` succeeded. + """ + collection = client_context.client.pymongo_test.test + + with frequent_thread_switches(): + for _i in range(NTRIALS): + reset(collection) + lazy_client = get_client() + lazy_collection = lazy_client.pymongo_test.test + run_threads(lazy_collection, target) + test(lazy_collection) + + +def gevent_monkey_patched(): + """Check if gevent's monkey patching is active.""" + try: + import socket + + import gevent.socket # type:ignore[import] + + return socket.socket is gevent.socket.socket + except ImportError: + return False + + +def is_greenthread_patched(): + return gevent_monkey_patched() + + +def parse_read_preference(pref): + # Make first letter lowercase to match read_pref's modes. + mode_string = pref.get("mode", "primary") + mode_string = mode_string[:1].lower() + mode_string[1:] + mode = read_preferences.read_pref_mode_from_name(mode_string) + max_staleness = pref.get("maxStalenessSeconds", -1) + tag_sets = pref.get("tagSets") or pref.get("tag_sets") + return read_preferences.make_read_preference( + mode, tag_sets=tag_sets, max_staleness=max_staleness + ) + + +def server_name_to_type(name): + """Convert a ServerType name to the corresponding value. For SDAM tests.""" + # Special case, some tests in the spec include the PossiblePrimary + # type, but only single-threaded drivers need that type. We call + # possible primaries Unknown. + if name == "PossiblePrimary": + return SERVER_TYPE.Unknown + return getattr(SERVER_TYPE, name) + + +def cat_files(dest, *sources): + """Cat multiple files into dest.""" + with open(dest, "wb") as fdst: + for src in sources: + with open(src, "rb") as fsrc: + shutil.copyfileobj(fsrc, fdst) + + +@contextlib.contextmanager +def assertion_context(msg): + """A context manager that adds info to an assertion failure.""" + try: + yield + except AssertionError as exc: + raise AssertionError(f"{msg}: {exc}") + + +def parse_spec_options(opts): + if "readPreference" in opts: + opts["read_preference"] = parse_read_preference(opts.pop("readPreference")) + + if "writeConcern" in opts: + w_opts = opts.pop("writeConcern") + if "journal" in w_opts: + w_opts["j"] = w_opts.pop("journal") + if "wtimeoutMS" in w_opts: + w_opts["wtimeout"] = w_opts.pop("wtimeoutMS") + opts["write_concern"] = WriteConcern(**dict(w_opts)) + + if "readConcern" in opts: + opts["read_concern"] = ReadConcern(**dict(opts.pop("readConcern"))) + + if "timeoutMS" in opts: + assert isinstance(opts["timeoutMS"], int) + opts["timeout"] = int(opts.pop("timeoutMS")) / 1000.0 + + if "maxTimeMS" in opts: + opts["max_time_ms"] = opts.pop("maxTimeMS") + + if "maxCommitTimeMS" in opts: + opts["max_commit_time_ms"] = opts.pop("maxCommitTimeMS") + + return dict(opts) + + +def prepare_spec_arguments(spec, arguments, opname, entity_map, with_txn_callback): + for arg_name in list(arguments): + c2s = camel_to_snake(arg_name) + # Named "key" instead not fieldName. + if arg_name == "fieldName": + arguments["key"] = arguments.pop(arg_name) + # Aggregate uses "batchSize", while find uses batch_size. + elif (arg_name == "batchSize" or arg_name == "allowDiskUse") and opname == "aggregate": + continue + elif arg_name == "bypassDocumentValidation" and ( + opname == "aggregate" or "find_one_and" in opname + ): + continue + elif arg_name == "timeoutMode": + raise unittest.SkipTest("PyMongo does not support timeoutMode") + # Requires boolean returnDocument. + elif arg_name == "returnDocument": + arguments[c2s] = getattr(ReturnDocument, arguments.pop(arg_name).upper()) + elif "bulk_write" in opname and (c2s == "requests" or c2s == "models"): + # Parse each request into a bulk write model. + requests = [] + for request in arguments[c2s]: + if "name" in request: + # CRUD v2 format + bulk_model = camel_to_upper_camel(request["name"]) + bulk_class = getattr(operations, bulk_model) + bulk_arguments = camel_to_snake_args(request["arguments"]) + else: + # Unified test format + bulk_model, spec = next(iter(request.items())) + bulk_class = getattr(operations, camel_to_upper_camel(bulk_model)) + bulk_arguments = camel_to_snake_args(spec) + requests.append(bulk_class(**dict(bulk_arguments))) + arguments[c2s] = requests + elif arg_name == "session": + arguments["session"] = entity_map[arguments["session"]] + elif opname == "open_download_stream" and arg_name == "id": + arguments["file_id"] = arguments.pop(arg_name) + elif opname not in ("find", "find_one") and c2s == "max_time_ms": + # find is the only method that accepts snake_case max_time_ms. + # All other methods take kwargs which must use the server's + # camelCase maxTimeMS. See PYTHON-1855. + arguments["maxTimeMS"] = arguments.pop("max_time_ms") + elif opname == "with_transaction" and arg_name == "callback": + if "operations" in arguments[arg_name]: + # CRUD v2 format + callback_ops = arguments[arg_name]["operations"] + else: + # Unified test format + callback_ops = arguments[arg_name] + arguments["callback"] = lambda _: with_txn_callback(copy.deepcopy(callback_ops)) + elif opname == "drop_collection" and arg_name == "collection": + arguments["name_or_collection"] = arguments.pop(arg_name) + elif opname == "create_collection": + if arg_name == "collection": + arguments["name"] = arguments.pop(arg_name) + arguments["check_exists"] = False + # Any other arguments to create_collection are passed through + # **kwargs. + elif opname == "create_index" and arg_name == "keys": + arguments["keys"] = list(arguments.pop(arg_name).items()) + elif opname == "drop_index" and arg_name == "name": + arguments["index_or_name"] = arguments.pop(arg_name) + elif opname == "rename" and arg_name == "to": + arguments["new_name"] = arguments.pop(arg_name) + elif opname == "rename" and arg_name == "dropTarget": + arguments["dropTarget"] = arguments.pop(arg_name) + elif arg_name == "cursorType": + cursor_type = arguments.pop(arg_name) + if cursor_type == "tailable": + arguments["cursor_type"] = CursorType.TAILABLE + elif cursor_type == "tailableAwait": + arguments["cursor_type"] = CursorType.TAILABLE + else: + raise AssertionError(f"Unsupported cursorType: {cursor_type}") + else: + arguments[c2s] = arguments.pop(arg_name) + + +def create_async_event(): + return asyncio.Event() + + +def create_event(): + return threading.Event() + + +def async_create_barrier(n_tasks: int): + return asyncio.Barrier(n_tasks) + + +def create_barrier(n_tasks: int, timeout: float | None = None): + return threading.Barrier(n_tasks, timeout=timeout) + + +async def async_barrier_wait(barrier, timeout: float | None = None): + await asyncio.wait_for(barrier.wait(), timeout=timeout) + + +def barrier_wait(barrier, timeout: float | None = None): + barrier.wait(timeout=timeout) diff --git a/test/utils_spec_runner.py b/test/utils_spec_runner.py index 4508502cd0..46adeaefb5 100644 --- a/test/utils_spec_runner.py +++ b/test/utils_spec_runner.py @@ -18,12 +18,13 @@ import asyncio import functools import os -import threading +import time import unittest -from asyncio import iscoroutinefunction from collections import abc +from inspect import iscoroutinefunction from test import IntegrationTest, client_context, client_knobs -from test.utils import ( +from test.helpers import ConcurrentRunner +from test.utils_shared import ( CMAPListener, CompareType, EventListener, @@ -44,6 +45,7 @@ from gridfs import GridFSBucket from gridfs.synchronous.grid_file import GridFSBucket from pymongo.errors import AutoReconnect, BulkWriteError, OperationFailure, PyMongoError +from pymongo.lock import _cond_wait, _create_condition, _create_lock from pymongo.read_concern import ReadConcern from pymongo.read_preferences import ReadPreference from pymongo.results import BulkWriteResult, _WriteResult @@ -55,15 +57,13 @@ _IS_SYNC = True -class SpecRunnerThread(threading.Thread): +class SpecRunnerThread(ConcurrentRunner): def __init__(self, name): - super().__init__() - self.name = name + super().__init__(name=name) self.exc = None self.daemon = True - self.cond = threading.Condition() + self.cond = _create_condition(_create_lock()) self.ops = [] - self.stopped = False def schedule(self, work): self.ops.append(work) @@ -79,7 +79,7 @@ def run(self): while not self.stopped or self.ops: if not self.ops: with self.cond: - self.cond.wait(10) + _cond_wait(self.cond, 10) if self.ops: try: work = self.ops.pop(0) @@ -124,18 +124,6 @@ def _ensure_min_max_server_version(self, scenario_def, method): if max_ver is not None: method = client_context.require_version_max(*max_ver)(method) - if "serverless" in scenario_def: - serverless = scenario_def["serverless"] - if serverless == "require": - serverless_satisfied = client_context.serverless - elif serverless == "forbid": - serverless_satisfied = not client_context.serverless - else: # unset or "allow" - serverless_satisfied = True - method = unittest.skipUnless( - serverless_satisfied, "Serverless requirement not satisfied" - )(method) - return method @staticmethod @@ -168,16 +156,6 @@ def valid_auth_enabled(run_on_req): return not client_context.auth_enabled return True - @staticmethod - def serverless_ok(run_on_req): - serverless = run_on_req["serverless"] - if serverless == "require": - return client_context.serverless - elif serverless == "forbid": - return not client_context.serverless - else: # unset or "allow" - return True - def should_run_on(self, scenario_def): run_on = scenario_def.get("runOn", []) if not run_on: @@ -190,7 +168,6 @@ def should_run_on(self, scenario_def): and self.min_server_version(req) and self.max_server_version(req) and self.valid_auth_enabled(req) - and self.serverless_ok(req) ): return True return False @@ -265,15 +242,10 @@ def setUp(self) -> None: def tearDown(self) -> None: self.knobs.disable() - def _set_fail_point(self, client, command_args): - cmd = SON([("configureFailPoint", "failCommand")]) - cmd.update(command_args) - client.admin.command(cmd) - def set_fail_point(self, command_args): clients = self.mongos_clients if self.mongos_clients else [self.client] for client in clients: - self._set_fail_point(client, command_args) + self.configure_fail_point(client, command_args) def targeted_fail_point(self, session, fail_point): """Run the targetedFailPoint test operation. @@ -282,7 +254,7 @@ def targeted_fail_point(self, session, fail_point): """ clients = {c.address: c for c in self.mongos_clients} client = clients[session._pinned_address] - self._set_fail_point(client, fail_point) + self.configure_fail_point(client, fail_point) self.addCleanup(self.set_fail_point, {"mode": "off"}) def assert_session_pinned(self, session): @@ -320,6 +292,10 @@ def assert_index_not_exists(self, database, collection, index): coll = self.client[database][collection] self.assertNotIn(index, [doc["name"] for doc in coll.list_indexes()]) + def wait(self, ms): + """Run the "wait" test operation.""" + time.sleep(ms / 1000.0) + def assertErrorLabelsContain(self, exc, expected_labels): labels = [l for l in expected_labels if exc.has_error_label(l)] self.assertEqual(labels, expected_labels) @@ -672,13 +648,10 @@ def run_scenario(self, scenario_def, test): server_listener = ServerAndTopologyEventListener() # Create a new client, to avoid interference from pooled sessions. client_options = self.parse_client_options(test["clientOptions"]) - # MMAPv1 does not support retryable writes. - if client_options.get("retryWrites") is True and client_context.storage_engine == "mmapv1": - self.skipTest("MMAPv1 does not support retryWrites=True") use_multi_mongos = test["useMultipleMongoses"] host = None if use_multi_mongos: - if client_context.load_balancer or client_context.serverless: + if client_context.load_balancer: host = client_context.MULTI_MONGOS_LB_URI elif client_context.is_mongos: host = client_context.mongos_seeds() diff --git a/test/version.py b/test/version.py index 42d53cfcf4..ae6ecb331f 100644 --- a/test/version.py +++ b/test/version.py @@ -15,64 +15,10 @@ """Some tools for running tests based on MongoDB server version.""" from __future__ import annotations +from pymongo.common import Version as BaseVersion -class Version(tuple): - def __new__(cls, *version): - padded_version = cls._padded(version, 4) - return super().__new__(cls, tuple(padded_version)) - - @classmethod - def _padded(cls, iter, length, padding=0): - l = list(iter) - if len(l) < length: - for _ in range(length - len(l)): - l.append(padding) - return l - - @classmethod - def from_string(cls, version_string): - mod = 0 - bump_patch_level = False - if version_string.endswith("+"): - version_string = version_string[0:-1] - mod = 1 - elif version_string.endswith("-pre-"): - version_string = version_string[0:-5] - mod = -1 - elif version_string.endswith("-"): - version_string = version_string[0:-1] - mod = -1 - # Deal with '-rcX' substrings - if "-rc" in version_string: - version_string = version_string[0 : version_string.find("-rc")] - mod = -1 - # Deal with git describe generated substrings - elif "-" in version_string: - version_string = version_string[0 : version_string.find("-")] - mod = -1 - bump_patch_level = True - - version = [int(part) for part in version_string.split(".")] - version = cls._padded(version, 3) - # Make from_string and from_version_array agree. For example: - # MongoDB Enterprise > db.runCommand('buildInfo').versionArray - # [ 3, 2, 1, -100 ] - # MongoDB Enterprise > db.runCommand('buildInfo').version - # 3.2.0-97-g1ef94fe - if bump_patch_level: - version[-1] += 1 - version.append(mod) - - return Version(*version) - - @classmethod - def from_version_array(cls, version_array): - version = list(version_array) - if version[-1] < 0: - version[-1] = -1 - version = cls._padded(version, 3) - return Version(*version) +class Version(BaseVersion): @classmethod def from_client(cls, client): info = client.server_info() @@ -86,9 +32,3 @@ async def async_from_client(cls, client): if "versionArray" in info: return cls.from_version_array(info["versionArray"]) return cls.from_string(info["version"]) - - def at_least(self, *other_version): - return self >= Version(*other_version) - - def __str__(self): - return ".".join(map(str, self)) diff --git a/test/versioned-api/transaction-handling.json b/test/versioned-api/transaction-handling.json index c00c5240ae..32031296af 100644 --- a/test/versioned-api/transaction-handling.json +++ b/test/versioned-api/transaction-handling.json @@ -6,7 +6,7 @@ "minServerVersion": "4.9", "topologies": [ "replicaset", - "sharded-replicaset", + "sharded", "load-balanced" ] } @@ -92,7 +92,7 @@ { "topologies": [ "replicaset", - "sharded-replicaset", + "sharded", "load-balanced" ] } @@ -221,7 +221,7 @@ { "topologies": [ "replicaset", - "sharded-replicaset", + "sharded", "load-balanced" ] } diff --git a/tools/convert_test_to_async.py b/tools/convert_test_to_async.py index dbdb217c84..6c68c34bf3 100644 --- a/tools/convert_test_to_async.py +++ b/tools/convert_test_to_async.py @@ -1,6 +1,6 @@ from __future__ import annotations -import asyncio +import inspect import sys from pymongo import AsyncMongoClient @@ -83,7 +83,7 @@ def get_async_methods() -> set[str]: for k, v in vars(x).items() if callable(v) and not isinstance(v, classmethod) - and asyncio.iscoroutinefunction(v) + and inspect.iscoroutinefunction(v) and v.__name__[0] != "_" } result = result | methods diff --git a/tools/fail_if_no_c.py b/tools/fail_if_no_c.py index 6848e155aa..64280a81d2 100644 --- a/tools/fail_if_no_c.py +++ b/tools/fail_if_no_c.py @@ -18,34 +18,30 @@ """ from __future__ import annotations -import os -import subprocess +import logging import sys -from pathlib import Path + +LOGGER = logging.getLogger(__name__) +logging.basicConfig(level=logging.INFO, format="%(levelname)-8s %(message)s") sys.path[0:0] = [""] import bson # noqa: E402 import pymongo # noqa: E402 -if not pymongo.has_c() or not bson.has_c(): - try: - from pymongo import _cmessage # type:ignore[attr-defined] # noqa: F401 - except Exception as e: - print(e) - try: - from bson import _cbson # type:ignore[attr-defined] # noqa: F401 - except Exception as e: - print(e) - sys.exit("could not load C extensions") - -if os.environ.get("ENSURE_UNIVERSAL2") == "1": - parent_dir = Path(pymongo.__path__[0]).parent - for pkg in ["pymongo", "bson", "grifs"]: - for so_file in Path(f"{parent_dir}/{pkg}").glob("*.so"): - print(f"Checking universal2 compatibility in {so_file}...") - output = subprocess.check_output(["file", so_file]) # noqa: S603, S607 - if "arm64" not in output.decode("utf-8"): - sys.exit("Universal wheel was not compiled with arm64 support") - if "x86_64" not in output.decode("utf-8"): - sys.exit("Universal wheel was not compiled with x86_64 support") + +def main() -> None: + if not pymongo.has_c() or not bson.has_c(): + try: + from pymongo import _cmessage # type:ignore[attr-defined] # noqa: F401 + except Exception as e: + LOGGER.exception(e) + try: + from bson import _cbson # type:ignore[attr-defined] # noqa: F401 + except Exception as e: + LOGGER.exception(e) + sys.exit("could not load C extensions") + + +if __name__ == "__main__": + main() diff --git a/tools/ocsptest.py b/tools/ocsptest.py index 521d048f79..8596db226d 100644 --- a/tools/ocsptest.py +++ b/tools/ocsptest.py @@ -35,6 +35,7 @@ def check_ocsp(host: str, port: int, capath: str) -> None: False, # allow_invalid_certificates False, # allow_invalid_hostnames False, + True, # is sync ) # disable_ocsp_endpoint_check # Ensure we're using pyOpenSSL. diff --git a/tools/synchro.py b/tools/synchro.py index 577e82d14e..e3d4835502 100644 --- a/tools/synchro.py +++ b/tools/synchro.py @@ -47,6 +47,7 @@ "async_receive_message": "receive_message", "async_receive_data": "receive_data", "async_sendall": "sendall", + "async_socket_sendall": "sendall", "asynchronous": "synchronous", "Asynchronous": "Synchronous", "AsyncBulkTestBase": "BulkTestBase", @@ -119,6 +120,21 @@ "_async_create_lock": "_create_lock", "_async_create_condition": "_create_condition", "_async_cond_wait": "_cond_wait", + "AsyncNetworkingInterface": "NetworkingInterface", + "_configured_protocol_interface": "_configured_socket_interface", + "_async_configured_socket": "_configured_socket", + "SpecRunnerTask": "SpecRunnerThread", + "AsyncMockConnection": "MockConnection", + "AsyncMockPool": "MockPool", + "StopAsyncIteration": "StopIteration", + "create_async_event": "create_event", + "async_create_barrier": "create_barrier", + "async_barrier_wait": "barrier_wait", + "async_joinall": "joinall", + "async_simple_test_client": "simple_test_client", + "_async_create_connection": "_create_connection", + "pymongo.asynchronous.srv_resolver._SrvResolver.get_hosts": "pymongo.synchronous.srv_resolver._SrvResolver.get_hosts", + "dns.asyncresolver.resolve": "dns.resolver.resolve", } docstring_replacements: dict[tuple[str, str], str] = { @@ -149,6 +165,10 @@ _gridfs_dest_base = "./gridfs/synchronous/" _test_dest_base = "./test/" +if not Path.exists(Path(_pymongo_dest_base)): + Path.mkdir(Path(_pymongo_dest_base)) +if not Path.exists(Path(_gridfs_dest_base)): + Path.mkdir(Path(_gridfs_dest_base)) async_files = [ _pymongo_base + f for f in listdir(_pymongo_base) if (Path(_pymongo_base) / f).is_file() @@ -161,7 +181,14 @@ def async_only_test(f: str) -> bool: """Return True for async tests that should not be converted to sync.""" - return f in ["test_locks.py", "test_concurrency.py"] + return f in [ + "test_locks.py", + "test_concurrency.py", + "test_async_cancellation.py", + "test_async_loop_safety.py", + "test_async_contextvars_reset.py", + "test_async_loop_unblocked.py", + ] test_files = [ @@ -170,18 +197,6 @@ def async_only_test(f: str) -> bool: if (Path(_test_base) / f).is_file() and not async_only_test(f) ] -sync_files = [ - _pymongo_dest_base + f - for f in listdir(_pymongo_dest_base) - if (Path(_pymongo_dest_base) / f).is_file() -] - -sync_gridfs_files = [ - _gridfs_dest_base + f - for f in listdir(_gridfs_dest_base) - if (Path(_gridfs_dest_base) / f).is_file() -] - # Add each asynchronized test here as part of the converting PR converted_tests = [ "__init__.py", @@ -191,12 +206,14 @@ def async_only_test(f: str) -> bool: "utils_spec_runner.py", "qcheck.py", "test_auth.py", + "test_auth_oidc.py", "test_auth_spec.py", "test_bulk.py", "test_change_stream.py", "test_client.py", "test_client_bulk_write.py", "test_client_context.py", + "test_client_metadata.py", "test_collation.py", "test_collection.py", "test_collection_management.py", @@ -206,32 +223,65 @@ def async_only_test(f: str) -> bool: "test_comment.py", "test_common.py", "test_connection_logging.py", + "test_connection_monitoring.py", "test_connections_survive_primary_stepdown_spec.py", "test_create_entities.py", "test_crud_unified.py", + "test_csot.py", "test_cursor.py", + "test_custom_types.py", "test_database.py", + "test_discovery_and_monitoring.py", + "test_dns.py", "test_encryption.py", + "test_examples.py", "test_grid_file.py", + "test_gridfs.py", + "test_gridfs_bucket.py", + "test_gridfs_spec.py", + "test_heartbeat_monitoring.py", + "test_index_management.py", + "test_json_util_integration.py", + "test_load_balancer.py", "test_logger.py", + "test_max_staleness.py", + "test_monitor.py", "test_monitoring.py", + "test_mongos_load_balancing.py", + "test_on_demand_csfle.py", + "test_pooling.py", "test_raw_bson.py", + "test_read_concern.py", + "test_read_preferences.py", + "test_read_write_concern_spec.py", "test_retryable_reads.py", + "test_retryable_reads_unified.py", "test_retryable_writes.py", + "test_retryable_writes_unified.py", + "test_run_command.py", + "test_sdam_monitoring_spec.py", + "test_server_selection.py", + "test_server_selection_in_window.py", + "test_server_selection_logging.py", + "test_server_selection_rtt.py", "test_session.py", + "test_sessions_unified.py", + "test_srv_polling.py", + "test_ssl.py", + "test_streaming_protocol.py", "test_transactions.py", + "test_transactions_unified.py", + "test_unified_format.py", + "test_versioned_api_integration.py", "unified_format.py", + "utils_selection_tests.py", + "utils.py", ] -sync_test_files = [ - _test_dest_base + f for f in converted_tests if (Path(_test_dest_base) / f).is_file() -] - - -docstring_translate_files = sync_files + sync_gridfs_files + sync_test_files - -def process_files(files: list[str]) -> None: +def process_files( + files: list[str], docstring_translate_files: list[str], sync_test_files: list[str] +) -> None: for file in files: if "__init__" not in file or "__init__" and "test" in file: with open(file, "r+") as f: @@ -242,7 +292,8 @@ def process_files(files: list[str]) -> None: if file in docstring_translate_files: lines = translate_docstrings(lines) if file in sync_test_files: - translate_imports(lines) + lines = translate_imports(lines) + lines = process_ignores(lines) f.seek(0) f.writelines(lines) f.truncate() @@ -344,6 +395,14 @@ def translate_docstrings(lines: list[str]) -> list[str]: return [line for line in lines if line != "DOCSTRING_REMOVED"] +def process_ignores(lines: list[str]) -> list[str]: + for i in range(len(lines)): + for k, v in replacements.items(): + if "unasync: off" in lines[i] and v in lines[i]: + lines[i] = lines[i].replace(v, k) + return lines + + def unasync_directory(files: list[str], src: str, dest: str, replacements: dict[str, str]) -> None: unasync_files( files, @@ -360,13 +419,18 @@ def unasync_directory(files: list[str], src: str, dest: str, replacements: dict[ def main() -> None: modified_files = [f"./{f}" for f in sys.argv[1:]] errored = False - for fname in async_files + gridfs_files: + for fname in async_files + gridfs_files + test_files: # If the async file was modified, we don't need to check if the sync file was also modified. if str(fname) in modified_files: continue sync_name = str(fname).replace("asynchronous", "synchronous") - if sync_name in modified_files and "OVERRIDE_SYNCHRO_CHECK" not in os.environ: - print(f"Refusing to overwrite {sync_name}") + test_sync_name = str(fname).replace("/asynchronous", "") + if ( + sync_name in modified_files + or test_sync_name in modified_files + and "OVERRIDE_SYNCHRO_CHECK" not in os.environ + ): + print(f"Refusing to overwrite {test_sync_name}") errored = True if errored: raise ValueError("Aborting synchro due to errors") @@ -374,7 +438,27 @@ def main() -> None: unasync_directory(async_files, _pymongo_base, _pymongo_dest_base, replacements) unasync_directory(gridfs_files, _gridfs_base, _gridfs_dest_base, replacements) unasync_directory(test_files, _test_base, _test_dest_base, replacements) - process_files(sync_files + sync_gridfs_files + sync_test_files) + + sync_files = [ + _pymongo_dest_base + f + for f in listdir(_pymongo_dest_base) + if (Path(_pymongo_dest_base) / f).is_file() + ] + + sync_gridfs_files = [ + _gridfs_dest_base + f + for f in listdir(_gridfs_dest_base) + if (Path(_gridfs_dest_base) / f).is_file() + ] + sync_test_files = [ + _test_dest_base + f for f in converted_tests if (Path(_test_dest_base) / f).is_file() + ] + + docstring_translate_files = sync_files + sync_gridfs_files + sync_test_files + + process_files( + sync_files + sync_gridfs_files + sync_test_files, docstring_translate_files, sync_test_files + ) if __name__ == "__main__": diff --git a/tools/synchro.sh b/tools/synchro.sh index 51c51a9548..28b9c6d6c4 100755 --- a/tools/synchro.sh +++ b/tools/synchro.sh @@ -1,5 +1,5 @@ #!/bin/bash - +# Keep the synchronous folders in sync with there async counterparts. set -eu python ./tools/synchro.py "$@" diff --git a/uv.lock b/uv.lock new file mode 100644 index 0000000000..f9a389c896 --- /dev/null +++ b/uv.lock @@ -0,0 +1,2208 @@ +version = 1 +revision = 3 +requires-python = ">=3.9" +resolution-markers = [ + "python_full_version == '3.14.*'", + "python_full_version >= '3.15' or (python_full_version >= '3.11' and python_full_version < '3.14')", + "python_full_version == '3.10.*'", + "python_full_version < '3.10'", +] + +[[package]] +name = "accessible-pygments" +version = "0.0.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bc/c1/bbac6a50d02774f91572938964c582fff4270eee73ab822a4aeea4d8b11b/accessible_pygments-0.0.5.tar.gz", hash = "sha256:40918d3e6a2b619ad424cb91e556bd3bd8865443d9f22f1dcdf79e33c8046872", size = 1377899, upload-time = "2024-05-10T11:23:10.216Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8d/3f/95338030883d8c8b91223b4e21744b04d11b161a3ef117295d8241f50ab4/accessible_pygments-0.0.5-py3-none-any.whl", hash = "sha256:88ae3211e68a1d0b011504b2ffc1691feafce124b845bd072ab6f9f66f34d4b7", size = 1395903, upload-time = "2024-05-10T11:23:08.421Z" }, +] + +[[package]] +name = "alabaster" +version = "0.7.16" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +sdist = { url = "https://files.pythonhosted.org/packages/c9/3e/13dd8e5ed9094e734ac430b5d0eb4f2bb001708a8b7856cbf8e084e001ba/alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65", size = 23776, upload-time = "2024-01-10T00:56:10.189Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/34/d4e1c02d3bee589efb5dfa17f88ea08bdb3e3eac12bc475462aec52ed223/alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92", size = 13511, upload-time = "2024-01-10T00:56:08.388Z" }, +] + +[[package]] +name = "alabaster" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version == '3.14.*'", + "python_full_version >= '3.15' or (python_full_version >= '3.11' and python_full_version < '3.14')", + "python_full_version == '3.10.*'", +] +sdist = { url = "https://files.pythonhosted.org/packages/a6/f8/d9c74d0daf3f742840fd818d69cfae176fa332022fd44e3469487d5a9420/alabaster-1.0.0.tar.gz", hash = "sha256:c00dca57bca26fa62a6d7d0a9fcce65f3e026e9bfe33e9c538fd3fbb2144fd9e", size = 24210, upload-time = "2024-07-26T18:15:03.762Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/b3/6b4067be973ae96ba0d615946e314c5ae35f9f993eca561b356540bb0c2b/alabaster-1.0.0-py3-none-any.whl", hash = "sha256:fc6786402dc3fcb2de3cabd5fe455a2db534b371124f1f21de8731783dec828b", size = 13929, upload-time = "2024-07-26T18:15:02.05Z" }, +] + +[[package]] +name = "anyio" +version = "4.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "idna" }, + { name = "sniffio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", size = 190949, upload-time = "2025-03-17T00:02:54.77Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916, upload-time = "2025-03-17T00:02:52.713Z" }, +] + +[[package]] +name = "attrs" +version = "25.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, +] + +[[package]] +name = "babel" +version = "2.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7d/6b/d52e42361e1aa00709585ecc30b3f9684b3ab62530771402248b1b1d6240/babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d", size = 9951852, upload-time = "2025-02-01T15:17:41.026Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/b8/3fe70c75fe32afc4bb507f75563d39bc5642255d1d94f1f23604725780bf/babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2", size = 10182537, upload-time = "2025-02-01T15:17:37.39Z" }, +] + +[[package]] +name = "backports-asyncio-runner" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/ff/70dca7d7cb1cbc0edb2c6cc0c38b65cba36cccc491eca64cabd5fe7f8670/backports_asyncio_runner-1.2.0.tar.gz", hash = "sha256:a5aa7b2b7d8f8bfcaa2b57313f70792df84e32a2a746f585213373f900b42162", size = 69893, upload-time = "2025-07-02T02:27:15.685Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/59/76ab57e3fe74484f48a53f8e337171b4a2349e506eabe136d7e01d059086/backports_asyncio_runner-1.2.0-py3-none-any.whl", hash = "sha256:0da0a936a8aeb554eccb426dc55af3ba63bcdc69fa1a600b5bb305413a4477b5", size = 12313, upload-time = "2025-07-02T02:27:14.263Z" }, +] + +[[package]] +name = "beautifulsoup4" +version = "4.13.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "soupsieve" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d8/e4/0c4c39e18fd76d6a628d4dd8da40543d136ce2d1752bd6eeeab0791f4d6b/beautifulsoup4-4.13.4.tar.gz", hash = "sha256:dbb3c4e1ceae6aefebdaf2423247260cd062430a410e38c66f2baa50a8437195", size = 621067, upload-time = "2025-04-15T17:05:13.836Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/50/cd/30110dc0ffcf3b131156077b90e9f60ed75711223f306da4db08eff8403b/beautifulsoup4-4.13.4-py3-none-any.whl", hash = "sha256:9bbbb14bfde9d79f38b8cd5f8c7c85f4b8f2523190ebed90e950a8dea4cb1c4b", size = 187285, upload-time = "2025-04-15T17:05:12.221Z" }, +] + +[[package]] +name = "boto3" +version = "1.40.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore" }, + { name = "jmespath" }, + { name = "s3transfer" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7b/34/298ef2023d7d88069776c9cc26b42ba6f05d143a1c9b44a0f65cd795c65b/boto3-1.40.0.tar.gz", hash = "sha256:fc1b3ca3baf3d8820c6faddf47cbba8ad3cd16f8e8d7e2f76d304bf995932eb7", size = 111847, upload-time = "2025-07-31T19:21:06.735Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5d/44/158581021038c5fc886ffa27fa4731fb4939258da7a23e0bc70b2d5757c9/boto3-1.40.0-py3-none-any.whl", hash = "sha256:959443055d2af676c336cc6033b3f870a8a924384b70d0b2905081d649378179", size = 139882, upload-time = "2025-07-31T19:21:04.65Z" }, +] + +[[package]] +name = "botocore" +version = "1.40.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jmespath" }, + { name = "python-dateutil" }, + { name = "urllib3", version = "1.26.20", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "urllib3", version = "2.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8f/e7/770ce910457ac6c68ea79b83892ab7a7cb08528f5d1dd77e51bf02a8529e/botocore-1.40.0.tar.gz", hash = "sha256:850242560dc8e74d542045a81eb6cc15f1b730b4ba55ba5b30e6d686548dfcaf", size = 14262316, upload-time = "2025-07-31T19:20:56.662Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/5a/bebc53f022514412613615b09aef20fbe804abb3ea26ec27e504a2d21c8f/botocore-1.40.0-py3-none-any.whl", hash = "sha256:2063e6d035a6a382b2ae37e40f5144044e55d4e091910d0c9f1be3121ad3e4e6", size = 13921768, upload-time = "2025-07-31T19:20:51.487Z" }, +] + +[[package]] +name = "certifi" +version = "2025.7.14" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b3/76/52c535bcebe74590f296d6c77c86dabf761c41980e1347a2422e4aa2ae41/certifi-2025.7.14.tar.gz", hash = "sha256:8ea99dbdfaaf2ba2f9bac77b9249ef62ec5218e7c2b2e903378ed5fccf765995", size = 163981, upload-time = "2025-07-14T03:29:28.449Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4f/52/34c6cf5bb9285074dc3531c437b3919e825d976fde097a7a73f79e726d03/certifi-2025.7.14-py3-none-any.whl", hash = "sha256:6b31f564a415d79ee77df69d757bb49a5bb53bd9f756cbbe24394ffd6fc1f4b2", size = 162722, upload-time = "2025-07-14T03:29:26.863Z" }, +] + +[[package]] +name = "cffi" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser", marker = "implementation_name != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/93/d7/516d984057745a6cd96575eea814fe1edd6646ee6efd552fb7b0921dec83/cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44", size = 184283, upload-time = "2025-09-08T23:22:08.01Z" }, + { url = "https://files.pythonhosted.org/packages/9e/84/ad6a0b408daa859246f57c03efd28e5dd1b33c21737c2db84cae8c237aa5/cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49", size = 180504, upload-time = "2025-09-08T23:22:10.637Z" }, + { url = "https://files.pythonhosted.org/packages/50/bd/b1a6362b80628111e6653c961f987faa55262b4002fcec42308cad1db680/cffi-2.0.0-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:53f77cbe57044e88bbd5ed26ac1d0514d2acf0591dd6bb02a3ae37f76811b80c", size = 208811, upload-time = "2025-09-08T23:22:12.267Z" }, + { url = "https://files.pythonhosted.org/packages/4f/27/6933a8b2562d7bd1fb595074cf99cc81fc3789f6a6c05cdabb46284a3188/cffi-2.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3e837e369566884707ddaf85fc1744b47575005c0a229de3327f8f9a20f4efeb", size = 216402, upload-time = "2025-09-08T23:22:13.455Z" }, + { url = "https://files.pythonhosted.org/packages/05/eb/b86f2a2645b62adcfff53b0dd97e8dfafb5c8aa864bd0d9a2c2049a0d551/cffi-2.0.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5eda85d6d1879e692d546a078b44251cdd08dd1cfb98dfb77b670c97cee49ea0", size = 203217, upload-time = "2025-09-08T23:22:14.596Z" }, + { url = "https://files.pythonhosted.org/packages/9f/e0/6cbe77a53acf5acc7c08cc186c9928864bd7c005f9efd0d126884858a5fe/cffi-2.0.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9332088d75dc3241c702d852d4671613136d90fa6881da7d770a483fd05248b4", size = 203079, upload-time = "2025-09-08T23:22:15.769Z" }, + { url = "https://files.pythonhosted.org/packages/98/29/9b366e70e243eb3d14a5cb488dfd3a0b6b2f1fb001a203f653b93ccfac88/cffi-2.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc7de24befaeae77ba923797c7c87834c73648a05a4bde34b3b7e5588973a453", size = 216475, upload-time = "2025-09-08T23:22:17.427Z" }, + { url = "https://files.pythonhosted.org/packages/21/7a/13b24e70d2f90a322f2900c5d8e1f14fa7e2a6b3332b7309ba7b2ba51a5a/cffi-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf364028c016c03078a23b503f02058f1814320a56ad535686f90565636a9495", size = 218829, upload-time = "2025-09-08T23:22:19.069Z" }, + { url = "https://files.pythonhosted.org/packages/60/99/c9dc110974c59cc981b1f5b66e1d8af8af764e00f0293266824d9c4254bc/cffi-2.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e11e82b744887154b182fd3e7e8512418446501191994dbf9c9fc1f32cc8efd5", size = 211211, upload-time = "2025-09-08T23:22:20.588Z" }, + { url = "https://files.pythonhosted.org/packages/49/72/ff2d12dbf21aca1b32a40ed792ee6b40f6dc3a9cf1644bd7ef6e95e0ac5e/cffi-2.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8ea985900c5c95ce9db1745f7933eeef5d314f0565b27625d9a10ec9881e1bfb", size = 218036, upload-time = "2025-09-08T23:22:22.143Z" }, + { url = "https://files.pythonhosted.org/packages/e2/cc/027d7fb82e58c48ea717149b03bcadcbdc293553edb283af792bd4bcbb3f/cffi-2.0.0-cp310-cp310-win32.whl", hash = "sha256:1f72fb8906754ac8a2cc3f9f5aaa298070652a0ffae577e0ea9bd480dc3c931a", size = 172184, upload-time = "2025-09-08T23:22:23.328Z" }, + { url = "https://files.pythonhosted.org/packages/33/fa/072dd15ae27fbb4e06b437eb6e944e75b068deb09e2a2826039e49ee2045/cffi-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:b18a3ed7d5b3bd8d9ef7a8cb226502c6bf8308df1525e1cc676c3680e7176739", size = 182790, upload-time = "2025-09-08T23:22:24.752Z" }, + { url = "https://files.pythonhosted.org/packages/12/4a/3dfd5f7850cbf0d06dc84ba9aa00db766b52ca38d8b86e3a38314d52498c/cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe", size = 184344, upload-time = "2025-09-08T23:22:26.456Z" }, + { url = "https://files.pythonhosted.org/packages/4f/8b/f0e4c441227ba756aafbe78f117485b25bb26b1c059d01f137fa6d14896b/cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c", size = 180560, upload-time = "2025-09-08T23:22:28.197Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613, upload-time = "2025-09-08T23:22:29.475Z" }, + { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476, upload-time = "2025-09-08T23:22:31.063Z" }, + { url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374, upload-time = "2025-09-08T23:22:32.507Z" }, + { url = "https://files.pythonhosted.org/packages/84/ef/a7b77c8bdc0f77adc3b46888f1ad54be8f3b7821697a7b89126e829e676a/cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664", size = 202597, upload-time = "2025-09-08T23:22:34.132Z" }, + { url = "https://files.pythonhosted.org/packages/d7/91/500d892b2bf36529a75b77958edfcd5ad8e2ce4064ce2ecfeab2125d72d1/cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26", size = 215574, upload-time = "2025-09-08T23:22:35.443Z" }, + { url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971, upload-time = "2025-09-08T23:22:36.805Z" }, + { url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972, upload-time = "2025-09-08T23:22:38.436Z" }, + { url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078, upload-time = "2025-09-08T23:22:39.776Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c0/015b25184413d7ab0a410775fdb4a50fca20f5589b5dab1dbbfa3baad8ce/cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5", size = 172076, upload-time = "2025-09-08T23:22:40.95Z" }, + { url = "https://files.pythonhosted.org/packages/ae/8f/dc5531155e7070361eb1b7e4c1a9d896d0cb21c49f807a6c03fd63fc877e/cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5", size = 182820, upload-time = "2025-09-08T23:22:42.463Z" }, + { url = "https://files.pythonhosted.org/packages/95/5c/1b493356429f9aecfd56bc171285a4c4ac8697f76e9bbbbb105e537853a1/cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d", size = 177635, upload-time = "2025-09-08T23:22:43.623Z" }, + { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" }, + { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" }, + { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, + { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" }, + { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" }, + { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" }, + { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" }, + { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" }, + { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" }, + { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" }, + { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" }, + { url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" }, + { url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" }, + { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" }, + { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" }, + { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" }, + { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" }, + { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" }, + { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" }, + { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" }, + { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" }, + { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" }, + { url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320, upload-time = "2025-09-08T23:23:18.087Z" }, + { url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487, upload-time = "2025-09-08T23:23:19.622Z" }, + { url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" }, + { url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" }, + { url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" }, + { url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" }, + { url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" }, + { url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328, upload-time = "2025-09-08T23:23:44.61Z" }, + { url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650, upload-time = "2025-09-08T23:23:45.848Z" }, + { url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687, upload-time = "2025-09-08T23:23:47.105Z" }, + { url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773, upload-time = "2025-09-08T23:23:29.347Z" }, + { url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013, upload-time = "2025-09-08T23:23:30.63Z" }, + { url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" }, + { url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" }, + { url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" }, + { url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" }, + { url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" }, + { url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" }, + { url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" }, + { url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" }, + { url = "https://files.pythonhosted.org/packages/c0/cc/08ed5a43f2996a16b462f64a7055c6e962803534924b9b2f1371d8c00b7b/cffi-2.0.0-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:fe562eb1a64e67dd297ccc4f5addea2501664954f2692b69a76449ec7913ecbf", size = 184288, upload-time = "2025-09-08T23:23:48.404Z" }, + { url = "https://files.pythonhosted.org/packages/3d/de/38d9726324e127f727b4ecc376bc85e505bfe61ef130eaf3f290c6847dd4/cffi-2.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:de8dad4425a6ca6e4e5e297b27b5c824ecc7581910bf9aee86cb6835e6812aa7", size = 180509, upload-time = "2025-09-08T23:23:49.73Z" }, + { url = "https://files.pythonhosted.org/packages/9b/13/c92e36358fbcc39cf0962e83223c9522154ee8630e1df7c0b3a39a8124e2/cffi-2.0.0-cp39-cp39-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:4647afc2f90d1ddd33441e5b0e85b16b12ddec4fca55f0d9671fef036ecca27c", size = 208813, upload-time = "2025-09-08T23:23:51.263Z" }, + { url = "https://files.pythonhosted.org/packages/15/12/a7a79bd0df4c3bff744b2d7e52cc1b68d5e7e427b384252c42366dc1ecbc/cffi-2.0.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3f4d46d8b35698056ec29bca21546e1551a205058ae1a181d871e278b0b28165", size = 216498, upload-time = "2025-09-08T23:23:52.494Z" }, + { url = "https://files.pythonhosted.org/packages/a3/ad/5c51c1c7600bdd7ed9a24a203ec255dccdd0ebf4527f7b922a0bde2fb6ed/cffi-2.0.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:e6e73b9e02893c764e7e8d5bb5ce277f1a009cd5243f8228f75f842bf937c534", size = 203243, upload-time = "2025-09-08T23:23:53.836Z" }, + { url = "https://files.pythonhosted.org/packages/32/f2/81b63e288295928739d715d00952c8c6034cb6c6a516b17d37e0c8be5600/cffi-2.0.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:cb527a79772e5ef98fb1d700678fe031e353e765d1ca2d409c92263c6d43e09f", size = 203158, upload-time = "2025-09-08T23:23:55.169Z" }, + { url = "https://files.pythonhosted.org/packages/1f/74/cc4096ce66f5939042ae094e2e96f53426a979864aa1f96a621ad128be27/cffi-2.0.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:61d028e90346df14fedc3d1e5441df818d095f3b87d286825dfcbd6459b7ef63", size = 216548, upload-time = "2025-09-08T23:23:56.506Z" }, + { url = "https://files.pythonhosted.org/packages/e8/be/f6424d1dc46b1091ffcc8964fa7c0ab0cd36839dd2761b49c90481a6ba1b/cffi-2.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0f6084a0ea23d05d20c3edcda20c3d006f9b6f3fefeac38f59262e10cef47ee2", size = 218897, upload-time = "2025-09-08T23:23:57.825Z" }, + { url = "https://files.pythonhosted.org/packages/f7/e0/dda537c2309817edf60109e39265f24f24aa7f050767e22c98c53fe7f48b/cffi-2.0.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1cd13c99ce269b3ed80b417dcd591415d3372bcac067009b6e0f59c7d4015e65", size = 211249, upload-time = "2025-09-08T23:23:59.139Z" }, + { url = "https://files.pythonhosted.org/packages/2b/e7/7c769804eb75e4c4b35e658dba01de1640a351a9653c3d49ca89d16ccc91/cffi-2.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:89472c9762729b5ae1ad974b777416bfda4ac5642423fa93bd57a09204712322", size = 218041, upload-time = "2025-09-08T23:24:00.496Z" }, + { url = "https://files.pythonhosted.org/packages/aa/d9/6218d78f920dcd7507fc16a766b5ef8f3b913cc7aa938e7fc80b9978d089/cffi-2.0.0-cp39-cp39-win32.whl", hash = "sha256:2081580ebb843f759b9f617314a24ed5738c51d2aee65d31e02f6f7a2b97707a", size = 172138, upload-time = "2025-09-08T23:24:01.7Z" }, + { url = "https://files.pythonhosted.org/packages/54/8f/a1e836f82d8e32a97e6b29cc8f641779181ac7363734f12df27db803ebda/cffi-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:b882b3df248017dba09d6b16defe9b5c407fe32fc7c65a9c69798e6175601be9", size = 182794, upload-time = "2025-09-08T23:24:02.943Z" }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e4/33/89c2ced2b67d1c2a61c19c6751aa8902d46ce3dacb23600a283619f5a12d/charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", size = 126367, upload-time = "2025-05-02T08:34:42.01Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/95/28/9901804da60055b406e1a1c5ba7aac1276fb77f1dde635aabfc7fd84b8ab/charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941", size = 201818, upload-time = "2025-05-02T08:31:46.725Z" }, + { url = "https://files.pythonhosted.org/packages/d9/9b/892a8c8af9110935e5adcbb06d9c6fe741b6bb02608c6513983048ba1a18/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd", size = 144649, upload-time = "2025-05-02T08:31:48.889Z" }, + { url = "https://files.pythonhosted.org/packages/7b/a5/4179abd063ff6414223575e008593861d62abfc22455b5d1a44995b7c101/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6", size = 155045, upload-time = "2025-05-02T08:31:50.757Z" }, + { url = "https://files.pythonhosted.org/packages/3b/95/bc08c7dfeddd26b4be8c8287b9bb055716f31077c8b0ea1cd09553794665/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d", size = 147356, upload-time = "2025-05-02T08:31:52.634Z" }, + { url = "https://files.pythonhosted.org/packages/a8/2d/7a5b635aa65284bf3eab7653e8b4151ab420ecbae918d3e359d1947b4d61/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86", size = 149471, upload-time = "2025-05-02T08:31:56.207Z" }, + { url = "https://files.pythonhosted.org/packages/ae/38/51fc6ac74251fd331a8cfdb7ec57beba8c23fd5493f1050f71c87ef77ed0/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c", size = 151317, upload-time = "2025-05-02T08:31:57.613Z" }, + { url = "https://files.pythonhosted.org/packages/b7/17/edee1e32215ee6e9e46c3e482645b46575a44a2d72c7dfd49e49f60ce6bf/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0", size = 146368, upload-time = "2025-05-02T08:31:59.468Z" }, + { url = "https://files.pythonhosted.org/packages/26/2c/ea3e66f2b5f21fd00b2825c94cafb8c326ea6240cd80a91eb09e4a285830/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef", size = 154491, upload-time = "2025-05-02T08:32:01.219Z" }, + { url = "https://files.pythonhosted.org/packages/52/47/7be7fa972422ad062e909fd62460d45c3ef4c141805b7078dbab15904ff7/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6", size = 157695, upload-time = "2025-05-02T08:32:03.045Z" }, + { url = "https://files.pythonhosted.org/packages/2f/42/9f02c194da282b2b340f28e5fb60762de1151387a36842a92b533685c61e/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366", size = 154849, upload-time = "2025-05-02T08:32:04.651Z" }, + { url = "https://files.pythonhosted.org/packages/67/44/89cacd6628f31fb0b63201a618049be4be2a7435a31b55b5eb1c3674547a/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db", size = 150091, upload-time = "2025-05-02T08:32:06.719Z" }, + { url = "https://files.pythonhosted.org/packages/1f/79/4b8da9f712bc079c0f16b6d67b099b0b8d808c2292c937f267d816ec5ecc/charset_normalizer-3.4.2-cp310-cp310-win32.whl", hash = "sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a", size = 98445, upload-time = "2025-05-02T08:32:08.66Z" }, + { url = "https://files.pythonhosted.org/packages/7d/d7/96970afb4fb66497a40761cdf7bd4f6fca0fc7bafde3a84f836c1f57a926/charset_normalizer-3.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509", size = 105782, upload-time = "2025-05-02T08:32:10.46Z" }, + { url = "https://files.pythonhosted.org/packages/05/85/4c40d00dcc6284a1c1ad5de5e0996b06f39d8232f1031cd23c2f5c07ee86/charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2", size = 198794, upload-time = "2025-05-02T08:32:11.945Z" }, + { url = "https://files.pythonhosted.org/packages/41/d9/7a6c0b9db952598e97e93cbdfcb91bacd89b9b88c7c983250a77c008703c/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645", size = 142846, upload-time = "2025-05-02T08:32:13.946Z" }, + { url = "https://files.pythonhosted.org/packages/66/82/a37989cda2ace7e37f36c1a8ed16c58cf48965a79c2142713244bf945c89/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd", size = 153350, upload-time = "2025-05-02T08:32:15.873Z" }, + { url = "https://files.pythonhosted.org/packages/df/68/a576b31b694d07b53807269d05ec3f6f1093e9545e8607121995ba7a8313/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8", size = 145657, upload-time = "2025-05-02T08:32:17.283Z" }, + { url = "https://files.pythonhosted.org/packages/92/9b/ad67f03d74554bed3aefd56fe836e1623a50780f7c998d00ca128924a499/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f", size = 147260, upload-time = "2025-05-02T08:32:18.807Z" }, + { url = "https://files.pythonhosted.org/packages/a6/e6/8aebae25e328160b20e31a7e9929b1578bbdc7f42e66f46595a432f8539e/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7", size = 149164, upload-time = "2025-05-02T08:32:20.333Z" }, + { url = "https://files.pythonhosted.org/packages/8b/f2/b3c2f07dbcc248805f10e67a0262c93308cfa149a4cd3d1fe01f593e5fd2/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9", size = 144571, upload-time = "2025-05-02T08:32:21.86Z" }, + { url = "https://files.pythonhosted.org/packages/60/5b/c3f3a94bc345bc211622ea59b4bed9ae63c00920e2e8f11824aa5708e8b7/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544", size = 151952, upload-time = "2025-05-02T08:32:23.434Z" }, + { url = "https://files.pythonhosted.org/packages/e2/4d/ff460c8b474122334c2fa394a3f99a04cf11c646da895f81402ae54f5c42/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82", size = 155959, upload-time = "2025-05-02T08:32:24.993Z" }, + { url = "https://files.pythonhosted.org/packages/a2/2b/b964c6a2fda88611a1fe3d4c400d39c66a42d6c169c924818c848f922415/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0", size = 153030, upload-time = "2025-05-02T08:32:26.435Z" }, + { url = "https://files.pythonhosted.org/packages/59/2e/d3b9811db26a5ebf444bc0fa4f4be5aa6d76fc6e1c0fd537b16c14e849b6/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5", size = 148015, upload-time = "2025-05-02T08:32:28.376Z" }, + { url = "https://files.pythonhosted.org/packages/90/07/c5fd7c11eafd561bb51220d600a788f1c8d77c5eef37ee49454cc5c35575/charset_normalizer-3.4.2-cp311-cp311-win32.whl", hash = "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a", size = 98106, upload-time = "2025-05-02T08:32:30.281Z" }, + { url = "https://files.pythonhosted.org/packages/a8/05/5e33dbef7e2f773d672b6d79f10ec633d4a71cd96db6673625838a4fd532/charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28", size = 105402, upload-time = "2025-05-02T08:32:32.191Z" }, + { url = "https://files.pythonhosted.org/packages/d7/a4/37f4d6035c89cac7930395a35cc0f1b872e652eaafb76a6075943754f095/charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7", size = 199936, upload-time = "2025-05-02T08:32:33.712Z" }, + { url = "https://files.pythonhosted.org/packages/ee/8a/1a5e33b73e0d9287274f899d967907cd0bf9c343e651755d9307e0dbf2b3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3", size = 143790, upload-time = "2025-05-02T08:32:35.768Z" }, + { url = "https://files.pythonhosted.org/packages/66/52/59521f1d8e6ab1482164fa21409c5ef44da3e9f653c13ba71becdd98dec3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a", size = 153924, upload-time = "2025-05-02T08:32:37.284Z" }, + { url = "https://files.pythonhosted.org/packages/86/2d/fb55fdf41964ec782febbf33cb64be480a6b8f16ded2dbe8db27a405c09f/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214", size = 146626, upload-time = "2025-05-02T08:32:38.803Z" }, + { url = "https://files.pythonhosted.org/packages/8c/73/6ede2ec59bce19b3edf4209d70004253ec5f4e319f9a2e3f2f15601ed5f7/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a", size = 148567, upload-time = "2025-05-02T08:32:40.251Z" }, + { url = "https://files.pythonhosted.org/packages/09/14/957d03c6dc343c04904530b6bef4e5efae5ec7d7990a7cbb868e4595ee30/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd", size = 150957, upload-time = "2025-05-02T08:32:41.705Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c8/8174d0e5c10ccebdcb1b53cc959591c4c722a3ad92461a273e86b9f5a302/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981", size = 145408, upload-time = "2025-05-02T08:32:43.709Z" }, + { url = "https://files.pythonhosted.org/packages/58/aa/8904b84bc8084ac19dc52feb4f5952c6df03ffb460a887b42615ee1382e8/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c", size = 153399, upload-time = "2025-05-02T08:32:46.197Z" }, + { url = "https://files.pythonhosted.org/packages/c2/26/89ee1f0e264d201cb65cf054aca6038c03b1a0c6b4ae998070392a3ce605/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b", size = 156815, upload-time = "2025-05-02T08:32:48.105Z" }, + { url = "https://files.pythonhosted.org/packages/fd/07/68e95b4b345bad3dbbd3a8681737b4338ff2c9df29856a6d6d23ac4c73cb/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d", size = 154537, upload-time = "2025-05-02T08:32:49.719Z" }, + { url = "https://files.pythonhosted.org/packages/77/1a/5eefc0ce04affb98af07bc05f3bac9094513c0e23b0562d64af46a06aae4/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f", size = 149565, upload-time = "2025-05-02T08:32:51.404Z" }, + { url = "https://files.pythonhosted.org/packages/37/a0/2410e5e6032a174c95e0806b1a6585eb21e12f445ebe239fac441995226a/charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c", size = 98357, upload-time = "2025-05-02T08:32:53.079Z" }, + { url = "https://files.pythonhosted.org/packages/6c/4f/c02d5c493967af3eda9c771ad4d2bbc8df6f99ddbeb37ceea6e8716a32bc/charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e", size = 105776, upload-time = "2025-05-02T08:32:54.573Z" }, + { url = "https://files.pythonhosted.org/packages/ea/12/a93df3366ed32db1d907d7593a94f1fe6293903e3e92967bebd6950ed12c/charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0", size = 199622, upload-time = "2025-05-02T08:32:56.363Z" }, + { url = "https://files.pythonhosted.org/packages/04/93/bf204e6f344c39d9937d3c13c8cd5bbfc266472e51fc8c07cb7f64fcd2de/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf", size = 143435, upload-time = "2025-05-02T08:32:58.551Z" }, + { url = "https://files.pythonhosted.org/packages/22/2a/ea8a2095b0bafa6c5b5a55ffdc2f924455233ee7b91c69b7edfcc9e02284/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e", size = 153653, upload-time = "2025-05-02T08:33:00.342Z" }, + { url = "https://files.pythonhosted.org/packages/b6/57/1b090ff183d13cef485dfbe272e2fe57622a76694061353c59da52c9a659/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1", size = 146231, upload-time = "2025-05-02T08:33:02.081Z" }, + { url = "https://files.pythonhosted.org/packages/e2/28/ffc026b26f441fc67bd21ab7f03b313ab3fe46714a14b516f931abe1a2d8/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c", size = 148243, upload-time = "2025-05-02T08:33:04.063Z" }, + { url = "https://files.pythonhosted.org/packages/c0/0f/9abe9bd191629c33e69e47c6ef45ef99773320e9ad8e9cb08b8ab4a8d4cb/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691", size = 150442, upload-time = "2025-05-02T08:33:06.418Z" }, + { url = "https://files.pythonhosted.org/packages/67/7c/a123bbcedca91d5916c056407f89a7f5e8fdfce12ba825d7d6b9954a1a3c/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0", size = 145147, upload-time = "2025-05-02T08:33:08.183Z" }, + { url = "https://files.pythonhosted.org/packages/ec/fe/1ac556fa4899d967b83e9893788e86b6af4d83e4726511eaaad035e36595/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b", size = 153057, upload-time = "2025-05-02T08:33:09.986Z" }, + { url = "https://files.pythonhosted.org/packages/2b/ff/acfc0b0a70b19e3e54febdd5301a98b72fa07635e56f24f60502e954c461/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff", size = 156454, upload-time = "2025-05-02T08:33:11.814Z" }, + { url = "https://files.pythonhosted.org/packages/92/08/95b458ce9c740d0645feb0e96cea1f5ec946ea9c580a94adfe0b617f3573/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b", size = 154174, upload-time = "2025-05-02T08:33:13.707Z" }, + { url = "https://files.pythonhosted.org/packages/78/be/8392efc43487ac051eee6c36d5fbd63032d78f7728cb37aebcc98191f1ff/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148", size = 149166, upload-time = "2025-05-02T08:33:15.458Z" }, + { url = "https://files.pythonhosted.org/packages/44/96/392abd49b094d30b91d9fbda6a69519e95802250b777841cf3bda8fe136c/charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7", size = 98064, upload-time = "2025-05-02T08:33:17.06Z" }, + { url = "https://files.pythonhosted.org/packages/e9/b0/0200da600134e001d91851ddc797809e2fe0ea72de90e09bec5a2fbdaccb/charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980", size = 105641, upload-time = "2025-05-02T08:33:18.753Z" }, + { url = "https://files.pythonhosted.org/packages/28/f8/dfb01ff6cc9af38552c69c9027501ff5a5117c4cc18dcd27cb5259fa1888/charset_normalizer-3.4.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:005fa3432484527f9732ebd315da8da8001593e2cf46a3d817669f062c3d9ed4", size = 201671, upload-time = "2025-05-02T08:34:12.696Z" }, + { url = "https://files.pythonhosted.org/packages/32/fb/74e26ee556a9dbfe3bd264289b67be1e6d616329403036f6507bb9f3f29c/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e92fca20c46e9f5e1bb485887d074918b13543b1c2a1185e69bb8d17ab6236a7", size = 144744, upload-time = "2025-05-02T08:34:14.665Z" }, + { url = "https://files.pythonhosted.org/packages/ad/06/8499ee5aa7addc6f6d72e068691826ff093329fe59891e83b092ae4c851c/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50bf98d5e563b83cc29471fa114366e6806bc06bc7a25fd59641e41445327836", size = 154993, upload-time = "2025-05-02T08:34:17.134Z" }, + { url = "https://files.pythonhosted.org/packages/f1/a2/5e4c187680728219254ef107a6949c60ee0e9a916a5dadb148c7ae82459c/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:721c76e84fe669be19c5791da68232ca2e05ba5185575086e384352e2c309597", size = 147382, upload-time = "2025-05-02T08:34:19.081Z" }, + { url = "https://files.pythonhosted.org/packages/4c/fe/56aca740dda674f0cc1ba1418c4d84534be51f639b5f98f538b332dc9a95/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d8fd25b7f4675d0c47cf95b594d4e7b158aca33b76aa63d07186e13c0e0ab7", size = 149536, upload-time = "2025-05-02T08:34:21.073Z" }, + { url = "https://files.pythonhosted.org/packages/53/13/db2e7779f892386b589173dd689c1b1e304621c5792046edd8a978cbf9e0/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3daeac64d5b371dea99714f08ffc2c208522ec6b06fbc7866a450dd446f5c0f", size = 151349, upload-time = "2025-05-02T08:34:23.193Z" }, + { url = "https://files.pythonhosted.org/packages/69/35/e52ab9a276186f729bce7a0638585d2982f50402046e4b0faa5d2c3ef2da/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dccab8d5fa1ef9bfba0590ecf4d46df048d18ffe3eec01eeb73a42e0d9e7a8ba", size = 146365, upload-time = "2025-05-02T08:34:25.187Z" }, + { url = "https://files.pythonhosted.org/packages/a6/d8/af7333f732fc2e7635867d56cb7c349c28c7094910c72267586947561b4b/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:aaf27faa992bfee0264dc1f03f4c75e9fcdda66a519db6b957a3f826e285cf12", size = 154499, upload-time = "2025-05-02T08:34:27.359Z" }, + { url = "https://files.pythonhosted.org/packages/7a/3d/a5b2e48acef264d71e036ff30bcc49e51bde80219bb628ba3e00cf59baac/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:eb30abc20df9ab0814b5a2524f23d75dcf83cde762c161917a2b4b7b55b1e518", size = 157735, upload-time = "2025-05-02T08:34:29.798Z" }, + { url = "https://files.pythonhosted.org/packages/85/d8/23e2c112532a29f3eef374375a8684a4f3b8e784f62b01da931186f43494/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c72fbbe68c6f32f251bdc08b8611c7b3060612236e960ef848e0a517ddbe76c5", size = 154786, upload-time = "2025-05-02T08:34:31.858Z" }, + { url = "https://files.pythonhosted.org/packages/c7/57/93e0169f08ecc20fe82d12254a200dfaceddc1c12a4077bf454ecc597e33/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:982bb1e8b4ffda883b3d0a521e23abcd6fd17418f6d2c4118d257a10199c0ce3", size = 150203, upload-time = "2025-05-02T08:34:33.88Z" }, + { url = "https://files.pythonhosted.org/packages/2c/9d/9bf2b005138e7e060d7ebdec7503d0ef3240141587651f4b445bdf7286c2/charset_normalizer-3.4.2-cp39-cp39-win32.whl", hash = "sha256:43e0933a0eff183ee85833f341ec567c0980dae57c464d8a508e1b2ceb336471", size = 98436, upload-time = "2025-05-02T08:34:35.907Z" }, + { url = "https://files.pythonhosted.org/packages/6d/24/5849d46cf4311bbf21b424c443b09b459f5b436b1558c04e45dbb7cc478b/charset_normalizer-3.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:d11b54acf878eef558599658b0ffca78138c8c3655cf4f3a4a673c437e67732e", size = 105772, upload-time = "2025-05-02T08:34:37.935Z" }, + { url = "https://files.pythonhosted.org/packages/20/94/c5790835a017658cbfabd07f3bfb549140c3ac458cfc196323996b10095a/charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0", size = 52626, upload-time = "2025-05-02T08:34:40.053Z" }, +] + +[[package]] +name = "click" +version = "8.1.8" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +dependencies = [ + { name = "colorama", marker = "python_full_version < '3.10' and sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593, upload-time = "2024-12-21T18:38:44.339Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188, upload-time = "2024-12-21T18:38:41.666Z" }, +] + +[[package]] +name = "click" +version = "8.2.1" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version == '3.14.*'", + "python_full_version >= '3.15' or (python_full_version >= '3.11' and python_full_version < '3.14')", + "python_full_version == '3.10.*'", +] +dependencies = [ + { name = "colorama", marker = "python_full_version >= '3.10' and sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342, upload-time = "2025-05-20T23:19:49.832Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215, upload-time = "2025-05-20T23:19:47.796Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "coverage" +version = "7.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/52/d3/3ec80acdd57a0d6a1111b978ade388824f37126446fd6750d38bfaca949c/coverage-7.5.0.tar.gz", hash = "sha256:cf62d17310f34084c59c01e027259076479128d11e4661bb6c9acb38c5e19bb8", size = 798314, upload-time = "2024-04-23T17:42:35.508Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/db/08d54dbc12fdfe5857b06105fd1235bdebb7da7c11cd1a0fae936556162a/coverage-7.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:432949a32c3e3f820af808db1833d6d1631664d53dd3ce487aa25d574e18ad1c", size = 210025, upload-time = "2024-04-23T17:40:22.328Z" }, + { url = "https://files.pythonhosted.org/packages/a8/ff/02c4bcff1025b4a788aa3933e1cd1474d79de43e0d859273b3319ef43cd3/coverage-7.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2bd7065249703cbeb6d4ce679c734bef0ee69baa7bff9724361ada04a15b7e3b", size = 210499, upload-time = "2024-04-23T17:40:25.747Z" }, + { url = "https://files.pythonhosted.org/packages/ab/b1/7820a8ef62adeebd37612af9d2369f4467a3bc2641dea1243450def5489e/coverage-7.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbfe6389c5522b99768a93d89aca52ef92310a96b99782973b9d11e80511f932", size = 238399, upload-time = "2024-04-23T17:40:27.591Z" }, + { url = "https://files.pythonhosted.org/packages/2c/0e/23a388f3ce16c5ea01a454fef6a9039115abd40b748027d4fef18b3628a7/coverage-7.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:39793731182c4be939b4be0cdecde074b833f6171313cf53481f869937129ed3", size = 236676, upload-time = "2024-04-23T17:40:30.455Z" }, + { url = "https://files.pythonhosted.org/packages/f8/81/e871b0d58ca5d6cc27d00b2f668ce09c4643ef00512341f3a592a81fb6cd/coverage-7.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85a5dbe1ba1bf38d6c63b6d2c42132d45cbee6d9f0c51b52c59aa4afba057517", size = 237467, upload-time = "2024-04-23T17:40:32.704Z" }, + { url = "https://files.pythonhosted.org/packages/95/cb/42a6d34d5840635394f1e172aaa0e7cbd9346155e5004a8ee75d8e434c6b/coverage-7.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:357754dcdfd811462a725e7501a9b4556388e8ecf66e79df6f4b988fa3d0b39a", size = 243539, upload-time = "2024-04-23T17:40:35.068Z" }, + { url = "https://files.pythonhosted.org/packages/6a/6a/18b3819919fdfd3e2062a75219b363f895f24ae5b80e72ffe5dfb1a7e9c8/coverage-7.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a81eb64feded34f40c8986869a2f764f0fe2db58c0530d3a4afbcde50f314880", size = 241725, upload-time = "2024-04-23T17:40:37.251Z" }, + { url = "https://files.pythonhosted.org/packages/b5/3d/a0650978e8b8f78d269358421b7401acaf7cb89e957b2e1be5205ea5940e/coverage-7.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:51431d0abbed3a868e967f8257c5faf283d41ec882f58413cf295a389bb22e58", size = 242913, upload-time = "2024-04-23T17:40:39.992Z" }, + { url = "https://files.pythonhosted.org/packages/8a/fe/95a74158fa0eda56d39783e918edc6fbb3dd3336be390557fc0a2815ecd4/coverage-7.5.0-cp310-cp310-win32.whl", hash = "sha256:f609ebcb0242d84b7adeee2b06c11a2ddaec5464d21888b2c8255f5fd6a98ae4", size = 212381, upload-time = "2024-04-23T17:40:42.632Z" }, + { url = "https://files.pythonhosted.org/packages/4c/26/b276e0c70cba5059becce2594a268a2731d5b4f2386e9a6afdf37ffa3d44/coverage-7.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:6782cd6216fab5a83216cc39f13ebe30adfac2fa72688c5a4d8d180cd52e8f6a", size = 213225, upload-time = "2024-04-23T17:40:45.175Z" }, + { url = "https://files.pythonhosted.org/packages/71/cf/964bb667ea37d64b25f04d4cfaf6232cdb7a6472e1f4a4faf0459ddcec40/coverage-7.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e768d870801f68c74c2b669fc909839660180c366501d4cc4b87efd6b0eee375", size = 210130, upload-time = "2024-04-23T17:40:47.325Z" }, + { url = "https://files.pythonhosted.org/packages/aa/56/31edd4baa132fe2b991437e0acf3e36c50418370044a89b65518e5581f4c/coverage-7.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:84921b10aeb2dd453247fd10de22907984eaf80901b578a5cf0bb1e279a587cb", size = 210617, upload-time = "2024-04-23T17:40:49.82Z" }, + { url = "https://files.pythonhosted.org/packages/26/6d/4cd14bd0221180c307fae4f8ef00dbd86a13507c25081858c620aa6fafd8/coverage-7.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:710c62b6e35a9a766b99b15cdc56d5aeda0914edae8bb467e9c355f75d14ee95", size = 242048, upload-time = "2024-04-23T17:40:52.779Z" }, + { url = "https://files.pythonhosted.org/packages/84/60/7eb84255bd9947b140e0382721b0a1b25fd670b4f0f176f11f90b5632d02/coverage-7.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c379cdd3efc0658e652a14112d51a7668f6bfca7445c5a10dee7eabecabba19d", size = 239619, upload-time = "2024-04-23T17:40:54.847Z" }, + { url = "https://files.pythonhosted.org/packages/76/6b/e8f4696194fdf3c19422f2a80ac10e03a9322f93e6c9ef57a89e03a8c8f7/coverage-7.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fea9d3ca80bcf17edb2c08a4704259dadac196fe5e9274067e7a20511fad1743", size = 241321, upload-time = "2024-04-23T17:40:57.092Z" }, + { url = "https://files.pythonhosted.org/packages/3f/1c/6a6990fd2e6890807775852882b1ed0a8e50519a525252490b0c219aa8a5/coverage-7.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:41327143c5b1d715f5f98a397608f90ab9ebba606ae4e6f3389c2145410c52b1", size = 250419, upload-time = "2024-04-23T17:40:59.051Z" }, + { url = "https://files.pythonhosted.org/packages/1a/be/b6422a1422381704dd015cc23e503acd1a44a6bdc4e59c75f8c6a2b24151/coverage-7.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:565b2e82d0968c977e0b0f7cbf25fd06d78d4856289abc79694c8edcce6eb2de", size = 248794, upload-time = "2024-04-23T17:41:01.803Z" }, + { url = "https://files.pythonhosted.org/packages/9b/93/e8231000754d4a31fe9a6c550f6a436eacd2e50763ba2b418f10b2308e45/coverage-7.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cf3539007202ebfe03923128fedfdd245db5860a36810136ad95a564a2fdffff", size = 249873, upload-time = "2024-04-23T17:41:04.719Z" }, + { url = "https://files.pythonhosted.org/packages/d3/6f/eb5aae80bf9d01d0f293121d4caa660ac968da2cb967f82547a7b5e8d65b/coverage-7.5.0-cp311-cp311-win32.whl", hash = "sha256:bf0b4b8d9caa8d64df838e0f8dcf68fb570c5733b726d1494b87f3da85db3a2d", size = 212380, upload-time = "2024-04-23T17:41:06.879Z" }, + { url = "https://files.pythonhosted.org/packages/30/73/b70ab57f11b62f5ca9a83f43cae752fbbb4417bea651875235c32eb2fc2e/coverage-7.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:9c6384cc90e37cfb60435bbbe0488444e54b98700f727f16f64d8bfda0b84656", size = 213316, upload-time = "2024-04-23T17:41:09.233Z" }, + { url = "https://files.pythonhosted.org/packages/36/db/f4e17ffb5ac2d125c72ee3b235c2e04f85a4296a6a9e17730e218af113d8/coverage-7.5.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fed7a72d54bd52f4aeb6c6e951f363903bd7d70bc1cad64dd1f087980d309ab9", size = 210340, upload-time = "2024-04-23T17:41:11.811Z" }, + { url = "https://files.pythonhosted.org/packages/c3/bc/d7e832280f269be9e8d46cff5c4031b4840f1844674dc53ad93c5a9c1da6/coverage-7.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cbe6581fcff7c8e262eb574244f81f5faaea539e712a058e6707a9d272fe5b64", size = 210612, upload-time = "2024-04-23T17:41:14.256Z" }, + { url = "https://files.pythonhosted.org/packages/54/84/543e2cd6c1de30c7522a0afcb040677957bac756dd8677bade8bdd9274ba/coverage-7.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad97ec0da94b378e593ef532b980c15e377df9b9608c7c6da3506953182398af", size = 242926, upload-time = "2024-04-23T17:41:16.284Z" }, + { url = "https://files.pythonhosted.org/packages/ad/06/570533f747141b4fd727a193317e16c6e677ed7945e23a195b8f64e685a2/coverage-7.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd4bacd62aa2f1a1627352fe68885d6ee694bdaebb16038b6e680f2924a9b2cc", size = 240294, upload-time = "2024-04-23T17:41:19.099Z" }, + { url = "https://files.pythonhosted.org/packages/fa/d9/ec4ba0913195d240d026670d41b91f3e5b9a8a143a385f93a09e97c90f5c/coverage-7.5.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:adf032b6c105881f9d77fa17d9eebe0ad1f9bfb2ad25777811f97c5362aa07f2", size = 242232, upload-time = "2024-04-23T17:41:21.05Z" }, + { url = "https://files.pythonhosted.org/packages/d9/3f/1a613c32aa1980d20d6ca2f54faf800df04aafad6016d7132b3276d8715d/coverage-7.5.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4ba01d9ba112b55bfa4b24808ec431197bb34f09f66f7cb4fd0258ff9d3711b1", size = 249171, upload-time = "2024-04-23T17:41:23.723Z" }, + { url = "https://files.pythonhosted.org/packages/b9/3b/e16b12693572fd69148453abc6ddcd20cbeae6f0a040b5ed6af2f75b646f/coverage-7.5.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f0bfe42523893c188e9616d853c47685e1c575fe25f737adf473d0405dcfa7eb", size = 247073, upload-time = "2024-04-23T17:41:25.719Z" }, + { url = "https://files.pythonhosted.org/packages/e7/3e/04a05d40bb09f90a312296a32fb2c5ade2dfcf803edf777ad18b97547503/coverage-7.5.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a9a7ef30a1b02547c1b23fa9a5564f03c9982fc71eb2ecb7f98c96d7a0db5cf2", size = 248812, upload-time = "2024-04-23T17:41:27.951Z" }, + { url = "https://files.pythonhosted.org/packages/ba/f7/3a8b7b0affe548227f3d45e248c0f22c5b55bff0ee062b49afc165b3ff25/coverage-7.5.0-cp312-cp312-win32.whl", hash = "sha256:3c2b77f295edb9fcdb6a250f83e6481c679335ca7e6e4a955e4290350f2d22a4", size = 212634, upload-time = "2024-04-23T17:41:30.114Z" }, + { url = "https://files.pythonhosted.org/packages/7c/31/5f5286d2a5e21e1fe5670629bb24c79bf46383a092e74e00077e7a178e5c/coverage-7.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:427e1e627b0963ac02d7c8730ca6d935df10280d230508c0ba059505e9233475", size = 213460, upload-time = "2024-04-23T17:41:32.683Z" }, + { url = "https://files.pythonhosted.org/packages/62/18/5573216d5b8db7d9f29189350dcd81830a03a624966c35f8201ae10df09c/coverage-7.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d0194d654e360b3e6cc9b774e83235bae6b9b2cac3be09040880bb0e8a88f4a1", size = 210014, upload-time = "2024-04-23T17:41:56.535Z" }, + { url = "https://files.pythonhosted.org/packages/7c/0e/e98d6c6d569d65ff3195f095e6b006b3d7780fd6182322a25e7dfe0d53d3/coverage-7.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:33c020d3322662e74bc507fb11488773a96894aa82a622c35a5a28673c0c26f5", size = 210494, upload-time = "2024-04-23T17:41:58.584Z" }, + { url = "https://files.pythonhosted.org/packages/d3/63/98e5a6b7ed1bfca874729ee309cc49a6d6658ab9e479a2b6d223ccc96e03/coverage-7.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbdf2cae14a06827bec50bd58e49249452d211d9caddd8bd80e35b53cb04631", size = 237996, upload-time = "2024-04-23T17:42:01.514Z" }, + { url = "https://files.pythonhosted.org/packages/76/e4/d3c67a0a092127b8a3dffa2f75334a8cdb2cefc99e3d75a7f42cf1ff98a9/coverage-7.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3235d7c781232e525b0761730e052388a01548bd7f67d0067a253887c6e8df46", size = 236287, upload-time = "2024-04-23T17:42:03.838Z" }, + { url = "https://files.pythonhosted.org/packages/12/7f/9b787ffc31bc39aa9e98c7005b698e7c6539bd222043e4a9c83b83c782a2/coverage-7.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2de4e546f0ec4b2787d625e0b16b78e99c3e21bc1722b4977c0dddf11ca84e", size = 237070, upload-time = "2024-04-23T17:42:06.993Z" }, + { url = "https://files.pythonhosted.org/packages/31/ee/9998a0d855cad5f8e04062f7428b83c34aa643e5df468409593a480d5585/coverage-7.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4d0e206259b73af35c4ec1319fd04003776e11e859936658cb6ceffdeba0f5be", size = 243115, upload-time = "2024-04-23T17:42:09.281Z" }, + { url = "https://files.pythonhosted.org/packages/16/94/1e348cd4445404c588ec8199adde0b45727b1d7989d8fb097d39c93e3da5/coverage-7.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2055c4fb9a6ff624253d432aa471a37202cd8f458c033d6d989be4499aed037b", size = 241315, upload-time = "2024-04-23T17:42:11.836Z" }, + { url = "https://files.pythonhosted.org/packages/28/17/6fe1695d2a706e586b87a407598f4ed82dd218b2b43cdc790f695f259849/coverage-7.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:075299460948cd12722a970c7eae43d25d37989da682997687b34ae6b87c0ef0", size = 242467, upload-time = "2024-04-23T17:42:14.019Z" }, + { url = "https://files.pythonhosted.org/packages/81/a2/1e550272c8b1f89b980504230b1a929de83d8f3d5ecb268477b32e5996a6/coverage-7.5.0-cp39-cp39-win32.whl", hash = "sha256:280132aada3bc2f0fac939a5771db4fbb84f245cb35b94fae4994d4c1f80dae7", size = 212394, upload-time = "2024-04-23T17:42:17.655Z" }, + { url = "https://files.pythonhosted.org/packages/c9/48/7d3c31064c5adcc743fe5370cf7e198cee06cc0e2d37b5cbe930691a3f54/coverage-7.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:c58536f6892559e030e6924896a44098bc1290663ea12532c78cef71d0df8493", size = 213246, upload-time = "2024-04-23T17:42:19.777Z" }, + { url = "https://files.pythonhosted.org/packages/34/81/f00ce7ef95479085feb01fa9e352b2b5b2b9d24767acf2266d6267a6dba9/coverage-7.5.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:2b57780b51084d5223eee7b59f0d4911c31c16ee5aa12737c7a02455829ff067", size = 202381, upload-time = "2024-04-23T17:42:22.127Z" }, +] + +[package.optional-dependencies] +toml = [ + { name = "tomli", marker = "python_full_version <= '3.11'" }, +] + +[[package]] +name = "cramjam" +version = "2.11.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/14/12/34bf6e840a79130dfd0da7badfb6f7810b8fcfd60e75b0539372667b41b6/cramjam-2.11.0.tar.gz", hash = "sha256:5c82500ed91605c2d9781380b378397012e25127e89d64f460fea6aeac4389b4", size = 99100, upload-time = "2025-07-27T21:25:07.559Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/53/d3/20d0402e4e983b66603117ad3dd3b864a05d7997a830206d3ff9cacef9a2/cramjam-2.11.0-cp310-cp310-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:d0859c65775e8ebf2cbc084bfd51bd0ffda10266da6f9306451123b89f8e5a63", size = 3558999, upload-time = "2025-07-27T21:21:34.105Z" }, + { url = "https://files.pythonhosted.org/packages/f5/a8/a6e2744288938ccd320a5c6f6f3653faa790f933f5edd088c6e5782a2354/cramjam-2.11.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:1d77b9b0aca02a3f6eeeff27fcd315ca5972616c0919ee38e522cce257bcd349", size = 1861558, upload-time = "2025-07-27T21:21:36.624Z" }, + { url = "https://files.pythonhosted.org/packages/96/29/7961e09a849eea7d8302e7baa6f829dd3ef3faf199cb25ed29b318ae799b/cramjam-2.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:66425bc25b5481359b12a6719b6e7c90ffe76d85d0691f1da7df304bfb8ce45c", size = 1699431, upload-time = "2025-07-27T21:21:38.396Z" }, + { url = "https://files.pythonhosted.org/packages/7a/60/6665e52f01a8919bf37c43dcf0e03b6dd3866f5c4e95440b357d508ee14e/cramjam-2.11.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:bd748d3407ec63e049b3aea1595e218814fccab329b7fb10bb51120a30e9fb7e", size = 2025262, upload-time = "2025-07-27T21:21:40.417Z" }, + { url = "https://files.pythonhosted.org/packages/d7/80/79bd84dbeb109e2c6efb74e661b7bd4c3ba393208ebcf69e2ae9454ae80c/cramjam-2.11.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6d9a23a35b3a105c42a8de60fc2e80281ae6e758f05a3baea0b68eb1ddcb679", size = 1766177, upload-time = "2025-07-27T21:21:42.224Z" }, + { url = "https://files.pythonhosted.org/packages/28/ef/b43280767ebcde022ba31f1e9902137655a956ae30e920d75630fa67e36e/cramjam-2.11.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:40a75b95e05e38a2a055b2446f09994ce1139151721659315151d4ad6289bbff", size = 1854031, upload-time = "2025-07-27T21:21:43.651Z" }, + { url = "https://files.pythonhosted.org/packages/60/1c/79d522757c494dfd9e9b208b0604cc7e97b481483cc477144f5705a06ab7/cramjam-2.11.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e5d042c376d2025300da37d65192d06a457918b63b31140f697f85fd8e310b29", size = 2035812, upload-time = "2025-07-27T21:21:45.473Z" }, + { url = "https://files.pythonhosted.org/packages/c8/70/3bf0670380069b3abd4c6b53f61d3148f4e08935569c08efbeaf7550e87d/cramjam-2.11.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cb148b35ab20c75b19a06c27f05732e2a321adbd86fadc93f9466dbd7b1154a7", size = 2067661, upload-time = "2025-07-27T21:21:47.901Z" }, + { url = "https://files.pythonhosted.org/packages/db/7e/4f6ca98a4b474348e965a529b359184785d1119ab7c4c9ec1280b8bea50a/cramjam-2.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ee47c220f0f5179ddc923ab91fc9e282c27b29fabc60c433dfe06f08084f798", size = 1981523, upload-time = "2025-07-27T21:21:49.704Z" }, + { url = "https://files.pythonhosted.org/packages/8a/6c/b241511c7ffd5f1da29641429bb0e19b5fbcffafde5ba1bbcbf9394ea456/cramjam-2.11.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0cf1b5a81b21ea175c976c3ab09e00494258f4b49b7995efc86060cced3f0b2e", size = 2034251, upload-time = "2025-07-27T21:21:51.252Z" }, + { url = "https://files.pythonhosted.org/packages/14/5c/4ef926c8c3c1bf6da96f9c53450ff334cdb6d0fc1efced0aea97e2090803/cramjam-2.11.0-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:360c00338ecf48921492455007f904be607fc7818de3d681acbcc542aae2fb36", size = 2155322, upload-time = "2025-07-27T21:21:53.348Z" }, + { url = "https://files.pythonhosted.org/packages/be/fb/eb2aef7fb2730e56c5a2c9000817ee8fb4a95c92f19cc6e441afed42ec29/cramjam-2.11.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f31fcc0d30dc3f3e94ea6b4d8e1a855071757c6abf6a7b1e284050ab7d4c299c", size = 2169094, upload-time = "2025-07-27T21:21:55.187Z" }, + { url = "https://files.pythonhosted.org/packages/3b/80/925a5c668dcee1c6f61775067185c5dc9a63c766d5393e5c60d2af4217a7/cramjam-2.11.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:033be66fdceb3d63b2c99b257a98380c4ec22c9e4dca54a2bfec3718cd24e184", size = 2159089, upload-time = "2025-07-27T21:21:57.118Z" }, + { url = "https://files.pythonhosted.org/packages/1f/ac/b2819640eef0592a6de7ca832c0d23c69bd1620f765ce88b60dbc8da9ba2/cramjam-2.11.0-cp310-cp310-win32.whl", hash = "sha256:1c6cea67f6000b81f6bd27d14c8a6f62d00336ca7252fd03ee16f6b70eb5c0d2", size = 1605046, upload-time = "2025-07-27T21:21:58.617Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f4/06af04727b9556721049e2127656d727306d275c518e3d97f9ed4cffd0d8/cramjam-2.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:98aa4a351b047b0f7f9e971585982065028adc2c162c5c23c5d5734c5ccc1077", size = 1710647, upload-time = "2025-07-27T21:22:00.279Z" }, + { url = "https://files.pythonhosted.org/packages/d0/89/8001f6a9b6b6e9fa69bec5319789083475d6f26d52aaea209d3ebf939284/cramjam-2.11.0-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:04cfa39118570e70e920a9b75c733299784b6d269733dbc791d9aaed6edd2615", size = 3559272, upload-time = "2025-07-27T21:22:01.988Z" }, + { url = "https://files.pythonhosted.org/packages/0b/f3/001d00070ca92e5fbe6aacc768e455568b0cde46b0eb944561a4ea132300/cramjam-2.11.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:66a18f68506290349a256375d7aa2f645b9f7993c10fc4cc211db214e4e61d2b", size = 1861743, upload-time = "2025-07-27T21:22:03.754Z" }, + { url = "https://files.pythonhosted.org/packages/c9/35/041a3af01bf3f6158f120070f798546d4383b962b63c35cd91dcbf193e17/cramjam-2.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:50e7d65533857736cd56f6509cf2c4866f28ad84dd15b5bdbf2f8a81e77fa28a", size = 1699631, upload-time = "2025-07-27T21:22:05.192Z" }, + { url = "https://files.pythonhosted.org/packages/17/eb/5358b238808abebd0c949c42635c3751204ca7cf82b29b984abe9f5e33c8/cramjam-2.11.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1f71989668458fc327ac15396db28d92df22f8024bb12963929798b2729d2df5", size = 2025603, upload-time = "2025-07-27T21:22:06.726Z" }, + { url = "https://files.pythonhosted.org/packages/0e/79/19dba7c03a27408d8d11b5a7a4a7908459cfd4e6f375b73264dc66517bf6/cramjam-2.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee77ac543f1e2b22af1e8be3ae589f729491b6090582340aacd77d1d757d9569", size = 1766283, upload-time = "2025-07-27T21:22:08.568Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ad/40e4b3408501d886d082db465c33971655fe82573c535428e52ab905f4d0/cramjam-2.11.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ad52784120e7e4d8a0b5b0517d185b8bf7f74f5e17272857ddc8951a628d9be1", size = 1854407, upload-time = "2025-07-27T21:22:10.518Z" }, + { url = "https://files.pythonhosted.org/packages/36/6e/c1b60ceb6d7ea6ff8b0bf197520aefe23f878bf2bfb0de65f2b0c2f82cd1/cramjam-2.11.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b86f8e6d9c1b3f9a75b2af870c93ceee0f1b827cd2507387540e053b35d7459", size = 2035793, upload-time = "2025-07-27T21:22:12.504Z" }, + { url = "https://files.pythonhosted.org/packages/9c/ad/32a8d5f4b1e3717787945ec6d71bd1c6e6bccba4b7e903fc0d9d4e4b08c3/cramjam-2.11.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:320d61938950d95da2371b46c406ec433e7955fae9f396c8e1bf148ffc187d11", size = 2067499, upload-time = "2025-07-27T21:22:14.067Z" }, + { url = "https://files.pythonhosted.org/packages/ff/cd/3b5a662736ea62ff7fa4c4a10a85e050bfdaad375cc53dc80427e8afe41c/cramjam-2.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41eafc8c1653a35a5c7e75ad48138f9f60085cc05cd99d592e5298552d944e9f", size = 1981853, upload-time = "2025-07-27T21:22:15.908Z" }, + { url = "https://files.pythonhosted.org/packages/26/8e/1dbcfaaa7a702ee82ee683ec3a81656934dd7e04a7bc4ee854033686f98a/cramjam-2.11.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03a7316c6bf763dfa34279335b27702321da44c455a64de58112968c0818ec4a", size = 2034514, upload-time = "2025-07-27T21:22:17.352Z" }, + { url = "https://files.pythonhosted.org/packages/50/62/f11709bfdce74af79a88b410dcb76dedc97612166e759136931bf63cfd7b/cramjam-2.11.0-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:244c2ed8bd7ccbb294a2abe7ca6498db7e89d7eb5e744691dc511a7dc82e65ca", size = 2155343, upload-time = "2025-07-27T21:22:18.854Z" }, + { url = "https://files.pythonhosted.org/packages/8a/6d/3b98b61841a5376d9a9b8468ae58753a8e6cf22be9534a0fa5af4d8621cc/cramjam-2.11.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:405f8790bad36ce0b4bbdb964ad51507bfc7942c78447f25cb828b870a1d86a0", size = 2169367, upload-time = "2025-07-27T21:22:20.389Z" }, + { url = "https://files.pythonhosted.org/packages/11/72/bd5db5c49dbebc8b002f1c4983101b28d2e7fc9419753db1c31ec22b03ef/cramjam-2.11.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6b1b751a5411032b08fb3ac556160229ca01c6bbe4757bb3a9a40b951ebaac23", size = 2159334, upload-time = "2025-07-27T21:22:22.254Z" }, + { url = "https://files.pythonhosted.org/packages/34/32/203c57acdb6eea727e7078b2219984e64ed4ad043c996ed56321301ba167/cramjam-2.11.0-cp311-cp311-win32.whl", hash = "sha256:5251585608778b9ac8effed544933df7ad85b4ba21ee9738b551f17798b215ac", size = 1605313, upload-time = "2025-07-27T21:22:24.126Z" }, + { url = "https://files.pythonhosted.org/packages/a9/bd/102d6deb87a8524ac11cddcd31a7612b8f20bf9b473c3c645045e3b957c7/cramjam-2.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:dca88bc8b68ce6d35dafd8c4d5d59a238a56c43fa02b74c2ce5f9dfb0d1ccb46", size = 1710991, upload-time = "2025-07-27T21:22:25.661Z" }, + { url = "https://files.pythonhosted.org/packages/0b/0d/7c84c913a5fae85b773a9dcf8874390f9d68ba0fcc6630efa7ff1541b950/cramjam-2.11.0-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:dba5c14b8b4f73ea1e65720f5a3fe4280c1d27761238378be8274135c60bbc6e", size = 3553368, upload-time = "2025-07-27T21:22:27.162Z" }, + { url = "https://files.pythonhosted.org/packages/2b/cc/4f6d185d8a744776f53035e72831ff8eefc2354f46ab836f4bd3c4f6c138/cramjam-2.11.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:11eb40722b3fcf3e6890fba46c711bf60f8dc26360a24876c85e52d76c33b25b", size = 1860014, upload-time = "2025-07-27T21:22:28.738Z" }, + { url = "https://files.pythonhosted.org/packages/1c/a8/626c76263085c6d5ded0e71823b411e9522bfc93ba6cc59855a5869296e7/cramjam-2.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aeb26e2898994b6e8319f19a4d37c481512acdcc6d30e1b5ecc9d8ec57e835cb", size = 1693512, upload-time = "2025-07-27T21:22:30.999Z" }, + { url = "https://files.pythonhosted.org/packages/e9/52/0851a16a62447532e30ba95a80e638926fdea869a34b4b5b9d0a020083ba/cramjam-2.11.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4f8d82081ed7d8fe52c982bd1f06e4c7631a73fe1fb6d4b3b3f2404f87dc40fe", size = 2025285, upload-time = "2025-07-27T21:22:32.954Z" }, + { url = "https://files.pythonhosted.org/packages/98/76/122e444f59dbc216451d8e3d8282c9665dc79eaf822f5f1470066be1b695/cramjam-2.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:092a3ec26e0a679305018380e4f652eae1b6dfe3fc3b154ee76aa6b92221a17c", size = 1761327, upload-time = "2025-07-27T21:22:34.484Z" }, + { url = "https://files.pythonhosted.org/packages/a3/bc/3a0189aef1af2b29632c039c19a7a1b752bc21a4053582a5464183a0ad3d/cramjam-2.11.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:529d6d667c65fd105d10bd83d1cd3f9869f8fd6c66efac9415c1812281196a92", size = 1854075, upload-time = "2025-07-27T21:22:36.157Z" }, + { url = "https://files.pythonhosted.org/packages/2e/80/8a6343b13778ce52d94bb8d5365a30c3aa951276b1857201fe79d7e2ad25/cramjam-2.11.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:555eb9c90c450e0f76e27d9ff064e64a8b8c6478ab1a5594c91b7bc5c82fd9f0", size = 2032710, upload-time = "2025-07-27T21:22:38.17Z" }, + { url = "https://files.pythonhosted.org/packages/df/6b/cd1778a207c29eda10791e3dfa018b588001928086e179fc71254793c625/cramjam-2.11.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5edf4c9e32493035b514cf2ba0c969d81ccb31de63bd05490cc8bfe3b431674e", size = 2068353, upload-time = "2025-07-27T21:22:39.615Z" }, + { url = "https://files.pythonhosted.org/packages/dc/f0/5c2a5cd5711032f3b191ca50cb786c17689b4a9255f9f768866e6c9f04d9/cramjam-2.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fa2fe41f48c4d58d923803383b0737f048918b5a0d10390de9628bb6272b107", size = 1978104, upload-time = "2025-07-27T21:22:41.106Z" }, + { url = "https://files.pythonhosted.org/packages/f9/8b/b363a5fb2c3347504fe9a64f8d0f1e276844f0e532aa7162c061cd1ffee4/cramjam-2.11.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9ca14cf1cabdb0b77d606db1bb9e9ca593b1dbd421fcaf251ec9a5431ec449f3", size = 2030779, upload-time = "2025-07-27T21:22:42.969Z" }, + { url = "https://files.pythonhosted.org/packages/78/7b/d83dad46adb6c988a74361f81ad9c5c22642be53ad88616a19baedd06243/cramjam-2.11.0-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:309e95bf898829476bccf4fd2c358ec00e7ff73a12f95a3cdeeba4bb1d3683d5", size = 2155297, upload-time = "2025-07-27T21:22:44.6Z" }, + { url = "https://files.pythonhosted.org/packages/1a/be/60d9be4cb33d8740a4aa94c7513f2ef3c4eba4fd13536f086facbafade71/cramjam-2.11.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:86dca35d2f15ef22922411496c220f3c9e315d5512f316fe417461971cc1648d", size = 2169255, upload-time = "2025-07-27T21:22:46.534Z" }, + { url = "https://files.pythonhosted.org/packages/11/b0/4a595f01a243aec8ad272b160b161c44351190c35d98d7787919d962e9e5/cramjam-2.11.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:193c6488bd2f514cbc0bef5c18fad61a5f9c8d059dd56edf773b3b37f0e85496", size = 2155651, upload-time = "2025-07-27T21:22:48.46Z" }, + { url = "https://files.pythonhosted.org/packages/38/47/7776659aaa677046b77f527106e53ddd47373416d8fcdb1e1a881ec5dc06/cramjam-2.11.0-cp312-cp312-win32.whl", hash = "sha256:514e2c008a8b4fa823122ca3ecab896eac41d9aa0f5fc881bd6264486c204e32", size = 1603568, upload-time = "2025-07-27T21:22:50.084Z" }, + { url = "https://files.pythonhosted.org/packages/75/b1/d53002729cfd94c5844ddfaf1233c86d29f2dbfc1b764a6562c41c044199/cramjam-2.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:53fed080476d5f6ad7505883ec5d1ec28ba36c2273db3b3e92d7224fe5e463db", size = 1709287, upload-time = "2025-07-27T21:22:51.534Z" }, + { url = "https://files.pythonhosted.org/packages/0a/8b/406c5dc0f8e82385519d8c299c40fd6a56d97eca3fcd6f5da8dad48de75b/cramjam-2.11.0-cp313-cp313-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:2c289729cc1c04e88bafa48b51082fb462b0a57dbc96494eab2be9b14dca62af", size = 3553330, upload-time = "2025-07-27T21:22:53.124Z" }, + { url = "https://files.pythonhosted.org/packages/00/ad/4186884083d6e4125b285903e17841827ab0d6d0cffc86216d27ed91e91d/cramjam-2.11.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:045201ee17147e36cf43d8ae2fa4b4836944ac672df5874579b81cf6d40f1a1f", size = 1859756, upload-time = "2025-07-27T21:22:54.821Z" }, + { url = "https://files.pythonhosted.org/packages/54/01/91b485cf76a7efef638151e8a7d35784dae2c4ff221b1aec2c083e4b106d/cramjam-2.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:619cd195d74c9e1d2a3ad78d63451d35379c84bd851aec552811e30842e1c67a", size = 1693609, upload-time = "2025-07-27T21:22:56.331Z" }, + { url = "https://files.pythonhosted.org/packages/cd/84/d0c80d279b2976870fc7d10f15dcb90a3c10c06566c6964b37c152694974/cramjam-2.11.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6eb3ae5ab72edb2ed68bdc0f5710f0a6cad7fd778a610ec2c31ee15e32d3921e", size = 2024912, upload-time = "2025-07-27T21:22:57.915Z" }, + { url = "https://files.pythonhosted.org/packages/d6/70/88f2a5cb904281ed5d3c111b8f7d5366639817a5470f059bcd26833fc870/cramjam-2.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df7da3f4b19e3078f9635f132d31b0a8196accb2576e3213ddd7a77f93317c20", size = 1760715, upload-time = "2025-07-27T21:22:59.528Z" }, + { url = "https://files.pythonhosted.org/packages/b2/06/cf5b02081132537d28964fb385fcef9ed9f8a017dd7d8c59d317e53ba50d/cramjam-2.11.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:57286b289cd557ac76c24479d8ecfb6c3d5b854cce54ccc7671f9a2f5e2a2708", size = 1853782, upload-time = "2025-07-27T21:23:01.07Z" }, + { url = "https://files.pythonhosted.org/packages/57/27/63525087ed40a53d1867021b9c4858b80cc86274ffe7225deed067d88d92/cramjam-2.11.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:28952fbbf8b32c0cb7fa4be9bcccfca734bf0d0989f4b509dc7f2f70ba79ae06", size = 2032354, upload-time = "2025-07-27T21:23:03.021Z" }, + { url = "https://files.pythonhosted.org/packages/c3/ef/dbba082c6ebfb6410da4dd39a64e654d7194fcfd4567f85991a83fa4ec32/cramjam-2.11.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78ed2e4099812a438b545dfbca1928ec825e743cd253bc820372d6ef8c3adff4", size = 2068007, upload-time = "2025-07-27T21:23:04.526Z" }, + { url = "https://files.pythonhosted.org/packages/35/ce/d902b9358a46a086938feae83b2251720e030f06e46006f4c1fc0ac9da20/cramjam-2.11.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d9aecd5c3845d415bd6c9957c93de8d93097e269137c2ecb0e5a5256374bdc8", size = 1977485, upload-time = "2025-07-27T21:23:06.058Z" }, + { url = "https://files.pythonhosted.org/packages/e8/03/982f54553244b0afcbdb2ad2065d460f0ab05a72a96896a969a1ca136a1e/cramjam-2.11.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:362fcf4d6f5e1242a4540812455f5a594949190f6fbc04f2ffbfd7ae0266d788", size = 2030447, upload-time = "2025-07-27T21:23:07.679Z" }, + { url = "https://files.pythonhosted.org/packages/74/5f/748e54cdb665ec098ec519e23caacc65fc5ae58718183b071e33fc1c45b4/cramjam-2.11.0-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:13240b3dea41b1174456cb9426843b085dc1a2bdcecd9ee2d8f65ac5703374b0", size = 2154949, upload-time = "2025-07-27T21:23:09.366Z" }, + { url = "https://files.pythonhosted.org/packages/69/81/c4e6cb06ed69db0dc81f9a8b1dc74995ebd4351e7a1877143f7031ff2700/cramjam-2.11.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:c54eed83726269594b9086d827decc7d2015696e31b99bf9b69b12d9063584fe", size = 2168925, upload-time = "2025-07-27T21:23:10.976Z" }, + { url = "https://files.pythonhosted.org/packages/13/5b/966365523ce8290a08e163e3b489626c5adacdff2b3da9da1b0823dfb14e/cramjam-2.11.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:f8195006fdd0fc0a85b19df3d64a3ef8a240e483ae1dfc7ac6a4316019eb5df2", size = 2154950, upload-time = "2025-07-27T21:23:12.514Z" }, + { url = "https://files.pythonhosted.org/packages/3a/7d/7f8eb5c534b72b32c6eb79d74585bfee44a9a5647a14040bb65c31c2572d/cramjam-2.11.0-cp313-cp313-win32.whl", hash = "sha256:ccf30e3fe6d770a803dcdf3bb863fa44ba5dc2664d4610ba2746a3c73599f2e4", size = 1603199, upload-time = "2025-07-27T21:23:14.38Z" }, + { url = "https://files.pythonhosted.org/packages/37/05/47b5e0bf7c41a3b1cdd3b7c2147f880c93226a6bef1f5d85183040cbdece/cramjam-2.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:ee36348a204f0a68b03400f4736224e9f61d1c6a1582d7f875c1ca56f0254268", size = 1708924, upload-time = "2025-07-27T21:23:16.332Z" }, + { url = "https://files.pythonhosted.org/packages/de/07/a1051cdbbe6d723df16d756b97f09da7c1adb69e29695c58f0392bc12515/cramjam-2.11.0-cp314-cp314-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:7ba5e38c9fbd06f086f4a5a64a1a5b7b417cd3f8fc07a20e5c03651f72f36100", size = 3554141, upload-time = "2025-07-27T21:23:17.938Z" }, + { url = "https://files.pythonhosted.org/packages/74/66/58487d2e16ef3d04f51a7c7f0e69823e806744b4c21101e89da4873074bc/cramjam-2.11.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:b8adeee57b41fe08e4520698a4b0bd3cc76dbd81f99424b806d70a5256a391d3", size = 1860353, upload-time = "2025-07-27T21:23:19.593Z" }, + { url = "https://files.pythonhosted.org/packages/67/b4/67f6254d166ffbcc9d5fa1b56876eaa920c32ebc8e9d3d525b27296b693b/cramjam-2.11.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:b96a74fa03a636c8a7d76f700d50e9a8bc17a516d6a72d28711225d641e30968", size = 1693832, upload-time = "2025-07-27T21:23:21.185Z" }, + { url = "https://files.pythonhosted.org/packages/55/a3/4e0b31c0d454ae70c04684ed7c13d3c67b4c31790c278c1e788cb804fa4a/cramjam-2.11.0-cp314-cp314-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c3811a56fa32e00b377ef79121c0193311fd7501f0fb378f254c7f083cc1fbe0", size = 2027080, upload-time = "2025-07-27T21:23:23.303Z" }, + { url = "https://files.pythonhosted.org/packages/d9/c7/5e8eed361d1d3b8be14f38a54852c5370cc0ceb2c2d543b8ba590c34f080/cramjam-2.11.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5d927e87461f8a0d448e4ab5eb2bca9f31ca5d8ea86d70c6f470bb5bc666d7e", size = 1761543, upload-time = "2025-07-27T21:23:24.991Z" }, + { url = "https://files.pythonhosted.org/packages/09/0c/06b7f8b0ce9fde89470505116a01fc0b6cb92d406c4fb1e46f168b5d3fa5/cramjam-2.11.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f1f5c450121430fd89cb5767e0a9728ecc65997768fd4027d069cb0368af62f9", size = 1854636, upload-time = "2025-07-27T21:23:26.987Z" }, + { url = "https://files.pythonhosted.org/packages/6f/c6/6ebc02c9d5acdf4e5f2b1ec6e1252bd5feee25762246798ae823b3347457/cramjam-2.11.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:724aa7490be50235d97f07e2ca10067927c5d7f336b786ddbc868470e822aa25", size = 2032715, upload-time = "2025-07-27T21:23:28.603Z" }, + { url = "https://files.pythonhosted.org/packages/a2/77/a122971c23f5ca4b53e4322c647ac7554626c95978f92d19419315dddd05/cramjam-2.11.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:54c4637122e7cfd7aac5c1d3d4c02364f446d6923ea34cf9d0e8816d6e7a4936", size = 2069039, upload-time = "2025-07-27T21:23:30.319Z" }, + { url = "https://files.pythonhosted.org/packages/19/0f/f6121b90b86b9093c066889274d26a1de3f29969d45c2ed1ecbe2033cb78/cramjam-2.11.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17eb39b1696179fb471eea2de958fa21f40a2cd8bf6b40d428312d5541e19dc4", size = 1979566, upload-time = "2025-07-27T21:23:32.002Z" }, + { url = "https://files.pythonhosted.org/packages/e0/a3/f95bc57fd7f4166ce6da816cfa917fb7df4bb80e669eb459d85586498414/cramjam-2.11.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:36aa5a798aa34e11813a80425a30d8e052d8de4a28f27bfc0368cfc454d1b403", size = 2030905, upload-time = "2025-07-27T21:23:33.696Z" }, + { url = "https://files.pythonhosted.org/packages/fc/52/e429de4e8bc86ee65e090dae0f87f45abd271742c63fb2d03c522ffde28a/cramjam-2.11.0-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:449fca52774dc0199545fbf11f5128933e5a6833946707885cf7be8018017839", size = 2155592, upload-time = "2025-07-27T21:23:35.375Z" }, + { url = "https://files.pythonhosted.org/packages/6c/6c/65a7a0207787ad39ad804af4da7f06a60149de19481d73d270b540657234/cramjam-2.11.0-cp314-cp314-musllinux_1_1_i686.whl", hash = "sha256:d87d37b3d476f4f7623c56a232045d25bd9b988314702ea01bd9b4a94948a778", size = 2170839, upload-time = "2025-07-27T21:23:37.197Z" }, + { url = "https://files.pythonhosted.org/packages/b2/c5/5c5db505ba692bc844246b066e23901d5905a32baf2f33719c620e65887f/cramjam-2.11.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:26cb45c47d71982d76282e303931c6dd4baee1753e5d48f9a89b3a63e690b3a3", size = 2157236, upload-time = "2025-07-27T21:23:38.854Z" }, + { url = "https://files.pythonhosted.org/packages/b0/22/88e6693e60afe98901e5bbe91b8dea193e3aa7f42e2770f9c3339f5c1065/cramjam-2.11.0-cp314-cp314-win32.whl", hash = "sha256:4efe919d443c2fd112fe25fe636a52f9628250c9a50d9bddb0488d8a6c09acc6", size = 1604136, upload-time = "2025-07-27T21:23:40.56Z" }, + { url = "https://files.pythonhosted.org/packages/cc/f8/01618801cd59ccedcc99f0f96d20be67d8cfc3497da9ccaaad6b481781dd/cramjam-2.11.0-cp314-cp314-win_amd64.whl", hash = "sha256:ccec3524ea41b9abd5600e3e27001fd774199dbb4f7b9cb248fcee37d4bda84c", size = 1710272, upload-time = "2025-07-27T21:23:42.236Z" }, + { url = "https://files.pythonhosted.org/packages/40/81/6cdb3ed222d13ae86bda77aafe8d50566e81a1169d49ed195b6263610704/cramjam-2.11.0-cp314-cp314t-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:966ac9358b23d21ecd895c418c048e806fd254e46d09b1ff0cdad2eba195ea3e", size = 3559671, upload-time = "2025-07-27T21:23:44.504Z" }, + { url = "https://files.pythonhosted.org/packages/cb/43/52b7e54fe5ba1ef0270d9fdc43dabd7971f70ea2d7179be918c997820247/cramjam-2.11.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:387f09d647a0d38dcb4539f8a14281f8eb6bb1d3e023471eb18a5974b2121c86", size = 1867876, upload-time = "2025-07-27T21:23:46.987Z" }, + { url = "https://files.pythonhosted.org/packages/9d/28/30d5b8d10acd30db3193bc562a313bff722888eaa45cfe32aa09389f2b24/cramjam-2.11.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:665b0d8fbbb1a7f300265b43926457ec78385200133e41fef19d85790fc1e800", size = 1695562, upload-time = "2025-07-27T21:23:48.644Z" }, + { url = "https://files.pythonhosted.org/packages/d9/86/ec806f986e01b896a650655024ea52a13e25c3ac8a3a382f493089483cdc/cramjam-2.11.0-cp314-cp314t-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ca905387c7a371531b9622d93471be4d745ef715f2890c3702479cd4fc85aa51", size = 2025056, upload-time = "2025-07-27T21:23:50.404Z" }, + { url = "https://files.pythonhosted.org/packages/09/43/c2c17586b90848d29d63181f7d14b8bd3a7d00975ad46e3edf2af8af7e1f/cramjam-2.11.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c1aa56aef2c8af55a21ed39040a94a12b53fb23beea290f94d19a76027e2ffb", size = 1764084, upload-time = "2025-07-27T21:23:52.265Z" }, + { url = "https://files.pythonhosted.org/packages/2b/a9/68bc334fadb434a61df10071dc8606702aa4f5b6cdb2df62474fc21d2845/cramjam-2.11.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e5db59c1cdfaa2ab85cc988e602d6919495f735ca8a5fd7603608eb1e23c26d5", size = 1854859, upload-time = "2025-07-27T21:23:54.085Z" }, + { url = "https://files.pythonhosted.org/packages/5b/4e/b48e67835b5811ec5e9cb2e2bcba9c3fd76dab3e732569fe801b542c6ca9/cramjam-2.11.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b1f893014f00fe5e89a660a032e813bf9f6d91de74cd1490cdb13b2b59d0c9a3", size = 2035970, upload-time = "2025-07-27T21:23:55.758Z" }, + { url = "https://files.pythonhosted.org/packages/c4/70/d2ac33d572b4d90f7f0f2c8a1d60fb48f06b128fdc2c05f9b49891bb0279/cramjam-2.11.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c26a1eb487947010f5de24943bd7c422dad955b2b0f8650762539778c380ca89", size = 2069320, upload-time = "2025-07-27T21:23:57.494Z" }, + { url = "https://files.pythonhosted.org/packages/1d/4c/85cec77af4a74308ba5fca8e296c4e2f80ec465c537afc7ab1e0ca2f9a00/cramjam-2.11.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d5c8bfb438d94e7b892d1426da5fc4b4a5370cc360df9b8d9d77c33b896c37e", size = 1982668, upload-time = "2025-07-27T21:23:59.126Z" }, + { url = "https://files.pythonhosted.org/packages/55/45/938546d1629e008cc3138df7c424ef892719b1796ff408a2ab8550032e5e/cramjam-2.11.0-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:cb1fb8c9337ab0da25a01c05d69a0463209c347f16512ac43be5986f3d1ebaf4", size = 2034028, upload-time = "2025-07-27T21:24:00.865Z" }, + { url = "https://files.pythonhosted.org/packages/01/76/b5a53e20505555f1640e66dcf70394bcf51a1a3a072aa18ea35135a0f9ed/cramjam-2.11.0-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:1f6449f6de52dde3e2f1038284910c8765a397a25e2d05083870f3f5e7fc682c", size = 2155513, upload-time = "2025-07-27T21:24:02.92Z" }, + { url = "https://files.pythonhosted.org/packages/84/12/8d3f6ceefae81bbe45a347fdfa2219d9f3ac75ebc304f92cd5fcb4fbddc5/cramjam-2.11.0-cp314-cp314t-musllinux_1_1_i686.whl", hash = "sha256:382dec4f996be48ed9c6958d4e30c2b89435d7c2c4dbf32480b3b8886293dd65", size = 2170035, upload-time = "2025-07-27T21:24:04.558Z" }, + { url = "https://files.pythonhosted.org/packages/4b/85/3be6f0a1398f976070672be64f61895f8839857618a2d8cc0d3ab529d3dc/cramjam-2.11.0-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:d388bd5723732c3afe1dd1d181e4213cc4e1be210b080572e7d5749f6e955656", size = 2160229, upload-time = "2025-07-27T21:24:06.729Z" }, + { url = "https://files.pythonhosted.org/packages/57/5e/66cfc3635511b20014bbb3f2ecf0095efb3049e9e96a4a9e478e4f3d7b78/cramjam-2.11.0-cp314-cp314t-win32.whl", hash = "sha256:0a70ff17f8e1d13f322df616505550f0f4c39eda62290acb56f069d4857037c8", size = 1610267, upload-time = "2025-07-27T21:24:08.428Z" }, + { url = "https://files.pythonhosted.org/packages/ce/c6/c71e82e041c95ffe6a92ac707785500aa2a515a4339c2c7dd67e3c449249/cramjam-2.11.0-cp314-cp314t-win_amd64.whl", hash = "sha256:028400d699442d40dbda02f74158c73d05cb76587a12490d0bfedd958fd49188", size = 1713108, upload-time = "2025-07-27T21:24:10.147Z" }, + { url = "https://files.pythonhosted.org/packages/8c/33/3d7a7fbfb313614d59ae2e512b9dacfc22efb07c20e4af7deb73d3409f7b/cramjam-2.11.0-cp39-cp39-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:2581e82dca742b55d8b1d7f33892394c06b057a74f2853ffcb0802dcddcbf694", size = 3559843, upload-time = "2025-07-27T21:24:11.928Z" }, + { url = "https://files.pythonhosted.org/packages/d4/b0/ccf09697df7fcc750c4913dc4bf3fb91e5b778dda65fb9fa55dde61c03dc/cramjam-2.11.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a9994a42cd12f07ece04eff94dbf6e127b3986f7af9b26db1eb4545c477a6604", size = 1862081, upload-time = "2025-07-27T21:24:13.8Z" }, + { url = "https://files.pythonhosted.org/packages/41/55/d36255f1a9004a3352469143d2b8a5b769e0eb4e484a8192da41ad67e893/cramjam-2.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a4963dac24213690183110d6b41125fdc4af871a5a213589d6c6606d49e1b949", size = 1699970, upload-time = "2025-07-27T21:24:15.547Z" }, + { url = "https://files.pythonhosted.org/packages/35/52/722a2efbe104903648185411f9c634e5678035476bc556001d6ef811e191/cramjam-2.11.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c9af16f0b07d851b968c54e52d19430d820bb47c26d10a09cfb5c7127de26773", size = 2025715, upload-time = "2025-07-27T21:24:17.327Z" }, + { url = "https://files.pythonhosted.org/packages/0a/60/75084f30277d5f2481d20a544654894a32528f98f4415c1bd467823ab5b2/cramjam-2.11.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e2400c09ba620e2ca91a903dbe907d75f6a1994d8337e9f3026778daa92b08d", size = 1766999, upload-time = "2025-07-27T21:24:19.163Z" }, + { url = "https://files.pythonhosted.org/packages/89/5c/2663bdfcea6ab06fcac97883b5b574a12236c5d9f70691cc05dd49cb10fb/cramjam-2.11.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b820004db8b22715cee2ef154d4b47b3d76c4677ff217c587dd46f694a3052f9", size = 1854352, upload-time = "2025-07-27T21:24:20.953Z" }, + { url = "https://files.pythonhosted.org/packages/b4/df/1db5b57ccf77e923687b2061766e69c2cbdaf41641204207dbf55ef7ebe9/cramjam-2.11.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:261e9200942189d8201a005ffa1e29339479364b5b0013ab0758b03229d9ac67", size = 2036219, upload-time = "2025-07-27T21:24:23.029Z" }, + { url = "https://files.pythonhosted.org/packages/f7/28/fa3b017668a3264068c893e57a6b923dfd8fa851a1c821c4cc1c95cd47a6/cramjam-2.11.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24c61f1fad56ca68aee53bf67b6a84cd762a2c71ee4b71064378547c2411ae6", size = 2077245, upload-time = "2025-07-27T21:24:25.127Z" }, + { url = "https://files.pythonhosted.org/packages/d1/1d/6f6018ee81acec6c4ef6cda6bd0770959992caf2f1c41e7944a135a53eca/cramjam-2.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab86d22f69a21961f35d1a1b02278b5bb9a95c5f5b4722c6904bca343c8d219f", size = 1982235, upload-time = "2025-07-27T21:24:26.851Z" }, + { url = "https://files.pythonhosted.org/packages/31/b4/c38f6077d8ec7c9208d23d4f7f19a618f5b4940170c9deba5d3bdc722eb6/cramjam-2.11.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a88bc9b191422cd5b22a1521b28607008590628b6b2a8a7db5c54ec04dc82fa1", size = 2034629, upload-time = "2025-07-27T21:24:28.694Z" }, + { url = "https://files.pythonhosted.org/packages/66/3b/3f46a349b1a7a67e2bda10e99403e9163c87c95e34399cc69f4f86a2461a/cramjam-2.11.0-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:7855bc4df5ed5f7fb1c98ea3fd98292e9acd3c097b1b21d596a69e1e60455400", size = 2155552, upload-time = "2025-07-27T21:24:30.572Z" }, + { url = "https://files.pythonhosted.org/packages/ed/86/b431a51162d4c8f33b28bdcca047382e1038757d43625e65c8d29ed6c31f/cramjam-2.11.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:19eb43e21db9dc42613599703c1a8e40b0170514a313f11f4c8be380425a1019", size = 2169651, upload-time = "2025-07-27T21:24:32.331Z" }, + { url = "https://files.pythonhosted.org/packages/6e/d5/9aa69784da58b6bd3f5abcaad2eb76ad2a89efde7929821bad17355fd8da/cramjam-2.11.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cec977d673ad596bae6bdfc0091ee386cef05b515b23f2ce52f9fadd0156186a", size = 2159740, upload-time = "2025-07-27T21:24:34.108Z" }, + { url = "https://files.pythonhosted.org/packages/9c/e1/75706936eb81605a939e15b8b7a1241b35e805ce76a64838b4586c440f61/cramjam-2.11.0-cp39-cp39-win32.whl", hash = "sha256:dcc3b15b97f3054964b47e2a5fcfb4f5ff569e9af0a7af19f1d4c5f4231bbf3b", size = 1605449, upload-time = "2025-07-27T21:24:36.538Z" }, + { url = "https://files.pythonhosted.org/packages/37/6b/ae7626994c7285bfc0ffa0d9929c3c16f2d0aea5b9e151dad82fd0616762/cramjam-2.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:5eb0603d8f8019451fc00e1daf4022dfc9df59c16d2e68f925c77ac94555493b", size = 1710860, upload-time = "2025-07-27T21:24:38.243Z" }, + { url = "https://files.pythonhosted.org/packages/bf/8f/82e35ec3c5387f1864f46b3c24bce89a07af8bb3ef242ae47281db2c1848/cramjam-2.11.0-pp310-pypy310_pp73-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:37bed927abc4a7ae2d2669baa3675e21904d8a038ed8e4313326ea7b3be62b2b", size = 3573104, upload-time = "2025-07-27T21:24:40.069Z" }, + { url = "https://files.pythonhosted.org/packages/f0/4e/0c821918080a32ba1e52c040e12dd02dada67728f07305c5f778b808a807/cramjam-2.11.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:50e4a58635fa8c6897d84847d6e065eb69f92811670fc5e9f2d9e3b6279a02b6", size = 1873441, upload-time = "2025-07-27T21:24:42.333Z" }, + { url = "https://files.pythonhosted.org/packages/a8/fd/848d077bf6abc4ce84273d8e3f3a70d61a2240519a339462f699d8acf829/cramjam-2.11.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:3d1ba626dd5f81f7f09bbf59f70b534e2b75e0d6582b056b7bd31b397f1c13e9", size = 1702589, upload-time = "2025-07-27T21:24:44.305Z" }, + { url = "https://files.pythonhosted.org/packages/9d/1c/899818999bbdb59c601756b413e87d37fd65875d1315346c10e367bb3505/cramjam-2.11.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c71e140d5eb3145d61d59d0be0bf72f07cc4cf4b32cb136b09f712a3b1040f5f", size = 1773646, upload-time = "2025-07-27T21:24:46.495Z" }, + { url = "https://files.pythonhosted.org/packages/5f/26/c2813c5422c43b3dcd8b6645bc359f08870737c44325ee4accc18f24eee0/cramjam-2.11.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a6ed7926a5cca28edebad7d0fedd2ad492710ae3524d25fc59a2b20546d9ce1", size = 1994179, upload-time = "2025-07-27T21:24:49.131Z" }, + { url = "https://files.pythonhosted.org/packages/2e/4f/af984f8d7f963f0301812cdd620ddcfd8276461ed7a786c0f89e82b14739/cramjam-2.11.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5eb4ed3cea945b164b0513fd491884993acac2153a27b93a84019c522e8eda82", size = 1714790, upload-time = "2025-07-27T21:24:51.045Z" }, + { url = "https://files.pythonhosted.org/packages/81/da/b3301962ccd6fce9fefa1ecd8ea479edaeaa38fadb1f34d5391d2587216a/cramjam-2.11.0-pp311-pypy311_pp73-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:52d5db3369f95b27b9f3c14d067acb0b183333613363ed34268c9e04560f997f", size = 3573546, upload-time = "2025-07-27T21:24:52.944Z" }, + { url = "https://files.pythonhosted.org/packages/b6/c2/410ddb8ad4b9dfb129284666293cb6559479645da560f7077dc19d6bee9e/cramjam-2.11.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:4820516366d455b549a44d0e2210ee7c4575882dda677564ce79092588321d54", size = 1873654, upload-time = "2025-07-27T21:24:54.958Z" }, + { url = "https://files.pythonhosted.org/packages/d5/99/f68a443c64f7ce7aff5bed369b0aa5b2fac668fa3dfd441837e316e97a1f/cramjam-2.11.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d9e5db525dc0a950a825202f84ee68d89a072479e07da98795a3469df942d301", size = 1702846, upload-time = "2025-07-27T21:24:57.124Z" }, + { url = "https://files.pythonhosted.org/packages/6c/02/0ff358ab773def1ee3383587906c453d289953171e9c92db84fdd01bf172/cramjam-2.11.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62ab4971199b2270005359cdc379bc5736071dc7c9a228581c5122d9ffaac50c", size = 1773683, upload-time = "2025-07-27T21:24:59.28Z" }, + { url = "https://files.pythonhosted.org/packages/e9/31/3298e15f87c9cf2aabdbdd90b153d8644cf989cb42a45d68a1b71e1f7aaf/cramjam-2.11.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24758375cc5414d3035ca967ebb800e8f24604ececcba3c67d6f0218201ebf2d", size = 1994136, upload-time = "2025-07-27T21:25:01.565Z" }, + { url = "https://files.pythonhosted.org/packages/c7/90/20d1747255f1ee69a412e319da51ea594c18cca195e7a4d4c713f045eff5/cramjam-2.11.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6c2eea545fef1065c7dd4eda991666fd9c783fbc1d226592ccca8d8891c02f23", size = 1714982, upload-time = "2025-07-27T21:25:05.79Z" }, +] + +[[package]] +name = "cryptography" +version = "46.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a9/62/e3664e6ffd7743e1694b244dde70b43a394f6f7fbcacf7014a8ff5197c73/cryptography-46.0.1.tar.gz", hash = "sha256:ed570874e88f213437f5cf758f9ef26cbfc3f336d889b1e592ee11283bb8d1c7", size = 749198, upload-time = "2025-09-17T00:10:35.797Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4c/8c/44ee01267ec01e26e43ebfdae3f120ec2312aa72fa4c0507ebe41a26739f/cryptography-46.0.1-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:1cd6d50c1a8b79af1a6f703709d8973845f677c8e97b1268f5ff323d38ce8475", size = 7285044, upload-time = "2025-09-17T00:08:36.807Z" }, + { url = "https://files.pythonhosted.org/packages/22/59/9ae689a25047e0601adfcb159ec4f83c0b4149fdb5c3030cc94cd218141d/cryptography-46.0.1-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0ff483716be32690c14636e54a1f6e2e1b7bf8e22ca50b989f88fa1b2d287080", size = 4308182, upload-time = "2025-09-17T00:08:39.388Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ee/ca6cc9df7118f2fcd142c76b1da0f14340d77518c05b1ebfbbabca6b9e7d/cryptography-46.0.1-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9873bf7c1f2a6330bdfe8621e7ce64b725784f9f0c3a6a55c3047af5849f920e", size = 4572393, upload-time = "2025-09-17T00:08:41.663Z" }, + { url = "https://files.pythonhosted.org/packages/7f/a3/0f5296f63815d8e985922b05c31f77ce44787b3127a67c0b7f70f115c45f/cryptography-46.0.1-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:0dfb7c88d4462a0cfdd0d87a3c245a7bc3feb59de101f6ff88194f740f72eda6", size = 4308400, upload-time = "2025-09-17T00:08:43.559Z" }, + { url = "https://files.pythonhosted.org/packages/5d/8c/74fcda3e4e01be1d32775d5b4dd841acaac3c1b8fa4d0774c7ac8d52463d/cryptography-46.0.1-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e22801b61613ebdebf7deb18b507919e107547a1d39a3b57f5f855032dd7cfb8", size = 4015786, upload-time = "2025-09-17T00:08:45.758Z" }, + { url = "https://files.pythonhosted.org/packages/dc/b8/85d23287baeef273b0834481a3dd55bbed3a53587e3b8d9f0898235b8f91/cryptography-46.0.1-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:757af4f6341ce7a1e47c326ca2a81f41d236070217e5fbbad61bbfe299d55d28", size = 4982606, upload-time = "2025-09-17T00:08:47.602Z" }, + { url = "https://files.pythonhosted.org/packages/e5/d3/de61ad5b52433b389afca0bc70f02a7a1f074651221f599ce368da0fe437/cryptography-46.0.1-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f7a24ea78de345cfa7f6a8d3bde8b242c7fac27f2bd78fa23474ca38dfaeeab9", size = 4604234, upload-time = "2025-09-17T00:08:49.879Z" }, + { url = "https://files.pythonhosted.org/packages/dc/1f/dbd4d6570d84748439237a7478d124ee0134bf166ad129267b7ed8ea6d22/cryptography-46.0.1-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:9e8776dac9e660c22241b6587fae51a67b4b0147daa4d176b172c3ff768ad736", size = 4307669, upload-time = "2025-09-17T00:08:52.321Z" }, + { url = "https://files.pythonhosted.org/packages/ec/fd/ca0a14ce7f0bfe92fa727aacaf2217eb25eb7e4ed513b14d8e03b26e63ed/cryptography-46.0.1-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:9f40642a140c0c8649987027867242b801486865277cbabc8c6059ddef16dc8b", size = 4947579, upload-time = "2025-09-17T00:08:54.697Z" }, + { url = "https://files.pythonhosted.org/packages/89/6b/09c30543bb93401f6f88fce556b3bdbb21e55ae14912c04b7bf355f5f96c/cryptography-46.0.1-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:449ef2b321bec7d97ef2c944173275ebdab78f3abdd005400cc409e27cd159ab", size = 4603669, upload-time = "2025-09-17T00:08:57.16Z" }, + { url = "https://files.pythonhosted.org/packages/23/9a/38cb01cb09ce0adceda9fc627c9cf98eb890fc8d50cacbe79b011df20f8a/cryptography-46.0.1-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2dd339ba3345b908fa3141ddba4025568fa6fd398eabce3ef72a29ac2d73ad75", size = 4435828, upload-time = "2025-09-17T00:08:59.606Z" }, + { url = "https://files.pythonhosted.org/packages/0f/53/435b5c36a78d06ae0bef96d666209b0ecd8f8181bfe4dda46536705df59e/cryptography-46.0.1-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:7411c910fb2a412053cf33cfad0153ee20d27e256c6c3f14d7d7d1d9fec59fd5", size = 4709553, upload-time = "2025-09-17T00:09:01.832Z" }, + { url = "https://files.pythonhosted.org/packages/f5/c4/0da6e55595d9b9cd3b6eb5dc22f3a07ded7f116a3ea72629cab595abb804/cryptography-46.0.1-cp311-abi3-win32.whl", hash = "sha256:cbb8e769d4cac884bb28e3ff620ef1001b75588a5c83c9c9f1fdc9afbe7f29b0", size = 3058327, upload-time = "2025-09-17T00:09:03.726Z" }, + { url = "https://files.pythonhosted.org/packages/95/0f/cd29a35e0d6e78a0ee61793564c8cff0929c38391cb0de27627bdc7525aa/cryptography-46.0.1-cp311-abi3-win_amd64.whl", hash = "sha256:92e8cfe8bd7dd86eac0a677499894862cd5cc2fd74de917daa881d00871ac8e7", size = 3523893, upload-time = "2025-09-17T00:09:06.272Z" }, + { url = "https://files.pythonhosted.org/packages/f2/dd/eea390f3e78432bc3d2f53952375f8b37cb4d37783e626faa6a51e751719/cryptography-46.0.1-cp311-abi3-win_arm64.whl", hash = "sha256:db5597a4c7353b2e5fb05a8e6cb74b56a4658a2b7bf3cb6b1821ae7e7fd6eaa0", size = 2932145, upload-time = "2025-09-17T00:09:08.568Z" }, + { url = "https://files.pythonhosted.org/packages/0a/fb/c73588561afcd5e24b089952bd210b14676c0c5bf1213376350ae111945c/cryptography-46.0.1-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:4c49eda9a23019e11d32a0eb51a27b3e7ddedde91e099c0ac6373e3aacc0d2ee", size = 7193928, upload-time = "2025-09-17T00:09:10.595Z" }, + { url = "https://files.pythonhosted.org/packages/26/34/0ff0bb2d2c79f25a2a63109f3b76b9108a906dd2a2eb5c1d460b9938adbb/cryptography-46.0.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9babb7818fdd71394e576cf26c5452df77a355eac1a27ddfa24096665a27f8fd", size = 4293515, upload-time = "2025-09-17T00:09:12.861Z" }, + { url = "https://files.pythonhosted.org/packages/df/b7/d4f848aee24ecd1be01db6c42c4a270069a4f02a105d9c57e143daf6cf0f/cryptography-46.0.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9f2c4cc63be3ef43c0221861177cee5d14b505cd4d4599a89e2cd273c4d3542a", size = 4545619, upload-time = "2025-09-17T00:09:15.397Z" }, + { url = "https://files.pythonhosted.org/packages/44/a5/42fedefc754fd1901e2d95a69815ea4ec8a9eed31f4c4361fcab80288661/cryptography-46.0.1-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:41c281a74df173876da1dc9a9b6953d387f06e3d3ed9284e3baae3ab3f40883a", size = 4299160, upload-time = "2025-09-17T00:09:17.155Z" }, + { url = "https://files.pythonhosted.org/packages/86/a1/cd21174f56e769c831fbbd6399a1b7519b0ff6280acec1b826d7b072640c/cryptography-46.0.1-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0a17377fa52563d730248ba1f68185461fff36e8bc75d8787a7dd2e20a802b7a", size = 3994491, upload-time = "2025-09-17T00:09:18.971Z" }, + { url = "https://files.pythonhosted.org/packages/8d/2f/a8cbfa1c029987ddc746fd966711d4fa71efc891d37fbe9f030fe5ab4eec/cryptography-46.0.1-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:0d1922d9280e08cde90b518a10cd66831f632960a8d08cb3418922d83fce6f12", size = 4960157, upload-time = "2025-09-17T00:09:20.923Z" }, + { url = "https://files.pythonhosted.org/packages/67/ae/63a84e6789e0d5a2502edf06b552bcb0fa9ff16147265d5c44a211942abe/cryptography-46.0.1-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:af84e8e99f1a82cea149e253014ea9dc89f75b82c87bb6c7242203186f465129", size = 4577263, upload-time = "2025-09-17T00:09:23.356Z" }, + { url = "https://files.pythonhosted.org/packages/ef/8f/1b9fa8e92bd9cbcb3b7e1e593a5232f2c1e6f9bd72b919c1a6b37d315f92/cryptography-46.0.1-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:ef648d2c690703501714588b2ba640facd50fd16548133b11b2859e8655a69da", size = 4298703, upload-time = "2025-09-17T00:09:25.566Z" }, + { url = "https://files.pythonhosted.org/packages/c3/af/bb95db070e73fea3fae31d8a69ac1463d89d1c084220f549b00dd01094a8/cryptography-46.0.1-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:e94eb5fa32a8a9f9bf991f424f002913e3dd7c699ef552db9b14ba6a76a6313b", size = 4926363, upload-time = "2025-09-17T00:09:27.451Z" }, + { url = "https://files.pythonhosted.org/packages/f5/3b/d8fb17ffeb3a83157a1cc0aa5c60691d062aceecba09c2e5e77ebfc1870c/cryptography-46.0.1-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:534b96c0831855e29fc3b069b085fd185aa5353033631a585d5cd4dd5d40d657", size = 4576958, upload-time = "2025-09-17T00:09:29.924Z" }, + { url = "https://files.pythonhosted.org/packages/d9/46/86bc3a05c10c8aa88c8ae7e953a8b4e407c57823ed201dbcba55c4d655f4/cryptography-46.0.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f9b55038b5c6c47559aa33626d8ecd092f354e23de3c6975e4bb205df128a2a0", size = 4422507, upload-time = "2025-09-17T00:09:32.222Z" }, + { url = "https://files.pythonhosted.org/packages/a8/4e/387e5a21dfd2b4198e74968a541cfd6128f66f8ec94ed971776e15091ac3/cryptography-46.0.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ec13b7105117dbc9afd023300fb9954d72ca855c274fe563e72428ece10191c0", size = 4683964, upload-time = "2025-09-17T00:09:34.118Z" }, + { url = "https://files.pythonhosted.org/packages/25/a3/f9f5907b166adb8f26762071474b38bbfcf89858a5282f032899075a38a1/cryptography-46.0.1-cp314-cp314t-win32.whl", hash = "sha256:504e464944f2c003a0785b81668fe23c06f3b037e9cb9f68a7c672246319f277", size = 3029705, upload-time = "2025-09-17T00:09:36.381Z" }, + { url = "https://files.pythonhosted.org/packages/12/66/4d3a4f1850db2e71c2b1628d14b70b5e4c1684a1bd462f7fffb93c041c38/cryptography-46.0.1-cp314-cp314t-win_amd64.whl", hash = "sha256:c52fded6383f7e20eaf70a60aeddd796b3677c3ad2922c801be330db62778e05", size = 3502175, upload-time = "2025-09-17T00:09:38.261Z" }, + { url = "https://files.pythonhosted.org/packages/52/c7/9f10ad91435ef7d0d99a0b93c4360bea3df18050ff5b9038c489c31ac2f5/cryptography-46.0.1-cp314-cp314t-win_arm64.whl", hash = "sha256:9495d78f52c804b5ec8878b5b8c7873aa8e63db9cd9ee387ff2db3fffe4df784", size = 2912354, upload-time = "2025-09-17T00:09:40.078Z" }, + { url = "https://files.pythonhosted.org/packages/98/e5/fbd632385542a3311915976f88e0dfcf09e62a3fc0aff86fb6762162a24d/cryptography-46.0.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:d84c40bdb8674c29fa192373498b6cb1e84f882889d21a471b45d1f868d8d44b", size = 7255677, upload-time = "2025-09-17T00:09:42.407Z" }, + { url = "https://files.pythonhosted.org/packages/56/3e/13ce6eab9ad6eba1b15a7bd476f005a4c1b3f299f4c2f32b22408b0edccf/cryptography-46.0.1-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9ed64e5083fa806709e74fc5ea067dfef9090e5b7a2320a49be3c9df3583a2d8", size = 4301110, upload-time = "2025-09-17T00:09:45.614Z" }, + { url = "https://files.pythonhosted.org/packages/a2/67/65dc233c1ddd688073cf7b136b06ff4b84bf517ba5529607c9d79720fc67/cryptography-46.0.1-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:341fb7a26bc9d6093c1b124b9f13acc283d2d51da440b98b55ab3f79f2522ead", size = 4562369, upload-time = "2025-09-17T00:09:47.601Z" }, + { url = "https://files.pythonhosted.org/packages/17/db/d64ae4c6f4e98c3dac5bf35dd4d103f4c7c345703e43560113e5e8e31b2b/cryptography-46.0.1-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6ef1488967e729948d424d09c94753d0167ce59afba8d0f6c07a22b629c557b2", size = 4302126, upload-time = "2025-09-17T00:09:49.335Z" }, + { url = "https://files.pythonhosted.org/packages/3d/19/5f1eea17d4805ebdc2e685b7b02800c4f63f3dd46cfa8d4c18373fea46c8/cryptography-46.0.1-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7823bc7cdf0b747ecfb096d004cc41573c2f5c7e3a29861603a2871b43d3ef32", size = 4009431, upload-time = "2025-09-17T00:09:51.239Z" }, + { url = "https://files.pythonhosted.org/packages/81/b5/229ba6088fe7abccbfe4c5edb96c7a5ad547fac5fdd0d40aa6ea540b2985/cryptography-46.0.1-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:f736ab8036796f5a119ff8211deda416f8c15ce03776db704a7a4e17381cb2ef", size = 4980739, upload-time = "2025-09-17T00:09:54.181Z" }, + { url = "https://files.pythonhosted.org/packages/3a/9c/50aa38907b201e74bc43c572f9603fa82b58e831bd13c245613a23cff736/cryptography-46.0.1-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:e46710a240a41d594953012213ea8ca398cd2448fbc5d0f1be8160b5511104a0", size = 4592289, upload-time = "2025-09-17T00:09:56.731Z" }, + { url = "https://files.pythonhosted.org/packages/5a/33/229858f8a5bb22f82468bb285e9f4c44a31978d5f5830bb4ea1cf8a4e454/cryptography-46.0.1-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:84ef1f145de5aee82ea2447224dc23f065ff4cc5791bb3b506615957a6ba8128", size = 4301815, upload-time = "2025-09-17T00:09:58.548Z" }, + { url = "https://files.pythonhosted.org/packages/52/cb/b76b2c87fbd6ed4a231884bea3ce073406ba8e2dae9defad910d33cbf408/cryptography-46.0.1-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:9394c7d5a7565ac5f7d9ba38b2617448eba384d7b107b262d63890079fad77ca", size = 4943251, upload-time = "2025-09-17T00:10:00.475Z" }, + { url = "https://files.pythonhosted.org/packages/94/0f/f66125ecf88e4cb5b8017ff43f3a87ede2d064cb54a1c5893f9da9d65093/cryptography-46.0.1-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:ed957044e368ed295257ae3d212b95456bd9756df490e1ac4538857f67531fcc", size = 4591247, upload-time = "2025-09-17T00:10:02.874Z" }, + { url = "https://files.pythonhosted.org/packages/f6/22/9f3134ae436b63b463cfdf0ff506a0570da6873adb4bf8c19b8a5b4bac64/cryptography-46.0.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:f7de12fa0eee6234de9a9ce0ffcfa6ce97361db7a50b09b65c63ac58e5f22fc7", size = 4428534, upload-time = "2025-09-17T00:10:04.994Z" }, + { url = "https://files.pythonhosted.org/packages/89/39/e6042bcb2638650b0005c752c38ea830cbfbcbb1830e4d64d530000aa8dc/cryptography-46.0.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:7fab1187b6c6b2f11a326f33b036f7168f5b996aedd0c059f9738915e4e8f53a", size = 4699541, upload-time = "2025-09-17T00:10:06.925Z" }, + { url = "https://files.pythonhosted.org/packages/68/46/753d457492d15458c7b5a653fc9a84a1c9c7a83af6ebdc94c3fc373ca6e8/cryptography-46.0.1-cp38-abi3-win32.whl", hash = "sha256:45f790934ac1018adeba46a0f7289b2b8fe76ba774a88c7f1922213a56c98bc1", size = 3043779, upload-time = "2025-09-17T00:10:08.951Z" }, + { url = "https://files.pythonhosted.org/packages/2f/50/b6f3b540c2f6ee712feeb5fa780bb11fad76634e71334718568e7695cb55/cryptography-46.0.1-cp38-abi3-win_amd64.whl", hash = "sha256:7176a5ab56fac98d706921f6416a05e5aff7df0e4b91516f450f8627cda22af3", size = 3517226, upload-time = "2025-09-17T00:10:10.769Z" }, + { url = "https://files.pythonhosted.org/packages/ff/e8/77d17d00981cdd27cc493e81e1749a0b8bbfb843780dbd841e30d7f50743/cryptography-46.0.1-cp38-abi3-win_arm64.whl", hash = "sha256:efc9e51c3e595267ff84adf56e9b357db89ab2279d7e375ffcaf8f678606f3d9", size = 2923149, upload-time = "2025-09-17T00:10:13.236Z" }, + { url = "https://files.pythonhosted.org/packages/14/b9/b260180b31a66859648cfed5c980544ee22b15f8bd20ef82a23f58c0b83e/cryptography-46.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fd4b5e2ee4e60425711ec65c33add4e7a626adef79d66f62ba0acfd493af282d", size = 3714683, upload-time = "2025-09-17T00:10:15.601Z" }, + { url = "https://files.pythonhosted.org/packages/c5/5a/1cd3ef86e5884edcbf8b27c3aa8f9544e9b9fcce5d3ed8b86959741f4f8e/cryptography-46.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:48948940d0ae00483e85e9154bb42997d0b77c21e43a77b7773c8c80de532ac5", size = 3443784, upload-time = "2025-09-17T00:10:18.014Z" }, + { url = "https://files.pythonhosted.org/packages/27/27/077e09fd92075dd1338ea0ffaf5cfee641535545925768350ad90d8c36ca/cryptography-46.0.1-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b9c79af2c3058430d911ff1a5b2b96bbfe8da47d5ed961639ce4681886614e70", size = 3722319, upload-time = "2025-09-17T00:10:20.273Z" }, + { url = "https://files.pythonhosted.org/packages/db/32/6fc7250280920418651640d76cee34d91c1e0601d73acd44364570cf041f/cryptography-46.0.1-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:0ca4be2af48c24df689a150d9cd37404f689e2968e247b6b8ff09bff5bcd786f", size = 4249030, upload-time = "2025-09-17T00:10:22.396Z" }, + { url = "https://files.pythonhosted.org/packages/32/33/8d5398b2da15a15110b2478480ab512609f95b45ead3a105c9a9c76f9980/cryptography-46.0.1-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:13e67c4d3fb8b6bc4ef778a7ccdd8df4cd15b4bcc18f4239c8440891a11245cc", size = 4528009, upload-time = "2025-09-17T00:10:24.418Z" }, + { url = "https://files.pythonhosted.org/packages/fd/1c/4012edad2a8977ab386c36b6e21f5065974d37afa3eade83a9968cba4855/cryptography-46.0.1-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:15b5fd9358803b0d1cc42505a18d8bca81dabb35b5cfbfea1505092e13a9d96d", size = 4248902, upload-time = "2025-09-17T00:10:26.255Z" }, + { url = "https://files.pythonhosted.org/packages/58/a3/257cd5ae677302de8fa066fca9de37128f6729d1e63c04dd6a15555dd450/cryptography-46.0.1-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:e34da95e29daf8a71cb2841fd55df0511539a6cdf33e6f77c1e95e44006b9b46", size = 4527150, upload-time = "2025-09-17T00:10:28.28Z" }, + { url = "https://files.pythonhosted.org/packages/6a/cd/fe6b65e1117ec7631f6be8951d3db076bac3e1b096e3e12710ed071ffc3c/cryptography-46.0.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:34f04b7311174469ab3ac2647469743720f8b6c8b046f238e5cb27905695eb2a", size = 3448210, upload-time = "2025-09-17T00:10:30.145Z" }, +] + +[[package]] +name = "decorator" +version = "5.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/fa/6d96a0978d19e17b68d634497769987b16c8f4cd0a7a05048bec693caa6b/decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360", size = 56711, upload-time = "2025-02-24T04:41:34.073Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a", size = 9190, upload-time = "2025-02-24T04:41:32.565Z" }, +] + +[[package]] +name = "dnspython" +version = "2.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b5/4a/263763cb2ba3816dd94b08ad3a33d5fdae34ecb856678773cc40a3605829/dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1", size = 345197, upload-time = "2024-10-05T20:14:59.362Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/68/1b/e0a87d256e40e8c888847551b20a017a6b98139178505dc7ffb96f04e954/dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86", size = 313632, upload-time = "2024-10-05T20:14:57.687Z" }, +] + +[[package]] +name = "docutils" +version = "0.21.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/ed/aefcc8cd0ba62a0560c3c18c33925362d46c6075480bfa4df87b28e169a9/docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f", size = 2204444, upload-time = "2024-04-23T18:57:18.24Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8f/d7/9322c609343d929e75e7e5e6255e614fcc67572cfd083959cdef3b7aad79/docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2", size = 587408, upload-time = "2024-04-23T18:57:14.835Z" }, +] + +[[package]] +name = "exceptiongroup" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, +] + +[[package]] +name = "furo" +version = "2025.9.25" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "accessible-pygments" }, + { name = "beautifulsoup4" }, + { name = "pygments" }, + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "sphinx-basic-ng" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4e/29/ff3b83a1ffce74676043ab3e7540d398e0b1ce7660917a00d7c4958b93da/furo-2025.9.25.tar.gz", hash = "sha256:3eac05582768fdbbc2bdfa1cdbcdd5d33cfc8b4bd2051729ff4e026a1d7e0a98", size = 1662007, upload-time = "2025-09-25T21:37:19.221Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ba/69/964b55f389c289e16ba2a5dfe587c3c462aac09e24123f09ddf703889584/furo-2025.9.25-py3-none-any.whl", hash = "sha256:2937f68e823b8e37b410c972c371bc2b1d88026709534927158e0cb3fac95afe", size = 340409, upload-time = "2025-09-25T21:37:17.244Z" }, +] + +[[package]] +name = "gevent" +version = "25.5.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation == 'CPython' and sys_platform == 'win32'" }, + { name = "greenlet", marker = "platform_python_implementation == 'CPython'" }, + { name = "zope-event" }, + { name = "zope-interface" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f1/58/267e8160aea00ab00acd2de97197eecfe307064a376fb5c892870a8a6159/gevent-25.5.1.tar.gz", hash = "sha256:582c948fa9a23188b890d0bc130734a506d039a2e5ad87dae276a456cc683e61", size = 6388207, upload-time = "2025-05-12T12:57:59.833Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/44/a7/438568c37fb255f80e710318bfcad04731b92ce764bc16adee278fdc6b4d/gevent-25.5.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:8e5a0fab5e245b15ec1005b3666b0a2e867c26f411c8fe66ae1afe07174a30e9", size = 2922800, upload-time = "2025-05-12T11:11:46.728Z" }, + { url = "https://files.pythonhosted.org/packages/5d/b3/b44d8b1c4a4d01097a7f82ffbc582d054007365c27b28867f0b2d4241d73/gevent-25.5.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7b80a37f2fb45ee4a8f7e64b77dd8a842d364384046e394227b974a4e9c9a52", size = 1812954, upload-time = "2025-05-12T11:52:27.059Z" }, + { url = "https://files.pythonhosted.org/packages/1e/c6/935b4c973ad827c9ec49c354d68d047da1d23e3018bda63d3723cce43178/gevent-25.5.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29ab729d50ae85077a68e0385f129f5b01052d01a0ae6d7fdc1824f5337905e4", size = 1900169, upload-time = "2025-05-12T11:54:17.797Z" }, + { url = "https://files.pythonhosted.org/packages/38/8a/b745bddfec35fb723cafb036f191e5e0a0013f1698bf0ba4fa2cb8e01879/gevent-25.5.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80d20592aeabcc4e294fd441fd43d45cb537437fd642c374ea9d964622fad229", size = 1849786, upload-time = "2025-05-12T12:00:01.962Z" }, + { url = "https://files.pythonhosted.org/packages/7c/b3/7aa7b09d91207bebe7608699558bbadd34f63e32904351867c29f8be25de/gevent-25.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8ba0257542ccbb72a8229dc34d00844ccdfba110417e4b7b34599548d0e20e9", size = 2139021, upload-time = "2025-05-12T11:32:58.961Z" }, + { url = "https://files.pythonhosted.org/packages/74/da/cf52ae0c84361f4164a04f3338508b1234331ce79719db103e50dbc5598c/gevent-25.5.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cad0821dff998c7c60dd238f92cd61380342c47fb9e92e1a8705d9b5ac7c16e8", size = 1830758, upload-time = "2025-05-12T11:59:55.666Z" }, + { url = "https://files.pythonhosted.org/packages/93/93/73a49b896d78eec27f0895ce3008f9825db748a5aacbca47404d1014da4b/gevent-25.5.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:017a7384c0cd1a5907751c991535a0699596e89725468a7fc39228312e10efa1", size = 2199993, upload-time = "2025-05-12T11:40:50.845Z" }, + { url = "https://files.pythonhosted.org/packages/df/c7/34680b7d2a75492fa032fa8ecaacc03c1940767a35125f6740954a0132a3/gevent-25.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:469c86d02fccad7e2a3d82fe22237e47ecb376fbf4710bc18747b49c50716817", size = 1652665, upload-time = "2025-05-12T12:35:58.105Z" }, + { url = "https://files.pythonhosted.org/packages/c6/eb/015e93f16a718e2f836ecebecae9bcd7b4d2a5695d1c8bd5bba2d5d91548/gevent-25.5.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:12380aba5c316e9ff53cc21d8ab80f4a91c0df3ada58f65d4f5eb2cf693db00e", size = 2877441, upload-time = "2025-05-12T11:14:57.735Z" }, + { url = "https://files.pythonhosted.org/packages/7b/86/42d191a6f6672ca59d6d79b4cd9b89d4a15f59c843fbbad42f2b749f8ea9/gevent-25.5.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f0694daab1a041b69a53f53c2141c12994892b2503870515cabe6a5dbd2a928", size = 1774873, upload-time = "2025-05-12T11:52:29.015Z" }, + { url = "https://files.pythonhosted.org/packages/f5/9f/42dd255849c9ca2e814f5cbe180980594007ba19044a132cf674069e38bf/gevent-25.5.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2797885e9aeffdc98e1846723e5aa212e7ce53007dbef40d6fd2add264235c41", size = 1857911, upload-time = "2025-05-12T11:54:19.523Z" }, + { url = "https://files.pythonhosted.org/packages/3e/fc/8e799a733be48f6114bfc531b94e28812741664d8af89872dd90e117f8a4/gevent-25.5.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cde6aaac36b54332e10ea2a5bc0de6a8aba6c205c92603fe4396e3777c88e05d", size = 1812751, upload-time = "2025-05-12T12:00:03.719Z" }, + { url = "https://files.pythonhosted.org/packages/52/4f/a3f3acd961887da10cb0b49c3d915201973d59ce6bf49e2922eaf2058d5f/gevent-25.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24484f80f14befb8822bf29554cfb3a26a26cb69cd1e5a8be9e23b4bd7a96e25", size = 2087115, upload-time = "2025-05-12T11:33:01.128Z" }, + { url = "https://files.pythonhosted.org/packages/b6/27/bb38e005106a53787c13ad1f9f73ed990e403e462108acae6320ab11d442/gevent-25.5.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fdc7446895fa184890d8ca5ea61e502691114f9db55c9b76adc33f3086c4368", size = 1793549, upload-time = "2025-05-12T11:59:57.854Z" }, + { url = "https://files.pythonhosted.org/packages/ee/56/da817bc69e1f0ae8438f12f2cd150656b09a8c3576c6d12f992dc9ca64ef/gevent-25.5.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5b6106e2414b1797133786258fa1962a5e836480e4d5e861577f9fc63b673a5a", size = 2145899, upload-time = "2025-05-12T11:40:53.275Z" }, + { url = "https://files.pythonhosted.org/packages/b8/42/989403abbdbb1346a1507083c02018bee3fedaef3f9648940c767d8c0958/gevent-25.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:bc899212d90f311784c58938a9c09c59802fb6dc287a35fabdc36d180f57f575", size = 1635771, upload-time = "2025-05-12T12:26:47.644Z" }, + { url = "https://files.pythonhosted.org/packages/58/c5/cf71423666a0b83db3d7e3f85788bc47d573fca5fe62b798fe2c4273de7c/gevent-25.5.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:d87c0a1bd809d8f70f96b9b229779ec6647339830b8888a192beed33ac8d129f", size = 2909333, upload-time = "2025-05-12T11:11:34.883Z" }, + { url = "https://files.pythonhosted.org/packages/26/7e/d2f174ee8bec6eb85d961ca203bc599d059c857b8412e367b8fa206603a5/gevent-25.5.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b87a4b66edb3808d4d07bbdb0deed5a710cf3d3c531e082759afd283758bb649", size = 1788420, upload-time = "2025-05-12T11:52:30.306Z" }, + { url = "https://files.pythonhosted.org/packages/fe/f3/3aba8c147b9108e62ba348c726fe38ae69735a233db425565227336e8ce6/gevent-25.5.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f076779050029a82feb0cb1462021d3404d22f80fa76a181b1a7889cd4d6b519", size = 1868854, upload-time = "2025-05-12T11:54:21.564Z" }, + { url = "https://files.pythonhosted.org/packages/c6/b1/11a5453f8fcebe90a456471fad48bd154c6a62fcb96e3475a5e408d05fc8/gevent-25.5.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bb673eb291c19370f69295f7a881a536451408481e2e3deec3f41dedb7c281ec", size = 1833946, upload-time = "2025-05-12T12:00:05.514Z" }, + { url = "https://files.pythonhosted.org/packages/70/1c/37d4a62303f86e6af67660a8df38c1171b7290df61b358e618c6fea79567/gevent-25.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1325ed44225c8309c0dd188bdbbbee79e1df8c11ceccac226b861c7d52e4837", size = 2070583, upload-time = "2025-05-12T11:33:02.803Z" }, + { url = "https://files.pythonhosted.org/packages/4b/8f/3b14929ff28263aba1d268ea97bcf104be1a86ba6f6bb4633838e7a1905e/gevent-25.5.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:fcd5bcad3102bde686d0adcc341fade6245186050ce14386d547ccab4bd54310", size = 1808341, upload-time = "2025-05-12T11:59:59.154Z" }, + { url = "https://files.pythonhosted.org/packages/2f/fc/674ec819fb8a96e482e4d21f8baa43d34602dba09dfce7bbdc8700899d1b/gevent-25.5.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1a93062609e8fa67ec97cd5fb9206886774b2a09b24887f40148c9c37e6fb71c", size = 2137974, upload-time = "2025-05-12T11:40:54.78Z" }, + { url = "https://files.pythonhosted.org/packages/05/9a/048b7f5e28c54e4595ad4a8ad3c338fa89560e558db2bbe8273f44f030de/gevent-25.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:2534c23dc32bed62b659ed4fd9e198906179e68b26c9276a897e04163bdde806", size = 1638344, upload-time = "2025-05-12T12:08:31.776Z" }, + { url = "https://files.pythonhosted.org/packages/10/25/2162b38d7b48e08865db6772d632bd1648136ce2bb50e340565e45607cad/gevent-25.5.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:a022a9de9275ce0b390b7315595454258c525dc8287a03f1a6cacc5878ab7cbc", size = 2928044, upload-time = "2025-05-12T11:11:36.33Z" }, + { url = "https://files.pythonhosted.org/packages/1b/e0/dbd597a964ed00176da122ea759bf2a6c1504f1e9f08e185379f92dc355f/gevent-25.5.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3fae8533f9d0ef3348a1f503edcfb531ef7a0236b57da1e24339aceb0ce52922", size = 1788751, upload-time = "2025-05-12T11:52:32.643Z" }, + { url = "https://files.pythonhosted.org/packages/f1/74/960cc4cf4c9c90eafbe0efc238cdf588862e8e278d0b8c0d15a0da4ed480/gevent-25.5.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c7b32d9c3b5294b39ea9060e20c582e49e1ec81edbfeae6cf05f8ad0829cb13d", size = 1869766, upload-time = "2025-05-12T11:54:23.903Z" }, + { url = "https://files.pythonhosted.org/packages/56/78/fa84b1c7db79b156929685db09a7c18c3127361dca18a09e998e98118506/gevent-25.5.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b95815fe44f318ebbfd733b6428b4cb18cc5e68f1c40e8501dd69cc1f42a83d", size = 1835358, upload-time = "2025-05-12T12:00:06.794Z" }, + { url = "https://files.pythonhosted.org/packages/00/5c/bfefe3822bbca5b83bfad256c82251b3f5be13d52d14e17a786847b9b625/gevent-25.5.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d316529b70d325b183b2f3f5cde958911ff7be12eb2b532b5c301f915dbbf1e", size = 2073071, upload-time = "2025-05-12T11:33:04.2Z" }, + { url = "https://files.pythonhosted.org/packages/20/e4/08a77a3839a37db96393dea952e992d5846a881b887986dde62ead6b48a1/gevent-25.5.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f6ba33c13db91ffdbb489a4f3d177a261ea1843923e1d68a5636c53fe98fa5ce", size = 1809805, upload-time = "2025-05-12T12:00:00.537Z" }, + { url = "https://files.pythonhosted.org/packages/2b/ac/28848348f790c1283df74b0fc0a554271d0606676470f848eccf84eae42a/gevent-25.5.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:37ee34b77c7553777c0b8379915f75934c3f9c8cd32f7cd098ea43c9323c2276", size = 2138305, upload-time = "2025-05-12T11:40:56.566Z" }, + { url = "https://files.pythonhosted.org/packages/52/9e/0e9e40facd2d714bfb00f71fc6dacaacc82c24c1c2e097bf6461e00dec9f/gevent-25.5.1-cp313-cp313-win_amd64.whl", hash = "sha256:9fa6aa0da224ed807d3b76cdb4ee8b54d4d4d5e018aed2478098e685baae7896", size = 1637444, upload-time = "2025-05-12T12:17:45.995Z" }, + { url = "https://files.pythonhosted.org/packages/60/16/b71171e97ec7b4ded8669542f4369d88d5a289e2704efbbde51e858e062a/gevent-25.5.1-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:0bacf89a65489d26c7087669af89938d5bfd9f7afb12a07b57855b9fad6ccbd0", size = 2937113, upload-time = "2025-05-12T11:12:03.191Z" }, + { url = "https://files.pythonhosted.org/packages/c7/54/e5908beb092c2745aa8390f15b9559cc3ebd77bf1ba71c81c606f7b1fb92/gevent-25.5.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e30169ef9cc0a57930bfd8fe14d86bc9d39fb96d278e3891e85cbe7b46058a97", size = 2147450, upload-time = "2025-05-12T11:33:05.883Z" }, + { url = "https://files.pythonhosted.org/packages/ee/39/206c9da2395a7df11c13e2989f7c7c65a7799babdb8b4b055cccae4d5c14/gevent-25.5.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e72ad5f8d9c92df017fb91a1f6a438cfb63b0eff4b40904ff81b40cb8150078c", size = 2210122, upload-time = "2025-05-12T11:40:58.063Z" }, + { url = "https://files.pythonhosted.org/packages/c0/18/d10ca3841b686143c1973cac816651a72ff77ad9e79a5300cbbbe310fced/gevent-25.5.1-cp39-cp39-win32.whl", hash = "sha256:e5f358e81e27b1a7f2fb2f5219794e13ab5f59ce05571aa3877cfac63adb97db", size = 1548447, upload-time = "2025-05-12T12:48:21.565Z" }, + { url = "https://files.pythonhosted.org/packages/ac/9d/48c01ff8324ce4bfaba0760c0f1db6f4e2c976838655f6b80333cfd47999/gevent-25.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:b83aff2441c7d4ee93e519989713b7c2607d4510abe990cd1d04f641bc6c03af", size = 1659832, upload-time = "2025-05-12T12:45:00.794Z" }, + { url = "https://files.pythonhosted.org/packages/11/81/834da3c1ea5e71e4dc1a78a034a15f2813d9760d135464aae5d1f058a8c6/gevent-25.5.1-pp310-pypy310_pp73-macosx_11_0_universal2.whl", hash = "sha256:60ad4ca9ca2c4cc8201b607c229cd17af749831e371d006d8a91303bb5568eb1", size = 1291540, upload-time = "2025-05-12T11:11:55.456Z" }, +] + +[[package]] +name = "greenlet" +version = "3.2.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c9/92/bb85bd6e80148a4d2e0c59f7c0c2891029f8fd510183afc7d8d2feeed9b6/greenlet-3.2.3.tar.gz", hash = "sha256:8b0dd8ae4c0d6f5e54ee55ba935eeb3d735a9b58a8a1e5b5cbab64e01a39f365", size = 185752, upload-time = "2025-06-05T16:16:09.955Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/92/db/b4c12cff13ebac2786f4f217f06588bccd8b53d260453404ef22b121fc3a/greenlet-3.2.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:1afd685acd5597349ee6d7a88a8bec83ce13c106ac78c196ee9dde7c04fe87be", size = 268977, upload-time = "2025-06-05T16:10:24.001Z" }, + { url = "https://files.pythonhosted.org/packages/52/61/75b4abd8147f13f70986df2801bf93735c1bd87ea780d70e3b3ecda8c165/greenlet-3.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:761917cac215c61e9dc7324b2606107b3b292a8349bdebb31503ab4de3f559ac", size = 627351, upload-time = "2025-06-05T16:38:50.685Z" }, + { url = "https://files.pythonhosted.org/packages/35/aa/6894ae299d059d26254779a5088632874b80ee8cf89a88bca00b0709d22f/greenlet-3.2.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:a433dbc54e4a37e4fff90ef34f25a8c00aed99b06856f0119dcf09fbafa16392", size = 638599, upload-time = "2025-06-05T16:41:34.057Z" }, + { url = "https://files.pythonhosted.org/packages/30/64/e01a8261d13c47f3c082519a5e9dbf9e143cc0498ed20c911d04e54d526c/greenlet-3.2.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:72e77ed69312bab0434d7292316d5afd6896192ac4327d44f3d613ecb85b037c", size = 634482, upload-time = "2025-06-05T16:48:16.26Z" }, + { url = "https://files.pythonhosted.org/packages/47/48/ff9ca8ba9772d083a4f5221f7b4f0ebe8978131a9ae0909cf202f94cd879/greenlet-3.2.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:68671180e3849b963649254a882cd544a3c75bfcd2c527346ad8bb53494444db", size = 633284, upload-time = "2025-06-05T16:13:01.599Z" }, + { url = "https://files.pythonhosted.org/packages/e9/45/626e974948713bc15775b696adb3eb0bd708bec267d6d2d5c47bb47a6119/greenlet-3.2.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:49c8cfb18fb419b3d08e011228ef8a25882397f3a859b9fe1436946140b6756b", size = 582206, upload-time = "2025-06-05T16:12:48.51Z" }, + { url = "https://files.pythonhosted.org/packages/b1/8e/8b6f42c67d5df7db35b8c55c9a850ea045219741bb14416255616808c690/greenlet-3.2.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:efc6dc8a792243c31f2f5674b670b3a95d46fa1c6a912b8e310d6f542e7b0712", size = 1111412, upload-time = "2025-06-05T16:36:45.479Z" }, + { url = "https://files.pythonhosted.org/packages/05/46/ab58828217349500a7ebb81159d52ca357da747ff1797c29c6023d79d798/greenlet-3.2.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:731e154aba8e757aedd0781d4b240f1225b075b4409f1bb83b05ff410582cf00", size = 1135054, upload-time = "2025-06-05T16:12:36.478Z" }, + { url = "https://files.pythonhosted.org/packages/68/7f/d1b537be5080721c0f0089a8447d4ef72839039cdb743bdd8ffd23046e9a/greenlet-3.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:96c20252c2f792defe9a115d3287e14811036d51e78b3aaddbee23b69b216302", size = 296573, upload-time = "2025-06-05T16:34:26.521Z" }, + { url = "https://files.pythonhosted.org/packages/fc/2e/d4fcb2978f826358b673f779f78fa8a32ee37df11920dc2bb5589cbeecef/greenlet-3.2.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:784ae58bba89fa1fa5733d170d42486580cab9decda3484779f4759345b29822", size = 270219, upload-time = "2025-06-05T16:10:10.414Z" }, + { url = "https://files.pythonhosted.org/packages/16/24/929f853e0202130e4fe163bc1d05a671ce8dcd604f790e14896adac43a52/greenlet-3.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0921ac4ea42a5315d3446120ad48f90c3a6b9bb93dd9b3cf4e4d84a66e42de83", size = 630383, upload-time = "2025-06-05T16:38:51.785Z" }, + { url = "https://files.pythonhosted.org/packages/d1/b2/0320715eb61ae70c25ceca2f1d5ae620477d246692d9cc284c13242ec31c/greenlet-3.2.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:d2971d93bb99e05f8c2c0c2f4aa9484a18d98c4c3bd3c62b65b7e6ae33dfcfaf", size = 642422, upload-time = "2025-06-05T16:41:35.259Z" }, + { url = "https://files.pythonhosted.org/packages/bd/49/445fd1a210f4747fedf77615d941444349c6a3a4a1135bba9701337cd966/greenlet-3.2.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c667c0bf9d406b77a15c924ef3285e1e05250948001220368e039b6aa5b5034b", size = 638375, upload-time = "2025-06-05T16:48:18.235Z" }, + { url = "https://files.pythonhosted.org/packages/7e/c8/ca19760cf6eae75fa8dc32b487e963d863b3ee04a7637da77b616703bc37/greenlet-3.2.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:592c12fb1165be74592f5de0d70f82bc5ba552ac44800d632214b76089945147", size = 637627, upload-time = "2025-06-05T16:13:02.858Z" }, + { url = "https://files.pythonhosted.org/packages/65/89/77acf9e3da38e9bcfca881e43b02ed467c1dedc387021fc4d9bd9928afb8/greenlet-3.2.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:29e184536ba333003540790ba29829ac14bb645514fbd7e32af331e8202a62a5", size = 585502, upload-time = "2025-06-05T16:12:49.642Z" }, + { url = "https://files.pythonhosted.org/packages/97/c6/ae244d7c95b23b7130136e07a9cc5aadd60d59b5951180dc7dc7e8edaba7/greenlet-3.2.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:93c0bb79844a367782ec4f429d07589417052e621aa39a5ac1fb99c5aa308edc", size = 1114498, upload-time = "2025-06-05T16:36:46.598Z" }, + { url = "https://files.pythonhosted.org/packages/89/5f/b16dec0cbfd3070658e0d744487919740c6d45eb90946f6787689a7efbce/greenlet-3.2.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:751261fc5ad7b6705f5f76726567375bb2104a059454e0226e1eef6c756748ba", size = 1139977, upload-time = "2025-06-05T16:12:38.262Z" }, + { url = "https://files.pythonhosted.org/packages/66/77/d48fb441b5a71125bcac042fc5b1494c806ccb9a1432ecaa421e72157f77/greenlet-3.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:83a8761c75312361aa2b5b903b79da97f13f556164a7dd2d5448655425bd4c34", size = 297017, upload-time = "2025-06-05T16:25:05.225Z" }, + { url = "https://files.pythonhosted.org/packages/f3/94/ad0d435f7c48debe960c53b8f60fb41c2026b1d0fa4a99a1cb17c3461e09/greenlet-3.2.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:25ad29caed5783d4bd7a85c9251c651696164622494c00802a139c00d639242d", size = 271992, upload-time = "2025-06-05T16:11:23.467Z" }, + { url = "https://files.pythonhosted.org/packages/93/5d/7c27cf4d003d6e77749d299c7c8f5fd50b4f251647b5c2e97e1f20da0ab5/greenlet-3.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:88cd97bf37fe24a6710ec6a3a7799f3f81d9cd33317dcf565ff9950c83f55e0b", size = 638820, upload-time = "2025-06-05T16:38:52.882Z" }, + { url = "https://files.pythonhosted.org/packages/c6/7e/807e1e9be07a125bb4c169144937910bf59b9d2f6d931578e57f0bce0ae2/greenlet-3.2.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:baeedccca94880d2f5666b4fa16fc20ef50ba1ee353ee2d7092b383a243b0b0d", size = 653046, upload-time = "2025-06-05T16:41:36.343Z" }, + { url = "https://files.pythonhosted.org/packages/9d/ab/158c1a4ea1068bdbc78dba5a3de57e4c7aeb4e7fa034320ea94c688bfb61/greenlet-3.2.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:be52af4b6292baecfa0f397f3edb3c6092ce071b499dd6fe292c9ac9f2c8f264", size = 647701, upload-time = "2025-06-05T16:48:19.604Z" }, + { url = "https://files.pythonhosted.org/packages/cc/0d/93729068259b550d6a0288da4ff72b86ed05626eaf1eb7c0d3466a2571de/greenlet-3.2.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0cc73378150b8b78b0c9fe2ce56e166695e67478550769536a6742dca3651688", size = 649747, upload-time = "2025-06-05T16:13:04.628Z" }, + { url = "https://files.pythonhosted.org/packages/f6/f6/c82ac1851c60851302d8581680573245c8fc300253fc1ff741ae74a6c24d/greenlet-3.2.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:706d016a03e78df129f68c4c9b4c4f963f7d73534e48a24f5f5a7101ed13dbbb", size = 605461, upload-time = "2025-06-05T16:12:50.792Z" }, + { url = "https://files.pythonhosted.org/packages/98/82/d022cf25ca39cf1200650fc58c52af32c90f80479c25d1cbf57980ec3065/greenlet-3.2.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:419e60f80709510c343c57b4bb5a339d8767bf9aef9b8ce43f4f143240f88b7c", size = 1121190, upload-time = "2025-06-05T16:36:48.59Z" }, + { url = "https://files.pythonhosted.org/packages/f5/e1/25297f70717abe8104c20ecf7af0a5b82d2f5a980eb1ac79f65654799f9f/greenlet-3.2.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:93d48533fade144203816783373f27a97e4193177ebaaf0fc396db19e5d61163", size = 1149055, upload-time = "2025-06-05T16:12:40.457Z" }, + { url = "https://files.pythonhosted.org/packages/1f/8f/8f9e56c5e82eb2c26e8cde787962e66494312dc8cb261c460e1f3a9c88bc/greenlet-3.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:7454d37c740bb27bdeddfc3f358f26956a07d5220818ceb467a483197d84f849", size = 297817, upload-time = "2025-06-05T16:29:49.244Z" }, + { url = "https://files.pythonhosted.org/packages/b1/cf/f5c0b23309070ae93de75c90d29300751a5aacefc0a3ed1b1d8edb28f08b/greenlet-3.2.3-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:500b8689aa9dd1ab26872a34084503aeddefcb438e2e7317b89b11eaea1901ad", size = 270732, upload-time = "2025-06-05T16:10:08.26Z" }, + { url = "https://files.pythonhosted.org/packages/48/ae/91a957ba60482d3fecf9be49bc3948f341d706b52ddb9d83a70d42abd498/greenlet-3.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a07d3472c2a93117af3b0136f246b2833fdc0b542d4a9799ae5f41c28323faef", size = 639033, upload-time = "2025-06-05T16:38:53.983Z" }, + { url = "https://files.pythonhosted.org/packages/6f/df/20ffa66dd5a7a7beffa6451bdb7400d66251374ab40b99981478c69a67a8/greenlet-3.2.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:8704b3768d2f51150626962f4b9a9e4a17d2e37c8a8d9867bbd9fa4eb938d3b3", size = 652999, upload-time = "2025-06-05T16:41:37.89Z" }, + { url = "https://files.pythonhosted.org/packages/51/b4/ebb2c8cb41e521f1d72bf0465f2f9a2fd803f674a88db228887e6847077e/greenlet-3.2.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:5035d77a27b7c62db6cf41cf786cfe2242644a7a337a0e155c80960598baab95", size = 647368, upload-time = "2025-06-05T16:48:21.467Z" }, + { url = "https://files.pythonhosted.org/packages/8e/6a/1e1b5aa10dced4ae876a322155705257748108b7fd2e4fae3f2a091fe81a/greenlet-3.2.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2d8aa5423cd4a396792f6d4580f88bdc6efcb9205891c9d40d20f6e670992efb", size = 650037, upload-time = "2025-06-05T16:13:06.402Z" }, + { url = "https://files.pythonhosted.org/packages/26/f2/ad51331a157c7015c675702e2d5230c243695c788f8f75feba1af32b3617/greenlet-3.2.3-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2c724620a101f8170065d7dded3f962a2aea7a7dae133a009cada42847e04a7b", size = 608402, upload-time = "2025-06-05T16:12:51.91Z" }, + { url = "https://files.pythonhosted.org/packages/26/bc/862bd2083e6b3aff23300900a956f4ea9a4059de337f5c8734346b9b34fc/greenlet-3.2.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:873abe55f134c48e1f2a6f53f7d1419192a3d1a4e873bace00499a4e45ea6af0", size = 1119577, upload-time = "2025-06-05T16:36:49.787Z" }, + { url = "https://files.pythonhosted.org/packages/86/94/1fc0cc068cfde885170e01de40a619b00eaa8f2916bf3541744730ffb4c3/greenlet-3.2.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:024571bbce5f2c1cfff08bf3fbaa43bbc7444f580ae13b0099e95d0e6e67ed36", size = 1147121, upload-time = "2025-06-05T16:12:42.527Z" }, + { url = "https://files.pythonhosted.org/packages/27/1a/199f9587e8cb08a0658f9c30f3799244307614148ffe8b1e3aa22f324dea/greenlet-3.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:5195fb1e75e592dd04ce79881c8a22becdfa3e6f500e7feb059b1e6fdd54d3e3", size = 297603, upload-time = "2025-06-05T16:20:12.651Z" }, + { url = "https://files.pythonhosted.org/packages/d8/ca/accd7aa5280eb92b70ed9e8f7fd79dc50a2c21d8c73b9a0856f5b564e222/greenlet-3.2.3-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:3d04332dddb10b4a211b68111dabaee2e1a073663d117dc10247b5b1642bac86", size = 271479, upload-time = "2025-06-05T16:10:47.525Z" }, + { url = "https://files.pythonhosted.org/packages/55/71/01ed9895d9eb49223280ecc98a557585edfa56b3d0e965b9fa9f7f06b6d9/greenlet-3.2.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8186162dffde068a465deab08fc72c767196895c39db26ab1c17c0b77a6d8b97", size = 683952, upload-time = "2025-06-05T16:38:55.125Z" }, + { url = "https://files.pythonhosted.org/packages/ea/61/638c4bdf460c3c678a0a1ef4c200f347dff80719597e53b5edb2fb27ab54/greenlet-3.2.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f4bfbaa6096b1b7a200024784217defedf46a07c2eee1a498e94a1b5f8ec5728", size = 696917, upload-time = "2025-06-05T16:41:38.959Z" }, + { url = "https://files.pythonhosted.org/packages/22/cc/0bd1a7eb759d1f3e3cc2d1bc0f0b487ad3cc9f34d74da4b80f226fde4ec3/greenlet-3.2.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:ed6cfa9200484d234d8394c70f5492f144b20d4533f69262d530a1a082f6ee9a", size = 692443, upload-time = "2025-06-05T16:48:23.113Z" }, + { url = "https://files.pythonhosted.org/packages/67/10/b2a4b63d3f08362662e89c103f7fe28894a51ae0bc890fabf37d1d780e52/greenlet-3.2.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:02b0df6f63cd15012bed5401b47829cfd2e97052dc89da3cfaf2c779124eb892", size = 692995, upload-time = "2025-06-05T16:13:07.972Z" }, + { url = "https://files.pythonhosted.org/packages/5a/c6/ad82f148a4e3ce9564056453a71529732baf5448ad53fc323e37efe34f66/greenlet-3.2.3-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:86c2d68e87107c1792e2e8d5399acec2487a4e993ab76c792408e59394d52141", size = 655320, upload-time = "2025-06-05T16:12:53.453Z" }, + { url = "https://files.pythonhosted.org/packages/5c/4f/aab73ecaa6b3086a4c89863d94cf26fa84cbff63f52ce9bc4342b3087a06/greenlet-3.2.3-cp314-cp314-win_amd64.whl", hash = "sha256:8c47aae8fbbfcf82cc13327ae802ba13c9c36753b67e760023fd116bc124a62a", size = 301236, upload-time = "2025-06-05T16:15:20.111Z" }, + { url = "https://files.pythonhosted.org/packages/3d/d9/a3114df5fba2bf9823e0acc01e9e2abdcd8ea4c5487cf1c3dcd4cc0b48cf/greenlet-3.2.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:42efc522c0bd75ffa11a71e09cd8a399d83fafe36db250a87cf1dacfaa15dc64", size = 267769, upload-time = "2025-06-05T16:10:44.802Z" }, + { url = "https://files.pythonhosted.org/packages/bc/da/47dfc50f6e5673116e66a737dc58d1eca651db9a9aa8797c1d27e940e211/greenlet-3.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d760f9bdfe79bff803bad32b4d8ffb2c1d2ce906313fc10a83976ffb73d64ca7", size = 625472, upload-time = "2025-06-05T16:38:56.882Z" }, + { url = "https://files.pythonhosted.org/packages/f5/74/f6ef9f85d981b2fcd665bbee3e69e3c0a10fb962eb4c6a5889ac3b6debfa/greenlet-3.2.3-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:8324319cbd7b35b97990090808fdc99c27fe5338f87db50514959f8059999805", size = 637253, upload-time = "2025-06-05T16:41:40.542Z" }, + { url = "https://files.pythonhosted.org/packages/66/69/4919bb1c9e43bfc16dc886e7a37fe1bc04bfa4101aba177936a10f313cad/greenlet-3.2.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:8c37ef5b3787567d322331d5250e44e42b58c8c713859b8a04c6065f27efbf72", size = 632611, upload-time = "2025-06-05T16:48:24.976Z" }, + { url = "https://files.pythonhosted.org/packages/6b/8d/97d988d019f40b6b360b0c71c99e5b4c877a3d92666fe48b081d0e1ea1cd/greenlet-3.2.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ce539fb52fb774d0802175d37fcff5c723e2c7d249c65916257f0a940cee8904", size = 631843, upload-time = "2025-06-05T16:13:09.476Z" }, + { url = "https://files.pythonhosted.org/packages/59/24/d5e1504ec00768755d4ccc2168b76d9f4524e96694a14ad45bd87796e9bb/greenlet-3.2.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:003c930e0e074db83559edc8705f3a2d066d4aa8c2f198aff1e454946efd0f26", size = 580781, upload-time = "2025-06-05T16:12:55.029Z" }, + { url = "https://files.pythonhosted.org/packages/9c/df/d009bcca566dbfd2283b306b4e424f4c0e59bf984868f8b789802fe9e607/greenlet-3.2.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7e70ea4384b81ef9e84192e8a77fb87573138aa5d4feee541d8014e452b434da", size = 1109903, upload-time = "2025-06-05T16:36:51.491Z" }, + { url = "https://files.pythonhosted.org/packages/33/54/5036097197a78388aa6901a5b90b562f3a154a9fbee89c301a26f56f3942/greenlet-3.2.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:22eb5ba839c4b2156f18f76768233fe44b23a31decd9cc0d4cc8141c211fd1b4", size = 1133975, upload-time = "2025-06-05T16:12:43.866Z" }, + { url = "https://files.pythonhosted.org/packages/e2/15/b001456a430805fdd8b600a788d19a790664eee8863739523395f68df752/greenlet-3.2.3-cp39-cp39-win32.whl", hash = "sha256:4532f0d25df67f896d137431b13f4cdce89f7e3d4a96387a41290910df4d3a57", size = 279320, upload-time = "2025-06-05T16:43:34.043Z" }, + { url = "https://files.pythonhosted.org/packages/6c/4c/bf2100cbc1bd07f39bee3b09e7eef39beffe29f5453dc2477a2693737913/greenlet-3.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:aaa7aae1e7f75eaa3ae400ad98f8644bb81e1dc6ba47ce8a93d3f17274e08322", size = 296444, upload-time = "2025-06-05T16:39:22.664Z" }, +] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, +] + +[[package]] +name = "imagesize" +version = "1.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a7/84/62473fb57d61e31fef6e36d64a179c8781605429fd927b5dd608c997be31/imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a", size = 1280026, upload-time = "2022-07-01T12:21:05.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/62/85c4c919272577931d407be5ba5d71c20f0b616d31a0befe0ae45bb79abd/imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b", size = 8769, upload-time = "2022-07-01T12:21:02.467Z" }, +] + +[[package]] +name = "importlib-metadata" +version = "8.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp", marker = "python_full_version != '3.14.*'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000", size = 56641, upload-time = "2025-04-27T15:29:01.736Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd", size = 27656, upload-time = "2025-04-27T15:29:00.214Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, +] + +[[package]] +name = "jinja2" +version = "3.1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, +] + +[[package]] +name = "jmespath" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/00/2a/e867e8531cf3e36b41201936b7fa7ba7b5702dbef42922193f05c8976cd6/jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe", size = 25843, upload-time = "2022-06-17T18:00:12.224Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/b4/b9b800c45527aadd64d5b442f9b932b00648617eb5d63d2c7a6587b7cafc/jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", size = 20256, upload-time = "2022-06-17T18:00:10.251Z" }, +] + +[[package]] +name = "markupsafe" +version = "3.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload-time = "2024-10-18T15:21:54.129Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/90/d08277ce111dd22f77149fd1a5d4653eeb3b3eaacbdfcbae5afb2600eebd/MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8", size = 14357, upload-time = "2024-10-18T15:20:51.44Z" }, + { url = "https://files.pythonhosted.org/packages/04/e1/6e2194baeae0bca1fae6629dc0cbbb968d4d941469cbab11a3872edff374/MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158", size = 12393, upload-time = "2024-10-18T15:20:52.426Z" }, + { url = "https://files.pythonhosted.org/packages/1d/69/35fa85a8ece0a437493dc61ce0bb6d459dcba482c34197e3efc829aa357f/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579", size = 21732, upload-time = "2024-10-18T15:20:53.578Z" }, + { url = "https://files.pythonhosted.org/packages/22/35/137da042dfb4720b638d2937c38a9c2df83fe32d20e8c8f3185dbfef05f7/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d", size = 20866, upload-time = "2024-10-18T15:20:55.06Z" }, + { url = "https://files.pythonhosted.org/packages/29/28/6d029a903727a1b62edb51863232152fd335d602def598dade38996887f0/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb", size = 20964, upload-time = "2024-10-18T15:20:55.906Z" }, + { url = "https://files.pythonhosted.org/packages/cc/cd/07438f95f83e8bc028279909d9c9bd39e24149b0d60053a97b2bc4f8aa51/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b", size = 21977, upload-time = "2024-10-18T15:20:57.189Z" }, + { url = "https://files.pythonhosted.org/packages/29/01/84b57395b4cc062f9c4c55ce0df7d3108ca32397299d9df00fedd9117d3d/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c", size = 21366, upload-time = "2024-10-18T15:20:58.235Z" }, + { url = "https://files.pythonhosted.org/packages/bd/6e/61ebf08d8940553afff20d1fb1ba7294b6f8d279df9fd0c0db911b4bbcfd/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171", size = 21091, upload-time = "2024-10-18T15:20:59.235Z" }, + { url = "https://files.pythonhosted.org/packages/11/23/ffbf53694e8c94ebd1e7e491de185124277964344733c45481f32ede2499/MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50", size = 15065, upload-time = "2024-10-18T15:21:00.307Z" }, + { url = "https://files.pythonhosted.org/packages/44/06/e7175d06dd6e9172d4a69a72592cb3f7a996a9c396eee29082826449bbc3/MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a", size = 15514, upload-time = "2024-10-18T15:21:01.122Z" }, + { url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", size = 14353, upload-time = "2024-10-18T15:21:02.187Z" }, + { url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", size = 12392, upload-time = "2024-10-18T15:21:02.941Z" }, + { url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984, upload-time = "2024-10-18T15:21:03.953Z" }, + { url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120, upload-time = "2024-10-18T15:21:06.495Z" }, + { url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032, upload-time = "2024-10-18T15:21:07.295Z" }, + { url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057, upload-time = "2024-10-18T15:21:08.073Z" }, + { url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359, upload-time = "2024-10-18T15:21:09.318Z" }, + { url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306, upload-time = "2024-10-18T15:21:10.185Z" }, + { url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", size = 15094, upload-time = "2024-10-18T15:21:11.005Z" }, + { url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", size = 15521, upload-time = "2024-10-18T15:21:12.911Z" }, + { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274, upload-time = "2024-10-18T15:21:13.777Z" }, + { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348, upload-time = "2024-10-18T15:21:14.822Z" }, + { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149, upload-time = "2024-10-18T15:21:15.642Z" }, + { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118, upload-time = "2024-10-18T15:21:17.133Z" }, + { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993, upload-time = "2024-10-18T15:21:18.064Z" }, + { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178, upload-time = "2024-10-18T15:21:18.859Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319, upload-time = "2024-10-18T15:21:19.671Z" }, + { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352, upload-time = "2024-10-18T15:21:20.971Z" }, + { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097, upload-time = "2024-10-18T15:21:22.646Z" }, + { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601, upload-time = "2024-10-18T15:21:23.499Z" }, + { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274, upload-time = "2024-10-18T15:21:24.577Z" }, + { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352, upload-time = "2024-10-18T15:21:25.382Z" }, + { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122, upload-time = "2024-10-18T15:21:26.199Z" }, + { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085, upload-time = "2024-10-18T15:21:27.029Z" }, + { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978, upload-time = "2024-10-18T15:21:27.846Z" }, + { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208, upload-time = "2024-10-18T15:21:28.744Z" }, + { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357, upload-time = "2024-10-18T15:21:29.545Z" }, + { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344, upload-time = "2024-10-18T15:21:30.366Z" }, + { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101, upload-time = "2024-10-18T15:21:31.207Z" }, + { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603, upload-time = "2024-10-18T15:21:32.032Z" }, + { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510, upload-time = "2024-10-18T15:21:33.625Z" }, + { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486, upload-time = "2024-10-18T15:21:34.611Z" }, + { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480, upload-time = "2024-10-18T15:21:35.398Z" }, + { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914, upload-time = "2024-10-18T15:21:36.231Z" }, + { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796, upload-time = "2024-10-18T15:21:37.073Z" }, + { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473, upload-time = "2024-10-18T15:21:37.932Z" }, + { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114, upload-time = "2024-10-18T15:21:39.799Z" }, + { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098, upload-time = "2024-10-18T15:21:40.813Z" }, + { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208, upload-time = "2024-10-18T15:21:41.814Z" }, + { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739, upload-time = "2024-10-18T15:21:42.784Z" }, + { url = "https://files.pythonhosted.org/packages/a7/ea/9b1530c3fdeeca613faeb0fb5cbcf2389d816072fab72a71b45749ef6062/MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a", size = 14344, upload-time = "2024-10-18T15:21:43.721Z" }, + { url = "https://files.pythonhosted.org/packages/4b/c2/fbdbfe48848e7112ab05e627e718e854d20192b674952d9042ebd8c9e5de/MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff", size = 12389, upload-time = "2024-10-18T15:21:44.666Z" }, + { url = "https://files.pythonhosted.org/packages/f0/25/7a7c6e4dbd4f867d95d94ca15449e91e52856f6ed1905d58ef1de5e211d0/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13", size = 21607, upload-time = "2024-10-18T15:21:45.452Z" }, + { url = "https://files.pythonhosted.org/packages/53/8f/f339c98a178f3c1e545622206b40986a4c3307fe39f70ccd3d9df9a9e425/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144", size = 20728, upload-time = "2024-10-18T15:21:46.295Z" }, + { url = "https://files.pythonhosted.org/packages/1a/03/8496a1a78308456dbd50b23a385c69b41f2e9661c67ea1329849a598a8f9/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29", size = 20826, upload-time = "2024-10-18T15:21:47.134Z" }, + { url = "https://files.pythonhosted.org/packages/e6/cf/0a490a4bd363048c3022f2f475c8c05582179bb179defcee4766fb3dcc18/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0", size = 21843, upload-time = "2024-10-18T15:21:48.334Z" }, + { url = "https://files.pythonhosted.org/packages/19/a3/34187a78613920dfd3cdf68ef6ce5e99c4f3417f035694074beb8848cd77/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0", size = 21219, upload-time = "2024-10-18T15:21:49.587Z" }, + { url = "https://files.pythonhosted.org/packages/17/d8/5811082f85bb88410ad7e452263af048d685669bbbfb7b595e8689152498/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178", size = 20946, upload-time = "2024-10-18T15:21:50.441Z" }, + { url = "https://files.pythonhosted.org/packages/7c/31/bd635fb5989440d9365c5e3c47556cfea121c7803f5034ac843e8f37c2f2/MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f", size = 15063, upload-time = "2024-10-18T15:21:51.385Z" }, + { url = "https://files.pythonhosted.org/packages/b3/73/085399401383ce949f727afec55ec3abd76648d04b9f22e1c0e99cb4bec3/MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a", size = 15506, upload-time = "2024-10-18T15:21:52.974Z" }, +] + +[[package]] +name = "mockupdb" +version = "1.9.0.dev1" +source = { git = "https://github.com/mongodb-labs/mongo-mockup-db?rev=master#317c4e049965f9d99423698a81e52d0ab37b7599" } +dependencies = [ + { name = "pymongo" }, +] + +[[package]] +name = "mypy" +version = "1.18.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mypy-extensions" }, + { name = "pathspec" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c0/77/8f0d0001ffad290cef2f7f216f96c814866248a0b92a722365ed54648e7e/mypy-1.18.2.tar.gz", hash = "sha256:06a398102a5f203d7477b2923dda3634c36727fa5c237d8f859ef90c42a9924b", size = 3448846, upload-time = "2025-09-19T00:11:10.519Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/03/6f/657961a0743cff32e6c0611b63ff1c1970a0b482ace35b069203bf705187/mypy-1.18.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c1eab0cf6294dafe397c261a75f96dc2c31bffe3b944faa24db5def4e2b0f77c", size = 12807973, upload-time = "2025-09-19T00:10:35.282Z" }, + { url = "https://files.pythonhosted.org/packages/10/e9/420822d4f661f13ca8900f5fa239b40ee3be8b62b32f3357df9a3045a08b/mypy-1.18.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7a780ca61fc239e4865968ebc5240bb3bf610ef59ac398de9a7421b54e4a207e", size = 11896527, upload-time = "2025-09-19T00:10:55.791Z" }, + { url = "https://files.pythonhosted.org/packages/aa/73/a05b2bbaa7005f4642fcfe40fb73f2b4fb6bb44229bd585b5878e9a87ef8/mypy-1.18.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:448acd386266989ef11662ce3c8011fd2a7b632e0ec7d61a98edd8e27472225b", size = 12507004, upload-time = "2025-09-19T00:11:05.411Z" }, + { url = "https://files.pythonhosted.org/packages/4f/01/f6e4b9f0d031c11ccbd6f17da26564f3a0f3c4155af344006434b0a05a9d/mypy-1.18.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f9e171c465ad3901dc652643ee4bffa8e9fef4d7d0eece23b428908c77a76a66", size = 13245947, upload-time = "2025-09-19T00:10:46.923Z" }, + { url = "https://files.pythonhosted.org/packages/d7/97/19727e7499bfa1ae0773d06afd30ac66a58ed7437d940c70548634b24185/mypy-1.18.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:592ec214750bc00741af1f80cbf96b5013d81486b7bb24cb052382c19e40b428", size = 13499217, upload-time = "2025-09-19T00:09:39.472Z" }, + { url = "https://files.pythonhosted.org/packages/9f/4f/90dc8c15c1441bf31cf0f9918bb077e452618708199e530f4cbd5cede6ff/mypy-1.18.2-cp310-cp310-win_amd64.whl", hash = "sha256:7fb95f97199ea11769ebe3638c29b550b5221e997c63b14ef93d2e971606ebed", size = 9766753, upload-time = "2025-09-19T00:10:49.161Z" }, + { url = "https://files.pythonhosted.org/packages/88/87/cafd3ae563f88f94eec33f35ff722d043e09832ea8530ef149ec1efbaf08/mypy-1.18.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:807d9315ab9d464125aa9fcf6d84fde6e1dc67da0b6f80e7405506b8ac72bc7f", size = 12731198, upload-time = "2025-09-19T00:09:44.857Z" }, + { url = "https://files.pythonhosted.org/packages/0f/e0/1e96c3d4266a06d4b0197ace5356d67d937d8358e2ee3ffac71faa843724/mypy-1.18.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:776bb00de1778caf4db739c6e83919c1d85a448f71979b6a0edd774ea8399341", size = 11817879, upload-time = "2025-09-19T00:09:47.131Z" }, + { url = "https://files.pythonhosted.org/packages/72/ef/0c9ba89eb03453e76bdac5a78b08260a848c7bfc5d6603634774d9cd9525/mypy-1.18.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1379451880512ffce14505493bd9fe469e0697543717298242574882cf8cdb8d", size = 12427292, upload-time = "2025-09-19T00:10:22.472Z" }, + { url = "https://files.pythonhosted.org/packages/1a/52/ec4a061dd599eb8179d5411d99775bec2a20542505988f40fc2fee781068/mypy-1.18.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1331eb7fd110d60c24999893320967594ff84c38ac6d19e0a76c5fd809a84c86", size = 13163750, upload-time = "2025-09-19T00:09:51.472Z" }, + { url = "https://files.pythonhosted.org/packages/c4/5f/2cf2ceb3b36372d51568f2208c021870fe7834cf3186b653ac6446511839/mypy-1.18.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3ca30b50a51e7ba93b00422e486cbb124f1c56a535e20eff7b2d6ab72b3b2e37", size = 13351827, upload-time = "2025-09-19T00:09:58.311Z" }, + { url = "https://files.pythonhosted.org/packages/c8/7d/2697b930179e7277529eaaec1513f8de622818696857f689e4a5432e5e27/mypy-1.18.2-cp311-cp311-win_amd64.whl", hash = "sha256:664dc726e67fa54e14536f6e1224bcfce1d9e5ac02426d2326e2bb4e081d1ce8", size = 9757983, upload-time = "2025-09-19T00:10:09.071Z" }, + { url = "https://files.pythonhosted.org/packages/07/06/dfdd2bc60c66611dd8335f463818514733bc763e4760dee289dcc33df709/mypy-1.18.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:33eca32dd124b29400c31d7cf784e795b050ace0e1f91b8dc035672725617e34", size = 12908273, upload-time = "2025-09-19T00:10:58.321Z" }, + { url = "https://files.pythonhosted.org/packages/81/14/6a9de6d13a122d5608e1a04130724caf9170333ac5a924e10f670687d3eb/mypy-1.18.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a3c47adf30d65e89b2dcd2fa32f3aeb5e94ca970d2c15fcb25e297871c8e4764", size = 11920910, upload-time = "2025-09-19T00:10:20.043Z" }, + { url = "https://files.pythonhosted.org/packages/5f/a9/b29de53e42f18e8cc547e38daa9dfa132ffdc64f7250e353f5c8cdd44bee/mypy-1.18.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d6c838e831a062f5f29d11c9057c6009f60cb294fea33a98422688181fe2893", size = 12465585, upload-time = "2025-09-19T00:10:33.005Z" }, + { url = "https://files.pythonhosted.org/packages/77/ae/6c3d2c7c61ff21f2bee938c917616c92ebf852f015fb55917fd6e2811db2/mypy-1.18.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01199871b6110a2ce984bde85acd481232d17413868c9807e95c1b0739a58914", size = 13348562, upload-time = "2025-09-19T00:10:11.51Z" }, + { url = "https://files.pythonhosted.org/packages/4d/31/aec68ab3b4aebdf8f36d191b0685d99faa899ab990753ca0fee60fb99511/mypy-1.18.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a2afc0fa0b0e91b4599ddfe0f91e2c26c2b5a5ab263737e998d6817874c5f7c8", size = 13533296, upload-time = "2025-09-19T00:10:06.568Z" }, + { url = "https://files.pythonhosted.org/packages/9f/83/abcb3ad9478fca3ebeb6a5358bb0b22c95ea42b43b7789c7fb1297ca44f4/mypy-1.18.2-cp312-cp312-win_amd64.whl", hash = "sha256:d8068d0afe682c7c4897c0f7ce84ea77f6de953262b12d07038f4d296d547074", size = 9828828, upload-time = "2025-09-19T00:10:28.203Z" }, + { url = "https://files.pythonhosted.org/packages/5f/04/7f462e6fbba87a72bc8097b93f6842499c428a6ff0c81dd46948d175afe8/mypy-1.18.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:07b8b0f580ca6d289e69209ec9d3911b4a26e5abfde32228a288eb79df129fcc", size = 12898728, upload-time = "2025-09-19T00:10:01.33Z" }, + { url = "https://files.pythonhosted.org/packages/99/5b/61ed4efb64f1871b41fd0b82d29a64640f3516078f6c7905b68ab1ad8b13/mypy-1.18.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ed4482847168439651d3feee5833ccedbf6657e964572706a2adb1f7fa4dfe2e", size = 11910758, upload-time = "2025-09-19T00:10:42.607Z" }, + { url = "https://files.pythonhosted.org/packages/3c/46/d297d4b683cc89a6e4108c4250a6a6b717f5fa96e1a30a7944a6da44da35/mypy-1.18.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c3ad2afadd1e9fea5cf99a45a822346971ede8685cc581ed9cd4d42eaf940986", size = 12475342, upload-time = "2025-09-19T00:11:00.371Z" }, + { url = "https://files.pythonhosted.org/packages/83/45/4798f4d00df13eae3bfdf726c9244bcb495ab5bd588c0eed93a2f2dd67f3/mypy-1.18.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a431a6f1ef14cf8c144c6b14793a23ec4eae3db28277c358136e79d7d062f62d", size = 13338709, upload-time = "2025-09-19T00:11:03.358Z" }, + { url = "https://files.pythonhosted.org/packages/d7/09/479f7358d9625172521a87a9271ddd2441e1dab16a09708f056e97007207/mypy-1.18.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7ab28cc197f1dd77a67e1c6f35cd1f8e8b73ed2217e4fc005f9e6a504e46e7ba", size = 13529806, upload-time = "2025-09-19T00:10:26.073Z" }, + { url = "https://files.pythonhosted.org/packages/71/cf/ac0f2c7e9d0ea3c75cd99dff7aec1c9df4a1376537cb90e4c882267ee7e9/mypy-1.18.2-cp313-cp313-win_amd64.whl", hash = "sha256:0e2785a84b34a72ba55fb5daf079a1003a34c05b22238da94fcae2bbe46f3544", size = 9833262, upload-time = "2025-09-19T00:10:40.035Z" }, + { url = "https://files.pythonhosted.org/packages/5a/0c/7d5300883da16f0063ae53996358758b2a2df2a09c72a5061fa79a1f5006/mypy-1.18.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:62f0e1e988ad41c2a110edde6c398383a889d95b36b3e60bcf155f5164c4fdce", size = 12893775, upload-time = "2025-09-19T00:10:03.814Z" }, + { url = "https://files.pythonhosted.org/packages/50/df/2cffbf25737bdb236f60c973edf62e3e7b4ee1c25b6878629e88e2cde967/mypy-1.18.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8795a039bab805ff0c1dfdb8cd3344642c2b99b8e439d057aba30850b8d3423d", size = 11936852, upload-time = "2025-09-19T00:10:51.631Z" }, + { url = "https://files.pythonhosted.org/packages/be/50/34059de13dd269227fb4a03be1faee6e2a4b04a2051c82ac0a0b5a773c9a/mypy-1.18.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6ca1e64b24a700ab5ce10133f7ccd956a04715463d30498e64ea8715236f9c9c", size = 12480242, upload-time = "2025-09-19T00:11:07.955Z" }, + { url = "https://files.pythonhosted.org/packages/5b/11/040983fad5132d85914c874a2836252bbc57832065548885b5bb5b0d4359/mypy-1.18.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d924eef3795cc89fecf6bedc6ed32b33ac13e8321344f6ddbf8ee89f706c05cb", size = 13326683, upload-time = "2025-09-19T00:09:55.572Z" }, + { url = "https://files.pythonhosted.org/packages/e9/ba/89b2901dd77414dd7a8c8729985832a5735053be15b744c18e4586e506ef/mypy-1.18.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:20c02215a080e3a2be3aa50506c67242df1c151eaba0dcbc1e4e557922a26075", size = 13514749, upload-time = "2025-09-19T00:10:44.827Z" }, + { url = "https://files.pythonhosted.org/packages/25/bc/cc98767cffd6b2928ba680f3e5bc969c4152bf7c2d83f92f5a504b92b0eb/mypy-1.18.2-cp314-cp314-win_amd64.whl", hash = "sha256:749b5f83198f1ca64345603118a6f01a4e99ad4bf9d103ddc5a3200cc4614adf", size = 9982959, upload-time = "2025-09-19T00:10:37.344Z" }, + { url = "https://files.pythonhosted.org/packages/3f/a6/490ff491d8ecddf8ab91762d4f67635040202f76a44171420bcbe38ceee5/mypy-1.18.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:25a9c8fb67b00599f839cf472713f54249a62efd53a54b565eb61956a7e3296b", size = 12807230, upload-time = "2025-09-19T00:09:49.471Z" }, + { url = "https://files.pythonhosted.org/packages/eb/2e/60076fc829645d167ece9e80db9e8375648d210dab44cc98beb5b322a826/mypy-1.18.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c2b9c7e284ee20e7598d6f42e13ca40b4928e6957ed6813d1ab6348aa3f47133", size = 11895666, upload-time = "2025-09-19T00:10:53.678Z" }, + { url = "https://files.pythonhosted.org/packages/97/4a/1e2880a2a5dda4dc8d9ecd1a7e7606bc0b0e14813637eeda40c38624e037/mypy-1.18.2-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d6985ed057513e344e43a26cc1cd815c7a94602fb6a3130a34798625bc2f07b6", size = 12499608, upload-time = "2025-09-19T00:09:36.204Z" }, + { url = "https://files.pythonhosted.org/packages/00/81/a117f1b73a3015b076b20246b1f341c34a578ebd9662848c6b80ad5c4138/mypy-1.18.2-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22f27105f1525ec024b5c630c0b9f36d5c1cc4d447d61fe51ff4bd60633f47ac", size = 13244551, upload-time = "2025-09-19T00:10:17.531Z" }, + { url = "https://files.pythonhosted.org/packages/9b/61/b9f48e1714ce87c7bf0358eb93f60663740ebb08f9ea886ffc670cea7933/mypy-1.18.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:030c52d0ea8144e721e49b1f68391e39553d7451f0c3f8a7565b59e19fcb608b", size = 13491552, upload-time = "2025-09-19T00:10:13.753Z" }, + { url = "https://files.pythonhosted.org/packages/c9/66/b2c0af3b684fa80d1b27501a8bdd3d2daa467ea3992a8aa612f5ca17c2db/mypy-1.18.2-cp39-cp39-win_amd64.whl", hash = "sha256:aa5e07ac1a60a253445797e42b8b2963c9675563a94f11291ab40718b016a7a0", size = 9765635, upload-time = "2025-09-19T00:10:30.993Z" }, + { url = "https://files.pythonhosted.org/packages/87/e3/be76d87158ebafa0309946c4a73831974d4d6ab4f4ef40c3b53a385a66fd/mypy-1.18.2-py3-none-any.whl", hash = "sha256:22a1748707dd62b58d2ae53562ffc4d7f8bcc727e8ac7cbc69c053ddc874d47e", size = 2352367, upload-time = "2025-09-19T00:10:15.489Z" }, +] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, +] + +[[package]] +name = "nodeenv" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload-time = "2024-06-04T18:44:11.171Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, +] + +[[package]] +name = "packaging" +version = "25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, +] + +[[package]] +name = "pip" +version = "25.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/20/16/650289cd3f43d5a2fadfd98c68bd1e1e7f2550a1a5326768cddfbcedb2c5/pip-25.2.tar.gz", hash = "sha256:578283f006390f85bb6282dffb876454593d637f5d1be494b5202ce4877e71f2", size = 1840021, upload-time = "2025-07-30T21:50:15.401Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/3f/945ef7ab14dc4f9d7f40288d2df998d1837ee0888ec3659c813487572faa/pip-25.2-py3-none-any.whl", hash = "sha256:6d67a2b4e7f14d8b31b8b52648866fa717f45a1eb70e83002f4331d07e953717", size = 1752557, upload-time = "2025-07-30T21:50:13.323Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "pyasn1" +version = "0.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/e9/01f1a64245b89f039897cb0130016d79f77d52669aae6ee7b159a6c4c018/pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034", size = 145322, upload-time = "2024-09-10T22:41:42.55Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/f1/d6a797abb14f6283c0ddff96bbdd46937f64122b8c925cab503dd37f8214/pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629", size = 83135, upload-time = "2024-09-11T16:00:36.122Z" }, +] + +[[package]] +name = "pyasn1-modules" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyasn1" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e9/e6/78ebbb10a8c8e4b61a59249394a4a594c1a7af95593dc933a349c8d00964/pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6", size = 307892, upload-time = "2025-03-28T02:41:22.17Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/47/8d/d529b5d697919ba8c11ad626e835d4039be708a35b0d22de83a269a6682c/pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a", size = 181259, upload-time = "2025-03-28T02:41:19.028Z" }, +] + +[[package]] +name = "pycparser" +version = "2.22" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736, upload-time = "2024-03-30T13:22:22.564Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552, upload-time = "2024-03-30T13:22:20.476Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pykerberos" +version = "1.2.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/07/e9/ae44ea7d7605df9e5ca1ed745a2c5672dc838a8398101051dd5f255b130d/pykerberos-1.2.4.tar.gz", hash = "sha256:9d701ebd8fc596c99d3155d5ba45813bd5908d26ef83ba0add250edb622abed4", size = 25046, upload-time = "2022-03-09T03:54:08.546Z" } + +[[package]] +name = "pymongo" +source = { editable = "." } +dependencies = [ + { name = "dnspython" }, +] + +[package.optional-dependencies] +aws = [ + { name = "pymongo-auth-aws" }, +] +docs = [ + { name = "furo" }, + { name = "readthedocs-sphinx-search" }, + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "sphinx-autobuild" }, + { name = "sphinx-rtd-theme" }, + { name = "sphinxcontrib-shellcheck" }, +] +encryption = [ + { name = "certifi", marker = "os_name == 'nt' or sys_platform == 'darwin'" }, + { name = "pymongo-auth-aws" }, + { name = "pymongocrypt" }, +] +gssapi = [ + { name = "pykerberos", marker = "os_name != 'nt'" }, + { name = "winkerberos", marker = "os_name == 'nt'" }, +] +ocsp = [ + { name = "certifi", marker = "os_name == 'nt' or sys_platform == 'darwin'" }, + { name = "cryptography" }, + { name = "pyopenssl" }, + { name = "requests" }, + { name = "service-identity" }, +] +snappy = [ + { name = "python-snappy" }, +] +test = [ + { name = "importlib-metadata", marker = "python_full_version < '3.13'" }, + { name = "pytest" }, + { name = "pytest-asyncio" }, +] +zstd = [ + { name = "zstandard" }, +] + +[package.dev-dependencies] +coverage = [ + { name = "coverage" }, + { name = "pytest-cov" }, +] +gevent = [ + { name = "gevent" }, +] +mockupdb = [ + { name = "mockupdb" }, +] +perf = [ + { name = "simplejson" }, +] +pip = [ + { name = "pip" }, +] +typing = [ + { name = "mypy" }, + { name = "pip" }, + { name = "pyright" }, + { name = "typing-extensions" }, +] + +[package.metadata] +requires-dist = [ + { name = "certifi", marker = "(os_name == 'nt' and extra == 'encryption') or (sys_platform == 'darwin' and extra == 'encryption')", specifier = ">=2023.7.22" }, + { name = "certifi", marker = "(os_name == 'nt' and extra == 'ocsp') or (sys_platform == 'darwin' and extra == 'ocsp')", specifier = ">=2023.7.22" }, + { name = "cryptography", marker = "extra == 'ocsp'", specifier = ">=2.5" }, + { name = "dnspython", specifier = ">=2.6.1,<3.0.0" }, + { name = "furo", marker = "extra == 'docs'", specifier = "==2025.9.25" }, + { name = "importlib-metadata", marker = "python_full_version < '3.13' and extra == 'test'", specifier = ">=7.0" }, + { name = "pykerberos", marker = "os_name != 'nt' and extra == 'gssapi'" }, + { name = "pymongo-auth-aws", marker = "extra == 'aws'", specifier = ">=1.1.0,<2.0.0" }, + { name = "pymongo-auth-aws", marker = "extra == 'encryption'", specifier = ">=1.1.0,<2.0.0" }, + { name = "pymongocrypt", marker = "extra == 'encryption'", specifier = ">=1.13.0,<2.0.0" }, + { name = "pyopenssl", marker = "extra == 'ocsp'", specifier = ">=17.2.0" }, + { name = "pytest", marker = "extra == 'test'", specifier = ">=8.2" }, + { name = "pytest-asyncio", marker = "extra == 'test'", specifier = ">=0.24.0" }, + { name = "python-snappy", marker = "extra == 'snappy'" }, + { name = "readthedocs-sphinx-search", marker = "extra == 'docs'", specifier = "~=0.3" }, + { name = "requests", marker = "extra == 'ocsp'", specifier = "<3.0.0" }, + { name = "service-identity", marker = "extra == 'ocsp'", specifier = ">=18.1.0" }, + { name = "sphinx", marker = "extra == 'docs'", specifier = ">=5.3,<9" }, + { name = "sphinx-autobuild", marker = "extra == 'docs'", specifier = ">=2020.9.1" }, + { name = "sphinx-rtd-theme", marker = "extra == 'docs'", specifier = ">=2,<4" }, + { name = "sphinxcontrib-shellcheck", marker = "extra == 'docs'", specifier = ">=1,<2" }, + { name = "winkerberos", marker = "os_name == 'nt' and extra == 'gssapi'", specifier = ">=0.5.0" }, + { name = "zstandard", marker = "extra == 'zstd'" }, +] +provides-extras = ["aws", "docs", "encryption", "gssapi", "ocsp", "snappy", "test", "zstd"] + +[package.metadata.requires-dev] +coverage = [ + { name = "coverage", specifier = ">=5,<=7.10.6" }, + { name = "pytest-cov" }, +] +dev = [] +gevent = [{ name = "gevent", specifier = ">=20.6.0" }] +mockupdb = [{ name = "mockupdb", git = "https://github.com/mongodb-labs/mongo-mockup-db?rev=master" }] +perf = [{ name = "simplejson", specifier = ">=3.17.0" }] +pip = [{ name = "pip" }] +typing = [ + { name = "mypy", specifier = "==1.18.2" }, + { name = "pip" }, + { name = "pyright", specifier = "==1.1.406" }, + { name = "typing-extensions" }, +] + +[[package]] +name = "pymongo-auth-aws" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "boto3" }, + { name = "botocore" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c7/37/ca8d840f322f0047b71afcec7a489b1ea1f59a5f6d29f91ad8004024736f/pymongo_auth_aws-1.3.0.tar.gz", hash = "sha256:d0fa893958dc525ca29f601c34f2ca73c860f66bc6511ec0a7da6eb7ea44e94f", size = 18559, upload-time = "2024-09-11T20:29:17.668Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/12/a997fc108416f31fac55748e5406c1c8c4e976a4073f07b5553825641611/pymongo_auth_aws-1.3.0-py3-none-any.whl", hash = "sha256:367f6d853da428a02e9e450422756133715d40f8141f47ae5d98f139a88c0ce5", size = 15470, upload-time = "2024-09-11T20:29:16.637Z" }, +] + +[[package]] +name = "pymongocrypt" +version = "1.16.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi" }, + { name = "cryptography" }, + { name = "httpx" }, + { name = "packaging" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fb/8e/dd9ed710e8fd4eec127dac1db3b3e9156ffcf340a0463a82087a12ae924e/pymongocrypt-1.16.0.tar.gz", hash = "sha256:0db0812055d00e6f5562a8d66711c4cba4b75014c363306c9b298a9fd68fccdd", size = 65354, upload-time = "2025-09-09T18:54:25.531Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/8b/dda0f19ce16f7b257e4aa2a8831a1a1307c1ea124a00f571cda83a04adcb/pymongocrypt-1.16.0-py3-none-macosx_11_0_universal2.whl", hash = "sha256:fbd85534880ea8525956b96e583a7021c721abbf3b51a6dbe48a57d7eba8e74a", size = 4721169, upload-time = "2025-09-09T18:54:18.642Z" }, + { url = "https://files.pythonhosted.org/packages/99/48/512a5b597d71407f9b06a14cd8e5ac376e06b780d4d54a4e69726bd48703/pymongocrypt-1.16.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:85df0a78480e91bdd3a5a6da3e4cdc7d9700de8a871aa8168588981c041f1914", size = 4038242, upload-time = "2025-09-09T18:54:20.496Z" }, + { url = "https://files.pythonhosted.org/packages/3f/67/3bdeda347191d6c1ee257eb3da8c85f1278d86dfb493cc9bc26352a41d0a/pymongocrypt-1.16.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8d2ebeb1b5e4f4554bf44f726e8009c59c4d7d0b412beebfece875991714676", size = 3775742, upload-time = "2025-09-09T18:54:22.254Z" }, + { url = "https://files.pythonhosted.org/packages/dc/81/70f6947afbd1ac7be54482b44cb1b99e8e9b9cac41985e6250c4fc279e58/pymongocrypt-1.16.0-py3-none-win_amd64.whl", hash = "sha256:c20afcd89ec5fc53305e924c05c4a0321ddc73f1e4e7c8240ee2fd0123e23609", size = 1607917, upload-time = "2025-09-09T18:54:24.182Z" }, +] + +[[package]] +name = "pyopenssl" +version = "25.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/80/be/97b83a464498a79103036bc74d1038df4a7ef0e402cfaf4d5e113fb14759/pyopenssl-25.3.0.tar.gz", hash = "sha256:c981cb0a3fd84e8602d7afc209522773b94c1c2446a3c710a75b06fe1beae329", size = 184073, upload-time = "2025-09-17T00:32:21.037Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/81/ef2b1dfd1862567d573a4fdbc9f969067621764fbb74338496840a1d2977/pyopenssl-25.3.0-py3-none-any.whl", hash = "sha256:1fda6fc034d5e3d179d39e59c1895c9faeaf40a79de5fc4cbbfbe0d36f4a77b6", size = 57268, upload-time = "2025-09-17T00:32:19.474Z" }, +] + +[[package]] +name = "pyright" +version = "1.1.406" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nodeenv" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f7/16/6b4fbdd1fef59a0292cbb99f790b44983e390321eccbc5921b4d161da5d1/pyright-1.1.406.tar.gz", hash = "sha256:c4872bc58c9643dac09e8a2e74d472c62036910b3bd37a32813989ef7576ea2c", size = 4113151, upload-time = "2025-10-02T01:04:45.488Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f6/a2/e309afbb459f50507103793aaef85ca4348b66814c86bc73908bdeb66d12/pyright-1.1.406-py3-none-any.whl", hash = "sha256:1d81fb43c2407bf566e97e57abb01c811973fdb21b2df8df59f870f688bdca71", size = 5980982, upload-time = "2025-10-02T01:04:43.137Z" }, +] + +[[package]] +name = "pytest" +version = "8.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/08/ba/45911d754e8eba3d5a841a5ce61a65a685ff1798421ac054f85aa8747dfb/pytest-8.4.1.tar.gz", hash = "sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c", size = 1517714, upload-time = "2025-06-18T05:48:06.109Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/29/16/c8a903f4c4dffe7a12843191437d7cd8e32751d5de349d45d3fe69544e87/pytest-8.4.1-py3-none-any.whl", hash = "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7", size = 365474, upload-time = "2025-06-18T05:48:03.955Z" }, +] + +[[package]] +name = "pytest-asyncio" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "backports-asyncio-runner", marker = "python_full_version < '3.11'" }, + { name = "pytest" }, + { name = "typing-extensions", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4e/51/f8794af39eeb870e87a8c8068642fc07bce0c854d6865d7dd0f2a9d338c2/pytest_asyncio-1.1.0.tar.gz", hash = "sha256:796aa822981e01b68c12e4827b8697108f7205020f24b5793b3c41555dab68ea", size = 46652, upload-time = "2025-07-16T04:29:26.393Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/9d/bf86eddabf8c6c9cb1ea9a869d6873b46f105a5d292d3a6f7071f5b07935/pytest_asyncio-1.1.0-py3-none-any.whl", hash = "sha256:5fe2d69607b0bd75c656d1211f969cadba035030156745ee09e7d71740e58ecf", size = 15157, upload-time = "2025-07-16T04:29:24.929Z" }, +] + +[[package]] +name = "pytest-cov" +version = "6.2.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coverage", extra = ["toml"] }, + { name = "pluggy" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/18/99/668cade231f434aaa59bbfbf49469068d2ddd945000621d3d165d2e7dd7b/pytest_cov-6.2.1.tar.gz", hash = "sha256:25cc6cc0a5358204b8108ecedc51a9b57b34cc6b8c967cc2c01a4e00d8a67da2", size = 69432, upload-time = "2025-06-12T10:47:47.684Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/16/4ea354101abb1287856baa4af2732be351c7bee728065aed451b678153fd/pytest_cov-6.2.1-py3-none-any.whl", hash = "sha256:f5bc4c23f42f1cdd23c70b1dab1bbaef4fc505ba950d53e0081d0730dd7e86d5", size = 24644, upload-time = "2025-06-12T10:47:45.932Z" }, +] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, +] + +[[package]] +name = "python-snappy" +version = "0.7.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cramjam" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/39/66/9185fbb6605ba92716d9f77fbb13c97eb671cd13c3ad56bd154016fbf08b/python_snappy-0.7.3.tar.gz", hash = "sha256:40216c1badfb2d38ac781ecb162a1d0ec40f8ee9747e610bcfefdfa79486cee3", size = 9337, upload-time = "2024-08-29T13:16:05.705Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/86/c1/0ee413ddd639aebf22c85d6db39f136ccc10e6a4b4dd275a92b5c839de8d/python_snappy-0.7.3-py3-none-any.whl", hash = "sha256:074c0636cfcd97e7251330f428064050ac81a52c62ed884fc2ddebbb60ed7f50", size = 9155, upload-time = "2024-08-29T13:16:04.773Z" }, +] + +[[package]] +name = "readthedocs-sphinx-search" +version = "0.3.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8f/96/0c51439e3dbc634cf5328ffb173ff759b7fc9abf3276e78bf71d9fc0aa51/readthedocs-sphinx-search-0.3.2.tar.gz", hash = "sha256:277773bfa28566a86694c08e568d5a648cd80f22826545555a764d6d20c365fb", size = 21949, upload-time = "2024-01-15T16:46:22.565Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/3c/41bc9d7d4d936a73e380423f23996bee1691e17598d8a03c062be6aac640/readthedocs_sphinx_search-0.3.2-py3-none-any.whl", hash = "sha256:58716fd21f01581e6e67bf3bc02e79c77e10dc58b5f8e4c7cc1977e013eda173", size = 21379, upload-time = "2024-01-15T16:46:20.552Z" }, +] + +[[package]] +name = "requests" +version = "2.32.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3", version = "1.26.20", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "urllib3", version = "2.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e1/0a/929373653770d8a0d7ea76c37de6e41f11eb07559b103b1c02cafb3f7cf8/requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422", size = 135258, upload-time = "2025-06-09T16:43:07.34Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7c/e4/56027c4a6b4ae70ca9de302488c5ca95ad4a39e190093d6c1a8ace08341b/requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c", size = 64847, upload-time = "2025-06-09T16:43:05.728Z" }, +] + +[[package]] +name = "roman-numerals-py" +version = "3.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/30/76/48fd56d17c5bdbdf65609abbc67288728a98ed4c02919428d4f52d23b24b/roman_numerals_py-3.1.0.tar.gz", hash = "sha256:be4bf804f083a4ce001b5eb7e3c0862479d10f94c936f6c4e5f250aa5ff5bd2d", size = 9017, upload-time = "2025-02-22T07:34:54.333Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/53/97/d2cbbaa10c9b826af0e10fdf836e1bf344d9f0abb873ebc34d1f49642d3f/roman_numerals_py-3.1.0-py3-none-any.whl", hash = "sha256:9da2ad2fb670bcf24e81070ceb3be72f6c11c440d73bd579fbeca1e9f330954c", size = 7742, upload-time = "2025-02-22T07:34:52.422Z" }, +] + +[[package]] +name = "s3transfer" +version = "0.13.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6d/05/d52bf1e65044b4e5e27d4e63e8d1579dbdec54fce685908ae09bc3720030/s3transfer-0.13.1.tar.gz", hash = "sha256:c3fdba22ba1bd367922f27ec8032d6a1cf5f10c934fb5d68cf60fd5a23d936cf", size = 150589, upload-time = "2025-07-18T19:22:42.31Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6d/4f/d073e09df851cfa251ef7840007d04db3293a0482ce607d2b993926089be/s3transfer-0.13.1-py3-none-any.whl", hash = "sha256:a981aa7429be23fe6dfc13e80e4020057cbab622b08c0315288758d67cabc724", size = 85308, upload-time = "2025-07-18T19:22:40.947Z" }, +] + +[[package]] +name = "service-identity" +version = "24.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "cryptography" }, + { name = "pyasn1" }, + { name = "pyasn1-modules" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/07/a5/dfc752b979067947261dbbf2543470c58efe735c3c1301dd870ef27830ee/service_identity-24.2.0.tar.gz", hash = "sha256:b8683ba13f0d39c6cd5d625d2c5f65421d6d707b013b375c355751557cbe8e09", size = 39245, upload-time = "2024-10-26T07:21:57.736Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/08/2c/ca6dd598b384bc1ce581e24aaae0f2bed4ccac57749d5c3befbb5e742081/service_identity-24.2.0-py3-none-any.whl", hash = "sha256:6b047fbd8a84fd0bb0d55ebce4031e400562b9196e1e0d3e0fe2b8a59f6d4a85", size = 11364, upload-time = "2024-10-26T07:21:56.302Z" }, +] + +[[package]] +name = "setuptools" +version = "80.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958, upload-time = "2025-05-27T00:56:51.443Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" }, +] + +[[package]] +name = "simplejson" +version = "3.20.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/af/92/51b417685abd96b31308b61b9acce7ec50d8e1de8fbc39a7fd4962c60689/simplejson-3.20.1.tar.gz", hash = "sha256:e64139b4ec4f1f24c142ff7dcafe55a22b811a74d86d66560c8815687143037d", size = 85591, upload-time = "2025-02-15T05:18:53.15Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/25/c4/627214fb418cd4a17fb0230ff0b6c3bb4a85cbb48dd69c85dcc3b85df828/simplejson-3.20.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e580aa65d5f6c3bf41b9b4afe74be5d5ddba9576701c107c772d936ea2b5043a", size = 93790, upload-time = "2025-02-15T05:15:32.954Z" }, + { url = "https://files.pythonhosted.org/packages/15/ca/56a6a2a33cbcf330c4d71af3f827c47e4e0ba791e78f2642f3d1ab02ff31/simplejson-3.20.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4a586ce4f78cec11f22fe55c5bee0f067e803aab9bad3441afe2181693b5ebb5", size = 75707, upload-time = "2025-02-15T05:15:34.954Z" }, + { url = "https://files.pythonhosted.org/packages/a9/c8/3d92b67e03a3b6207d97202669f9454ed700b35ade9bd4428265a078fb6c/simplejson-3.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74a1608f9e6e8c27a4008d70a54270868306d80ed48c9df7872f9f4b8ac87808", size = 75700, upload-time = "2025-02-15T05:15:37.144Z" }, + { url = "https://files.pythonhosted.org/packages/74/30/20001219d6fdca4aaa3974c96dfb6955a766b4e2cc950505a5b51fd050b0/simplejson-3.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03db8cb64154189a92a7786209f24e391644f3a3fa335658be2df2af1960b8d8", size = 138672, upload-time = "2025-02-15T05:15:38.547Z" }, + { url = "https://files.pythonhosted.org/packages/21/47/50157810876c2a7ebbd6e6346ec25eda841fe061fecaa02538a7742a3d2a/simplejson-3.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eea7e2b7d858f6fdfbf0fe3cb846d6bd8a45446865bc09960e51f3d473c2271b", size = 146616, upload-time = "2025-02-15T05:15:39.871Z" }, + { url = "https://files.pythonhosted.org/packages/95/60/8c97cdc93096437b0aca2745aca63c880fe2315fd7f6a6ce6edbb344a2ae/simplejson-3.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e66712b17d8425bb7ff8968d4c7c7fd5a2dd7bd63728b28356223c000dd2f91f", size = 134344, upload-time = "2025-02-15T05:15:42.091Z" }, + { url = "https://files.pythonhosted.org/packages/bb/9e/da184f0e9bb3a5d7ffcde713bd41b4fe46cca56b6f24d9bd155fac56805a/simplejson-3.20.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2cc4f6486f9f515b62f5831ff1888886619b84fc837de68f26d919ba7bbdcbc", size = 138017, upload-time = "2025-02-15T05:15:43.542Z" }, + { url = "https://files.pythonhosted.org/packages/31/db/00d1a8d9b036db98f678c8a3c69ed17d2894d1768d7a00576e787ad3e546/simplejson-3.20.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a3c2df555ee4016148fa192e2b9cd9e60bc1d40769366134882685e90aee2a1e", size = 140118, upload-time = "2025-02-15T05:15:45.7Z" }, + { url = "https://files.pythonhosted.org/packages/52/21/57fc47eab8c1c73390b933a5ba9271f08e3e1ec83162c580357f28f5b97c/simplejson-3.20.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:78520f04b7548a5e476b5396c0847e066f1e0a4c0c5e920da1ad65e95f410b11", size = 140314, upload-time = "2025-02-15T05:16:07.949Z" }, + { url = "https://files.pythonhosted.org/packages/ad/cc/7cfd78d1e0fa5e57350b98cfe77353b6dfa13dce21afa4060e1019223852/simplejson-3.20.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f4bd49ecde87b0fe9f55cc971449a32832bca9910821f7072bbfae1155eaa007", size = 148544, upload-time = "2025-02-15T05:16:09.455Z" }, + { url = "https://files.pythonhosted.org/packages/63/26/1c894a1c2bd95dc8be0cf5a2fa73b0d173105b6ca18c90cb981ff10443d0/simplejson-3.20.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7eaae2b88eb5da53caaffdfa50e2e12022553949b88c0df4f9a9663609373f72", size = 141172, upload-time = "2025-02-15T05:16:10.966Z" }, + { url = "https://files.pythonhosted.org/packages/93/27/0717dccc10cd9988dbf1314def52ab32678a95a95328bb37cafacf499400/simplejson-3.20.1-cp310-cp310-win32.whl", hash = "sha256:e836fb88902799eac8debc2b642300748f4860a197fa3d9ea502112b6bb8e142", size = 74181, upload-time = "2025-02-15T05:16:12.361Z" }, + { url = "https://files.pythonhosted.org/packages/5f/af/593f896573f306519332d4287b1ab8b7b888c239bbd5159f7054d7055c2d/simplejson-3.20.1-cp310-cp310-win_amd64.whl", hash = "sha256:b122a19b552b212fc3b5b96fc5ce92333d4a9ac0a800803e1f17ebb16dac4be5", size = 75738, upload-time = "2025-02-15T05:16:14.438Z" }, + { url = "https://files.pythonhosted.org/packages/76/59/74bc90d1c051bc2432c96b34bd4e8036875ab58b4fcbe4d6a5a76985f853/simplejson-3.20.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:325b8c107253d3217e89d7b50c71015b5b31e2433e6c5bf38967b2f80630a8ca", size = 92132, upload-time = "2025-02-15T05:16:15.743Z" }, + { url = "https://files.pythonhosted.org/packages/71/c7/1970916e0c51794fff89f76da2f632aaf0b259b87753c88a8c409623d3e1/simplejson-3.20.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88a7baa8211089b9e58d78fbc1b0b322103f3f3d459ff16f03a36cece0d0fcf0", size = 74956, upload-time = "2025-02-15T05:16:17.062Z" }, + { url = "https://files.pythonhosted.org/packages/c8/0d/98cc5909180463f1d75fac7180de62d4cdb4e82c4fef276b9e591979372c/simplejson-3.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:299b1007b8101d50d95bc0db1bf5c38dc372e85b504cf77f596462083ee77e3f", size = 74772, upload-time = "2025-02-15T05:16:19.204Z" }, + { url = "https://files.pythonhosted.org/packages/e1/94/a30a5211a90d67725a3e8fcc1c788189f2ae2ed2b96b63ed15d0b7f5d6bb/simplejson-3.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ec618ed65caab48e81e3ed29586236a8e57daef792f1f3bb59504a7e98cd10", size = 143575, upload-time = "2025-02-15T05:16:21.337Z" }, + { url = "https://files.pythonhosted.org/packages/ee/08/cdb6821f1058eb5db46d252de69ff7e6c53f05f1bae6368fe20d5b51d37e/simplejson-3.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2cdead1d3197f0ff43373cf4730213420523ba48697743e135e26f3d179f38", size = 153241, upload-time = "2025-02-15T05:16:22.859Z" }, + { url = "https://files.pythonhosted.org/packages/4c/2d/ca3caeea0bdc5efc5503d5f57a2dfb56804898fb196dfada121323ee0ccb/simplejson-3.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3466d2839fdc83e1af42e07b90bc8ff361c4e8796cd66722a40ba14e458faddd", size = 141500, upload-time = "2025-02-15T05:16:25.068Z" }, + { url = "https://files.pythonhosted.org/packages/e1/33/d3e0779d5c58245e7370c98eb969275af6b7a4a5aec3b97cbf85f09ad328/simplejson-3.20.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d492ed8e92f3a9f9be829205f44b1d0a89af6582f0cf43e0d129fa477b93fe0c", size = 144757, upload-time = "2025-02-15T05:16:28.301Z" }, + { url = "https://files.pythonhosted.org/packages/54/53/2d93128bb55861b2fa36c5944f38da51a0bc6d83e513afc6f7838440dd15/simplejson-3.20.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f924b485537b640dc69434565463fd6fc0c68c65a8c6e01a823dd26c9983cf79", size = 144409, upload-time = "2025-02-15T05:16:29.687Z" }, + { url = "https://files.pythonhosted.org/packages/99/4c/dac310a98f897ad3435b4bdc836d92e78f09e38c5dbf28211ed21dc59fa2/simplejson-3.20.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9e8eacf6a3491bf76ea91a8d46726368a6be0eb94993f60b8583550baae9439e", size = 146082, upload-time = "2025-02-15T05:16:31.064Z" }, + { url = "https://files.pythonhosted.org/packages/ee/22/d7ba958cfed39827335b82656b1c46f89678faecda9a7677b47e87b48ee6/simplejson-3.20.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:d34d04bf90b4cea7c22d8b19091633908f14a096caa301b24c2f3d85b5068fb8", size = 154339, upload-time = "2025-02-15T05:16:32.719Z" }, + { url = "https://files.pythonhosted.org/packages/b8/c8/b072b741129406a7086a0799c6f5d13096231bf35fdd87a0cffa789687fc/simplejson-3.20.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:69dd28d4ce38390ea4aaf212902712c0fd1093dc4c1ff67e09687c3c3e15a749", size = 147915, upload-time = "2025-02-15T05:16:34.291Z" }, + { url = "https://files.pythonhosted.org/packages/6c/46/8347e61e9cf3db5342a42f7fd30a81b4f5cf85977f916852d7674a540907/simplejson-3.20.1-cp311-cp311-win32.whl", hash = "sha256:dfe7a9da5fd2a3499436cd350f31539e0a6ded5da6b5b3d422df016444d65e43", size = 73972, upload-time = "2025-02-15T05:16:35.712Z" }, + { url = "https://files.pythonhosted.org/packages/01/85/b52f24859237b4e9d523d5655796d911ba3d46e242eb1959c45b6af5aedd/simplejson-3.20.1-cp311-cp311-win_amd64.whl", hash = "sha256:896a6c04d7861d507d800da7642479c3547060bf97419d9ef73d98ced8258766", size = 75595, upload-time = "2025-02-15T05:16:36.957Z" }, + { url = "https://files.pythonhosted.org/packages/8d/eb/34c16a1ac9ba265d024dc977ad84e1659d931c0a700967c3e59a98ed7514/simplejson-3.20.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f31c4a3a7ab18467ee73a27f3e59158255d1520f3aad74315edde7a940f1be23", size = 93100, upload-time = "2025-02-15T05:16:38.801Z" }, + { url = "https://files.pythonhosted.org/packages/41/fc/2c2c007d135894971e6814e7c0806936e5bade28f8db4dd7e2a58b50debd/simplejson-3.20.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:884e6183d16b725e113b83a6fc0230152ab6627d4d36cb05c89c2c5bccfa7bc6", size = 75464, upload-time = "2025-02-15T05:16:40.905Z" }, + { url = "https://files.pythonhosted.org/packages/0f/05/2b5ecb33b776c34bb5cace5de5d7669f9b60e3ca13c113037b2ca86edfbd/simplejson-3.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03d7a426e416fe0d3337115f04164cd9427eb4256e843a6b8751cacf70abc832", size = 75112, upload-time = "2025-02-15T05:16:42.246Z" }, + { url = "https://files.pythonhosted.org/packages/fe/36/1f3609a2792f06cd4b71030485f78e91eb09cfd57bebf3116bf2980a8bac/simplejson-3.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:000602141d0bddfcff60ea6a6e97d5e10c9db6b17fd2d6c66199fa481b6214bb", size = 150182, upload-time = "2025-02-15T05:16:43.557Z" }, + { url = "https://files.pythonhosted.org/packages/2f/b0/053fbda38b8b602a77a4f7829def1b4f316cd8deb5440a6d3ee90790d2a4/simplejson-3.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:af8377a8af78226e82e3a4349efdde59ffa421ae88be67e18cef915e4023a595", size = 158363, upload-time = "2025-02-15T05:16:45.748Z" }, + { url = "https://files.pythonhosted.org/packages/d1/4b/2eb84ae867539a80822e92f9be4a7200dffba609275faf99b24141839110/simplejson-3.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15c7de4c88ab2fbcb8781a3b982ef883696736134e20b1210bca43fb42ff1acf", size = 148415, upload-time = "2025-02-15T05:16:47.861Z" }, + { url = "https://files.pythonhosted.org/packages/e0/bd/400b0bd372a5666addf2540c7358bfc3841b9ce5cdbc5cc4ad2f61627ad8/simplejson-3.20.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:455a882ff3f97d810709f7b620007d4e0aca8da71d06fc5c18ba11daf1c4df49", size = 152213, upload-time = "2025-02-15T05:16:49.25Z" }, + { url = "https://files.pythonhosted.org/packages/50/12/143f447bf6a827ee9472693768dc1a5eb96154f8feb140a88ce6973a3cfa/simplejson-3.20.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:fc0f523ce923e7f38eb67804bc80e0a028c76d7868500aa3f59225574b5d0453", size = 150048, upload-time = "2025-02-15T05:16:51.5Z" }, + { url = "https://files.pythonhosted.org/packages/5e/ea/dd9b3e8e8ed710a66f24a22c16a907c9b539b6f5f45fd8586bd5c231444e/simplejson-3.20.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76461ec929282dde4a08061071a47281ad939d0202dc4e63cdd135844e162fbc", size = 151668, upload-time = "2025-02-15T05:16:53Z" }, + { url = "https://files.pythonhosted.org/packages/99/af/ee52a8045426a0c5b89d755a5a70cc821815ef3c333b56fbcad33c4435c0/simplejson-3.20.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ab19c2da8c043607bde4d4ef3a6b633e668a7d2e3d56f40a476a74c5ea71949f", size = 158840, upload-time = "2025-02-15T05:16:54.851Z" }, + { url = "https://files.pythonhosted.org/packages/68/db/ab32869acea6b5de7d75fa0dac07a112ded795d41eaa7e66c7813b17be95/simplejson-3.20.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b2578bedaedf6294415197b267d4ef678fea336dd78ee2a6d2f4b028e9d07be3", size = 154212, upload-time = "2025-02-15T05:16:56.318Z" }, + { url = "https://files.pythonhosted.org/packages/fa/7a/e3132d454977d75a3bf9a6d541d730f76462ebf42a96fea2621498166f41/simplejson-3.20.1-cp312-cp312-win32.whl", hash = "sha256:339f407373325a36b7fd744b688ba5bae0666b5d340ec6d98aebc3014bf3d8ea", size = 74101, upload-time = "2025-02-15T05:16:57.746Z" }, + { url = "https://files.pythonhosted.org/packages/bc/5d/4e243e937fa3560107c69f6f7c2eed8589163f5ed14324e864871daa2dd9/simplejson-3.20.1-cp312-cp312-win_amd64.whl", hash = "sha256:627d4486a1ea7edf1f66bb044ace1ce6b4c1698acd1b05353c97ba4864ea2e17", size = 75736, upload-time = "2025-02-15T05:16:59.017Z" }, + { url = "https://files.pythonhosted.org/packages/c4/03/0f453a27877cb5a5fff16a975925f4119102cc8552f52536b9a98ef0431e/simplejson-3.20.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:71e849e7ceb2178344998cbe5ade101f1b329460243c79c27fbfc51c0447a7c3", size = 93109, upload-time = "2025-02-15T05:17:00.377Z" }, + { url = "https://files.pythonhosted.org/packages/74/1f/a729f4026850cabeaff23e134646c3f455e86925d2533463420635ae54de/simplejson-3.20.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b63fdbab29dc3868d6f009a59797cefaba315fd43cd32ddd998ee1da28e50e29", size = 75475, upload-time = "2025-02-15T05:17:02.544Z" }, + { url = "https://files.pythonhosted.org/packages/e2/14/50a2713fee8ff1f8d655b1a14f4a0f1c0c7246768a1b3b3d12964a4ed5aa/simplejson-3.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1190f9a3ce644fd50ec277ac4a98c0517f532cfebdcc4bd975c0979a9f05e1fb", size = 75112, upload-time = "2025-02-15T05:17:03.875Z" }, + { url = "https://files.pythonhosted.org/packages/45/86/ea9835abb646755140e2d482edc9bc1e91997ed19a59fd77ae4c6a0facea/simplejson-3.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1336ba7bcb722ad487cd265701ff0583c0bb6de638364ca947bb84ecc0015d1", size = 150245, upload-time = "2025-02-15T05:17:06.899Z" }, + { url = "https://files.pythonhosted.org/packages/12/b4/53084809faede45da829fe571c65fbda8479d2a5b9c633f46b74124d56f5/simplejson-3.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e975aac6a5acd8b510eba58d5591e10a03e3d16c1cf8a8624ca177491f7230f0", size = 158465, upload-time = "2025-02-15T05:17:08.707Z" }, + { url = "https://files.pythonhosted.org/packages/a9/7d/d56579468d1660b3841e1f21c14490d103e33cf911886b22652d6e9683ec/simplejson-3.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6a6dd11ee282937ad749da6f3b8d87952ad585b26e5edfa10da3ae2536c73078", size = 148514, upload-time = "2025-02-15T05:17:11.323Z" }, + { url = "https://files.pythonhosted.org/packages/19/e3/874b1cca3d3897b486d3afdccc475eb3a09815bf1015b01cf7fcb52a55f0/simplejson-3.20.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab980fcc446ab87ea0879edad41a5c28f2d86020014eb035cf5161e8de4474c6", size = 152262, upload-time = "2025-02-15T05:17:13.543Z" }, + { url = "https://files.pythonhosted.org/packages/32/84/f0fdb3625292d945c2bd13a814584603aebdb38cfbe5fe9be6b46fe598c4/simplejson-3.20.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f5aee2a4cb6b146bd17333ac623610f069f34e8f31d2f4f0c1a2186e50c594f0", size = 150164, upload-time = "2025-02-15T05:17:15.021Z" }, + { url = "https://files.pythonhosted.org/packages/95/51/6d625247224f01eaaeabace9aec75ac5603a42f8ebcce02c486fbda8b428/simplejson-3.20.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:652d8eecbb9a3b6461b21ec7cf11fd0acbab144e45e600c817ecf18e4580b99e", size = 151795, upload-time = "2025-02-15T05:17:16.542Z" }, + { url = "https://files.pythonhosted.org/packages/7f/d9/bb921df6b35be8412f519e58e86d1060fddf3ad401b783e4862e0a74c4c1/simplejson-3.20.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:8c09948f1a486a89251ee3a67c9f8c969b379f6ffff1a6064b41fea3bce0a112", size = 159027, upload-time = "2025-02-15T05:17:18.083Z" }, + { url = "https://files.pythonhosted.org/packages/03/c5/5950605e4ad023a6621cf4c931b29fd3d2a9c1f36be937230bfc83d7271d/simplejson-3.20.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:cbbd7b215ad4fc6f058b5dd4c26ee5c59f72e031dfda3ac183d7968a99e4ca3a", size = 154380, upload-time = "2025-02-15T05:17:20.334Z" }, + { url = "https://files.pythonhosted.org/packages/66/ad/b74149557c5ec1e4e4d55758bda426f5d2ec0123cd01a53ae63b8de51fa3/simplejson-3.20.1-cp313-cp313-win32.whl", hash = "sha256:ae81e482476eaa088ef9d0120ae5345de924f23962c0c1e20abbdff597631f87", size = 74102, upload-time = "2025-02-15T05:17:22.475Z" }, + { url = "https://files.pythonhosted.org/packages/db/a9/25282fdd24493e1022f30b7f5cdf804255c007218b2bfaa655bd7ad34b2d/simplejson-3.20.1-cp313-cp313-win_amd64.whl", hash = "sha256:1b9fd15853b90aec3b1739f4471efbf1ac05066a2c7041bf8db821bb73cd2ddc", size = 75736, upload-time = "2025-02-15T05:17:24.122Z" }, + { url = "https://files.pythonhosted.org/packages/4c/ba/d32fe890a5edaf4a8518adf043bccf7866b600123f512a6de0988cf36810/simplejson-3.20.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a8011f1dd1d676befcd4d675ebdbfdbbefd3bf350052b956ba8c699fca7d8cef", size = 93773, upload-time = "2025-02-15T05:18:28.231Z" }, + { url = "https://files.pythonhosted.org/packages/48/c7/361e7f6695b56001a04e0a5cc623cd6c82ea2f45e872e61213e405cc8a24/simplejson-3.20.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e91703a4c5fec53e36875ae426ad785f4120bd1d93b65bed4752eeccd1789e0c", size = 75697, upload-time = "2025-02-15T05:18:30.006Z" }, + { url = "https://files.pythonhosted.org/packages/3c/2f/d0ff0b772d4ef092876eb85c99bc591c446b0502715551dad7dfc7f7c2c0/simplejson-3.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e39eaa57c7757daa25bcd21f976c46be443b73dd6c3da47fe5ce7b7048ccefe2", size = 75692, upload-time = "2025-02-15T05:18:31.424Z" }, + { url = "https://files.pythonhosted.org/packages/26/94/cab4db9530b6ca9d62f16a260e8311b04130ccd670dab75e958fcb44590e/simplejson-3.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceab2ce2acdc7fbaa433a93006758db6ba9a659e80c4faa13b80b9d2318e9b17", size = 138106, upload-time = "2025-02-15T05:18:32.907Z" }, + { url = "https://files.pythonhosted.org/packages/40/22/11c0f746bdb44c297cea8a37d8f7ccb75ea6681132aadfb9f820d9a52647/simplejson-3.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6d4f320c33277a5b715db5bf5b10dae10c19076bd6d66c2843e04bd12d1f1ea5", size = 146242, upload-time = "2025-02-15T05:18:35.223Z" }, + { url = "https://files.pythonhosted.org/packages/78/e9/b7c4c26f29b41cc41ba5f0224c47adbfa7f28427418edfd58ab122f3b584/simplejson-3.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b6436c48e64378fa844d8c9e58a5ed0352bbcfd4028369a9b46679b7ab79d2d", size = 133866, upload-time = "2025-02-15T05:18:36.998Z" }, + { url = "https://files.pythonhosted.org/packages/09/68/1e81ed83f38906c8859f2b973afb19302357d6003e724a6105cee0f61ec7/simplejson-3.20.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e18345c8dda5d699be8166b61f9d80aaee4545b709f1363f60813dc032dac53", size = 137444, upload-time = "2025-02-15T05:18:38.763Z" }, + { url = "https://files.pythonhosted.org/packages/9a/6b/8d1e076c543277c1d603230eec24f4dd75ebce46d351c0679526d202981f/simplejson-3.20.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:90b573693d1526bed576f6817e2a492eaaef68f088b57d7a9e83d122bbb49e51", size = 139617, upload-time = "2025-02-15T05:18:40.36Z" }, + { url = "https://files.pythonhosted.org/packages/d1/46/7b74803de10d4157c5cd2e89028897fa733374667bc5520a44b23b6c887a/simplejson-3.20.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:272cc767826e924a6bd369ea3dbf18e166ded29059c7a4d64d21a9a22424b5b5", size = 139725, upload-time = "2025-02-15T05:18:42.012Z" }, + { url = "https://files.pythonhosted.org/packages/4b/8f/9991582665a7b6d95415e439bb4fbaa4faf0f77231666675a0fd1de54107/simplejson-3.20.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:51b41f284d603c4380732d7d619f8b34bd04bc4aa0ed0ed5f4ffd0539b14da44", size = 148010, upload-time = "2025-02-15T05:18:43.749Z" }, + { url = "https://files.pythonhosted.org/packages/54/ee/3c6e91989cdf65ec75e75662d9f15cfe167a792b893806169ea5b1da6fd2/simplejson-3.20.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6e6697a3067d281f01de0fe96fc7cba4ea870d96d7deb7bfcf85186d74456503", size = 140624, upload-time = "2025-02-15T05:18:45.498Z" }, + { url = "https://files.pythonhosted.org/packages/9d/bd/05e13ebb7ead81c8b555f4ccc741ea7dfa0ef5c2a0c183d6a7bc50a02bca/simplejson-3.20.1-cp39-cp39-win32.whl", hash = "sha256:6dd3a1d5aca87bf947f3339b0f8e8e329f1badf548bdbff37fac63c17936da8e", size = 74148, upload-time = "2025-02-15T05:18:47.27Z" }, + { url = "https://files.pythonhosted.org/packages/88/c9/d8bf87aaebec5a4c3ccfd5228689578e2fe77027d6114a259255d54969bf/simplejson-3.20.1-cp39-cp39-win_amd64.whl", hash = "sha256:463f1fca8fbf23d088e5850fdd0dd4d5faea8900a9f9680270bd98fd649814ca", size = 75732, upload-time = "2025-02-15T05:18:49.598Z" }, + { url = "https://files.pythonhosted.org/packages/4b/30/00f02a0a921556dd5a6db1ef2926a1bc7a8bbbfb1c49cfed68a275b8ab2b/simplejson-3.20.1-py3-none-any.whl", hash = "sha256:8a6c1bbac39fa4a79f83cbf1df6ccd8ff7069582a9fd8db1e52cea073bc2c697", size = 57121, upload-time = "2025-02-15T05:18:51.243Z" }, +] + +[[package]] +name = "six" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, +] + +[[package]] +name = "snowballstemmer" +version = "3.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/75/a7/9810d872919697c9d01295633f5d574fb416d47e535f258272ca1f01f447/snowballstemmer-3.0.1.tar.gz", hash = "sha256:6d5eeeec8e9f84d4d56b847692bacf79bc2c8e90c7f80ca4444ff8b6f2e52895", size = 105575, upload-time = "2025-05-09T16:34:51.843Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/78/3565d011c61f5a43488987ee32b6f3f656e7f107ac2782dd57bdd7d91d9a/snowballstemmer-3.0.1-py3-none-any.whl", hash = "sha256:6cd7b3897da8d6c9ffb968a6781fa6532dce9c3618a4b127d920dab764a19064", size = 103274, upload-time = "2025-05-09T16:34:50.371Z" }, +] + +[[package]] +name = "soupsieve" +version = "2.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3f/f4/4a80cd6ef364b2e8b65b15816a843c0980f7a5a2b4dc701fc574952aa19f/soupsieve-2.7.tar.gz", hash = "sha256:ad282f9b6926286d2ead4750552c8a6142bc4c783fd66b0293547c8fe6ae126a", size = 103418, upload-time = "2025-04-20T18:50:08.518Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e7/9c/0e6afc12c269578be5c0c1c9f4b49a8d32770a080260c333ac04cc1c832d/soupsieve-2.7-py3-none-any.whl", hash = "sha256:6e60cc5c1ffaf1cebcc12e8188320b72071e922c2e897f737cadce79ad5d30c4", size = 36677, upload-time = "2025-04-20T18:50:07.196Z" }, +] + +[[package]] +name = "sphinx" +version = "7.4.7" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +dependencies = [ + { name = "alabaster", version = "0.7.16", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "babel", marker = "python_full_version < '3.10'" }, + { name = "colorama", marker = "python_full_version < '3.10' and sys_platform == 'win32'" }, + { name = "docutils", marker = "python_full_version < '3.10'" }, + { name = "imagesize", marker = "python_full_version < '3.10'" }, + { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, + { name = "jinja2", marker = "python_full_version < '3.10'" }, + { name = "packaging", marker = "python_full_version < '3.10'" }, + { name = "pygments", marker = "python_full_version < '3.10'" }, + { name = "requests", marker = "python_full_version < '3.10'" }, + { name = "snowballstemmer", marker = "python_full_version < '3.10'" }, + { name = "sphinxcontrib-applehelp", marker = "python_full_version < '3.10'" }, + { name = "sphinxcontrib-devhelp", marker = "python_full_version < '3.10'" }, + { name = "sphinxcontrib-htmlhelp", marker = "python_full_version < '3.10'" }, + { name = "sphinxcontrib-jsmath", marker = "python_full_version < '3.10'" }, + { name = "sphinxcontrib-qthelp", marker = "python_full_version < '3.10'" }, + { name = "sphinxcontrib-serializinghtml", marker = "python_full_version < '3.10'" }, + { name = "tomli", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/be/50e50cb4f2eff47df05673d361095cafd95521d2a22521b920c67a372dcb/sphinx-7.4.7.tar.gz", hash = "sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe", size = 8067911, upload-time = "2024-07-20T14:46:56.059Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0d/ef/153f6803c5d5f8917dbb7f7fcf6d34a871ede3296fa89c2c703f5f8a6c8e/sphinx-7.4.7-py3-none-any.whl", hash = "sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239", size = 3401624, upload-time = "2024-07-20T14:46:52.142Z" }, +] + +[[package]] +name = "sphinx" +version = "8.1.3" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version == '3.10.*'", +] +dependencies = [ + { name = "alabaster", version = "1.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "babel", marker = "python_full_version == '3.10.*'" }, + { name = "colorama", marker = "python_full_version == '3.10.*' and sys_platform == 'win32'" }, + { name = "docutils", marker = "python_full_version == '3.10.*'" }, + { name = "imagesize", marker = "python_full_version == '3.10.*'" }, + { name = "jinja2", marker = "python_full_version == '3.10.*'" }, + { name = "packaging", marker = "python_full_version == '3.10.*'" }, + { name = "pygments", marker = "python_full_version == '3.10.*'" }, + { name = "requests", marker = "python_full_version == '3.10.*'" }, + { name = "snowballstemmer", marker = "python_full_version == '3.10.*'" }, + { name = "sphinxcontrib-applehelp", marker = "python_full_version == '3.10.*'" }, + { name = "sphinxcontrib-devhelp", marker = "python_full_version == '3.10.*'" }, + { name = "sphinxcontrib-htmlhelp", marker = "python_full_version == '3.10.*'" }, + { name = "sphinxcontrib-jsmath", marker = "python_full_version == '3.10.*'" }, + { name = "sphinxcontrib-qthelp", marker = "python_full_version == '3.10.*'" }, + { name = "sphinxcontrib-serializinghtml", marker = "python_full_version == '3.10.*'" }, + { name = "tomli", marker = "python_full_version == '3.10.*'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/be0b61178fe2cdcb67e2a92fc9ebb488e3c51c4f74a36a7824c0adf23425/sphinx-8.1.3.tar.gz", hash = "sha256:43c1911eecb0d3e161ad78611bc905d1ad0e523e4ddc202a58a821773dc4c927", size = 8184611, upload-time = "2024-10-13T20:27:13.93Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/26/60/1ddff83a56d33aaf6f10ec8ce84b4c007d9368b21008876fceda7e7381ef/sphinx-8.1.3-py3-none-any.whl", hash = "sha256:09719015511837b76bf6e03e42eb7595ac8c2e41eeb9c29c5b755c6b677992a2", size = 3487125, upload-time = "2024-10-13T20:27:10.448Z" }, +] + +[[package]] +name = "sphinx" +version = "8.2.3" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version == '3.14.*'", + "python_full_version >= '3.15' or (python_full_version >= '3.11' and python_full_version < '3.14')", +] +dependencies = [ + { name = "alabaster", version = "1.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "babel", marker = "python_full_version >= '3.11'" }, + { name = "colorama", marker = "python_full_version >= '3.11' and sys_platform == 'win32'" }, + { name = "docutils", marker = "python_full_version >= '3.11'" }, + { name = "imagesize", marker = "python_full_version >= '3.11'" }, + { name = "jinja2", marker = "python_full_version >= '3.11'" }, + { name = "packaging", marker = "python_full_version >= '3.11'" }, + { name = "pygments", marker = "python_full_version >= '3.11'" }, + { name = "requests", marker = "python_full_version >= '3.11'" }, + { name = "roman-numerals-py", marker = "python_full_version >= '3.11'" }, + { name = "snowballstemmer", marker = "python_full_version >= '3.11'" }, + { name = "sphinxcontrib-applehelp", marker = "python_full_version >= '3.11'" }, + { name = "sphinxcontrib-devhelp", marker = "python_full_version >= '3.11'" }, + { name = "sphinxcontrib-htmlhelp", marker = "python_full_version >= '3.11'" }, + { name = "sphinxcontrib-jsmath", marker = "python_full_version >= '3.11'" }, + { name = "sphinxcontrib-qthelp", marker = "python_full_version >= '3.11'" }, + { name = "sphinxcontrib-serializinghtml", marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/ad/4360e50ed56cb483667b8e6dadf2d3fda62359593faabbe749a27c4eaca6/sphinx-8.2.3.tar.gz", hash = "sha256:398ad29dee7f63a75888314e9424d40f52ce5a6a87ae88e7071e80af296ec348", size = 8321876, upload-time = "2025-03-02T22:31:59.658Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/53/136e9eca6e0b9dc0e1962e2c908fbea2e5ac000c2a2fbd9a35797958c48b/sphinx-8.2.3-py3-none-any.whl", hash = "sha256:4405915165f13521d875a8c29c8970800a0141c14cc5416a38feca4ea5d9b9c3", size = 3589741, upload-time = "2025-03-02T22:31:56.836Z" }, +] + +[[package]] +name = "sphinx-autobuild" +version = "2024.10.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama" }, + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "starlette" }, + { name = "uvicorn" }, + { name = "watchfiles" }, + { name = "websockets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a5/2c/155e1de2c1ba96a72e5dba152c509a8b41e047ee5c2def9e9f0d812f8be7/sphinx_autobuild-2024.10.3.tar.gz", hash = "sha256:248150f8f333e825107b6d4b86113ab28fa51750e5f9ae63b59dc339be951fb1", size = 14023, upload-time = "2024-10-02T23:15:30.172Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/c0/eba125db38c84d3c74717008fd3cb5000b68cd7e2cbafd1349c6a38c3d3b/sphinx_autobuild-2024.10.3-py3-none-any.whl", hash = "sha256:158e16c36f9d633e613c9aaf81c19b0fc458ca78b112533b20dafcda430d60fa", size = 11908, upload-time = "2024-10-02T23:15:28.739Z" }, +] + +[[package]] +name = "sphinx-basic-ng" +version = "1.0.0b2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/98/0b/a866924ded68efec7a1759587a4e478aec7559d8165fac8b2ad1c0e774d6/sphinx_basic_ng-1.0.0b2.tar.gz", hash = "sha256:9ec55a47c90c8c002b5960c57492ec3021f5193cb26cebc2dc4ea226848651c9", size = 20736, upload-time = "2023-07-08T18:40:54.166Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/dd/018ce05c532a22007ac58d4f45232514cd9d6dd0ee1dc374e309db830983/sphinx_basic_ng-1.0.0b2-py3-none-any.whl", hash = "sha256:eb09aedbabfb650607e9b4b68c9d240b90b1e1be221d6ad71d61c52e29f7932b", size = 22496, upload-time = "2023-07-08T18:40:52.659Z" }, +] + +[[package]] +name = "sphinx-rtd-theme" +version = "3.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docutils" }, + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "sphinxcontrib-jquery" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/91/44/c97faec644d29a5ceddd3020ae2edffa69e7d00054a8c7a6021e82f20335/sphinx_rtd_theme-3.0.2.tar.gz", hash = "sha256:b7457bc25dda723b20b086a670b9953c859eab60a2a03ee8eb2bb23e176e5f85", size = 7620463, upload-time = "2024-11-13T11:06:04.545Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/85/77/46e3bac77b82b4df5bb5b61f2de98637724f246b4966cfc34bc5895d852a/sphinx_rtd_theme-3.0.2-py2.py3-none-any.whl", hash = "sha256:422ccc750c3a3a311de4ae327e82affdaf59eb695ba4936538552f3b00f4ee13", size = 7655561, upload-time = "2024-11-13T11:06:02.094Z" }, +] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/6e/b837e84a1a704953c62ef8776d45c3e8d759876b4a84fe14eba2859106fe/sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1", size = 20053, upload-time = "2024-07-29T01:09:00.465Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5d/85/9ebeae2f76e9e77b952f4b274c27238156eae7979c5421fba91a28f4970d/sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5", size = 119300, upload-time = "2024-07-29T01:08:58.99Z" }, +] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/d2/5beee64d3e4e747f316bae86b55943f51e82bb86ecd325883ef65741e7da/sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad", size = 12967, upload-time = "2024-07-29T01:09:23.417Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/35/7a/987e583882f985fe4d7323774889ec58049171828b58c2217e7f79cdf44e/sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2", size = 82530, upload-time = "2024-07-29T01:09:21.945Z" }, +] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/93/983afd9aa001e5201eab16b5a444ed5b9b0a7a010541e0ddfbbfd0b2470c/sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9", size = 22617, upload-time = "2024-07-29T01:09:37.889Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0a/7b/18a8c0bcec9182c05a0b3ec2a776bba4ead82750a55ff798e8d406dae604/sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8", size = 98705, upload-time = "2024-07-29T01:09:36.407Z" }, +] + +[[package]] +name = "sphinxcontrib-jquery" +version = "4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/de/f3/aa67467e051df70a6330fe7770894b3e4f09436dea6881ae0b4f3d87cad8/sphinxcontrib-jquery-4.1.tar.gz", hash = "sha256:1620739f04e36a2c779f1a131a2dfd49b2fd07351bf1968ced074365933abc7a", size = 122331, upload-time = "2023-03-14T15:01:01.944Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/85/749bd22d1a68db7291c89e2ebca53f4306c3f205853cf31e9de279034c3c/sphinxcontrib_jquery-4.1-py2.py3-none-any.whl", hash = "sha256:f936030d7d0147dd026a4f2b5a57343d233f1fc7b363f68b3d4f1cb0993878ae", size = 121104, upload-time = "2023-03-14T15:01:00.356Z" }, +] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/e8/9ed3830aeed71f17c026a07a5097edcf44b692850ef215b161b8ad875729/sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8", size = 5787, upload-time = "2019-01-21T16:10:16.347Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/42/4c8646762ee83602e3fb3fbe774c2fac12f317deb0b5dbeeedd2d3ba4b77/sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", size = 5071, upload-time = "2019-01-21T16:10:14.333Z" }, +] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/68/bc/9104308fc285eb3e0b31b67688235db556cd5b0ef31d96f30e45f2e51cae/sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab", size = 17165, upload-time = "2024-07-29T01:09:56.435Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/83/859ecdd180cacc13b1f7e857abf8582a64552ea7a061057a6c716e790fce/sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb", size = 88743, upload-time = "2024-07-29T01:09:54.885Z" }, +] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3b/44/6716b257b0aa6bfd51a1b31665d1c205fb12cb5ad56de752dfa15657de2f/sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d", size = 16080, upload-time = "2024-07-29T01:10:09.332Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/52/a7/d2782e4e3f77c8450f727ba74a8f12756d5ba823d81b941f1b04da9d033a/sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331", size = 92072, upload-time = "2024-07-29T01:10:08.203Z" }, +] + +[[package]] +name = "sphinxcontrib-shellcheck" +version = "1.1.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "decorator" }, + { name = "docutils" }, + { name = "six" }, + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ef/2b/20717a5e0c7ee99dfd5fcdf11a8cf0ab02533cf62775f24d344ea5cf48c1/sphinxcontrib-shellcheck-1.1.2.zip", hash = "sha256:475a3ae12a1cfc1bc26cff57f0dd15561213818e3b470b3eacc4bb8be7b129c0", size = 338739, upload-time = "2020-03-30T01:51:39.993Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/06/9c/1ff7fe5174f944fac0fcb53bdaac7b98d73a98dd2ca800d95af6af9edb9a/sphinxcontrib_shellcheck-1.1.2-py35-none-any.whl", hash = "sha256:c0449dc9402521ab1d05a1b9eb8c9099707da64824341686dab4f620dc688514", size = 11532, upload-time = "2020-03-30T01:51:34.913Z" }, + { url = "https://files.pythonhosted.org/packages/9f/8c/833388d3127d8dc0d5558bf52225eb20ed024ac46ef8ef4bffe7298ceb3d/sphinxcontrib_shellcheck-1.1.2-py36-none-any.whl", hash = "sha256:bcd8ffd26e6430deff9ffd10705683b502ace3fc8b4d1ba84496b3752f65fe52", size = 11533, upload-time = "2020-03-30T01:51:36.422Z" }, + { url = "https://files.pythonhosted.org/packages/9d/b5/cdc74763bcf0916f47d053830c00114f1de65d97ea2281b66bbf2a587b8a/sphinxcontrib_shellcheck-1.1.2-py37-none-any.whl", hash = "sha256:46d1aba8201bbfc7a2c51e08446cab36bdab318c997223c8fc40733a5eedc71f", size = 11533, upload-time = "2020-03-30T01:51:37.351Z" }, + { url = "https://files.pythonhosted.org/packages/58/ba/cf15480bc238a15e10604ee7f0e3e20ea0bf9a55a4f0b4e50571e8d13e60/sphinxcontrib_shellcheck-1.1.2-py38-none-any.whl", hash = "sha256:4c5f2840418cd1d7d662c0b3f51a07625f1a8f92755b19347ce85e8258e9d847", size = 11532, upload-time = "2020-03-30T01:51:38.858Z" }, +] + +[[package]] +name = "starlette" +version = "0.47.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/04/57/d062573f391d062710d4088fa1369428c38d51460ab6fedff920efef932e/starlette-0.47.2.tar.gz", hash = "sha256:6ae9aa5db235e4846decc1e7b79c4f346adf41e9777aebeb49dfd09bbd7023d8", size = 2583948, upload-time = "2025-07-20T17:31:58.522Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f7/1f/b876b1f83aef204198a42dc101613fefccb32258e5428b5f9259677864b4/starlette-0.47.2-py3-none-any.whl", hash = "sha256:c5847e96134e5c5371ee9fac6fdf1a67336d5815e09eb2a01fdb57a351ef915b", size = 72984, upload-time = "2025-07-20T17:31:56.738Z" }, +] + +[[package]] +name = "tomli" +version = "2.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175, upload-time = "2024-11-27T22:38:36.873Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077, upload-time = "2024-11-27T22:37:54.956Z" }, + { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429, upload-time = "2024-11-27T22:37:56.698Z" }, + { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067, upload-time = "2024-11-27T22:37:57.63Z" }, + { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030, upload-time = "2024-11-27T22:37:59.344Z" }, + { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898, upload-time = "2024-11-27T22:38:00.429Z" }, + { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894, upload-time = "2024-11-27T22:38:02.094Z" }, + { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319, upload-time = "2024-11-27T22:38:03.206Z" }, + { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273, upload-time = "2024-11-27T22:38:04.217Z" }, + { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310, upload-time = "2024-11-27T22:38:05.908Z" }, + { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309, upload-time = "2024-11-27T22:38:06.812Z" }, + { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762, upload-time = "2024-11-27T22:38:07.731Z" }, + { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453, upload-time = "2024-11-27T22:38:09.384Z" }, + { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486, upload-time = "2024-11-27T22:38:10.329Z" }, + { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349, upload-time = "2024-11-27T22:38:11.443Z" }, + { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159, upload-time = "2024-11-27T22:38:13.099Z" }, + { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243, upload-time = "2024-11-27T22:38:14.766Z" }, + { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645, upload-time = "2024-11-27T22:38:15.843Z" }, + { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584, upload-time = "2024-11-27T22:38:17.645Z" }, + { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875, upload-time = "2024-11-27T22:38:19.159Z" }, + { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418, upload-time = "2024-11-27T22:38:20.064Z" }, + { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708, upload-time = "2024-11-27T22:38:21.659Z" }, + { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582, upload-time = "2024-11-27T22:38:22.693Z" }, + { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543, upload-time = "2024-11-27T22:38:24.367Z" }, + { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691, upload-time = "2024-11-27T22:38:26.081Z" }, + { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170, upload-time = "2024-11-27T22:38:27.921Z" }, + { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530, upload-time = "2024-11-27T22:38:29.591Z" }, + { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666, upload-time = "2024-11-27T22:38:30.639Z" }, + { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954, upload-time = "2024-11-27T22:38:31.702Z" }, + { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724, upload-time = "2024-11-27T22:38:32.837Z" }, + { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383, upload-time = "2024-11-27T22:38:34.455Z" }, + { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.14.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/98/5a/da40306b885cc8c09109dc2e1abd358d5684b1425678151cdaed4731c822/typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36", size = 107673, upload-time = "2025-07-04T13:28:34.16Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76", size = 43906, upload-time = "2025-07-04T13:28:32.743Z" }, +] + +[[package]] +name = "urllib3" +version = "1.26.20" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +sdist = { url = "https://files.pythonhosted.org/packages/e4/e8/6ff5e6bc22095cfc59b6ea711b687e2b7ed4bdb373f7eeec370a97d7392f/urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32", size = 307380, upload-time = "2024-08-29T15:43:11.37Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/cf/8435d5a7159e2a9c83a95896ed596f68cf798005fe107cc655b5c5c14704/urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e", size = 144225, upload-time = "2024-08-29T15:43:08.921Z" }, +] + +[[package]] +name = "urllib3" +version = "2.5.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version == '3.14.*'", + "python_full_version >= '3.15' or (python_full_version >= '3.11' and python_full_version < '3.14')", + "python_full_version == '3.10.*'", +] +sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, +] + +[[package]] +name = "uvicorn" +version = "0.35.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "click", version = "8.2.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "h11" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5e/42/e0e305207bb88c6b8d3061399c6a961ffe5fbb7e2aa63c9234df7259e9cd/uvicorn-0.35.0.tar.gz", hash = "sha256:bc662f087f7cf2ce11a1d7fd70b90c9f98ef2e2831556dd078d131b96cc94a01", size = 78473, upload-time = "2025-06-28T16:15:46.058Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/e2/dc81b1bd1dcfe91735810265e9d26bc8ec5da45b4c0f6237e286819194c3/uvicorn-0.35.0-py3-none-any.whl", hash = "sha256:197535216b25ff9b785e29a0b79199f55222193d47f820816e7da751e9bc8d4a", size = 66406, upload-time = "2025-06-28T16:15:44.816Z" }, +] + +[[package]] +name = "watchfiles" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2a/9a/d451fcc97d029f5812e898fd30a53fd8c15c7bbd058fd75cfc6beb9bd761/watchfiles-1.1.0.tar.gz", hash = "sha256:693ed7ec72cbfcee399e92c895362b6e66d63dac6b91e2c11ae03d10d503e575", size = 94406, upload-time = "2025-06-15T19:06:59.42Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b9/dd/579d1dc57f0f895426a1211c4ef3b0cb37eb9e642bb04bdcd962b5df206a/watchfiles-1.1.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:27f30e14aa1c1e91cb653f03a63445739919aef84c8d2517997a83155e7a2fcc", size = 405757, upload-time = "2025-06-15T19:04:51.058Z" }, + { url = "https://files.pythonhosted.org/packages/1c/a0/7a0318cd874393344d48c34d53b3dd419466adf59a29ba5b51c88dd18b86/watchfiles-1.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3366f56c272232860ab45c77c3ca7b74ee819c8e1f6f35a7125556b198bbc6df", size = 397511, upload-time = "2025-06-15T19:04:52.79Z" }, + { url = "https://files.pythonhosted.org/packages/06/be/503514656d0555ec2195f60d810eca29b938772e9bfb112d5cd5ad6f6a9e/watchfiles-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8412eacef34cae2836d891836a7fff7b754d6bcac61f6c12ba5ca9bc7e427b68", size = 450739, upload-time = "2025-06-15T19:04:54.203Z" }, + { url = "https://files.pythonhosted.org/packages/4e/0d/a05dd9e5f136cdc29751816d0890d084ab99f8c17b86f25697288ca09bc7/watchfiles-1.1.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:df670918eb7dd719642e05979fc84704af913d563fd17ed636f7c4783003fdcc", size = 458106, upload-time = "2025-06-15T19:04:55.607Z" }, + { url = "https://files.pythonhosted.org/packages/f1/fa/9cd16e4dfdb831072b7ac39e7bea986e52128526251038eb481effe9f48e/watchfiles-1.1.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d7642b9bc4827b5518ebdb3b82698ada8c14c7661ddec5fe719f3e56ccd13c97", size = 484264, upload-time = "2025-06-15T19:04:57.009Z" }, + { url = "https://files.pythonhosted.org/packages/32/04/1da8a637c7e2b70e750a0308e9c8e662ada0cca46211fa9ef24a23937e0b/watchfiles-1.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:199207b2d3eeaeb80ef4411875a6243d9ad8bc35b07fc42daa6b801cc39cc41c", size = 597612, upload-time = "2025-06-15T19:04:58.409Z" }, + { url = "https://files.pythonhosted.org/packages/30/01/109f2762e968d3e58c95731a206e5d7d2a7abaed4299dd8a94597250153c/watchfiles-1.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a479466da6db5c1e8754caee6c262cd373e6e6c363172d74394f4bff3d84d7b5", size = 477242, upload-time = "2025-06-15T19:04:59.786Z" }, + { url = "https://files.pythonhosted.org/packages/b5/b8/46f58cf4969d3b7bc3ca35a98e739fa4085b0657a1540ccc29a1a0bc016f/watchfiles-1.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:935f9edd022ec13e447e5723a7d14456c8af254544cefbc533f6dd276c9aa0d9", size = 453148, upload-time = "2025-06-15T19:05:01.103Z" }, + { url = "https://files.pythonhosted.org/packages/a5/cd/8267594263b1770f1eb76914940d7b2d03ee55eca212302329608208e061/watchfiles-1.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8076a5769d6bdf5f673a19d51da05fc79e2bbf25e9fe755c47595785c06a8c72", size = 626574, upload-time = "2025-06-15T19:05:02.582Z" }, + { url = "https://files.pythonhosted.org/packages/a1/2f/7f2722e85899bed337cba715723e19185e288ef361360718973f891805be/watchfiles-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:86b1e28d4c37e89220e924305cd9f82866bb0ace666943a6e4196c5df4d58dcc", size = 624378, upload-time = "2025-06-15T19:05:03.719Z" }, + { url = "https://files.pythonhosted.org/packages/bf/20/64c88ec43d90a568234d021ab4b2a6f42a5230d772b987c3f9c00cc27b8b/watchfiles-1.1.0-cp310-cp310-win32.whl", hash = "sha256:d1caf40c1c657b27858f9774d5c0e232089bca9cb8ee17ce7478c6e9264d2587", size = 279829, upload-time = "2025-06-15T19:05:04.822Z" }, + { url = "https://files.pythonhosted.org/packages/39/5c/a9c1ed33de7af80935e4eac09570de679c6e21c07070aa99f74b4431f4d6/watchfiles-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:a89c75a5b9bc329131115a409d0acc16e8da8dfd5867ba59f1dd66ae7ea8fa82", size = 292192, upload-time = "2025-06-15T19:05:06.348Z" }, + { url = "https://files.pythonhosted.org/packages/8b/78/7401154b78ab484ccaaeef970dc2af0cb88b5ba8a1b415383da444cdd8d3/watchfiles-1.1.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:c9649dfc57cc1f9835551deb17689e8d44666315f2e82d337b9f07bd76ae3aa2", size = 405751, upload-time = "2025-06-15T19:05:07.679Z" }, + { url = "https://files.pythonhosted.org/packages/76/63/e6c3dbc1f78d001589b75e56a288c47723de28c580ad715eb116639152b5/watchfiles-1.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:406520216186b99374cdb58bc48e34bb74535adec160c8459894884c983a149c", size = 397313, upload-time = "2025-06-15T19:05:08.764Z" }, + { url = "https://files.pythonhosted.org/packages/6c/a2/8afa359ff52e99af1632f90cbf359da46184207e893a5f179301b0c8d6df/watchfiles-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb45350fd1dc75cd68d3d72c47f5b513cb0578da716df5fba02fff31c69d5f2d", size = 450792, upload-time = "2025-06-15T19:05:09.869Z" }, + { url = "https://files.pythonhosted.org/packages/1d/bf/7446b401667f5c64972a57a0233be1104157fc3abf72c4ef2666c1bd09b2/watchfiles-1.1.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:11ee4444250fcbeb47459a877e5e80ed994ce8e8d20283857fc128be1715dac7", size = 458196, upload-time = "2025-06-15T19:05:11.91Z" }, + { url = "https://files.pythonhosted.org/packages/58/2f/501ddbdfa3fa874ea5597c77eeea3d413579c29af26c1091b08d0c792280/watchfiles-1.1.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bda8136e6a80bdea23e5e74e09df0362744d24ffb8cd59c4a95a6ce3d142f79c", size = 484788, upload-time = "2025-06-15T19:05:13.373Z" }, + { url = "https://files.pythonhosted.org/packages/61/1e/9c18eb2eb5c953c96bc0e5f626f0e53cfef4bd19bd50d71d1a049c63a575/watchfiles-1.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b915daeb2d8c1f5cee4b970f2e2c988ce6514aace3c9296e58dd64dc9aa5d575", size = 597879, upload-time = "2025-06-15T19:05:14.725Z" }, + { url = "https://files.pythonhosted.org/packages/8b/6c/1467402e5185d89388b4486745af1e0325007af0017c3384cc786fff0542/watchfiles-1.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ed8fc66786de8d0376f9f913c09e963c66e90ced9aa11997f93bdb30f7c872a8", size = 477447, upload-time = "2025-06-15T19:05:15.775Z" }, + { url = "https://files.pythonhosted.org/packages/2b/a1/ec0a606bde4853d6c4a578f9391eeb3684a9aea736a8eb217e3e00aa89a1/watchfiles-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe4371595edf78c41ef8ac8df20df3943e13defd0efcb732b2e393b5a8a7a71f", size = 453145, upload-time = "2025-06-15T19:05:17.17Z" }, + { url = "https://files.pythonhosted.org/packages/90/b9/ef6f0c247a6a35d689fc970dc7f6734f9257451aefb30def5d100d6246a5/watchfiles-1.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b7c5f6fe273291f4d414d55b2c80d33c457b8a42677ad14b4b47ff025d0893e4", size = 626539, upload-time = "2025-06-15T19:05:18.557Z" }, + { url = "https://files.pythonhosted.org/packages/34/44/6ffda5537085106ff5aaa762b0d130ac6c75a08015dd1621376f708c94de/watchfiles-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7738027989881e70e3723c75921f1efa45225084228788fc59ea8c6d732eb30d", size = 624472, upload-time = "2025-06-15T19:05:19.588Z" }, + { url = "https://files.pythonhosted.org/packages/c3/e3/71170985c48028fa3f0a50946916a14055e741db11c2e7bc2f3b61f4d0e3/watchfiles-1.1.0-cp311-cp311-win32.whl", hash = "sha256:622d6b2c06be19f6e89b1d951485a232e3b59618def88dbeda575ed8f0d8dbf2", size = 279348, upload-time = "2025-06-15T19:05:20.856Z" }, + { url = "https://files.pythonhosted.org/packages/89/1b/3e39c68b68a7a171070f81fc2561d23ce8d6859659406842a0e4bebf3bba/watchfiles-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:48aa25e5992b61debc908a61ab4d3f216b64f44fdaa71eb082d8b2de846b7d12", size = 292607, upload-time = "2025-06-15T19:05:21.937Z" }, + { url = "https://files.pythonhosted.org/packages/61/9f/2973b7539f2bdb6ea86d2c87f70f615a71a1fc2dba2911795cea25968aea/watchfiles-1.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:00645eb79a3faa70d9cb15c8d4187bb72970b2470e938670240c7998dad9f13a", size = 285056, upload-time = "2025-06-15T19:05:23.12Z" }, + { url = "https://files.pythonhosted.org/packages/f6/b8/858957045a38a4079203a33aaa7d23ea9269ca7761c8a074af3524fbb240/watchfiles-1.1.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9dc001c3e10de4725c749d4c2f2bdc6ae24de5a88a339c4bce32300a31ede179", size = 402339, upload-time = "2025-06-15T19:05:24.516Z" }, + { url = "https://files.pythonhosted.org/packages/80/28/98b222cca751ba68e88521fabd79a4fab64005fc5976ea49b53fa205d1fa/watchfiles-1.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d9ba68ec283153dead62cbe81872d28e053745f12335d037de9cbd14bd1877f5", size = 394409, upload-time = "2025-06-15T19:05:25.469Z" }, + { url = "https://files.pythonhosted.org/packages/86/50/dee79968566c03190677c26f7f47960aff738d32087087bdf63a5473e7df/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:130fc497b8ee68dce163e4254d9b0356411d1490e868bd8790028bc46c5cc297", size = 450939, upload-time = "2025-06-15T19:05:26.494Z" }, + { url = "https://files.pythonhosted.org/packages/40/45/a7b56fb129700f3cfe2594a01aa38d033b92a33dddce86c8dfdfc1247b72/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:50a51a90610d0845a5931a780d8e51d7bd7f309ebc25132ba975aca016b576a0", size = 457270, upload-time = "2025-06-15T19:05:27.466Z" }, + { url = "https://files.pythonhosted.org/packages/b5/c8/fa5ef9476b1d02dc6b5e258f515fcaaecf559037edf8b6feffcbc097c4b8/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc44678a72ac0910bac46fa6a0de6af9ba1355669b3dfaf1ce5f05ca7a74364e", size = 483370, upload-time = "2025-06-15T19:05:28.548Z" }, + { url = "https://files.pythonhosted.org/packages/98/68/42cfcdd6533ec94f0a7aab83f759ec11280f70b11bfba0b0f885e298f9bd/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a543492513a93b001975ae283a51f4b67973662a375a403ae82f420d2c7205ee", size = 598654, upload-time = "2025-06-15T19:05:29.997Z" }, + { url = "https://files.pythonhosted.org/packages/d3/74/b2a1544224118cc28df7e59008a929e711f9c68ce7d554e171b2dc531352/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ac164e20d17cc285f2b94dc31c384bc3aa3dd5e7490473b3db043dd70fbccfd", size = 478667, upload-time = "2025-06-15T19:05:31.172Z" }, + { url = "https://files.pythonhosted.org/packages/8c/77/e3362fe308358dc9f8588102481e599c83e1b91c2ae843780a7ded939a35/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7590d5a455321e53857892ab8879dce62d1f4b04748769f5adf2e707afb9d4f", size = 452213, upload-time = "2025-06-15T19:05:32.299Z" }, + { url = "https://files.pythonhosted.org/packages/6e/17/c8f1a36540c9a1558d4faf08e909399e8133599fa359bf52ec8fcee5be6f/watchfiles-1.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:37d3d3f7defb13f62ece99e9be912afe9dd8a0077b7c45ee5a57c74811d581a4", size = 626718, upload-time = "2025-06-15T19:05:33.415Z" }, + { url = "https://files.pythonhosted.org/packages/26/45/fb599be38b4bd38032643783d7496a26a6f9ae05dea1a42e58229a20ac13/watchfiles-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7080c4bb3efd70a07b1cc2df99a7aa51d98685be56be6038c3169199d0a1c69f", size = 623098, upload-time = "2025-06-15T19:05:34.534Z" }, + { url = "https://files.pythonhosted.org/packages/a1/e7/fdf40e038475498e160cd167333c946e45d8563ae4dd65caf757e9ffe6b4/watchfiles-1.1.0-cp312-cp312-win32.whl", hash = "sha256:cbcf8630ef4afb05dc30107bfa17f16c0896bb30ee48fc24bf64c1f970f3b1fd", size = 279209, upload-time = "2025-06-15T19:05:35.577Z" }, + { url = "https://files.pythonhosted.org/packages/3f/d3/3ae9d5124ec75143bdf088d436cba39812122edc47709cd2caafeac3266f/watchfiles-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:cbd949bdd87567b0ad183d7676feb98136cde5bb9025403794a4c0db28ed3a47", size = 292786, upload-time = "2025-06-15T19:05:36.559Z" }, + { url = "https://files.pythonhosted.org/packages/26/2f/7dd4fc8b5f2b34b545e19629b4a018bfb1de23b3a496766a2c1165ca890d/watchfiles-1.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:0a7d40b77f07be87c6faa93d0951a0fcd8cbca1ddff60a1b65d741bac6f3a9f6", size = 284343, upload-time = "2025-06-15T19:05:37.5Z" }, + { url = "https://files.pythonhosted.org/packages/d3/42/fae874df96595556a9089ade83be34a2e04f0f11eb53a8dbf8a8a5e562b4/watchfiles-1.1.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:5007f860c7f1f8df471e4e04aaa8c43673429047d63205d1630880f7637bca30", size = 402004, upload-time = "2025-06-15T19:05:38.499Z" }, + { url = "https://files.pythonhosted.org/packages/fa/55/a77e533e59c3003d9803c09c44c3651224067cbe7fb5d574ddbaa31e11ca/watchfiles-1.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:20ecc8abbd957046f1fe9562757903f5eaf57c3bce70929fda6c7711bb58074a", size = 393671, upload-time = "2025-06-15T19:05:39.52Z" }, + { url = "https://files.pythonhosted.org/packages/05/68/b0afb3f79c8e832e6571022611adbdc36e35a44e14f129ba09709aa4bb7a/watchfiles-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2f0498b7d2a3c072766dba3274fe22a183dbea1f99d188f1c6c72209a1063dc", size = 449772, upload-time = "2025-06-15T19:05:40.897Z" }, + { url = "https://files.pythonhosted.org/packages/ff/05/46dd1f6879bc40e1e74c6c39a1b9ab9e790bf1f5a2fe6c08b463d9a807f4/watchfiles-1.1.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:239736577e848678e13b201bba14e89718f5c2133dfd6b1f7846fa1b58a8532b", size = 456789, upload-time = "2025-06-15T19:05:42.045Z" }, + { url = "https://files.pythonhosted.org/packages/8b/ca/0eeb2c06227ca7f12e50a47a3679df0cd1ba487ea19cf844a905920f8e95/watchfiles-1.1.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eff4b8d89f444f7e49136dc695599a591ff769300734446c0a86cba2eb2f9895", size = 482551, upload-time = "2025-06-15T19:05:43.781Z" }, + { url = "https://files.pythonhosted.org/packages/31/47/2cecbd8694095647406645f822781008cc524320466ea393f55fe70eed3b/watchfiles-1.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12b0a02a91762c08f7264e2e79542f76870c3040bbc847fb67410ab81474932a", size = 597420, upload-time = "2025-06-15T19:05:45.244Z" }, + { url = "https://files.pythonhosted.org/packages/d9/7e/82abc4240e0806846548559d70f0b1a6dfdca75c1b4f9fa62b504ae9b083/watchfiles-1.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:29e7bc2eee15cbb339c68445959108803dc14ee0c7b4eea556400131a8de462b", size = 477950, upload-time = "2025-06-15T19:05:46.332Z" }, + { url = "https://files.pythonhosted.org/packages/25/0d/4d564798a49bf5482a4fa9416dea6b6c0733a3b5700cb8a5a503c4b15853/watchfiles-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9481174d3ed982e269c090f780122fb59cee6c3796f74efe74e70f7780ed94c", size = 451706, upload-time = "2025-06-15T19:05:47.459Z" }, + { url = "https://files.pythonhosted.org/packages/81/b5/5516cf46b033192d544102ea07c65b6f770f10ed1d0a6d388f5d3874f6e4/watchfiles-1.1.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:80f811146831c8c86ab17b640801c25dc0a88c630e855e2bef3568f30434d52b", size = 625814, upload-time = "2025-06-15T19:05:48.654Z" }, + { url = "https://files.pythonhosted.org/packages/0c/dd/7c1331f902f30669ac3e754680b6edb9a0dd06dea5438e61128111fadd2c/watchfiles-1.1.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:60022527e71d1d1fda67a33150ee42869042bce3d0fcc9cc49be009a9cded3fb", size = 622820, upload-time = "2025-06-15T19:05:50.088Z" }, + { url = "https://files.pythonhosted.org/packages/1b/14/36d7a8e27cd128d7b1009e7715a7c02f6c131be9d4ce1e5c3b73d0e342d8/watchfiles-1.1.0-cp313-cp313-win32.whl", hash = "sha256:32d6d4e583593cb8576e129879ea0991660b935177c0f93c6681359b3654bfa9", size = 279194, upload-time = "2025-06-15T19:05:51.186Z" }, + { url = "https://files.pythonhosted.org/packages/25/41/2dd88054b849aa546dbeef5696019c58f8e0774f4d1c42123273304cdb2e/watchfiles-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:f21af781a4a6fbad54f03c598ab620e3a77032c5878f3d780448421a6e1818c7", size = 292349, upload-time = "2025-06-15T19:05:52.201Z" }, + { url = "https://files.pythonhosted.org/packages/c8/cf/421d659de88285eb13941cf11a81f875c176f76a6d99342599be88e08d03/watchfiles-1.1.0-cp313-cp313-win_arm64.whl", hash = "sha256:5366164391873ed76bfdf618818c82084c9db7fac82b64a20c44d335eec9ced5", size = 283836, upload-time = "2025-06-15T19:05:53.265Z" }, + { url = "https://files.pythonhosted.org/packages/45/10/6faf6858d527e3599cc50ec9fcae73590fbddc1420bd4fdccfebffeedbc6/watchfiles-1.1.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:17ab167cca6339c2b830b744eaf10803d2a5b6683be4d79d8475d88b4a8a4be1", size = 400343, upload-time = "2025-06-15T19:05:54.252Z" }, + { url = "https://files.pythonhosted.org/packages/03/20/5cb7d3966f5e8c718006d0e97dfe379a82f16fecd3caa7810f634412047a/watchfiles-1.1.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:328dbc9bff7205c215a7807da7c18dce37da7da718e798356212d22696404339", size = 392916, upload-time = "2025-06-15T19:05:55.264Z" }, + { url = "https://files.pythonhosted.org/packages/8c/07/d8f1176328fa9e9581b6f120b017e286d2a2d22ae3f554efd9515c8e1b49/watchfiles-1.1.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7208ab6e009c627b7557ce55c465c98967e8caa8b11833531fdf95799372633", size = 449582, upload-time = "2025-06-15T19:05:56.317Z" }, + { url = "https://files.pythonhosted.org/packages/66/e8/80a14a453cf6038e81d072a86c05276692a1826471fef91df7537dba8b46/watchfiles-1.1.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a8f6f72974a19efead54195bc9bed4d850fc047bb7aa971268fd9a8387c89011", size = 456752, upload-time = "2025-06-15T19:05:57.359Z" }, + { url = "https://files.pythonhosted.org/packages/5a/25/0853b3fe0e3c2f5af9ea60eb2e781eade939760239a72c2d38fc4cc335f6/watchfiles-1.1.0-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d181ef50923c29cf0450c3cd47e2f0557b62218c50b2ab8ce2ecaa02bd97e670", size = 481436, upload-time = "2025-06-15T19:05:58.447Z" }, + { url = "https://files.pythonhosted.org/packages/fe/9e/4af0056c258b861fbb29dcb36258de1e2b857be4a9509e6298abcf31e5c9/watchfiles-1.1.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:adb4167043d3a78280d5d05ce0ba22055c266cf8655ce942f2fb881262ff3cdf", size = 596016, upload-time = "2025-06-15T19:05:59.59Z" }, + { url = "https://files.pythonhosted.org/packages/c5/fa/95d604b58aa375e781daf350897aaaa089cff59d84147e9ccff2447c8294/watchfiles-1.1.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c5701dc474b041e2934a26d31d39f90fac8a3dee2322b39f7729867f932b1d4", size = 476727, upload-time = "2025-06-15T19:06:01.086Z" }, + { url = "https://files.pythonhosted.org/packages/65/95/fe479b2664f19be4cf5ceeb21be05afd491d95f142e72d26a42f41b7c4f8/watchfiles-1.1.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b067915e3c3936966a8607f6fe5487df0c9c4afb85226613b520890049deea20", size = 451864, upload-time = "2025-06-15T19:06:02.144Z" }, + { url = "https://files.pythonhosted.org/packages/d3/8a/3c4af14b93a15ce55901cd7a92e1a4701910f1768c78fb30f61d2b79785b/watchfiles-1.1.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:9c733cda03b6d636b4219625a4acb5c6ffb10803338e437fb614fef9516825ef", size = 625626, upload-time = "2025-06-15T19:06:03.578Z" }, + { url = "https://files.pythonhosted.org/packages/da/f5/cf6aa047d4d9e128f4b7cde615236a915673775ef171ff85971d698f3c2c/watchfiles-1.1.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:cc08ef8b90d78bfac66f0def80240b0197008e4852c9f285907377b2947ffdcb", size = 622744, upload-time = "2025-06-15T19:06:05.066Z" }, + { url = "https://files.pythonhosted.org/packages/2c/00/70f75c47f05dea6fd30df90f047765f6fc2d6eb8b5a3921379b0b04defa2/watchfiles-1.1.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:9974d2f7dc561cce3bb88dfa8eb309dab64c729de85fba32e98d75cf24b66297", size = 402114, upload-time = "2025-06-15T19:06:06.186Z" }, + { url = "https://files.pythonhosted.org/packages/53/03/acd69c48db4a1ed1de26b349d94077cca2238ff98fd64393f3e97484cae6/watchfiles-1.1.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c68e9f1fcb4d43798ad8814c4c1b61547b014b667216cb754e606bfade587018", size = 393879, upload-time = "2025-06-15T19:06:07.369Z" }, + { url = "https://files.pythonhosted.org/packages/2f/c8/a9a2a6f9c8baa4eceae5887fecd421e1b7ce86802bcfc8b6a942e2add834/watchfiles-1.1.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95ab1594377effac17110e1352989bdd7bdfca9ff0e5eeccd8c69c5389b826d0", size = 450026, upload-time = "2025-06-15T19:06:08.476Z" }, + { url = "https://files.pythonhosted.org/packages/fe/51/d572260d98388e6e2b967425c985e07d47ee6f62e6455cefb46a6e06eda5/watchfiles-1.1.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fba9b62da882c1be1280a7584ec4515d0a6006a94d6e5819730ec2eab60ffe12", size = 457917, upload-time = "2025-06-15T19:06:09.988Z" }, + { url = "https://files.pythonhosted.org/packages/c6/2d/4258e52917bf9f12909b6ec314ff9636276f3542f9d3807d143f27309104/watchfiles-1.1.0-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3434e401f3ce0ed6b42569128b3d1e3af773d7ec18751b918b89cd49c14eaafb", size = 483602, upload-time = "2025-06-15T19:06:11.088Z" }, + { url = "https://files.pythonhosted.org/packages/84/99/bee17a5f341a4345fe7b7972a475809af9e528deba056f8963d61ea49f75/watchfiles-1.1.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa257a4d0d21fcbca5b5fcba9dca5a78011cb93c0323fb8855c6d2dfbc76eb77", size = 596758, upload-time = "2025-06-15T19:06:12.197Z" }, + { url = "https://files.pythonhosted.org/packages/40/76/e4bec1d59b25b89d2b0716b41b461ed655a9a53c60dc78ad5771fda5b3e6/watchfiles-1.1.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7fd1b3879a578a8ec2076c7961076df540b9af317123f84569f5a9ddee64ce92", size = 477601, upload-time = "2025-06-15T19:06:13.391Z" }, + { url = "https://files.pythonhosted.org/packages/1f/fa/a514292956f4a9ce3c567ec0c13cce427c158e9f272062685a8a727d08fc/watchfiles-1.1.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62cc7a30eeb0e20ecc5f4bd113cd69dcdb745a07c68c0370cea919f373f65d9e", size = 451936, upload-time = "2025-06-15T19:06:14.656Z" }, + { url = "https://files.pythonhosted.org/packages/32/5d/c3bf927ec3bbeb4566984eba8dd7a8eb69569400f5509904545576741f88/watchfiles-1.1.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:891c69e027748b4a73847335d208e374ce54ca3c335907d381fde4e41661b13b", size = 626243, upload-time = "2025-06-15T19:06:16.232Z" }, + { url = "https://files.pythonhosted.org/packages/e6/65/6e12c042f1a68c556802a84d54bb06d35577c81e29fba14019562479159c/watchfiles-1.1.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:12fe8eaffaf0faa7906895b4f8bb88264035b3f0243275e0bf24af0436b27259", size = 623073, upload-time = "2025-06-15T19:06:17.457Z" }, + { url = "https://files.pythonhosted.org/packages/89/ab/7f79d9bf57329e7cbb0a6fd4c7bd7d0cee1e4a8ef0041459f5409da3506c/watchfiles-1.1.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:bfe3c517c283e484843cb2e357dd57ba009cff351edf45fb455b5fbd1f45b15f", size = 400872, upload-time = "2025-06-15T19:06:18.57Z" }, + { url = "https://files.pythonhosted.org/packages/df/d5/3f7bf9912798e9e6c516094db6b8932df53b223660c781ee37607030b6d3/watchfiles-1.1.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a9ccbf1f129480ed3044f540c0fdbc4ee556f7175e5ab40fe077ff6baf286d4e", size = 392877, upload-time = "2025-06-15T19:06:19.55Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c5/54ec7601a2798604e01c75294770dbee8150e81c6e471445d7601610b495/watchfiles-1.1.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba0e3255b0396cac3cc7bbace76404dd72b5438bf0d8e7cefa2f79a7f3649caa", size = 449645, upload-time = "2025-06-15T19:06:20.66Z" }, + { url = "https://files.pythonhosted.org/packages/0a/04/c2f44afc3b2fce21ca0b7802cbd37ed90a29874f96069ed30a36dfe57c2b/watchfiles-1.1.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4281cd9fce9fc0a9dbf0fc1217f39bf9cf2b4d315d9626ef1d4e87b84699e7e8", size = 457424, upload-time = "2025-06-15T19:06:21.712Z" }, + { url = "https://files.pythonhosted.org/packages/9f/b0/eec32cb6c14d248095261a04f290636da3df3119d4040ef91a4a50b29fa5/watchfiles-1.1.0-cp314-cp314t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d2404af8db1329f9a3c9b79ff63e0ae7131986446901582067d9304ae8aaf7f", size = 481584, upload-time = "2025-06-15T19:06:22.777Z" }, + { url = "https://files.pythonhosted.org/packages/d1/e2/ca4bb71c68a937d7145aa25709e4f5d68eb7698a25ce266e84b55d591bbd/watchfiles-1.1.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e78b6ed8165996013165eeabd875c5dfc19d41b54f94b40e9fff0eb3193e5e8e", size = 596675, upload-time = "2025-06-15T19:06:24.226Z" }, + { url = "https://files.pythonhosted.org/packages/a1/dd/b0e4b7fb5acf783816bc950180a6cd7c6c1d2cf7e9372c0ea634e722712b/watchfiles-1.1.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:249590eb75ccc117f488e2fabd1bfa33c580e24b96f00658ad88e38844a040bb", size = 477363, upload-time = "2025-06-15T19:06:25.42Z" }, + { url = "https://files.pythonhosted.org/packages/69/c4/088825b75489cb5b6a761a4542645718893d395d8c530b38734f19da44d2/watchfiles-1.1.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d05686b5487cfa2e2c28ff1aa370ea3e6c5accfe6435944ddea1e10d93872147", size = 452240, upload-time = "2025-06-15T19:06:26.552Z" }, + { url = "https://files.pythonhosted.org/packages/10/8c/22b074814970eeef43b7c44df98c3e9667c1f7bf5b83e0ff0201b0bd43f9/watchfiles-1.1.0-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:d0e10e6f8f6dc5762adee7dece33b722282e1f59aa6a55da5d493a97282fedd8", size = 625607, upload-time = "2025-06-15T19:06:27.606Z" }, + { url = "https://files.pythonhosted.org/packages/32/fa/a4f5c2046385492b2273213ef815bf71a0d4c1943b784fb904e184e30201/watchfiles-1.1.0-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:af06c863f152005c7592df1d6a7009c836a247c9d8adb78fef8575a5a98699db", size = 623315, upload-time = "2025-06-15T19:06:29.076Z" }, + { url = "https://files.pythonhosted.org/packages/47/8a/a45db804b9f0740f8408626ab2bca89c3136432e57c4673b50180bf85dd9/watchfiles-1.1.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:865c8e95713744cf5ae261f3067861e9da5f1370ba91fc536431e29b418676fa", size = 406400, upload-time = "2025-06-15T19:06:30.233Z" }, + { url = "https://files.pythonhosted.org/packages/64/06/a08684f628fb41addd451845aceedc2407dc3d843b4b060a7c4350ddee0c/watchfiles-1.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:42f92befc848bb7a19658f21f3e7bae80d7d005d13891c62c2cd4d4d0abb3433", size = 397920, upload-time = "2025-06-15T19:06:31.315Z" }, + { url = "https://files.pythonhosted.org/packages/79/e6/e10d5675af653b1b07d4156906858041149ca222edaf8995877f2605ba9e/watchfiles-1.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa0cc8365ab29487eb4f9979fd41b22549853389e22d5de3f134a6796e1b05a4", size = 451196, upload-time = "2025-06-15T19:06:32.435Z" }, + { url = "https://files.pythonhosted.org/packages/f6/8a/facd6988100cd0f39e89f6c550af80edb28e3a529e1ee662e750663e6b36/watchfiles-1.1.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:90ebb429e933645f3da534c89b29b665e285048973b4d2b6946526888c3eb2c7", size = 458218, upload-time = "2025-06-15T19:06:33.503Z" }, + { url = "https://files.pythonhosted.org/packages/90/26/34cbcbc4d0f2f8f9cc243007e65d741ae039f7a11ef8ec6e9cd25bee08d1/watchfiles-1.1.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c588c45da9b08ab3da81d08d7987dae6d2a3badd63acdb3e206a42dbfa7cb76f", size = 484851, upload-time = "2025-06-15T19:06:34.541Z" }, + { url = "https://files.pythonhosted.org/packages/d7/1f/f59faa9fc4b0e36dbcdd28a18c430416443b309d295d8b82e18192d120ad/watchfiles-1.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7c55b0f9f68590115c25272b06e63f0824f03d4fc7d6deed43d8ad5660cabdbf", size = 599520, upload-time = "2025-06-15T19:06:35.785Z" }, + { url = "https://files.pythonhosted.org/packages/83/72/3637abecb3bf590529f5154ca000924003e5f4bbb9619744feeaf6f0b70b/watchfiles-1.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd17a1e489f02ce9117b0de3c0b1fab1c3e2eedc82311b299ee6b6faf6c23a29", size = 477956, upload-time = "2025-06-15T19:06:36.965Z" }, + { url = "https://files.pythonhosted.org/packages/f7/f3/d14ffd9acc0c1bd4790378995e320981423263a5d70bd3929e2e0dc87fff/watchfiles-1.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da71945c9ace018d8634822f16cbc2a78323ef6c876b1d34bbf5d5222fd6a72e", size = 453196, upload-time = "2025-06-15T19:06:38.024Z" }, + { url = "https://files.pythonhosted.org/packages/7f/38/78ad77bd99e20c0fdc82262be571ef114fc0beef9b43db52adb939768c38/watchfiles-1.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:51556d5004887045dba3acdd1fdf61dddea2be0a7e18048b5e853dcd37149b86", size = 627479, upload-time = "2025-06-15T19:06:39.442Z" }, + { url = "https://files.pythonhosted.org/packages/e6/cf/549d50a22fcc83f1017c6427b1c76c053233f91b526f4ad7a45971e70c0b/watchfiles-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04e4ed5d1cd3eae68c89bcc1a485a109f39f2fd8de05f705e98af6b5f1861f1f", size = 624414, upload-time = "2025-06-15T19:06:40.859Z" }, + { url = "https://files.pythonhosted.org/packages/72/de/57d6e40dc9140af71c12f3a9fc2d3efc5529d93981cd4d265d484d7c9148/watchfiles-1.1.0-cp39-cp39-win32.whl", hash = "sha256:c600e85f2ffd9f1035222b1a312aff85fd11ea39baff1d705b9b047aad2ce267", size = 280020, upload-time = "2025-06-15T19:06:41.89Z" }, + { url = "https://files.pythonhosted.org/packages/88/bb/7d287fc2a762396b128a0fca2dbae29386e0a242b81d1046daf389641db3/watchfiles-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:3aba215958d88182e8d2acba0fdaf687745180974946609119953c0e112397dc", size = 292758, upload-time = "2025-06-15T19:06:43.251Z" }, + { url = "https://files.pythonhosted.org/packages/be/7c/a3d7c55cfa377c2f62c4ae3c6502b997186bc5e38156bafcb9b653de9a6d/watchfiles-1.1.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3a6fd40bbb50d24976eb275ccb55cd1951dfb63dbc27cae3066a6ca5f4beabd5", size = 406748, upload-time = "2025-06-15T19:06:44.2Z" }, + { url = "https://files.pythonhosted.org/packages/38/d0/c46f1b2c0ca47f3667b144de6f0515f6d1c670d72f2ca29861cac78abaa1/watchfiles-1.1.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9f811079d2f9795b5d48b55a37aa7773680a5659afe34b54cc1d86590a51507d", size = 398801, upload-time = "2025-06-15T19:06:45.774Z" }, + { url = "https://files.pythonhosted.org/packages/70/9c/9a6a42e97f92eeed77c3485a43ea96723900aefa3ac739a8c73f4bff2cd7/watchfiles-1.1.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2726d7bfd9f76158c84c10a409b77a320426540df8c35be172444394b17f7ea", size = 451528, upload-time = "2025-06-15T19:06:46.791Z" }, + { url = "https://files.pythonhosted.org/packages/51/7b/98c7f4f7ce7ff03023cf971cd84a3ee3b790021ae7584ffffa0eb2554b96/watchfiles-1.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df32d59cb9780f66d165a9a7a26f19df2c7d24e3bd58713108b41d0ff4f929c6", size = 454095, upload-time = "2025-06-15T19:06:48.211Z" }, + { url = "https://files.pythonhosted.org/packages/8c/6b/686dcf5d3525ad17b384fd94708e95193529b460a1b7bf40851f1328ec6e/watchfiles-1.1.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0ece16b563b17ab26eaa2d52230c9a7ae46cf01759621f4fbbca280e438267b3", size = 406910, upload-time = "2025-06-15T19:06:49.335Z" }, + { url = "https://files.pythonhosted.org/packages/f3/d3/71c2dcf81dc1edcf8af9f4d8d63b1316fb0a2dd90cbfd427e8d9dd584a90/watchfiles-1.1.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:51b81e55d40c4b4aa8658427a3ee7ea847c591ae9e8b81ef94a90b668999353c", size = 398816, upload-time = "2025-06-15T19:06:50.433Z" }, + { url = "https://files.pythonhosted.org/packages/b8/fa/12269467b2fc006f8fce4cd6c3acfa77491dd0777d2a747415f28ccc8c60/watchfiles-1.1.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2bcdc54ea267fe72bfc7d83c041e4eb58d7d8dc6f578dfddb52f037ce62f432", size = 451584, upload-time = "2025-06-15T19:06:51.834Z" }, + { url = "https://files.pythonhosted.org/packages/bd/d3/254cea30f918f489db09d6a8435a7de7047f8cb68584477a515f160541d6/watchfiles-1.1.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:923fec6e5461c42bd7e3fd5ec37492c6f3468be0499bc0707b4bbbc16ac21792", size = 454009, upload-time = "2025-06-15T19:06:52.896Z" }, + { url = "https://files.pythonhosted.org/packages/48/93/5c96bdb65e7f88f7da40645f34c0a3c317a2931ed82161e93c91e8eddd27/watchfiles-1.1.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7b3443f4ec3ba5aa00b0e9fa90cf31d98321cbff8b925a7c7b84161619870bc9", size = 406640, upload-time = "2025-06-15T19:06:54.868Z" }, + { url = "https://files.pythonhosted.org/packages/e3/25/09204836e93e1b99cce88802ce87264a1d20610c7a8f6de24def27ad95b1/watchfiles-1.1.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7049e52167fc75fc3cc418fc13d39a8e520cbb60ca08b47f6cedb85e181d2f2a", size = 398543, upload-time = "2025-06-15T19:06:55.95Z" }, + { url = "https://files.pythonhosted.org/packages/5e/dc/6f324a6f32c5ab73b54311b5f393a79df34c1584b8d2404cf7e6d780aa5d/watchfiles-1.1.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54062ef956807ba806559b3c3d52105ae1827a0d4ab47b621b31132b6b7e2866", size = 451787, upload-time = "2025-06-15T19:06:56.998Z" }, + { url = "https://files.pythonhosted.org/packages/45/5d/1d02ef4caa4ec02389e72d5594cdf9c67f1800a7c380baa55063c30c6598/watchfiles-1.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a7bd57a1bb02f9d5c398c0c1675384e7ab1dd39da0ca50b7f09af45fa435277", size = 454272, upload-time = "2025-06-15T19:06:58.055Z" }, +] + +[[package]] +name = "websockets" +version = "15.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016, upload-time = "2025-03-05T20:03:41.606Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/da/6462a9f510c0c49837bbc9345aca92d767a56c1fb2939e1579df1e1cdcf7/websockets-15.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d63efaa0cd96cf0c5fe4d581521d9fa87744540d4bc999ae6e08595a1014b45b", size = 175423, upload-time = "2025-03-05T20:01:35.363Z" }, + { url = "https://files.pythonhosted.org/packages/1c/9f/9d11c1a4eb046a9e106483b9ff69bce7ac880443f00e5ce64261b47b07e7/websockets-15.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac60e3b188ec7574cb761b08d50fcedf9d77f1530352db4eef1707fe9dee7205", size = 173080, upload-time = "2025-03-05T20:01:37.304Z" }, + { url = "https://files.pythonhosted.org/packages/d5/4f/b462242432d93ea45f297b6179c7333dd0402b855a912a04e7fc61c0d71f/websockets-15.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5756779642579d902eed757b21b0164cd6fe338506a8083eb58af5c372e39d9a", size = 173329, upload-time = "2025-03-05T20:01:39.668Z" }, + { url = "https://files.pythonhosted.org/packages/6e/0c/6afa1f4644d7ed50284ac59cc70ef8abd44ccf7d45850d989ea7310538d0/websockets-15.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdfe3e2a29e4db3659dbd5bbf04560cea53dd9610273917799f1cde46aa725e", size = 182312, upload-time = "2025-03-05T20:01:41.815Z" }, + { url = "https://files.pythonhosted.org/packages/dd/d4/ffc8bd1350b229ca7a4db2a3e1c482cf87cea1baccd0ef3e72bc720caeec/websockets-15.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c2529b320eb9e35af0fa3016c187dffb84a3ecc572bcee7c3ce302bfeba52bf", size = 181319, upload-time = "2025-03-05T20:01:43.967Z" }, + { url = "https://files.pythonhosted.org/packages/97/3a/5323a6bb94917af13bbb34009fac01e55c51dfde354f63692bf2533ffbc2/websockets-15.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac1e5c9054fe23226fb11e05a6e630837f074174c4c2f0fe442996112a6de4fb", size = 181631, upload-time = "2025-03-05T20:01:46.104Z" }, + { url = "https://files.pythonhosted.org/packages/a6/cc/1aeb0f7cee59ef065724041bb7ed667b6ab1eeffe5141696cccec2687b66/websockets-15.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5df592cd503496351d6dc14f7cdad49f268d8e618f80dce0cd5a36b93c3fc08d", size = 182016, upload-time = "2025-03-05T20:01:47.603Z" }, + { url = "https://files.pythonhosted.org/packages/79/f9/c86f8f7af208e4161a7f7e02774e9d0a81c632ae76db2ff22549e1718a51/websockets-15.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0a34631031a8f05657e8e90903e656959234f3a04552259458aac0b0f9ae6fd9", size = 181426, upload-time = "2025-03-05T20:01:48.949Z" }, + { url = "https://files.pythonhosted.org/packages/c7/b9/828b0bc6753db905b91df6ae477c0b14a141090df64fb17f8a9d7e3516cf/websockets-15.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3d00075aa65772e7ce9e990cab3ff1de702aa09be3940d1dc88d5abf1ab8a09c", size = 181360, upload-time = "2025-03-05T20:01:50.938Z" }, + { url = "https://files.pythonhosted.org/packages/89/fb/250f5533ec468ba6327055b7d98b9df056fb1ce623b8b6aaafb30b55d02e/websockets-15.0.1-cp310-cp310-win32.whl", hash = "sha256:1234d4ef35db82f5446dca8e35a7da7964d02c127b095e172e54397fb6a6c256", size = 176388, upload-time = "2025-03-05T20:01:52.213Z" }, + { url = "https://files.pythonhosted.org/packages/1c/46/aca7082012768bb98e5608f01658ff3ac8437e563eca41cf068bd5849a5e/websockets-15.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:39c1fec2c11dc8d89bba6b2bf1556af381611a173ac2b511cf7231622058af41", size = 176830, upload-time = "2025-03-05T20:01:53.922Z" }, + { url = "https://files.pythonhosted.org/packages/9f/32/18fcd5919c293a398db67443acd33fde142f283853076049824fc58e6f75/websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:823c248b690b2fd9303ba00c4f66cd5e2d8c3ba4aa968b2779be9532a4dad431", size = 175423, upload-time = "2025-03-05T20:01:56.276Z" }, + { url = "https://files.pythonhosted.org/packages/76/70/ba1ad96b07869275ef42e2ce21f07a5b0148936688c2baf7e4a1f60d5058/websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678999709e68425ae2593acf2e3ebcbcf2e69885a5ee78f9eb80e6e371f1bf57", size = 173082, upload-time = "2025-03-05T20:01:57.563Z" }, + { url = "https://files.pythonhosted.org/packages/86/f2/10b55821dd40eb696ce4704a87d57774696f9451108cff0d2824c97e0f97/websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d50fd1ee42388dcfb2b3676132c78116490976f1300da28eb629272d5d93e905", size = 173330, upload-time = "2025-03-05T20:01:59.063Z" }, + { url = "https://files.pythonhosted.org/packages/a5/90/1c37ae8b8a113d3daf1065222b6af61cc44102da95388ac0018fcb7d93d9/websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d99e5546bf73dbad5bf3547174cd6cb8ba7273062a23808ffea025ecb1cf8562", size = 182878, upload-time = "2025-03-05T20:02:00.305Z" }, + { url = "https://files.pythonhosted.org/packages/8e/8d/96e8e288b2a41dffafb78e8904ea7367ee4f891dafc2ab8d87e2124cb3d3/websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66dd88c918e3287efc22409d426c8f729688d89a0c587c88971a0faa2c2f3792", size = 181883, upload-time = "2025-03-05T20:02:03.148Z" }, + { url = "https://files.pythonhosted.org/packages/93/1f/5d6dbf551766308f6f50f8baf8e9860be6182911e8106da7a7f73785f4c4/websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dd8327c795b3e3f219760fa603dcae1dcc148172290a8ab15158cf85a953413", size = 182252, upload-time = "2025-03-05T20:02:05.29Z" }, + { url = "https://files.pythonhosted.org/packages/d4/78/2d4fed9123e6620cbf1706c0de8a1632e1a28e7774d94346d7de1bba2ca3/websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fdc51055e6ff4adeb88d58a11042ec9a5eae317a0a53d12c062c8a8865909e8", size = 182521, upload-time = "2025-03-05T20:02:07.458Z" }, + { url = "https://files.pythonhosted.org/packages/e7/3b/66d4c1b444dd1a9823c4a81f50231b921bab54eee2f69e70319b4e21f1ca/websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:693f0192126df6c2327cce3baa7c06f2a117575e32ab2308f7f8216c29d9e2e3", size = 181958, upload-time = "2025-03-05T20:02:09.842Z" }, + { url = "https://files.pythonhosted.org/packages/08/ff/e9eed2ee5fed6f76fdd6032ca5cd38c57ca9661430bb3d5fb2872dc8703c/websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54479983bd5fb469c38f2f5c7e3a24f9a4e70594cd68cd1fa6b9340dadaff7cf", size = 181918, upload-time = "2025-03-05T20:02:11.968Z" }, + { url = "https://files.pythonhosted.org/packages/d8/75/994634a49b7e12532be6a42103597b71098fd25900f7437d6055ed39930a/websockets-15.0.1-cp311-cp311-win32.whl", hash = "sha256:16b6c1b3e57799b9d38427dda63edcbe4926352c47cf88588c0be4ace18dac85", size = 176388, upload-time = "2025-03-05T20:02:13.32Z" }, + { url = "https://files.pythonhosted.org/packages/98/93/e36c73f78400a65f5e236cd376713c34182e6663f6889cd45a4a04d8f203/websockets-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:27ccee0071a0e75d22cb35849b1db43f2ecd3e161041ac1ee9d2352ddf72f065", size = 176828, upload-time = "2025-03-05T20:02:14.585Z" }, + { url = "https://files.pythonhosted.org/packages/51/6b/4545a0d843594f5d0771e86463606a3988b5a09ca5123136f8a76580dd63/websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3", size = 175437, upload-time = "2025-03-05T20:02:16.706Z" }, + { url = "https://files.pythonhosted.org/packages/f4/71/809a0f5f6a06522af902e0f2ea2757f71ead94610010cf570ab5c98e99ed/websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665", size = 173096, upload-time = "2025-03-05T20:02:18.832Z" }, + { url = "https://files.pythonhosted.org/packages/3d/69/1a681dd6f02180916f116894181eab8b2e25b31e484c5d0eae637ec01f7c/websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2", size = 173332, upload-time = "2025-03-05T20:02:20.187Z" }, + { url = "https://files.pythonhosted.org/packages/a6/02/0073b3952f5bce97eafbb35757f8d0d54812b6174ed8dd952aa08429bcc3/websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215", size = 183152, upload-time = "2025-03-05T20:02:22.286Z" }, + { url = "https://files.pythonhosted.org/packages/74/45/c205c8480eafd114b428284840da0b1be9ffd0e4f87338dc95dc6ff961a1/websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5", size = 182096, upload-time = "2025-03-05T20:02:24.368Z" }, + { url = "https://files.pythonhosted.org/packages/14/8f/aa61f528fba38578ec553c145857a181384c72b98156f858ca5c8e82d9d3/websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65", size = 182523, upload-time = "2025-03-05T20:02:25.669Z" }, + { url = "https://files.pythonhosted.org/packages/ec/6d/0267396610add5bc0d0d3e77f546d4cd287200804fe02323797de77dbce9/websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe", size = 182790, upload-time = "2025-03-05T20:02:26.99Z" }, + { url = "https://files.pythonhosted.org/packages/02/05/c68c5adbf679cf610ae2f74a9b871ae84564462955d991178f95a1ddb7dd/websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4", size = 182165, upload-time = "2025-03-05T20:02:30.291Z" }, + { url = "https://files.pythonhosted.org/packages/29/93/bb672df7b2f5faac89761cb5fa34f5cec45a4026c383a4b5761c6cea5c16/websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597", size = 182160, upload-time = "2025-03-05T20:02:31.634Z" }, + { url = "https://files.pythonhosted.org/packages/ff/83/de1f7709376dc3ca9b7eeb4b9a07b4526b14876b6d372a4dc62312bebee0/websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9", size = 176395, upload-time = "2025-03-05T20:02:33.017Z" }, + { url = "https://files.pythonhosted.org/packages/7d/71/abf2ebc3bbfa40f391ce1428c7168fb20582d0ff57019b69ea20fa698043/websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7", size = 176841, upload-time = "2025-03-05T20:02:34.498Z" }, + { url = "https://files.pythonhosted.org/packages/cb/9f/51f0cf64471a9d2b4d0fc6c534f323b664e7095640c34562f5182e5a7195/websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931", size = 175440, upload-time = "2025-03-05T20:02:36.695Z" }, + { url = "https://files.pythonhosted.org/packages/8a/05/aa116ec9943c718905997412c5989f7ed671bc0188ee2ba89520e8765d7b/websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675", size = 173098, upload-time = "2025-03-05T20:02:37.985Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0b/33cef55ff24f2d92924923c99926dcce78e7bd922d649467f0eda8368923/websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151", size = 173329, upload-time = "2025-03-05T20:02:39.298Z" }, + { url = "https://files.pythonhosted.org/packages/31/1d/063b25dcc01faa8fada1469bdf769de3768b7044eac9d41f734fd7b6ad6d/websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22", size = 183111, upload-time = "2025-03-05T20:02:40.595Z" }, + { url = "https://files.pythonhosted.org/packages/93/53/9a87ee494a51bf63e4ec9241c1ccc4f7c2f45fff85d5bde2ff74fcb68b9e/websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f", size = 182054, upload-time = "2025-03-05T20:02:41.926Z" }, + { url = "https://files.pythonhosted.org/packages/ff/b2/83a6ddf56cdcbad4e3d841fcc55d6ba7d19aeb89c50f24dd7e859ec0805f/websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8", size = 182496, upload-time = "2025-03-05T20:02:43.304Z" }, + { url = "https://files.pythonhosted.org/packages/98/41/e7038944ed0abf34c45aa4635ba28136f06052e08fc2168520bb8b25149f/websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375", size = 182829, upload-time = "2025-03-05T20:02:48.812Z" }, + { url = "https://files.pythonhosted.org/packages/e0/17/de15b6158680c7623c6ef0db361da965ab25d813ae54fcfeae2e5b9ef910/websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d", size = 182217, upload-time = "2025-03-05T20:02:50.14Z" }, + { url = "https://files.pythonhosted.org/packages/33/2b/1f168cb6041853eef0362fb9554c3824367c5560cbdaad89ac40f8c2edfc/websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4", size = 182195, upload-time = "2025-03-05T20:02:51.561Z" }, + { url = "https://files.pythonhosted.org/packages/86/eb/20b6cdf273913d0ad05a6a14aed4b9a85591c18a987a3d47f20fa13dcc47/websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa", size = 176393, upload-time = "2025-03-05T20:02:53.814Z" }, + { url = "https://files.pythonhosted.org/packages/1b/6c/c65773d6cab416a64d191d6ee8a8b1c68a09970ea6909d16965d26bfed1e/websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561", size = 176837, upload-time = "2025-03-05T20:02:55.237Z" }, + { url = "https://files.pythonhosted.org/packages/36/db/3fff0bcbe339a6fa6a3b9e3fbc2bfb321ec2f4cd233692272c5a8d6cf801/websockets-15.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5f4c04ead5aed67c8a1a20491d54cdfba5884507a48dd798ecaf13c74c4489f5", size = 175424, upload-time = "2025-03-05T20:02:56.505Z" }, + { url = "https://files.pythonhosted.org/packages/46/e6/519054c2f477def4165b0ec060ad664ed174e140b0d1cbb9fafa4a54f6db/websockets-15.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abdc0c6c8c648b4805c5eacd131910d2a7f6455dfd3becab248ef108e89ab16a", size = 173077, upload-time = "2025-03-05T20:02:58.37Z" }, + { url = "https://files.pythonhosted.org/packages/1a/21/c0712e382df64c93a0d16449ecbf87b647163485ca1cc3f6cbadb36d2b03/websockets-15.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a625e06551975f4b7ea7102bc43895b90742746797e2e14b70ed61c43a90f09b", size = 173324, upload-time = "2025-03-05T20:02:59.773Z" }, + { url = "https://files.pythonhosted.org/packages/1c/cb/51ba82e59b3a664df54beed8ad95517c1b4dc1a913730e7a7db778f21291/websockets-15.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d591f8de75824cbb7acad4e05d2d710484f15f29d4a915092675ad3456f11770", size = 182094, upload-time = "2025-03-05T20:03:01.827Z" }, + { url = "https://files.pythonhosted.org/packages/fb/0f/bf3788c03fec679bcdaef787518dbe60d12fe5615a544a6d4cf82f045193/websockets-15.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47819cea040f31d670cc8d324bb6435c6f133b8c7a19ec3d61634e62f8d8f9eb", size = 181094, upload-time = "2025-03-05T20:03:03.123Z" }, + { url = "https://files.pythonhosted.org/packages/5e/da/9fb8c21edbc719b66763a571afbaf206cb6d3736d28255a46fc2fe20f902/websockets-15.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac017dd64572e5c3bd01939121e4d16cf30e5d7e110a119399cf3133b63ad054", size = 181397, upload-time = "2025-03-05T20:03:04.443Z" }, + { url = "https://files.pythonhosted.org/packages/2e/65/65f379525a2719e91d9d90c38fe8b8bc62bd3c702ac651b7278609b696c4/websockets-15.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4a9fac8e469d04ce6c25bb2610dc535235bd4aa14996b4e6dbebf5e007eba5ee", size = 181794, upload-time = "2025-03-05T20:03:06.708Z" }, + { url = "https://files.pythonhosted.org/packages/d9/26/31ac2d08f8e9304d81a1a7ed2851c0300f636019a57cbaa91342015c72cc/websockets-15.0.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363c6f671b761efcb30608d24925a382497c12c506b51661883c3e22337265ed", size = 181194, upload-time = "2025-03-05T20:03:08.844Z" }, + { url = "https://files.pythonhosted.org/packages/98/72/1090de20d6c91994cd4b357c3f75a4f25ee231b63e03adea89671cc12a3f/websockets-15.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2034693ad3097d5355bfdacfffcbd3ef5694f9718ab7f29c29689a9eae841880", size = 181164, upload-time = "2025-03-05T20:03:10.242Z" }, + { url = "https://files.pythonhosted.org/packages/2d/37/098f2e1c103ae8ed79b0e77f08d83b0ec0b241cf4b7f2f10edd0126472e1/websockets-15.0.1-cp39-cp39-win32.whl", hash = "sha256:3b1ac0d3e594bf121308112697cf4b32be538fb1444468fb0a6ae4feebc83411", size = 176381, upload-time = "2025-03-05T20:03:12.77Z" }, + { url = "https://files.pythonhosted.org/packages/75/8b/a32978a3ab42cebb2ebdd5b05df0696a09f4d436ce69def11893afa301f0/websockets-15.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b7643a03db5c95c799b89b31c036d5f27eeb4d259c798e878d6937d71832b1e4", size = 176841, upload-time = "2025-03-05T20:03:14.367Z" }, + { url = "https://files.pythonhosted.org/packages/02/9e/d40f779fa16f74d3468357197af8d6ad07e7c5a27ea1ca74ceb38986f77a/websockets-15.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0c9e74d766f2818bb95f84c25be4dea09841ac0f734d1966f415e4edfc4ef1c3", size = 173109, upload-time = "2025-03-05T20:03:17.769Z" }, + { url = "https://files.pythonhosted.org/packages/bc/cd/5b887b8585a593073fd92f7c23ecd3985cd2c3175025a91b0d69b0551372/websockets-15.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1009ee0c7739c08a0cd59de430d6de452a55e42d6b522de7aa15e6f67db0b8e1", size = 173343, upload-time = "2025-03-05T20:03:19.094Z" }, + { url = "https://files.pythonhosted.org/packages/fe/ae/d34f7556890341e900a95acf4886833646306269f899d58ad62f588bf410/websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76d1f20b1c7a2fa82367e04982e708723ba0e7b8d43aa643d3dcd404d74f1475", size = 174599, upload-time = "2025-03-05T20:03:21.1Z" }, + { url = "https://files.pythonhosted.org/packages/71/e6/5fd43993a87db364ec60fc1d608273a1a465c0caba69176dd160e197ce42/websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f29d80eb9a9263b8d109135351caf568cc3f80b9928bccde535c235de55c22d9", size = 174207, upload-time = "2025-03-05T20:03:23.221Z" }, + { url = "https://files.pythonhosted.org/packages/2b/fb/c492d6daa5ec067c2988ac80c61359ace5c4c674c532985ac5a123436cec/websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b359ed09954d7c18bbc1680f380c7301f92c60bf924171629c5db97febb12f04", size = 174155, upload-time = "2025-03-05T20:03:25.321Z" }, + { url = "https://files.pythonhosted.org/packages/68/a1/dcb68430b1d00b698ae7a7e0194433bce4f07ded185f0ee5fb21e2a2e91e/websockets-15.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:cad21560da69f4ce7658ca2cb83138fb4cf695a2ba3e475e0559e05991aa8122", size = 176884, upload-time = "2025-03-05T20:03:27.934Z" }, + { url = "https://files.pythonhosted.org/packages/b7/48/4b67623bac4d79beb3a6bb27b803ba75c1bdedc06bd827e465803690a4b2/websockets-15.0.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7f493881579c90fc262d9cdbaa05a6b54b3811c2f300766748db79f098db9940", size = 173106, upload-time = "2025-03-05T20:03:29.404Z" }, + { url = "https://files.pythonhosted.org/packages/ed/f0/adb07514a49fe5728192764e04295be78859e4a537ab8fcc518a3dbb3281/websockets-15.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:47b099e1f4fbc95b701b6e85768e1fcdaf1630f3cbe4765fa216596f12310e2e", size = 173339, upload-time = "2025-03-05T20:03:30.755Z" }, + { url = "https://files.pythonhosted.org/packages/87/28/bd23c6344b18fb43df40d0700f6d3fffcd7cef14a6995b4f976978b52e62/websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67f2b6de947f8c757db2db9c71527933ad0019737ec374a8a6be9a956786aaf9", size = 174597, upload-time = "2025-03-05T20:03:32.247Z" }, + { url = "https://files.pythonhosted.org/packages/6d/79/ca288495863d0f23a60f546f0905ae8f3ed467ad87f8b6aceb65f4c013e4/websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d08eb4c2b7d6c41da6ca0600c077e93f5adcfd979cd777d747e9ee624556da4b", size = 174205, upload-time = "2025-03-05T20:03:33.731Z" }, + { url = "https://files.pythonhosted.org/packages/04/e4/120ff3180b0872b1fe6637f6f995bcb009fb5c87d597c1fc21456f50c848/websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b826973a4a2ae47ba357e4e82fa44a463b8f168e1ca775ac64521442b19e87f", size = 174150, upload-time = "2025-03-05T20:03:35.757Z" }, + { url = "https://files.pythonhosted.org/packages/cb/c3/30e2f9c539b8da8b1d76f64012f3b19253271a63413b2d3adb94b143407f/websockets-15.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:21c1fa28a6a7e3cbdc171c694398b6df4744613ce9b36b1a498e816787e28123", size = 176877, upload-time = "2025-03-05T20:03:37.199Z" }, + { url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743, upload-time = "2025-03-05T20:03:39.41Z" }, +] + +[[package]] +name = "winkerberos" +version = "0.12.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2d/75/86d470935167eb1c40d53498993e14cc021d9611a539d61c9b4202c291ab/winkerberos-0.12.2.tar.gz", hash = "sha256:ff91daed04727a0362892802ee093d8da11f08536393526bdf3bc64e04079faa", size = 35672, upload-time = "2025-04-02T14:41:48.274Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/59/ac/c6ce495af45371ffd85a6a3d24c2ced679b8dbcf3b8c6beca093706b1620/winkerberos-0.12.2-cp310-cp310-win32.whl", hash = "sha256:f8b751bd5a28e6a9146f154bed395c30ce4f245448addc763f98cb8843879027", size = 25331, upload-time = "2025-04-02T14:41:36.398Z" }, + { url = "https://files.pythonhosted.org/packages/cb/7b/ad32174c3ed4710cd2ad8f20171f5061cb13603f091d714d5aa6b30d51f0/winkerberos-0.12.2-cp310-cp310-win_amd64.whl", hash = "sha256:4be3b0de548b80f52a6544dff9d571da6cdfde590176a01477358b3808b12dfa", size = 27670, upload-time = "2025-04-02T14:41:37.68Z" }, + { url = "https://files.pythonhosted.org/packages/91/12/23b29d359dee9f7a8243cb0040ea1834acd1af8cbc38cfe1c7ca82ab4ec0/winkerberos-0.12.2-cp311-cp311-win32.whl", hash = "sha256:ff2b2ec9b9246bbc05f0d4e6fe5f3f3563237357b9b35eaa58ec1a9ddf349ab8", size = 25332, upload-time = "2025-04-02T14:41:38.671Z" }, + { url = "https://files.pythonhosted.org/packages/23/d2/2bfa1dcdb4a47b7f989a9e758c892bd7393a156b0e1f0df63eca8304e892/winkerberos-0.12.2-cp311-cp311-win_amd64.whl", hash = "sha256:e6ac2b2cc329a68502821905f6ffe48e109d54a46aba7414ea231a30c75bb2d9", size = 27671, upload-time = "2025-04-02T14:41:40.104Z" }, + { url = "https://files.pythonhosted.org/packages/4f/01/26c5b1435654596c07b314653183ffe42b64ea07041c328f0fd4c68fe9f9/winkerberos-0.12.2-cp312-cp312-win32.whl", hash = "sha256:46dac1300e20738cbaf6c17c2e4832062ed7faee346c7a96f0e57f8bbe279c25", size = 25396, upload-time = "2025-04-02T14:41:41.6Z" }, + { url = "https://files.pythonhosted.org/packages/64/b1/6c4a1e4e50553798eb44dbb0d71ba6af48e2a62a0eb01bd0d4e2b41914e3/winkerberos-0.12.2-cp312-cp312-win_amd64.whl", hash = "sha256:2c5c7a70c0d4a43546b20d5654e7e7e5e5e96f42084a7f293864f7ad0fb1e953", size = 27710, upload-time = "2025-04-02T14:41:42.656Z" }, + { url = "https://files.pythonhosted.org/packages/5f/91/cff6750c7c3b2a9f35e12cd7c4df901251fc3be985edef707a3458c43e9a/winkerberos-0.12.2-cp313-cp313-win32.whl", hash = "sha256:482a72500b7822cc8f941d0c6eed668a24c030ac145c97732e175b51441bebbf", size = 25391, upload-time = "2025-04-02T14:41:43.938Z" }, + { url = "https://files.pythonhosted.org/packages/2a/98/defb037ad127c4006c4e992dd55ce0df92059626d3df5f5f4c5fc8502c26/winkerberos-0.12.2-cp313-cp313-win_amd64.whl", hash = "sha256:efd65ba54534512070916cb9c91ef9798a0f9fb0b04e12732c9631e71553fd69", size = 27704, upload-time = "2025-04-02T14:41:45.203Z" }, + { url = "https://files.pythonhosted.org/packages/be/17/b16e72e0b896cdf05666994cbc402a66f5911d56ea28d4e858714328b698/winkerberos-0.12.2-cp39-cp39-win32.whl", hash = "sha256:0c80eed53472a38d7f1dd015e27d93705b22a2acd2557bad13d8b5d688037b29", size = 25326, upload-time = "2025-04-02T14:41:46.216Z" }, + { url = "https://files.pythonhosted.org/packages/65/04/ae42e839e8d836fde613f94f30395953292a7b9be388247237196d1e5caa/winkerberos-0.12.2-cp39-cp39-win_amd64.whl", hash = "sha256:4b908aab5ab42e98bee44eca67dfebe4733d210bccf021e42b669bf4af2005a4", size = 27663, upload-time = "2025-04-02T14:41:47.294Z" }, +] + +[[package]] +name = "zipp" +version = "3.23.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, +] + +[[package]] +name = "zope-event" +version = "5.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "setuptools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5a/9f/c443569a68d3844c044d9fa9711e08adb33649b527b4d432433f4c2a6a02/zope_event-5.1.1.tar.gz", hash = "sha256:c1ac931abf57efba71a2a313c5f4d57768a19b15c37e3f02f50eb1536be12d4e", size = 18811, upload-time = "2025-07-22T07:04:00.924Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/04/fd55695f6448abd22295fc68b2d3a135389558f0f49a24b0dffe019d0ecb/zope_event-5.1.1-py3-none-any.whl", hash = "sha256:8d5ea7b992c42ce73a6fa9c2ba99a004c52cd9f05d87f3220768ef0329b92df7", size = 7014, upload-time = "2025-07-22T07:03:59.9Z" }, +] + +[[package]] +name = "zope-interface" +version = "7.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "setuptools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/30/93/9210e7606be57a2dfc6277ac97dcc864fd8d39f142ca194fdc186d596fda/zope.interface-7.2.tar.gz", hash = "sha256:8b49f1a3d1ee4cdaf5b32d2e738362c7f5e40ac8b46dd7d1a65e82a4872728fe", size = 252960, upload-time = "2024-11-28T08:45:39.224Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/71/e6177f390e8daa7e75378505c5ab974e0bf59c1d3b19155638c7afbf4b2d/zope.interface-7.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ce290e62229964715f1011c3dbeab7a4a1e4971fd6f31324c4519464473ef9f2", size = 208243, upload-time = "2024-11-28T08:47:29.781Z" }, + { url = "https://files.pythonhosted.org/packages/52/db/7e5f4226bef540f6d55acfd95cd105782bc6ee044d9b5587ce2c95558a5e/zope.interface-7.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:05b910a5afe03256b58ab2ba6288960a2892dfeef01336dc4be6f1b9ed02ab0a", size = 208759, upload-time = "2024-11-28T08:47:31.908Z" }, + { url = "https://files.pythonhosted.org/packages/28/ea/fdd9813c1eafd333ad92464d57a4e3a82b37ae57c19497bcffa42df673e4/zope.interface-7.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:550f1c6588ecc368c9ce13c44a49b8d6b6f3ca7588873c679bd8fd88a1b557b6", size = 254922, upload-time = "2024-11-28T09:18:11.795Z" }, + { url = "https://files.pythonhosted.org/packages/3b/d3/0000a4d497ef9fbf4f66bb6828b8d0a235e690d57c333be877bec763722f/zope.interface-7.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0ef9e2f865721553c6f22a9ff97da0f0216c074bd02b25cf0d3af60ea4d6931d", size = 249367, upload-time = "2024-11-28T08:48:24.238Z" }, + { url = "https://files.pythonhosted.org/packages/3e/e5/0b359e99084f033d413419eff23ee9c2bd33bca2ca9f4e83d11856f22d10/zope.interface-7.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27f926f0dcb058211a3bb3e0e501c69759613b17a553788b2caeb991bed3b61d", size = 254488, upload-time = "2024-11-28T08:48:28.816Z" }, + { url = "https://files.pythonhosted.org/packages/7b/90/12d50b95f40e3b2fc0ba7f7782104093b9fd62806b13b98ef4e580f2ca61/zope.interface-7.2-cp310-cp310-win_amd64.whl", hash = "sha256:144964649eba4c5e4410bb0ee290d338e78f179cdbfd15813de1a664e7649b3b", size = 211947, upload-time = "2024-11-28T08:48:18.831Z" }, + { url = "https://files.pythonhosted.org/packages/98/7d/2e8daf0abea7798d16a58f2f3a2bf7588872eee54ac119f99393fdd47b65/zope.interface-7.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1909f52a00c8c3dcab6c4fad5d13de2285a4b3c7be063b239b8dc15ddfb73bd2", size = 208776, upload-time = "2024-11-28T08:47:53.009Z" }, + { url = "https://files.pythonhosted.org/packages/a0/2a/0c03c7170fe61d0d371e4c7ea5b62b8cb79b095b3d630ca16719bf8b7b18/zope.interface-7.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:80ecf2451596f19fd607bb09953f426588fc1e79e93f5968ecf3367550396b22", size = 209296, upload-time = "2024-11-28T08:47:57.993Z" }, + { url = "https://files.pythonhosted.org/packages/49/b4/451f19448772b4a1159519033a5f72672221e623b0a1bd2b896b653943d8/zope.interface-7.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:033b3923b63474800b04cba480b70f6e6243a62208071fc148354f3f89cc01b7", size = 260997, upload-time = "2024-11-28T09:18:13.935Z" }, + { url = "https://files.pythonhosted.org/packages/65/94/5aa4461c10718062c8f8711161faf3249d6d3679c24a0b81dd6fc8ba1dd3/zope.interface-7.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a102424e28c6b47c67923a1f337ede4a4c2bba3965b01cf707978a801fc7442c", size = 255038, upload-time = "2024-11-28T08:48:26.381Z" }, + { url = "https://files.pythonhosted.org/packages/9f/aa/1a28c02815fe1ca282b54f6705b9ddba20328fabdc37b8cf73fc06b172f0/zope.interface-7.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25e6a61dcb184453bb00eafa733169ab6d903e46f5c2ace4ad275386f9ab327a", size = 259806, upload-time = "2024-11-28T08:48:30.78Z" }, + { url = "https://files.pythonhosted.org/packages/a7/2c/82028f121d27c7e68632347fe04f4a6e0466e77bb36e104c8b074f3d7d7b/zope.interface-7.2-cp311-cp311-win_amd64.whl", hash = "sha256:3f6771d1647b1fc543d37640b45c06b34832a943c80d1db214a37c31161a93f1", size = 212305, upload-time = "2024-11-28T08:49:14.525Z" }, + { url = "https://files.pythonhosted.org/packages/68/0b/c7516bc3bad144c2496f355e35bd699443b82e9437aa02d9867653203b4a/zope.interface-7.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:086ee2f51eaef1e4a52bd7d3111a0404081dadae87f84c0ad4ce2649d4f708b7", size = 208959, upload-time = "2024-11-28T08:47:47.788Z" }, + { url = "https://files.pythonhosted.org/packages/a2/e9/1463036df1f78ff8c45a02642a7bf6931ae4a38a4acd6a8e07c128e387a7/zope.interface-7.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:21328fcc9d5b80768bf051faa35ab98fb979080c18e6f84ab3f27ce703bce465", size = 209357, upload-time = "2024-11-28T08:47:50.897Z" }, + { url = "https://files.pythonhosted.org/packages/07/a8/106ca4c2add440728e382f1b16c7d886563602487bdd90004788d45eb310/zope.interface-7.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6dd02ec01f4468da0f234da9d9c8545c5412fef80bc590cc51d8dd084138a89", size = 264235, upload-time = "2024-11-28T09:18:15.56Z" }, + { url = "https://files.pythonhosted.org/packages/fc/ca/57286866285f4b8a4634c12ca1957c24bdac06eae28fd4a3a578e30cf906/zope.interface-7.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8e7da17f53e25d1a3bde5da4601e026adc9e8071f9f6f936d0fe3fe84ace6d54", size = 259253, upload-time = "2024-11-28T08:48:29.025Z" }, + { url = "https://files.pythonhosted.org/packages/96/08/2103587ebc989b455cf05e858e7fbdfeedfc3373358320e9c513428290b1/zope.interface-7.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cab15ff4832580aa440dc9790b8a6128abd0b88b7ee4dd56abacbc52f212209d", size = 264702, upload-time = "2024-11-28T08:48:37.363Z" }, + { url = "https://files.pythonhosted.org/packages/5f/c7/3c67562e03b3752ba4ab6b23355f15a58ac2d023a6ef763caaca430f91f2/zope.interface-7.2-cp312-cp312-win_amd64.whl", hash = "sha256:29caad142a2355ce7cfea48725aa8bcf0067e2b5cc63fcf5cd9f97ad12d6afb5", size = 212466, upload-time = "2024-11-28T08:49:14.397Z" }, + { url = "https://files.pythonhosted.org/packages/c6/3b/e309d731712c1a1866d61b5356a069dd44e5b01e394b6cb49848fa2efbff/zope.interface-7.2-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:3e0350b51e88658d5ad126c6a57502b19d5f559f6cb0a628e3dc90442b53dd98", size = 208961, upload-time = "2024-11-28T08:48:29.865Z" }, + { url = "https://files.pythonhosted.org/packages/49/65/78e7cebca6be07c8fc4032bfbb123e500d60efdf7b86727bb8a071992108/zope.interface-7.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:15398c000c094b8855d7d74f4fdc9e73aa02d4d0d5c775acdef98cdb1119768d", size = 209356, upload-time = "2024-11-28T08:48:33.297Z" }, + { url = "https://files.pythonhosted.org/packages/11/b1/627384b745310d082d29e3695db5f5a9188186676912c14b61a78bbc6afe/zope.interface-7.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:802176a9f99bd8cc276dcd3b8512808716492f6f557c11196d42e26c01a69a4c", size = 264196, upload-time = "2024-11-28T09:18:17.584Z" }, + { url = "https://files.pythonhosted.org/packages/b8/f6/54548df6dc73e30ac6c8a7ff1da73ac9007ba38f866397091d5a82237bd3/zope.interface-7.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb23f58a446a7f09db85eda09521a498e109f137b85fb278edb2e34841055398", size = 259237, upload-time = "2024-11-28T08:48:31.71Z" }, + { url = "https://files.pythonhosted.org/packages/b6/66/ac05b741c2129fdf668b85631d2268421c5cd1a9ff99be1674371139d665/zope.interface-7.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a71a5b541078d0ebe373a81a3b7e71432c61d12e660f1d67896ca62d9628045b", size = 264696, upload-time = "2024-11-28T08:48:41.161Z" }, + { url = "https://files.pythonhosted.org/packages/0a/2f/1bccc6f4cc882662162a1158cda1a7f616add2ffe322b28c99cb031b4ffc/zope.interface-7.2-cp313-cp313-win_amd64.whl", hash = "sha256:4893395d5dd2ba655c38ceb13014fd65667740f09fa5bb01caa1e6284e48c0cd", size = 212472, upload-time = "2024-11-28T08:49:56.587Z" }, + { url = "https://files.pythonhosted.org/packages/8c/2c/1f49dc8b4843c4f0848d8e43191aed312bad946a1563d1bf9e46cf2816ee/zope.interface-7.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7bd449c306ba006c65799ea7912adbbfed071089461a19091a228998b82b1fdb", size = 208349, upload-time = "2024-11-28T08:49:28.872Z" }, + { url = "https://files.pythonhosted.org/packages/ed/7d/83ddbfc8424c69579a90fc8edc2b797223da2a8083a94d8dfa0e374c5ed4/zope.interface-7.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a19a6cc9c6ce4b1e7e3d319a473cf0ee989cbbe2b39201d7c19e214d2dfb80c7", size = 208799, upload-time = "2024-11-28T08:49:30.616Z" }, + { url = "https://files.pythonhosted.org/packages/36/22/b1abd91854c1be03f5542fe092e6a745096d2eca7704d69432e119100583/zope.interface-7.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:72cd1790b48c16db85d51fbbd12d20949d7339ad84fd971427cf00d990c1f137", size = 254267, upload-time = "2024-11-28T09:18:21.059Z" }, + { url = "https://files.pythonhosted.org/packages/2a/dd/fcd313ee216ad0739ae00e6126bc22a0af62a74f76a9ca668d16cd276222/zope.interface-7.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52e446f9955195440e787596dccd1411f543743c359eeb26e9b2c02b077b0519", size = 248614, upload-time = "2024-11-28T08:48:41.953Z" }, + { url = "https://files.pythonhosted.org/packages/88/d4/4ba1569b856870527cec4bf22b91fe704b81a3c1a451b2ccf234e9e0666f/zope.interface-7.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ad9913fd858274db8dd867012ebe544ef18d218f6f7d1e3c3e6d98000f14b75", size = 253800, upload-time = "2024-11-28T08:48:46.637Z" }, + { url = "https://files.pythonhosted.org/packages/69/da/c9cfb384c18bd3a26d9fc6a9b5f32ccea49ae09444f097eaa5ca9814aff9/zope.interface-7.2-cp39-cp39-win_amd64.whl", hash = "sha256:1090c60116b3da3bfdd0c03406e2f14a1ff53e5771aebe33fec1edc0a350175d", size = 211980, upload-time = "2024-11-28T08:50:35.681Z" }, +] + +[[package]] +name = "zstandard" +version = "0.23.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation == 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ed/f6/2ac0287b442160a89d726b17a9184a4c615bb5237db763791a7fd16d9df1/zstandard-0.23.0.tar.gz", hash = "sha256:b2d8c62d08e7255f68f7a740bae85b3c9b8e5466baa9cbf7f57f1cde0ac6bc09", size = 681701, upload-time = "2024-07-15T00:18:06.141Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/55/bd0487e86679db1823fc9ee0d8c9c78ae2413d34c0b461193b5f4c31d22f/zstandard-0.23.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bf0a05b6059c0528477fba9054d09179beb63744355cab9f38059548fedd46a9", size = 788701, upload-time = "2024-07-15T00:13:27.351Z" }, + { url = "https://files.pythonhosted.org/packages/e1/8a/ccb516b684f3ad987dfee27570d635822e3038645b1a950c5e8022df1145/zstandard-0.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fc9ca1c9718cb3b06634c7c8dec57d24e9438b2aa9a0f02b8bb36bf478538880", size = 633678, upload-time = "2024-07-15T00:13:30.24Z" }, + { url = "https://files.pythonhosted.org/packages/12/89/75e633d0611c028e0d9af6df199423bf43f54bea5007e6718ab7132e234c/zstandard-0.23.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77da4c6bfa20dd5ea25cbf12c76f181a8e8cd7ea231c673828d0386b1740b8dc", size = 4941098, upload-time = "2024-07-15T00:13:32.526Z" }, + { url = "https://files.pythonhosted.org/packages/4a/7a/bd7f6a21802de358b63f1ee636ab823711c25ce043a3e9f043b4fcb5ba32/zstandard-0.23.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2170c7e0367dde86a2647ed5b6f57394ea7f53545746104c6b09fc1f4223573", size = 5308798, upload-time = "2024-07-15T00:13:34.925Z" }, + { url = "https://files.pythonhosted.org/packages/79/3b/775f851a4a65013e88ca559c8ae42ac1352db6fcd96b028d0df4d7d1d7b4/zstandard-0.23.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c16842b846a8d2a145223f520b7e18b57c8f476924bda92aeee3a88d11cfc391", size = 5341840, upload-time = "2024-07-15T00:13:37.376Z" }, + { url = "https://files.pythonhosted.org/packages/09/4f/0cc49570141dd72d4d95dd6fcf09328d1b702c47a6ec12fbed3b8aed18a5/zstandard-0.23.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:157e89ceb4054029a289fb504c98c6a9fe8010f1680de0201b3eb5dc20aa6d9e", size = 5440337, upload-time = "2024-07-15T00:13:39.772Z" }, + { url = "https://files.pythonhosted.org/packages/e7/7c/aaa7cd27148bae2dc095191529c0570d16058c54c4597a7d118de4b21676/zstandard-0.23.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:203d236f4c94cd8379d1ea61db2fce20730b4c38d7f1c34506a31b34edc87bdd", size = 4861182, upload-time = "2024-07-15T00:13:42.495Z" }, + { url = "https://files.pythonhosted.org/packages/ac/eb/4b58b5c071d177f7dc027129d20bd2a44161faca6592a67f8fcb0b88b3ae/zstandard-0.23.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dc5d1a49d3f8262be192589a4b72f0d03b72dcf46c51ad5852a4fdc67be7b9e4", size = 4932936, upload-time = "2024-07-15T00:13:44.234Z" }, + { url = "https://files.pythonhosted.org/packages/44/f9/21a5fb9bb7c9a274b05ad700a82ad22ce82f7ef0f485980a1e98ed6e8c5f/zstandard-0.23.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:752bf8a74412b9892f4e5b58f2f890a039f57037f52c89a740757ebd807f33ea", size = 5464705, upload-time = "2024-07-15T00:13:46.822Z" }, + { url = "https://files.pythonhosted.org/packages/49/74/b7b3e61db3f88632776b78b1db597af3f44c91ce17d533e14a25ce6a2816/zstandard-0.23.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:80080816b4f52a9d886e67f1f96912891074903238fe54f2de8b786f86baded2", size = 4857882, upload-time = "2024-07-15T00:13:49.297Z" }, + { url = "https://files.pythonhosted.org/packages/4a/7f/d8eb1cb123d8e4c541d4465167080bec88481ab54cd0b31eb4013ba04b95/zstandard-0.23.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:84433dddea68571a6d6bd4fbf8ff398236031149116a7fff6f777ff95cad3df9", size = 4697672, upload-time = "2024-07-15T00:13:51.447Z" }, + { url = "https://files.pythonhosted.org/packages/5e/05/f7dccdf3d121309b60342da454d3e706453a31073e2c4dac8e1581861e44/zstandard-0.23.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ab19a2d91963ed9e42b4e8d77cd847ae8381576585bad79dbd0a8837a9f6620a", size = 5206043, upload-time = "2024-07-15T00:13:53.587Z" }, + { url = "https://files.pythonhosted.org/packages/86/9d/3677a02e172dccd8dd3a941307621c0cbd7691d77cb435ac3c75ab6a3105/zstandard-0.23.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:59556bf80a7094d0cfb9f5e50bb2db27fefb75d5138bb16fb052b61b0e0eeeb0", size = 5667390, upload-time = "2024-07-15T00:13:56.137Z" }, + { url = "https://files.pythonhosted.org/packages/41/7e/0012a02458e74a7ba122cd9cafe491facc602c9a17f590367da369929498/zstandard-0.23.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:27d3ef2252d2e62476389ca8f9b0cf2bbafb082a3b6bfe9d90cbcbb5529ecf7c", size = 5198901, upload-time = "2024-07-15T00:13:58.584Z" }, + { url = "https://files.pythonhosted.org/packages/65/3a/8f715b97bd7bcfc7342d8adcd99a026cb2fb550e44866a3b6c348e1b0f02/zstandard-0.23.0-cp310-cp310-win32.whl", hash = "sha256:5d41d5e025f1e0bccae4928981e71b2334c60f580bdc8345f824e7c0a4c2a813", size = 430596, upload-time = "2024-07-15T00:14:00.693Z" }, + { url = "https://files.pythonhosted.org/packages/19/b7/b2b9eca5e5a01111e4fe8a8ffb56bdcdf56b12448a24effe6cfe4a252034/zstandard-0.23.0-cp310-cp310-win_amd64.whl", hash = "sha256:519fbf169dfac1222a76ba8861ef4ac7f0530c35dd79ba5727014613f91613d4", size = 495498, upload-time = "2024-07-15T00:14:02.741Z" }, + { url = "https://files.pythonhosted.org/packages/9e/40/f67e7d2c25a0e2dc1744dd781110b0b60306657f8696cafb7ad7579469bd/zstandard-0.23.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:34895a41273ad33347b2fc70e1bff4240556de3c46c6ea430a7ed91f9042aa4e", size = 788699, upload-time = "2024-07-15T00:14:04.909Z" }, + { url = "https://files.pythonhosted.org/packages/e8/46/66d5b55f4d737dd6ab75851b224abf0afe5774976fe511a54d2eb9063a41/zstandard-0.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:77ea385f7dd5b5676d7fd943292ffa18fbf5c72ba98f7d09fc1fb9e819b34c23", size = 633681, upload-time = "2024-07-15T00:14:13.99Z" }, + { url = "https://files.pythonhosted.org/packages/63/b6/677e65c095d8e12b66b8f862b069bcf1f1d781b9c9c6f12eb55000d57583/zstandard-0.23.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:983b6efd649723474f29ed42e1467f90a35a74793437d0bc64a5bf482bedfa0a", size = 4944328, upload-time = "2024-07-15T00:14:16.588Z" }, + { url = "https://files.pythonhosted.org/packages/59/cc/e76acb4c42afa05a9d20827116d1f9287e9c32b7ad58cc3af0721ce2b481/zstandard-0.23.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80a539906390591dd39ebb8d773771dc4db82ace6372c4d41e2d293f8e32b8db", size = 5311955, upload-time = "2024-07-15T00:14:19.389Z" }, + { url = "https://files.pythonhosted.org/packages/78/e4/644b8075f18fc7f632130c32e8f36f6dc1b93065bf2dd87f03223b187f26/zstandard-0.23.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:445e4cb5048b04e90ce96a79b4b63140e3f4ab5f662321975679b5f6360b90e2", size = 5344944, upload-time = "2024-07-15T00:14:22.173Z" }, + { url = "https://files.pythonhosted.org/packages/76/3f/dbafccf19cfeca25bbabf6f2dd81796b7218f768ec400f043edc767015a6/zstandard-0.23.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd30d9c67d13d891f2360b2a120186729c111238ac63b43dbd37a5a40670b8ca", size = 5442927, upload-time = "2024-07-15T00:14:24.825Z" }, + { url = "https://files.pythonhosted.org/packages/0c/c3/d24a01a19b6733b9f218e94d1a87c477d523237e07f94899e1c10f6fd06c/zstandard-0.23.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d20fd853fbb5807c8e84c136c278827b6167ded66c72ec6f9a14b863d809211c", size = 4864910, upload-time = "2024-07-15T00:14:26.982Z" }, + { url = "https://files.pythonhosted.org/packages/1c/a9/cf8f78ead4597264f7618d0875be01f9bc23c9d1d11afb6d225b867cb423/zstandard-0.23.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ed1708dbf4d2e3a1c5c69110ba2b4eb6678262028afd6c6fbcc5a8dac9cda68e", size = 4935544, upload-time = "2024-07-15T00:14:29.582Z" }, + { url = "https://files.pythonhosted.org/packages/2c/96/8af1e3731b67965fb995a940c04a2c20997a7b3b14826b9d1301cf160879/zstandard-0.23.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:be9b5b8659dff1f913039c2feee1aca499cfbc19e98fa12bc85e037c17ec6ca5", size = 5467094, upload-time = "2024-07-15T00:14:40.126Z" }, + { url = "https://files.pythonhosted.org/packages/ff/57/43ea9df642c636cb79f88a13ab07d92d88d3bfe3e550b55a25a07a26d878/zstandard-0.23.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:65308f4b4890aa12d9b6ad9f2844b7ee42c7f7a4fd3390425b242ffc57498f48", size = 4860440, upload-time = "2024-07-15T00:14:42.786Z" }, + { url = "https://files.pythonhosted.org/packages/46/37/edb78f33c7f44f806525f27baa300341918fd4c4af9472fbc2c3094be2e8/zstandard-0.23.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:98da17ce9cbf3bfe4617e836d561e433f871129e3a7ac16d6ef4c680f13a839c", size = 4700091, upload-time = "2024-07-15T00:14:45.184Z" }, + { url = "https://files.pythonhosted.org/packages/c1/f1/454ac3962671a754f3cb49242472df5c2cced4eb959ae203a377b45b1a3c/zstandard-0.23.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:8ed7d27cb56b3e058d3cf684d7200703bcae623e1dcc06ed1e18ecda39fee003", size = 5208682, upload-time = "2024-07-15T00:14:47.407Z" }, + { url = "https://files.pythonhosted.org/packages/85/b2/1734b0fff1634390b1b887202d557d2dd542de84a4c155c258cf75da4773/zstandard-0.23.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:b69bb4f51daf461b15e7b3db033160937d3ff88303a7bc808c67bbc1eaf98c78", size = 5669707, upload-time = "2024-07-15T00:15:03.529Z" }, + { url = "https://files.pythonhosted.org/packages/52/5a/87d6971f0997c4b9b09c495bf92189fb63de86a83cadc4977dc19735f652/zstandard-0.23.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:034b88913ecc1b097f528e42b539453fa82c3557e414b3de9d5632c80439a473", size = 5201792, upload-time = "2024-07-15T00:15:28.372Z" }, + { url = "https://files.pythonhosted.org/packages/79/02/6f6a42cc84459d399bd1a4e1adfc78d4dfe45e56d05b072008d10040e13b/zstandard-0.23.0-cp311-cp311-win32.whl", hash = "sha256:f2d4380bf5f62daabd7b751ea2339c1a21d1c9463f1feb7fc2bdcea2c29c3160", size = 430586, upload-time = "2024-07-15T00:15:32.26Z" }, + { url = "https://files.pythonhosted.org/packages/be/a2/4272175d47c623ff78196f3c10e9dc7045c1b9caf3735bf041e65271eca4/zstandard-0.23.0-cp311-cp311-win_amd64.whl", hash = "sha256:62136da96a973bd2557f06ddd4e8e807f9e13cbb0bfb9cc06cfe6d98ea90dfe0", size = 495420, upload-time = "2024-07-15T00:15:34.004Z" }, + { url = "https://files.pythonhosted.org/packages/7b/83/f23338c963bd9de687d47bf32efe9fd30164e722ba27fb59df33e6b1719b/zstandard-0.23.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b4567955a6bc1b20e9c31612e615af6b53733491aeaa19a6b3b37f3b65477094", size = 788713, upload-time = "2024-07-15T00:15:35.815Z" }, + { url = "https://files.pythonhosted.org/packages/5b/b3/1a028f6750fd9227ee0b937a278a434ab7f7fdc3066c3173f64366fe2466/zstandard-0.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e172f57cd78c20f13a3415cc8dfe24bf388614324d25539146594c16d78fcc8", size = 633459, upload-time = "2024-07-15T00:15:37.995Z" }, + { url = "https://files.pythonhosted.org/packages/26/af/36d89aae0c1f95a0a98e50711bc5d92c144939efc1f81a2fcd3e78d7f4c1/zstandard-0.23.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0e166f698c5a3e914947388c162be2583e0c638a4703fc6a543e23a88dea3c1", size = 4945707, upload-time = "2024-07-15T00:15:39.872Z" }, + { url = "https://files.pythonhosted.org/packages/cd/2e/2051f5c772f4dfc0aae3741d5fc72c3dcfe3aaeb461cc231668a4db1ce14/zstandard-0.23.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12a289832e520c6bd4dcaad68e944b86da3bad0d339ef7989fb7e88f92e96072", size = 5306545, upload-time = "2024-07-15T00:15:41.75Z" }, + { url = "https://files.pythonhosted.org/packages/0a/9e/a11c97b087f89cab030fa71206963090d2fecd8eb83e67bb8f3ffb84c024/zstandard-0.23.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d50d31bfedd53a928fed6707b15a8dbeef011bb6366297cc435accc888b27c20", size = 5337533, upload-time = "2024-07-15T00:15:44.114Z" }, + { url = "https://files.pythonhosted.org/packages/fc/79/edeb217c57fe1bf16d890aa91a1c2c96b28c07b46afed54a5dcf310c3f6f/zstandard-0.23.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72c68dda124a1a138340fb62fa21b9bf4848437d9ca60bd35db36f2d3345f373", size = 5436510, upload-time = "2024-07-15T00:15:46.509Z" }, + { url = "https://files.pythonhosted.org/packages/81/4f/c21383d97cb7a422ddf1ae824b53ce4b51063d0eeb2afa757eb40804a8ef/zstandard-0.23.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53dd9d5e3d29f95acd5de6802e909ada8d8d8cfa37a3ac64836f3bc4bc5512db", size = 4859973, upload-time = "2024-07-15T00:15:49.939Z" }, + { url = "https://files.pythonhosted.org/packages/ab/15/08d22e87753304405ccac8be2493a495f529edd81d39a0870621462276ef/zstandard-0.23.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6a41c120c3dbc0d81a8e8adc73312d668cd34acd7725f036992b1b72d22c1772", size = 4936968, upload-time = "2024-07-15T00:15:52.025Z" }, + { url = "https://files.pythonhosted.org/packages/eb/fa/f3670a597949fe7dcf38119a39f7da49a8a84a6f0b1a2e46b2f71a0ab83f/zstandard-0.23.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:40b33d93c6eddf02d2c19f5773196068d875c41ca25730e8288e9b672897c105", size = 5467179, upload-time = "2024-07-15T00:15:54.971Z" }, + { url = "https://files.pythonhosted.org/packages/4e/a9/dad2ab22020211e380adc477a1dbf9f109b1f8d94c614944843e20dc2a99/zstandard-0.23.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9206649ec587e6b02bd124fb7799b86cddec350f6f6c14bc82a2b70183e708ba", size = 4848577, upload-time = "2024-07-15T00:15:57.634Z" }, + { url = "https://files.pythonhosted.org/packages/08/03/dd28b4484b0770f1e23478413e01bee476ae8227bbc81561f9c329e12564/zstandard-0.23.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76e79bc28a65f467e0409098fa2c4376931fd3207fbeb6b956c7c476d53746dd", size = 4693899, upload-time = "2024-07-15T00:16:00.811Z" }, + { url = "https://files.pythonhosted.org/packages/2b/64/3da7497eb635d025841e958bcd66a86117ae320c3b14b0ae86e9e8627518/zstandard-0.23.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:66b689c107857eceabf2cf3d3fc699c3c0fe8ccd18df2219d978c0283e4c508a", size = 5199964, upload-time = "2024-07-15T00:16:03.669Z" }, + { url = "https://files.pythonhosted.org/packages/43/a4/d82decbab158a0e8a6ebb7fc98bc4d903266bce85b6e9aaedea1d288338c/zstandard-0.23.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9c236e635582742fee16603042553d276cca506e824fa2e6489db04039521e90", size = 5655398, upload-time = "2024-07-15T00:16:06.694Z" }, + { url = "https://files.pythonhosted.org/packages/f2/61/ac78a1263bc83a5cf29e7458b77a568eda5a8f81980691bbc6eb6a0d45cc/zstandard-0.23.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a8fffdbd9d1408006baaf02f1068d7dd1f016c6bcb7538682622c556e7b68e35", size = 5191313, upload-time = "2024-07-15T00:16:09.758Z" }, + { url = "https://files.pythonhosted.org/packages/e7/54/967c478314e16af5baf849b6ee9d6ea724ae5b100eb506011f045d3d4e16/zstandard-0.23.0-cp312-cp312-win32.whl", hash = "sha256:dc1d33abb8a0d754ea4763bad944fd965d3d95b5baef6b121c0c9013eaf1907d", size = 430877, upload-time = "2024-07-15T00:16:11.758Z" }, + { url = "https://files.pythonhosted.org/packages/75/37/872d74bd7739639c4553bf94c84af7d54d8211b626b352bc57f0fd8d1e3f/zstandard-0.23.0-cp312-cp312-win_amd64.whl", hash = "sha256:64585e1dba664dc67c7cdabd56c1e5685233fbb1fc1966cfba2a340ec0dfff7b", size = 495595, upload-time = "2024-07-15T00:16:13.731Z" }, + { url = "https://files.pythonhosted.org/packages/80/f1/8386f3f7c10261fe85fbc2c012fdb3d4db793b921c9abcc995d8da1b7a80/zstandard-0.23.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:576856e8594e6649aee06ddbfc738fec6a834f7c85bf7cadd1c53d4a58186ef9", size = 788975, upload-time = "2024-07-15T00:16:16.005Z" }, + { url = "https://files.pythonhosted.org/packages/16/e8/cbf01077550b3e5dc86089035ff8f6fbbb312bc0983757c2d1117ebba242/zstandard-0.23.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:38302b78a850ff82656beaddeb0bb989a0322a8bbb1bf1ab10c17506681d772a", size = 633448, upload-time = "2024-07-15T00:16:17.897Z" }, + { url = "https://files.pythonhosted.org/packages/06/27/4a1b4c267c29a464a161aeb2589aff212b4db653a1d96bffe3598f3f0d22/zstandard-0.23.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2240ddc86b74966c34554c49d00eaafa8200a18d3a5b6ffbf7da63b11d74ee2", size = 4945269, upload-time = "2024-07-15T00:16:20.136Z" }, + { url = "https://files.pythonhosted.org/packages/7c/64/d99261cc57afd9ae65b707e38045ed8269fbdae73544fd2e4a4d50d0ed83/zstandard-0.23.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ef230a8fd217a2015bc91b74f6b3b7d6522ba48be29ad4ea0ca3a3775bf7dd5", size = 5306228, upload-time = "2024-07-15T00:16:23.398Z" }, + { url = "https://files.pythonhosted.org/packages/7a/cf/27b74c6f22541f0263016a0fd6369b1b7818941de639215c84e4e94b2a1c/zstandard-0.23.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:774d45b1fac1461f48698a9d4b5fa19a69d47ece02fa469825b442263f04021f", size = 5336891, upload-time = "2024-07-15T00:16:26.391Z" }, + { url = "https://files.pythonhosted.org/packages/fa/18/89ac62eac46b69948bf35fcd90d37103f38722968e2981f752d69081ec4d/zstandard-0.23.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f77fa49079891a4aab203d0b1744acc85577ed16d767b52fc089d83faf8d8ed", size = 5436310, upload-time = "2024-07-15T00:16:29.018Z" }, + { url = "https://files.pythonhosted.org/packages/a8/a8/5ca5328ee568a873f5118d5b5f70d1f36c6387716efe2e369010289a5738/zstandard-0.23.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ac184f87ff521f4840e6ea0b10c0ec90c6b1dcd0bad2f1e4a9a1b4fa177982ea", size = 4859912, upload-time = "2024-07-15T00:16:31.871Z" }, + { url = "https://files.pythonhosted.org/packages/ea/ca/3781059c95fd0868658b1cf0440edd832b942f84ae60685d0cfdb808bca1/zstandard-0.23.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c363b53e257246a954ebc7c488304b5592b9c53fbe74d03bc1c64dda153fb847", size = 4936946, upload-time = "2024-07-15T00:16:34.593Z" }, + { url = "https://files.pythonhosted.org/packages/ce/11/41a58986f809532742c2b832c53b74ba0e0a5dae7e8ab4642bf5876f35de/zstandard-0.23.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e7792606d606c8df5277c32ccb58f29b9b8603bf83b48639b7aedf6df4fe8171", size = 5466994, upload-time = "2024-07-15T00:16:36.887Z" }, + { url = "https://files.pythonhosted.org/packages/83/e3/97d84fe95edd38d7053af05159465d298c8b20cebe9ccb3d26783faa9094/zstandard-0.23.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a0817825b900fcd43ac5d05b8b3079937073d2b1ff9cf89427590718b70dd840", size = 4848681, upload-time = "2024-07-15T00:16:39.709Z" }, + { url = "https://files.pythonhosted.org/packages/6e/99/cb1e63e931de15c88af26085e3f2d9af9ce53ccafac73b6e48418fd5a6e6/zstandard-0.23.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9da6bc32faac9a293ddfdcb9108d4b20416219461e4ec64dfea8383cac186690", size = 4694239, upload-time = "2024-07-15T00:16:41.83Z" }, + { url = "https://files.pythonhosted.org/packages/ab/50/b1e703016eebbc6501fc92f34db7b1c68e54e567ef39e6e59cf5fb6f2ec0/zstandard-0.23.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fd7699e8fd9969f455ef2926221e0233f81a2542921471382e77a9e2f2b57f4b", size = 5200149, upload-time = "2024-07-15T00:16:44.287Z" }, + { url = "https://files.pythonhosted.org/packages/aa/e0/932388630aaba70197c78bdb10cce2c91fae01a7e553b76ce85471aec690/zstandard-0.23.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:d477ed829077cd945b01fc3115edd132c47e6540ddcd96ca169facff28173057", size = 5655392, upload-time = "2024-07-15T00:16:46.423Z" }, + { url = "https://files.pythonhosted.org/packages/02/90/2633473864f67a15526324b007a9f96c96f56d5f32ef2a56cc12f9548723/zstandard-0.23.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa6ce8b52c5987b3e34d5674b0ab529a4602b632ebab0a93b07bfb4dfc8f8a33", size = 5191299, upload-time = "2024-07-15T00:16:49.053Z" }, + { url = "https://files.pythonhosted.org/packages/b0/4c/315ca5c32da7e2dc3455f3b2caee5c8c2246074a61aac6ec3378a97b7136/zstandard-0.23.0-cp313-cp313-win32.whl", hash = "sha256:a9b07268d0c3ca5c170a385a0ab9fb7fdd9f5fd866be004c4ea39e44edce47dd", size = 430862, upload-time = "2024-07-15T00:16:51.003Z" }, + { url = "https://files.pythonhosted.org/packages/a2/bf/c6aaba098e2d04781e8f4f7c0ba3c7aa73d00e4c436bcc0cf059a66691d1/zstandard-0.23.0-cp313-cp313-win_amd64.whl", hash = "sha256:f3513916e8c645d0610815c257cbfd3242adfd5c4cfa78be514e5a3ebb42a41b", size = 495578, upload-time = "2024-07-15T00:16:53.135Z" }, + { url = "https://files.pythonhosted.org/packages/fb/96/4fcafeb7e013a2386d22f974b5b97a0b9a65004ed58c87ae001599bfbd48/zstandard-0.23.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa014d55c3af933c1315eb4bb06dd0459661cc0b15cd61077afa6489bec63bb", size = 788697, upload-time = "2024-07-15T00:17:31.236Z" }, + { url = "https://files.pythonhosted.org/packages/83/ff/a52ce725be69b86a2967ecba0497a8184540cc284c0991125515449e54e2/zstandard-0.23.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7f0804bb3799414af278e9ad51be25edf67f78f916e08afdb983e74161b916", size = 633679, upload-time = "2024-07-15T00:17:32.911Z" }, + { url = "https://files.pythonhosted.org/packages/34/0f/3dc62db122f6a9c481c335fff6fc9f4e88d8f6e2d47321ee3937328addb4/zstandard-0.23.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb2b1ecfef1e67897d336de3a0e3f52478182d6a47eda86cbd42504c5cbd009a", size = 4940416, upload-time = "2024-07-15T00:17:34.849Z" }, + { url = "https://files.pythonhosted.org/packages/1d/e5/9fe0dd8c85fdc2f635e6660d07872a5dc4b366db566630161e39f9f804e1/zstandard-0.23.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:837bb6764be6919963ef41235fd56a6486b132ea64afe5fafb4cb279ac44f259", size = 5307693, upload-time = "2024-07-15T00:17:37.355Z" }, + { url = "https://files.pythonhosted.org/packages/73/bf/fe62c0cd865c171ee8ed5bc83174b5382a2cb729c8d6162edfb99a83158b/zstandard-0.23.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1516c8c37d3a053b01c1c15b182f3b5f5eef19ced9b930b684a73bad121addf4", size = 5341236, upload-time = "2024-07-15T00:17:40.213Z" }, + { url = "https://files.pythonhosted.org/packages/39/86/4fe79b30c794286110802a6cd44a73b6a314ac8196b9338c0fbd78c2407d/zstandard-0.23.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48ef6a43b1846f6025dde6ed9fee0c24e1149c1c25f7fb0a0585572b2f3adc58", size = 5439101, upload-time = "2024-07-15T00:17:42.284Z" }, + { url = "https://files.pythonhosted.org/packages/72/ed/cacec235c581ebf8c608c7fb3d4b6b70d1b490d0e5128ea6996f809ecaef/zstandard-0.23.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11e3bf3c924853a2d5835b24f03eeba7fc9b07d8ca499e247e06ff5676461a15", size = 4860320, upload-time = "2024-07-15T00:17:44.21Z" }, + { url = "https://files.pythonhosted.org/packages/f6/1e/2c589a2930f93946b132fc852c574a19d5edc23fad2b9e566f431050c7ec/zstandard-0.23.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2fb4535137de7e244c230e24f9d1ec194f61721c86ebea04e1581d9d06ea1269", size = 4931933, upload-time = "2024-07-15T00:17:46.455Z" }, + { url = "https://files.pythonhosted.org/packages/8e/f5/30eadde3686d902b5d4692bb5f286977cbc4adc082145eb3f49d834b2eae/zstandard-0.23.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8c24f21fa2af4bb9f2c492a86fe0c34e6d2c63812a839590edaf177b7398f700", size = 5463878, upload-time = "2024-07-15T00:17:48.866Z" }, + { url = "https://files.pythonhosted.org/packages/e0/c8/8aed1f0ab9854ef48e5ad4431367fcb23ce73f0304f7b72335a8edc66556/zstandard-0.23.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a8c86881813a78a6f4508ef9daf9d4995b8ac2d147dcb1a450448941398091c9", size = 4857192, upload-time = "2024-07-15T00:17:51.558Z" }, + { url = "https://files.pythonhosted.org/packages/a8/c6/55e666cfbcd032b9e271865e8578fec56e5594d4faeac379d371526514f5/zstandard-0.23.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fe3b385d996ee0822fd46528d9f0443b880d4d05528fd26a9119a54ec3f91c69", size = 4696513, upload-time = "2024-07-15T00:17:53.924Z" }, + { url = "https://files.pythonhosted.org/packages/dc/bd/720b65bea63ec9de0ac7414c33b9baf271c8de8996e5ff324dc93fc90ff1/zstandard-0.23.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:82d17e94d735c99621bf8ebf9995f870a6b3e6d14543b99e201ae046dfe7de70", size = 5204823, upload-time = "2024-07-15T00:17:55.948Z" }, + { url = "https://files.pythonhosted.org/packages/d8/40/d678db1556e3941d330cd4e95623a63ef235b18547da98fa184cbc028ecf/zstandard-0.23.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c7c517d74bea1a6afd39aa612fa025e6b8011982a0897768a2f7c8ab4ebb78a2", size = 5666490, upload-time = "2024-07-15T00:17:58.327Z" }, + { url = "https://files.pythonhosted.org/packages/ed/cc/c89329723d7515898a1fc7ef5d251264078548c505719d13e9511800a103/zstandard-0.23.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fd7e0f1cfb70eb2f95a19b472ee7ad6d9a0a992ec0ae53286870c104ca939e5", size = 5196622, upload-time = "2024-07-15T00:18:00.404Z" }, + { url = "https://files.pythonhosted.org/packages/78/4c/634289d41e094327a94500dfc919e58841b10ea3a9efdfafbac614797ec2/zstandard-0.23.0-cp39-cp39-win32.whl", hash = "sha256:43da0f0092281bf501f9c5f6f3b4c975a8a0ea82de49ba3f7100e64d422a1274", size = 430620, upload-time = "2024-07-15T00:18:02.613Z" }, + { url = "https://files.pythonhosted.org/packages/a2/e2/0b0c5a0f4f7699fecd92c1ba6278ef9b01f2b0b0dd46f62bfc6729c05659/zstandard-0.23.0-cp39-cp39-win_amd64.whl", hash = "sha256:f8346bfa098532bc1fb6c7ef06783e969d87a99dd1d2a5a18a892c1d7a643c58", size = 495528, upload-time = "2024-07-15T00:18:04.452Z" }, +]