diff --git a/.deepsource.toml b/.deepsource.toml new file mode 100644 index 00000000000..72aefc7b07a --- /dev/null +++ b/.deepsource.toml @@ -0,0 +1,9 @@ +version = 1 + +[[analyzers]] +name = "javascript" +enabled = true + + [analyzers.meta] + environment = ["nodejs"] + dialect = "typescript" diff --git a/.github/ISSUE_TEMPLATE/BUG-REPORT.yml b/.github/ISSUE_TEMPLATE/BUG-REPORT.yml new file mode 100644 index 00000000000..2e8db33a0c6 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/BUG-REPORT.yml @@ -0,0 +1,39 @@ +name: Bug Report +description: Tell us about something that isn't working as expected +labels: [Bug] +body: + - type: textarea + id: description + attributes: + label: Description + description: Please enter a detailed description of your issue. If possible, please provide example code to reproduce the issue. + validations: + required: true + - type: input + id: node-js-version + attributes: + label: Node.js Version + description: Please enter your Node.js version `node --version` + - type: input + id: redis-server-version + attributes: + label: Redis Server Version + description: Please enter your Redis server version ([`INFO server`](https://redis.io/commands/info/)) + - type: input + id: node-redis-version + attributes: + label: Node Redis Version + description: Please enter your node redis version `npm ls redis` + - type: input + id: platform + attributes: + label: Platform + description: Please enter the platform you are using e.g. Linux, macOS, Windows + - type: textarea + id: logs + attributes: + label: Logs + description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks. + render: bash + validations: + required: false diff --git a/.github/ISSUE_TEMPLATE/DOCUMENTATION.yml b/.github/ISSUE_TEMPLATE/DOCUMENTATION.yml new file mode 100644 index 00000000000..b5ece5aeca2 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/DOCUMENTATION.yml @@ -0,0 +1,11 @@ +name: Documentation +description: Any questions or issues relating to the project documentation. +labels: [Documentation] +body: + - type: textarea + id: description + attributes: + label: Description + description: Ask your question or describe your issue here. + validations: + required: true diff --git a/.github/ISSUE_TEMPLATE/FEATURE-REQUEST.yml b/.github/ISSUE_TEMPLATE/FEATURE-REQUEST.yml new file mode 100644 index 00000000000..ae10cbd7b7a --- /dev/null +++ b/.github/ISSUE_TEMPLATE/FEATURE-REQUEST.yml @@ -0,0 +1,19 @@ +name: Feature Request +description: Suggest an idea for this project +labels: [Feature] +body: + - type: textarea + id: motivation + attributes: + label: Motivation + description: How would Node Redis users benefit from this feature? + validations: + required: true + - type: textarea + id: basic-code-example + attributes: + label: Basic Code Example + description: Provide examples of how you imagine the API for this feature might be implemented. This will be automatically formatted into code, so no need for backticks. + render: JavaScript + validations: + required: false diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 00000000000..d4f8b8f2d9b --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,20 @@ +### Description + + + + + +> Describe your pull request here + +--- + +### Checklist + + + +- [ ] Does `npm test` pass with this change (including linting)? +- [ ] Is the new or changed code fully tested? +- [ ] Is a documentation update included (if this change modifies existing APIs, or introduces new ones)? + + diff --git a/.github/release-drafter-base.yml b/.github/release-drafter-base.yml new file mode 100644 index 00000000000..ea259fc0d2d --- /dev/null +++ b/.github/release-drafter-base.yml @@ -0,0 +1,50 @@ +name-template: 'json@$NEXT_PATCH_VERSION' +tag-template: 'json@$NEXT_PATCH_VERSION' +autolabeler: + - label: 'chore' + files: + - '*.md' + - '.github/*' + - label: 'bug' + branch: + - '/bug-.+' + - label: 'chore' + branch: + - '/chore-.+' + - label: 'feature' + branch: + - '/feature-.+' +categories: + - title: 'Breaking Changes' + labels: + - 'breakingchange' + - title: 'πŸš€ New Features' + labels: + - 'feature' + - 'enhancement' + - title: 'πŸ› Bug Fixes' + labels: + - 'fix' + - 'bugfix' + - 'bug' + - title: '🧰 Maintenance' + label: + - 'chore' + - 'maintenance' + - 'documentation' + - 'docs' + +change-template: '- $TITLE (#$NUMBER)' +include-paths: + - 'packages/json' +exclude-labels: + - 'skip-changelog' +template: | + ## Changes + + $CHANGES + + ## Contributors + We'd like to thank all the contributors who worked on this release! + + $CONTRIBUTORS diff --git a/.github/release-drafter/bloom-config.yml b/.github/release-drafter/bloom-config.yml new file mode 100644 index 00000000000..7734330b95c --- /dev/null +++ b/.github/release-drafter/bloom-config.yml @@ -0,0 +1,50 @@ +name-template: 'bloom@$NEXT_PATCH_VERSION' +tag-template: 'bloom@$NEXT_PATCH_VERSION' +autolabeler: + - label: 'chore' + files: + - '*.md' + - '.github/*' + - label: 'bug' + branch: + - '/bug-.+' + - label: 'chore' + branch: + - '/chore-.+' + - label: 'feature' + branch: + - '/feature-.+' +categories: + - title: 'Breaking Changes' + labels: + - 'breakingchange' + - title: 'πŸš€ New Features' + labels: + - 'feature' + - 'enhancement' + - title: 'πŸ› Bug Fixes' + labels: + - 'fix' + - 'bugfix' + - 'bug' + - title: '🧰 Maintenance' + label: + - 'chore' + - 'maintenance' + - 'documentation' + - 'docs' + +change-template: '- $TITLE (#$NUMBER)' +include-paths: + - 'packages/bloom' +exclude-labels: + - 'skip-changelog' +template: | + ## Changes + + $CHANGES + + ## Contributors + We'd like to thank all the contributors who worked on this release! + + $CONTRIBUTORS diff --git a/.github/release-drafter/entraid-config.yml b/.github/release-drafter/entraid-config.yml new file mode 100644 index 00000000000..d0ddd00773a --- /dev/null +++ b/.github/release-drafter/entraid-config.yml @@ -0,0 +1,50 @@ +name-template: 'entraid@$NEXT_PATCH_VERSION' +tag-template: 'entraid@$NEXT_PATCH_VERSION' +autolabeler: + - label: 'chore' + files: + - '*.md' + - '.github/*' + - label: 'bug' + branch: + - '/bug-.+' + - label: 'chore' + branch: + - '/chore-.+' + - label: 'feature' + branch: + - '/feature-.+' +categories: + - title: 'Breaking Changes' + labels: + - 'breakingchange' + - title: 'πŸš€ New Features' + labels: + - 'feature' + - 'enhancement' + - title: 'πŸ› Bug Fixes' + labels: + - 'fix' + - 'bugfix' + - 'bug' + - title: '🧰 Maintenance' + label: + - 'chore' + - 'maintenance' + - 'documentation' + - 'docs' + +change-template: '- $TITLE (#$NUMBER)' +include-paths: + - 'packages/entraid' +exclude-labels: + - 'skip-changelog' +template: | + ## Changes + + $CHANGES + + ## Contributors + We'd like to thank all the contributors who worked on this release! + + $CONTRIBUTORS diff --git a/.github/release-drafter/json-config.yml b/.github/release-drafter/json-config.yml new file mode 100644 index 00000000000..ea259fc0d2d --- /dev/null +++ b/.github/release-drafter/json-config.yml @@ -0,0 +1,50 @@ +name-template: 'json@$NEXT_PATCH_VERSION' +tag-template: 'json@$NEXT_PATCH_VERSION' +autolabeler: + - label: 'chore' + files: + - '*.md' + - '.github/*' + - label: 'bug' + branch: + - '/bug-.+' + - label: 'chore' + branch: + - '/chore-.+' + - label: 'feature' + branch: + - '/feature-.+' +categories: + - title: 'Breaking Changes' + labels: + - 'breakingchange' + - title: 'πŸš€ New Features' + labels: + - 'feature' + - 'enhancement' + - title: 'πŸ› Bug Fixes' + labels: + - 'fix' + - 'bugfix' + - 'bug' + - title: '🧰 Maintenance' + label: + - 'chore' + - 'maintenance' + - 'documentation' + - 'docs' + +change-template: '- $TITLE (#$NUMBER)' +include-paths: + - 'packages/json' +exclude-labels: + - 'skip-changelog' +template: | + ## Changes + + $CHANGES + + ## Contributors + We'd like to thank all the contributors who worked on this release! + + $CONTRIBUTORS diff --git a/.github/release-drafter/search-config.yml b/.github/release-drafter/search-config.yml new file mode 100644 index 00000000000..a78070aa59c --- /dev/null +++ b/.github/release-drafter/search-config.yml @@ -0,0 +1,50 @@ +name-template: 'search@$NEXT_PATCH_VERSION' +tag-template: 'search@$NEXT_PATCH_VERSION' +autolabeler: + - label: 'chore' + files: + - '*.md' + - '.github/*' + - label: 'bug' + branch: + - '/bug-.+' + - label: 'chore' + branch: + - '/chore-.+' + - label: 'feature' + branch: + - '/feature-.+' +categories: + - title: 'Breaking Changes' + labels: + - 'breakingchange' + - title: 'πŸš€ New Features' + labels: + - 'feature' + - 'enhancement' + - title: 'πŸ› Bug Fixes' + labels: + - 'fix' + - 'bugfix' + - 'bug' + - title: '🧰 Maintenance' + label: + - 'chore' + - 'maintenance' + - 'documentation' + - 'docs' + +change-template: '- $TITLE (#$NUMBER)' +include-paths: + - 'packages/search' +exclude-labels: + - 'skip-changelog' +template: | + ## Changes + + $CHANGES + + ## Contributors + We'd like to thank all the contributors who worked on this release! + + $CONTRIBUTORS diff --git a/.github/release-drafter/time-series-config.yml b/.github/release-drafter/time-series-config.yml new file mode 100644 index 00000000000..29aee0cbc95 --- /dev/null +++ b/.github/release-drafter/time-series-config.yml @@ -0,0 +1,49 @@ +name-template: 'time-series@$NEXT_PATCH_VERSION' +tag-template: 'time-series@$NEXT_PATCH_VERSION' +autolabeler: + - label: 'chore' + files: + - '*.md' + - '.github/*' + - label: 'bug' + branch: + - '/bug-.+' + - label: 'chore' + branch: + - '/chore-.+' + - label: 'feature' + branch: + - '/feature-.+' +categories: + - title: 'Breaking Changes' + labels: + - 'breakingchange' + - title: 'πŸš€ New Features' + labels: + - 'feature' + - 'enhancement' + - title: 'πŸ› Bug Fixes' + labels: + - 'fix' + - 'bugfix' + - 'bug' + - title: '🧰 Maintenance' + label: + - 'chore' + - 'maintenance' + - 'documentation' + - 'docs' +change-template: '- $TITLE (#$NUMBER)' +include-paths: + - 'packages/time-series' +exclude-labels: + - 'skip-changelog' +template: | + ## Changes + + $CHANGES + + ## Contributors + We'd like to thank all the contributors who worked on this release! + + $CONTRIBUTORS diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml new file mode 100644 index 00000000000..47c82baea2c --- /dev/null +++ b/.github/workflows/codeql.yml @@ -0,0 +1,74 @@ +# For most projects, this workflow file will not need changing; you simply need +# to commit it to your repository. +# +# You may wish to alter this file to override the set of languages analyzed, +# or to provide custom queries or build logic. +# +# ******** NOTE ******** +# We have attempted to detect the languages in your repository. Please check +# the `language` matrix defined below to confirm you have the correct set of +# supported CodeQL languages. +# +name: "CodeQL" + +on: + push: + branches: [ "master" ] + pull_request: + # The branches below must be a subset of the branches above + branches: [ "master" ] + schedule: + - cron: '43 20 * * 1' + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + security-events: write + + strategy: + fail-fast: false + matrix: + language: [ 'TypeScript' ] + # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] + # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v2 + with: + languages: ${{ matrix.language }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + + # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs + # queries: security-extended,security-and-quality + + + # Autobuild attempts to build any compiled languages (C/C++, C#, Go, or Java). + # If this step fails, then you should remove it and run the build manually (see below) + - name: Autobuild + uses: github/codeql-action/autobuild@v2 + + # ℹ️ Command-line programs to run using the OS shell. + # πŸ“š See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun + + # If the Autobuild fails above, remove it and uncomment the following three lines. + # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. + + # - run: | + # echo "Run, Build Application using script" + # ./location_of_script_within_repo/buildscript.sh + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v2 + with: + category: "/language:${{matrix.language}}" diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml new file mode 100644 index 00000000000..a8c22752423 --- /dev/null +++ b/.github/workflows/documentation.yml @@ -0,0 +1,28 @@ +name: Documentation + +on: + push: + branches: + - master + - v4.0 + +jobs: + documentation: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 1 + - name: Use Node.js + uses: actions/setup-node@v3 + - name: Install Packages + run: npm ci + - name: Generate Documentation + run: npm run documentation + - name: Upload + run: | + git remote set-url origin https://git:${GITHUB_TOKEN}@github.com/${GITHUB_REPOSITORY}.git + npm run gh-pages + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + diff --git a/.github/workflows/release-drafter-bloom.yml b/.github/workflows/release-drafter-bloom.yml new file mode 100644 index 00000000000..4ad525c058f --- /dev/null +++ b/.github/workflows/release-drafter-bloom.yml @@ -0,0 +1,24 @@ +name: Release Drafter + +on: + push: + # branches to consider in the event; optional, defaults to all + branches: + - master + +jobs: + + update_release_draft: + + permissions: + contents: write + pull-requests: write + runs-on: ubuntu-latest + steps: + # Drafts your next Release notes as Pull Requests are merged into "master" + - uses: release-drafter/release-drafter@v5 + with: + # (Optional) specify config name to use, relative to .github/. Default: release-drafter.yml + config-name: release-drafter/bloom-config.yml + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/release-drafter-entraid.yml b/.github/workflows/release-drafter-entraid.yml new file mode 100644 index 00000000000..d522c6cef6f --- /dev/null +++ b/.github/workflows/release-drafter-entraid.yml @@ -0,0 +1,24 @@ +name: Release Drafter + +on: + push: + # branches to consider in the event; optional, defaults to all + branches: + - master + +jobs: + + update_release_draft: + + permissions: + contents: write + pull-requests: write + runs-on: ubuntu-latest + steps: + # Drafts your next Release notes as Pull Requests are merged into "master" + - uses: release-drafter/release-drafter@v5 + with: + # (Optional) specify config name to use, relative to .github/. Default: release-drafter.yml + config-name: release-drafter/entraid-config.yml + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/release-drafter-json.yml b/.github/workflows/release-drafter-json.yml new file mode 100644 index 00000000000..a8b3ba4d135 --- /dev/null +++ b/.github/workflows/release-drafter-json.yml @@ -0,0 +1,24 @@ +name: Release Drafter + +on: + push: + # branches to consider in the event; optional, defaults to all + branches: + - master + +jobs: + + update_release_draft: + + permissions: + contents: write + pull-requests: write + runs-on: ubuntu-latest + steps: + # Drafts your next Release notes as Pull Requests are merged into "master" + - uses: release-drafter/release-drafter@v5 + with: + # (Optional) specify config name to use, relative to .github/. Default: release-drafter.yml + config-name: release-drafter/json-config.yml + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/release-drafter-search.yml b/.github/workflows/release-drafter-search.yml new file mode 100644 index 00000000000..c331430353f --- /dev/null +++ b/.github/workflows/release-drafter-search.yml @@ -0,0 +1,24 @@ +name: Release Drafter + +on: + push: + # branches to consider in the event; optional, defaults to all + branches: + - master + +jobs: + + update_release_draft: + + permissions: + contents: write + pull-requests: write + runs-on: ubuntu-latest + steps: + # Drafts your next Release notes as Pull Requests are merged into "master" + - uses: release-drafter/release-drafter@v5 + with: + # (Optional) specify config name to use, relative to .github/. Default: release-drafter.yml + config-name: release-drafter/search-config.yml + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/release-drafter-time-series.yml b/.github/workflows/release-drafter-time-series.yml new file mode 100644 index 00000000000..71e44a70fd3 --- /dev/null +++ b/.github/workflows/release-drafter-time-series.yml @@ -0,0 +1,24 @@ +name: Release Drafter + +on: + push: + # branches to consider in the event; optional, defaults to all + branches: + - master + +jobs: + + update_release_draft: + + permissions: + contents: write + pull-requests: write + runs-on: ubuntu-latest + steps: + # Drafts your next Release notes as Pull Requests are merged into "master" + - uses: release-drafter/release-drafter@v5 + with: + # (Optional) specify config name to use, relative to .github/. Default: release-drafter.yml + config-name: release-drafter/time-series-config.yml + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 00000000000..e7c9d58fe71 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,51 @@ +name: Release + +on: + workflow_dispatch: + inputs: + version: + description: 'Version to release ("major", "minor", "patch", or "pre*" version; or specify version like "5.3.3")' + required: true + type: string + args: + description: 'Additional arguments to pass to release-it (e.g. "--dry-run"). See docs: https://github.com/release-it/release-it/blob/main/docs/git.md#configuration-options' + required: false + type: string + +jobs: + release: + runs-on: ubuntu-latest + permissions: + contents: write + packages: write + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + fetch-depth: 0 + ssh-key: ${{ secrets.RELEASE_KEY }} + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: '22' + registry-url: 'https://registry.npmjs.org' + + - name: Install dependencies + run: npm ci + + - name: Configure Git + run: | + git config --local user.email "action@github.com" + git config --local user.name "GitHub Action" + + # Build all packages + - name: Build packages + run: npm run build + + # Release using the monorepo approach + - name: Release packages + run: npm run release -- --ci -i ${{ github.event.inputs.version }} ${{ github.event.inputs.args }} + env: + NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/stale-issues.yml b/.github/workflows/stale-issues.yml new file mode 100644 index 00000000000..ba0fb5587ed --- /dev/null +++ b/.github/workflows/stale-issues.yml @@ -0,0 +1,95 @@ +name: "Stale Issue Management" +on: + schedule: + # Run daily at midnight UTC + - cron: "0 0 * * *" + workflow_dispatch: # Allow manual triggering + +env: + # Default stale policy timeframes + DAYS_BEFORE_STALE: 365 + DAYS_BEFORE_CLOSE: 30 + + # Accelerated timeline for needs-information issues + NEEDS_INFO_DAYS_BEFORE_STALE: 30 + NEEDS_INFO_DAYS_BEFORE_CLOSE: 7 + +jobs: + stale: + runs-on: ubuntu-latest + steps: + # First step: Handle regular issues (excluding needs-information) + - name: Mark regular issues as stale + uses: actions/stale@v9 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + + # Default stale policy + days-before-stale: ${{ env.DAYS_BEFORE_STALE }} + days-before-close: ${{ env.DAYS_BEFORE_CLOSE }} + + # Explicit stale label configuration + stale-issue-label: "stale" + stale-pr-label: "stale" + + stale-issue-message: | + This issue has been automatically marked as stale due to inactivity. + It will be closed in 30 days if no further activity occurs. + If you believe this issue is still relevant, please add a comment to keep it open. + + close-issue-message: | + This issue has been automatically closed due to inactivity. + If you believe this issue is still relevant, please reopen it or create a new issue with updated information. + + # Exclude needs-information issues from this step + exempt-issue-labels: 'no-stale,needs-information' + + # Remove stale label when issue/PR becomes active again + remove-stale-when-updated: true + + # Apply to pull requests with same timeline + days-before-pr-stale: ${{ env.DAYS_BEFORE_STALE }} + days-before-pr-close: ${{ env.DAYS_BEFORE_CLOSE }} + + stale-pr-message: | + This pull request has been automatically marked as stale due to inactivity. + It will be closed in 30 days if no further activity occurs. + + close-pr-message: | + This pull request has been automatically closed due to inactivity. + If you would like to continue this work, please reopen the PR or create a new one. + + # Only exclude no-stale PRs (needs-information PRs follow standard timeline) + exempt-pr-labels: 'no-stale' + + # Second step: Handle needs-information issues with accelerated timeline + - name: Mark needs-information issues as stale + uses: actions/stale@v9 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + + # Accelerated timeline for needs-information + days-before-stale: ${{ env.NEEDS_INFO_DAYS_BEFORE_STALE }} + days-before-close: ${{ env.NEEDS_INFO_DAYS_BEFORE_CLOSE }} + + # Explicit stale label configuration + stale-issue-label: "stale" + + # Only target ISSUES with needs-information label (not PRs) + only-issue-labels: 'needs-information' + + stale-issue-message: | + This issue has been marked as stale because it requires additional information + that has not been provided for 30 days. It will be closed in 7 days if the + requested information is not provided. + + close-issue-message: | + This issue has been closed because the requested information was not provided within the specified timeframe. + If you can provide the missing information, please reopen this issue or create a new one. + + # Disable PR processing for this step + days-before-pr-stale: -1 + days-before-pr-close: -1 + + # Remove stale label when issue becomes active again + remove-stale-when-updated: true diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 00000000000..45ada77197f --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,52 @@ +name: Tests + +on: + push: + branches: + - master + - v4.0 + - v5 + paths-ignore: + - "**/*.md" + pull_request: + branches: + - master + - v4.0 + - v5 + paths-ignore: + - "**/*.md" +jobs: + tests: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + node-version: ["18", "20", "22"] + redis-version: ["rs-7.4.0-v1", "8.0.2", "8.2", "8.4-M01-pre"] + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 1 + - name: Use Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v4 + with: + node-version: ${{ matrix.node-version }} + - name: Update npm + run: npm i -g npm + if: ${{ matrix.node-version <= 14 }} + - name: Install Packages + run: npm ci + - name: Build + run: npm run build + - name: Run Tests + run: npm run test -ws --if-present -- --forbid-only --redis-version=${{ matrix.redis-version }} + - name: Upload to Codecov + run: | + curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --keyring trustedkeys.gpg --import + curl -Os https://uploader.codecov.io/latest/linux/codecov + curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM + curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM.sig + gpgv codecov.SHA256SUM.sig codecov.SHA256SUM + shasum -a 256 -c codecov.SHA256SUM + chmod +x codecov + ./codecov diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000000..ecdef37dffd --- /dev/null +++ b/.gitignore @@ -0,0 +1,10 @@ +.idea/ +.nyc_output/ +.vscode/ +coverage/ +dist/ +node_modules/ +.DS_Store +dump.rdb +documentation/ +tsconfig.tsbuildinfo diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 00000000000..fbc3070381e --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,958 @@ +# Changelog + +## v4.0.2 - 13 Jan, 2022 + +### Fixes + +- Fix v4 commands in legacy mode (#1820) +- Fix `EXISTS` command reply (#1819) +- Fix handler for "__redis__:invalidate" messages (#1798) +- Fix "SEPARATOR" typo in RediSearch (#1823) + +### Enhancements + +- First release of `@node-redis/bloom` +- Add support for `Buffer`s +- Enhance `ASK` and `MOVED` errors handler + +## v4.0.1 - 13 Dec, 2021 + +### Fixes + +- Fix `NOAUTH` error when using authentication & database (#1681) +- Allow to `.quit()` in PubSub mode (#1766) +- Add an option to configure `name` on a client (#1758) +- Lowercase commands (`client.hset`) in `legacyMode` +- Fix PubSub resubscribe (#1764) +- Fix `RedisSocketOptions` type (#1741) + +### Enhancements + +- Add support for `number`s and `Buffer`s in `HSET` (#1738 #1739) +- Export `RedisClientType`, `RedisClusterType` and some more types (#1673) +- First release of `@node-redis/time-series` + +## v4.0.0 - 24 Nov, 2021 + +This version is a major change and refactor, adding modern JavaScript capabilities and multiple breaking changes. See the [migration guide](./docs/v3-to-v4.md) for tips on how to upgrade. + +### Breaking Changes + +- All functions return Promises by default +- Dropped support for Node.js 10.x, the minimum supported Node.js version is now 12.x +- `createClient` takes new and different arguments +- The `prefix`, `rename_commands` configuration options to `createClient` have been removed +- The `enable_offline_queue` configuration option is removed, executing commands on a closed client (without calling `.connect()` or after calling `.disconnect()`) will reject immediately +- Login credentials are no longer saved when using `.auth()` directly + +### Features + +- Added support for Promises +- Added built-in TypeScript declaration files enabling code completion +- Added support for [clustering](./README.md#cluster) +- Added idiomatic arguments and responses to [Redis commands](./README.md#redis-commands) +- Added full support for [Lua Scripts](./README.md#lua-scripts) +- Added support for [SCAN iterators](./README.md#scan-iterator) +- Added the ability to extend Node Redis with Redis Module commands + +## v3.1.2 + +### Fixes + +- Exclude unnecessary files from tarball + +## v3.1.1 + +### Enhancements + +- Upgrade node and dependencies + +### Fixes + +- Fix a potential exponential regex in monitor mode + +## v3.1.0 - 31 Mar, 2021 + +### Enhancements + +- Upgrade node and dependencies and redis-commands to support Redis 6 +- Add support for Redis 6 `auth pass [user]` + +## v3.0.0 - 09 Feb, 2020 + +This version is mainly a release to distribute all the unreleased changes on master since 2017 and additionally removes +a lot of old deprecated features and old internals in preparation for an upcoming modernization refactor (v4). + +### Breaking Changes + +- Dropped support for Node.js < 6 +- Dropped support for `hiredis` (no longer required) +- Removed previously deprecated `drain` event +- Removed previously deprecated `idle` event +- Removed previously deprecated `parser` option +- Removed previously deprecated `max_delay` option +- Removed previously deprecated `max_attempts` option +- Removed previously deprecated `socket_no_delay` option + +### Bug Fixes + +- Removed development files from published package (#1370) +- Duplicate function now allows db param to be passed (#1311) + +### Features + +- Upgraded to latest `redis-commands` package +- Upgraded to latest `redis-parser` package, v3.0.0, which brings performance improvements +- Replaced `double-ended-queue` with `denque`, which brings performance improvements +- Add timestamps to debug traces +- Add `socket_initial_delay` option for `socket.setKeepAlive` (#1396) +- Add support for `rediss` protocol in url (#1282) + +## v2.8.0 - 31 Jul, 2017 + +Features + +- Accept UPPER_CASE commands in send_command +- Add arbitrary commands to the prototype by using `Redis.addCommand(name)` + +Bugfixes + +- Fixed not always copying subscribe unsubscribe arguments +- Fixed emitting internal errors while reconnecting with auth +- Fixed crashing with invalid url option + +## v2.7.1 - 14 Mar, 2017 + +Bugfixes + +- Fixed monitor mode not working in combination with IPv6 (2.6.0 regression) + +## v2.7.0 - 11 Mar, 2017 + +Features + +- All returned errors are from now a subclass of `RedisError`. + +Bugfixes + +- Fixed rename_commands not accepting `null` as value +- Fixed `AbortError`s and `AggregateError`s not showing the error message in the stack trace + +## v2.6.5 - 15 Jan, 2017 + +Bugfixes + +- Fixed parser not being reset in case the redis connection closed ASAP for overcoming of output buffer limits +- Fixed parser reset if (p)message_buffer listener is attached + +## v2.6.4 - 12 Jan, 2017 + +Bugfixes + +- Fixed monitor mode not working in combination with IPv6, sockets or lua scripts (2.6.0 regression) + +## v2.6.3 - 31 Oct, 2016 + +Bugfixes + +- Do not change the tls setting to camel_case +- Fix domain handling in combination with the offline queue (2.5.3 regression) + +## v2.6.2 - 16 Jun, 2016 + +Bugfixes + +- Fixed individual callbacks of a transaction not being called (2.6.0 regression) + +## v2.6.1 - 02 Jun, 2016 + +Bugfixes + +- Fixed invalid function name being exported + +## v2.6.0 - 01 Jun, 2016 + +In addition to the pre-releases the following changes exist in v.2.6.0: + +Features + +- Updated [redis-parser](https://github.com/NodeRedis/node-redis-parser) dependency ([changelog](https://github.com/NodeRedis/node-redis-parser/releases/tag/v.2.0.0)) +- The JS parser is from now on the new default as it is a lot faster than the hiredis parser +- This is no BC as there is no changed behavior for the user at all but just a performance improvement. Explicitly requireing the Hiredis parser is still possible. +- Added name property to all Redis functions (Node.js >= 4.0) +- Improved stack traces in development and debug mode + +Bugfixes + +- Reverted support for `__proto__` (v.2.6.0-2) to prevent and breaking change + +Deprecations + +- The `parser` option is deprecated and should be removed. The built-in Javascript parser is a lot faster than the hiredis parser and has more features + +## v2.6.0-2 - 29 Apr, 2016 + +Features + +- Added support for the new [CLIENT REPLY ON|OFF|SKIP](http://redis.io/commands/client-reply) command (Redis v.3.2) +- Added support for camelCase +- The Node.js landscape default is to use camelCase. node_redis is a bit out of the box here + but from now on it is possible to use both, just as you prefer! +- If there's any documented variable missing as camelCased, please open a issue for it +- Improve error handling significantly +- Only emit an error if the error has not already been handled in a callback +- Improved unspecific error messages e.g. "Connection gone from end / close event" +- Added `args` to command errors to improve identification of the error +- Added origin to errors if there's e.g. a connection error +- Added ReplyError class. All Redis errors are from now on going to be of that class +- Added AbortError class. A subclass of AbortError. All unresolved and by node_redis rejected commands are from now on of that class +- Added AggregateError class. If a unresolved and by node_redis rejected command has no callback and + this applies to more than a single command, the errors for the commands without callback are aggregated + to a single error that is emitted in debug_mode in that case. +- Added `message_buffer` / `pmessage_buffer` events. That event is always going to emit a buffer +- Listening to the `message` event at the same time is always going to return the same message as string +- Added callback option to the duplicate function +- Added support for `__proto__` and other reserved keywords as hgetall field +- Updated [redis-commands](https://github.com/NodeRedis/redis-commands) dependency ([changelog](https://github.com/NodeRedis/redis-commands/releases/tag/v.1.2.0)) + +Bugfixes + +- Fixed v.2.5.0 auth command regression (under special circumstances a reconnect would not authenticate properly) +- Fixed v.2.6.0-0 pub sub mode and quit command regressions: +- Entering pub sub mode not working if a earlier called and still running command returned an error +- Unsubscribe callback not called if unsubscribing from all channels and resubscribing right away +- Quit command resulting in an error in some cases +- Fixed special handled functions in batch and multi context not working the same as without (e.g. select and info) +- Be aware that not all commands work in combination with transactions but they all work with batch +- Fixed address always set to 127.0.0.1:6379 in case host / port is set in the `tls` options instead of the general options + +## v2.6.0-1 - 01 Apr, 2016 + +A second pre-release with further fixes. This is likely going to be released as 2.6.0 stable without further changes. + +Features + +- Added type validations for client.send_command arguments + +Bugfixes + +- Fixed client.send_command not working properly with every command and every option +- Fixed pub sub mode unsubscribing from all channels in combination with the new `string_numbers` option crashing +- Fixed pub sub mode unsubscribing from all channels not respected while reconnecting +- Fixed pub sub mode events in combination with the `string_numbers` option emitting the number of channels not as number + +## v2.6.0-0 - 27 Mar, 2016 + +This is mainly a very important bug fix release with some smaller features. + +Features + +- Monitor and pub sub mode now work together with the offline queue +- All commands that were send after a connection loss are now going to be send after reconnecting +- Activating monitor mode does now work together with arbitrary commands including pub sub mode +- Pub sub mode is completely rewritten and all known issues fixed +- Added `string_numbers` option to get back strings instead of numbers +- Quit command is from now on always going to end the connection properly + +Bugfixes + +- Fixed calling monitor command while other commands are still running +- Fixed monitor and pub sub mode not working together +- Fixed monitor mode not working in combination with the offline queue +- Fixed pub sub mode not working in combination with the offline queue +- Fixed pub sub mode resubscribing not working with non utf8 buffer channels +- Fixed pub sub mode crashing if calling unsubscribe / subscribe in various combinations +- Fixed pub sub mode emitting unsubscribe even if no channels were unsubscribed +- Fixed pub sub mode emitting a message without a message published +- Fixed quit command not ending the connection and resulting in further reconnection if called while reconnecting + +The quit command did not end connections earlier if the connection was down at that time and this could have +lead to strange situations, therefor this was fixed to end the connection right away in those cases. + +## v2.5.3 - 21 Mar, 2016 + +Bugfixes + +- Revert throwing on invalid data types and print a warning instead + +## v2.5.2 - 16 Mar, 2016 + +Bugfixes + +- Fixed breaking changes against Redis 2.4 introduced in 2.5.0 / 2.5.1 + +## v2.5.1 - 15 Mar, 2016 + +Bugfixes + +- Fixed info command not working anymore with optional section argument + +## v2.5.0 - 15 Mar, 2016 + +Same changelog as the pre-release + +## v2.5.0-1 - 07 Mar, 2016 + +This is a big release with some substantial underlining changes. Therefor this is released as a pre-release and I encourage anyone who's able to, to test this out. + +It took way to long to release this one and the next release cycles will be shorter again. + +This release is also going to deprecate a couple things to prepare for a future v.3 (it'll still take a while to v.3). + +Features + +- The parsers moved into the [redis-parser](https://github.com/NodeRedis/node-redis-parser) module and will be maintained in there from now on +- Improve js parser speed significantly for big SUNION/SINTER/LRANGE/ZRANGE +- Improve redis-url parsing to also accept the database-number and options as query parameters as suggested in [IANA](http://www.iana.org/assignments/uri-schemes/prov/redis) +- Added a `retry_unfulfilled_commands` option +- Setting this to 'true' results in retrying all commands that were not fulfilled on a connection loss after the reconnect. Use with caution +- Added a `db` option to select the database while connecting (this is [not recommended](https://groups.google.com/forum/#!topic/redis-db/vS5wX8X4Cjg)) +- Added a `password` option as alias for auth_pass +- The client.server_info is from now on updated while using the info command +- Gracefuly handle redis protocol errors from now on +- Added a `warning` emitter that receives node_redis warnings like auth not required and deprecation messages +- Added a `retry_strategy` option that replaces all reconnect options +- The reconnecting event from now on also receives: +- The error message why the reconnect happened (params.error) +- The amount of times the client was connected (params.times_connected) +- The total reconnecting time since the last time connected (params.total_retry_time) +- Always respect the command execution order no matter if the reply could be returned sync or not (former exceptions: [#937](https://github.com/NodeRedis/node_redis/issues/937#issuecomment-167525939)) +- redis.createClient is now checking input values stricter and detects more faulty input +- Started refactoring internals into individual modules +- Pipelining speed improvements + +Bugfixes + +- Fixed explicit undefined as a command callback in a multi context +- Fixed hmset failing to detect the first key as buffer or date if the key is of that type +- Fixed do not run toString on an array argument and throw a "invalid data" error instead +- This is not considered as breaking change, as this is likely a error in your code and if you want to have such a behavior you should handle this beforehand +- The same applies to Map / Set and individual Object types +- Fixed redis url not accepting the protocol being omitted or protocols other than the redis protocol for convenience +- Fixed parsing the db keyspace even if the first database does not begin with a zero +- Fixed handling of errors occurring while receiving pub sub messages +- Fixed huge string pipelines crashing NodeJS (Pipeline size above 256mb) +- Fixed rename_commands and prefix option not working together +- Fixed ready being emitted to early in case a slave is still syncing / master down + +Deprecations + +- Using any command with a argument being set to null or undefined is deprecated +- From v.3.0.0 on using a command with such an argument will return an error instead +- If you want to keep the old behavior please use a precheck in your code that converts the arguments to a string. +- Using SET or SETEX with a undefined or null value will from now on also result in converting the value to "null" / "undefined" to have a consistent behavior. This is not considered as breaking change, as it returned an error earlier. +- Using .end(flush) without the flush parameter is deprecated and the flush parameter should explicitly be used +- From v.3.0.0 on using .end without flush will result in an error +- Using .end without flush means that any command that did not yet return is going to silently fail. Therefor this is considered harmful and you should explicitly silence such errors if you are sure you want this +- Depending on the return value of a command to detect the backpressure is deprecated +- From version 3.0.0 on node_redis might not return true / false as a return value anymore. Please rely on client.should_buffer instead +- The `socket_nodelay` option is deprecated and will be removed in v.3.0.0 +- If you want to buffer commands you should use [.batch or .multi](./README.md) instead. This is necessary to reduce the amount of different options and this is very likely reducing your throughput if set to false. +- If you are sure you want to activate the NAGLE algorithm you can still activate it by using client.stream.setNoDelay(false) +- The `max_attempts` option is deprecated and will be removed in v.3.0.0. Please use the `retry_strategy` instead +- The `retry_max_delay` option is deprecated and will be removed in v.3.0.0. Please use the `retry_strategy` instead +- The drain event is deprecated and will be removed in v.3.0.0. Please listen to the stream drain event instead +- The idle event is deprecated and will likely be removed in v.3.0.0. If you rely on this feature please open a new ticket in node_redis with your use case +- Redis < v. 2.6 is not officially supported anymore and might not work in all cases. Please update to a newer redis version as it is not possible to test for these old versions +- Removed non documented command syntax (adding the callback to an arguments array instead of passing it as individual argument) + +## v2.4.2 - 27 Nov, 2015 + +Bugfixes + +- Fixed not emitting ready after reconnect with disable_resubscribing ([@maxgalbu](https://github.com/maxgalbu)) + +## v2.4.1 - 25 Nov, 2015 + +Bugfixes + +- Fixed a js parser regression introduced in 2.4.0 ([@BridgeAR](https://github.com/BridgeAR)) + +## v2.4.0 - 25 Nov, 2015 + +Features + +- Added `tls` option to initiate a connection to a redis server behind a TLS proxy. Thanks ([@paddybyers](https://github.com/paddybyers)) +- Added `prefix` option to auto key prefix any command with the provided prefix ([@luin](https://github.com/luin) & [@BridgeAR](https://github.com/BridgeAR)) +- Added `url` option to pass the connection url with the options object ([@BridgeAR](https://github.com/BridgeAR)) +- Added `client.duplicate([options])` to duplicate the current client and return a new one with the same options ([@BridgeAR](https://github.com/BridgeAR)) +- Improve performance by up to 20% on almost all use cases ([@BridgeAR](https://github.com/BridgeAR)) + +Bugfixes + +- Fixed js parser handling big values slow ([@BridgeAR](https://github.com/BridgeAR)) +- The speed is now on par with the hiredis parser. + +## v2.3.1 - 18 Nov, 2015 + +Bugfixes + +- Fixed saving buffers with charsets other than utf-8 while using multi ([@BridgeAR](https://github.com/BridgeAR)) +- Fixed js parser handling big values very slow ([@BridgeAR](https://github.com/BridgeAR)) +- The speed is up to ~500% faster than before but still up to ~50% slower than the hiredis parser. + +## v2.3.0 - 30 Oct, 2015 + +Features + +- Improve speed further for: ([@BridgeAR](https://github.com/BridgeAR)) +- saving big strings (up to +300%) +- using .multi / .batch (up to +50% / on Node.js 0.10.x +300%) +- saving small buffers +- Increased coverage to 99% ([@BridgeAR](https://github.com/BridgeAR)) +- Refactored manual backpressure control ([@BridgeAR](https://github.com/BridgeAR)) +- Removed the high water mark and low water mark. Such a mechanism should be implemented by a user instead +- The `drain` event is from now on only emitted if the stream really had to buffer +- Reduced the default connect_timeout to be one hour instead of 24h ([@BridgeAR](https://github.com/BridgeAR)) +- Added .path to redis.createClient(options); ([@BridgeAR](https://github.com/BridgeAR)) +- Ignore info command, if not available on server ([@ivanB1975](https://github.com/ivanB1975)) + +Bugfixes + +- Fixed a js parser error that could result in a timeout ([@BridgeAR](https://github.com/BridgeAR)) +- Fixed .multi / .batch used with Node.js 0.10.x not working properly after a reconnect ([@BridgeAR](https://github.com/BridgeAR)) +- Fixed fired but not yet returned commands not being rejected after a connection loss ([@BridgeAR](https://github.com/BridgeAR)) +- Fixed connect_timeout not respected if no connection has ever been established ([@gagle](https://github.com/gagle) & [@benjie](https://github.com/benjie)) +- Fixed return_buffers in pub sub mode ([@komachi](https://github.com/komachi)) + +## v2.2.5 - 18 Oct, 2015 + +Bugfixes + +- Fixed undefined options passed to a new instance not accepted (possible with individual .createClient functions) ([@BridgeAR](https://github.com/BridgeAR)) + +## v2.2.4 - 17 Oct, 2015 + +Bugfixes + +- Fixed unspecific error message for unresolvable commands ([@BridgeAR](https://github.com/BridgeAR)) +- Fixed not allowed command error in pubsub mode not being returned in a provided callback ([@BridgeAR](https://github.com/BridgeAR)) +- Fixed to many commands forbidden in pub sub mode ([@BridgeAR](https://github.com/BridgeAR)) +- Fixed mutation of the arguments array passed to .multi / .batch constructor ([@BridgeAR](https://github.com/BridgeAR)) +- Fixed mutation of the options object passed to createClient ([@BridgeAR](https://github.com/BridgeAR)) +- Fixed error callback in .multi not called if connection in broken mode ([@BridgeAR](https://github.com/BridgeAR)) + +## v2.2.3 - 14 Oct, 2015 + +Bugfixes + +- Fixed multi not being executed on Node 0.10.x if node_redis not yet ready ([@BridgeAR](https://github.com/BridgeAR)) + +## v2.2.2 - 14 Oct, 2015 + +Bugfixes + +- Fixed regular commands not being executed after a .multi until .exec was called ([@BridgeAR](https://github.com/BridgeAR)) + +## v2.2.1 - 12 Oct, 2015 + +No code change + +## v2.2.0 - 12 Oct, 2015 - The peregrino falcon + +The peregrino falcon is the fasted bird on earth and this is what this release is all about: Increased performance for heavy usage by up to **400%** [sic!] and increased overall performance for any command as well. Please check the benchmarks in the [README.md](./README.md) for further details. + +Features + +- Added rename_commands options to handle renamed commands from the redis config ([@digmxl](https://github.com/digmxl) & [@BridgeAR](https://github.com/BridgeAR)) +- Added disable_resubscribing option to prevent a client from resubscribing after reconnecting ([@BridgeAR](https://github.com/BridgeAR)) +- Increased performance ([@BridgeAR](https://github.com/BridgeAR)) +- exchanging built in queue with [@petkaantonov](https://github.com/petkaantonov)'s [double-ended queue](https://github.com/petkaantonov/deque) +- prevent polymorphism +- optimize statements +- Added _.batch_ command, similar to .multi but without transaction ([@BridgeAR](https://github.com/BridgeAR)) +- Improved pipelining to minimize the [RTT](http://redis.io/topics/pipelining) further ([@BridgeAR](https://github.com/BridgeAR)) + +Bugfixes + +- Fixed a javascript parser regression introduced in 2.0 that could result in timeouts on high load. ([@BridgeAR](https://github.com/BridgeAR)) +- I was not able to write a regression test for this, since the error seems to only occur under heavy load with special conditions. So please have a look for timeouts with the js parser, if you use it and report all issues and switch to the hiredis parser in the meanwhile. If you're able to come up with a reproducible test case, this would be even better :) +- Fixed should_buffer boolean for .exec, .select and .auth commands not being returned and fix a couple special conditions ([@BridgeAR](https://github.com/BridgeAR)) + +If you do not rely on transactions but want to reduce the RTT you can use .batch from now on. It'll behave just the same as .multi but it does not have any transaction and therefor won't roll back any failed commands.
+Both .multi and .batch are from now on going to cache the commands and release them while calling .exec. + +Please consider using .batch instead of looping through a lot of commands one by one. This will significantly improve your performance. + +Here are some stats compared to ioredis 1.9.1 (Lenovo T450s i7-5600U): + + simple set + 82,496 op/s Β» ioredis + 112,617 op/s Β» node_redis + + simple get + 82,015 op/s Β» ioredis + 105,701 op/s Β» node_redis + + simple get with pipeline + 10,233 op/s Β» ioredis + 26,541 op/s Β» node_redis (using .batch) + + lrange 100 + 7,321 op/s Β» ioredis + 26,155 op/s Β» node_redis + + publish + 90,524 op/s Β» ioredis + 112,823 op/s Β» node_redis + + subscribe + 43,783 op/s Β» ioredis + 61,889 op/s Β» node_redis + +To conclude: we can proudly say that node_redis is very likely outperforming any other node redis client. + +Known issues + +- The pub sub system has some flaws and those will be addressed in the next minor release + +## v2.1.0 - Oct 02, 2015 + +Features: + +- Addded optional flush parameter to `.end`. If set to true, commands fired after using .end are going to be rejected instead of being ignored. (@crispy1989) +- Addded: host and port can now be provided in a single options object. E.g. redis.createClient({ host: 'localhost', port: 1337, max_attempts: 5 }); (@BridgeAR) +- Speedup common cases (@BridgeAR) + +Bugfixes: + +- Fix argument mutation while using the array notation with the multi constructor (@BridgeAR) +- Fix multi.hmset key not being type converted if used with an object and key not being a string (@BridgeAR) +- Fix parser errors not being caught properly (@BridgeAR) +- Fix a crash that could occur if a redis server does not return the info command as usual #541 (@BridgeAR) +- Explicitly passing undefined as a callback statement will work again. E.g. client.publish('channel', 'message', undefined); (@BridgeAR) + +## v2.0.1 - Sep 24, 2015 + +Bugfixes: + +- Fix argument mutation while using the array notation in combination with keys / callbacks ([#866](.)). (@BridgeAR) + +## v2.0.0 - Sep 21, 2015 + +This is the biggest release that node_redis had since it was released in 2010. A long list of outstanding bugs has been fixed, so we are very happy to present you redis 2.0 and we highly recommend updating as soon as possible. + +# What's new in 2.0 + +- Implemented a "connection is broken" mode if no connection could be established +- node_redis no longer throws under any circumstances, preventing it from terminating applications. +- Multi error handling is now working properly +- Consistent command behavior including multi +- Windows support +- Improved performance +- A lot of code cleanup +- Many bug fixes +- Better user support! + +## Features: + +- Added a "redis connection is broken" mode after reaching max connection attempts / exceeding connection timeout. (@BridgeAR) +- Added NODE_DEBUG=redis env to activate the debug_mode (@BridgeAR) +- Added a default connection timeout of 24h instead of never timing out as a default (@BridgeAR) +- Added: Network errors and other stream errors will from now on include the error code as `err.code` property (@BridgeAR) +- Added: Errors thrown by redis will now include the redis error code as `err.code` property. (@skeggse & @BridgeAR) +- Added: Errors thrown by node_redis will now include a `err.command` property for the command used (@BridgeAR) +- Added new commands and drop support for deprecated _substr_ (@BridgeAR) +- Added new possibilities how to provide the command arguments (@BridgeAR) +- The entries in the keyspace of the server_info is now an object instead of a string. (@SinisterLight & @BridgeAR) +- Small speedup here and there (e.g. by not using .toLowerCase() anymore) (@BridgeAR) +- Full windows support (@bcoe) +- Increased coverage by 10% and add a lot of tests to make sure everything works as it should. We now reached 97% :-) (@BridgeAR) +- Remove dead code, clean up and refactor very old chunks (@BridgeAR) +- Don't flush the offline queue if reconnecting (@BridgeAR) +- Emit all errors instead of throwing sometimes and sometimes emitting them (@BridgeAR) +- _auth_pass_ passwords are now checked to be a valid password (@jcppman & @BridgeAR) + +## Bug fixes: + +- Don't kill the app anymore by randomly throwing errors sync instead of emitting them (@BridgeAR) +- Don't catch user errors anymore occurring in callbacks (no try callback anymore & more fixes for the parser) (@BridgeAR) +- Early garbage collection of queued items (@dohse) +- Fix js parser returning errors as strings (@BridgeAR) +- Do not wrap errors into other errors (@BridgeAR) +- Authentication failures are now returned in the callback instead of being emitted (@BridgeAR) +- Fix a memory leak on reconnect (@rahar) +- Using `send_command` directly may now also be called without the args as stated in the [README.md](./README.md) (@BridgeAR) +- Fix the multi.exec error handling (@BridgeAR) +- Fix commands being inconsistent and behaving wrong (@BridgeAR) +- Channel names with spaces are now properly resubscribed after a reconnection (@pbihler) +- Do not try to reconnect after the connection timeout has been exceeded (@BridgeAR) +- Ensure the execution order is observed if using .eval (@BridgeAR) +- Fix commands not being rejected after calling .quit (@BridgeAR) +- Fix .auth calling the callback twice if already connected (@BridgeAR) +- Fix detect_buffers not working in pub sub mode and while monitoring (@BridgeAR) +- Fix channel names always being strings instead of buffers while return_buffers is true (@BridgeAR) +- Don't print any debug statements if not asked for (@BridgeAR) +- Fix a couple small other bugs + +## Breaking changes: + +1. redis.send_command commands have to be lower case from now on. This does only apply if you use `.send_command` directly instead of the convenient methods like `redis.command`. +2. Error messages have changed quite a bit. If you depend on a specific wording please check your application carefully. +3. Errors are from now on always either returned if a callback is present or emitted. They won't be thrown (neither sync, nor async). +4. The Multi error handling has changed a lot! + +- All errors are from now on errors instead of strings (this only applied to the js parser). +- If an error occurs while queueing the commands an EXECABORT error will be returned including the failed commands as `.errors` property instead of an array with errors. +- If an error occurs while executing the commands and that command has a callback it'll return the error as first parameter (`err, undefined` instead of `null, undefined`). +- All the errors occurring while executing the commands will stay in the result value as error instance (if you used the js parser before they would have been strings). Be aware that the transaction won't be aborted if those error occur! +- If `multi.exec` does not have a callback and an EXECABORT error occurrs, it'll emit that error instead. + +5. If redis can't connect to your redis server it'll give up after a certain point of failures (either max connection attempts or connection timeout exceeded). If that is the case it'll emit an CONNECTION_BROKEN error. You'll have to initiate a new client to try again afterwards. +6. The offline queue is not flushed anymore on a reconnect. It'll stay until node_redis gives up trying to reach the server or until you close the connection. +7. Before this release node_redis caught user errors and threw them async back. This is not the case anymore! No user behavior of what so ever will be tracked or caught. +8. The keyspace of `redis.server_info` (db0...) is from now on an object instead of an string. + +NodeRedis also thanks @qdb, @tobek, @cvibhagool, @frewsxcv, @davidbanham, @serv, @vitaliylag, @chrishamant, @GamingCoder and all other contributors that I may have missed for their contributions! + +From now on we'll push new releases more frequently out and fix further long outstanding things and implement new features. + +
+ +## v1.0.0 - Aug 30, 2015 + +- Huge issue and pull-request cleanup. Thanks Blain! (@blainsmith) +- [#658](https://github.com/NodeRedis/node_redis/pull/658) Client now parses URL-format connection strings (e.g., redis://foo:pass@127.0.0.1:8080) (@kuwabarahiroshi) +- [#749](https://github.com/NodeRedis/node_redis/pull/749) Fix reconnection bug when client is in monitoring mode (@danielbprice) +- [#786](https://github.com/NodeRedis/node_redis/pull/786) Refactor createClient. Fixes #651 (@BridgeAR) +- [#793](https://github.com/NodeRedis/node_redis/pull/793) Refactor tests and improve test coverage (@erinspice, @bcoe) +- [#733](https://github.com/NodeRedis/node_redis/pull/733) Fixes detect_buffers functionality in the context of exec. Fixes #732, #263 (@raydog) +- [#785](https://github.com/NodeRedis/node_redis/pull/785) Tiny speedup by using 'use strict' (@BridgeAR) +- Fix extraneous error output due to pubsub tests (Mikael Kohlmyr) + +## v0.12.1 - Aug 10, 2014 + +- Fix IPv6/IPv4 family selection in node 0.11+ (Various) + +## v0.12.0 - Aug 9, 2014 + +- Fix unix socket support (Jack Tang) +- Improve createClient argument handling (Jack Tang) + +## v0.11.0 - Jul 10, 2014 + +- IPv6 Support. (Yann Stephan) +- Revert error emitting and go back to throwing errors. (Bryce Baril) +- Set socket_keepalive to prevent long-lived client timeouts. (mohit) +- Correctly reset retry timer. (ouotuo) +- Domains protection from bad user exit. (Jake Verbaten) +- Fix reconnection socket logic to prevent misqueued entries. (Iain Proctor) + +## v0.10.3 - May 22, 2014 + +- Update command list to match Redis 2.8.9 (Charles Feng) + +## v0.10.2 - May 18, 2014 + +- Better binary key handling for HGETALL. (Nick Apperson) +- Fix test not resetting `error` handler. (CrypticSwarm) +- Fix SELECT error semantics. (Bryan English) + +## v0.10.1 - February 17, 2014 + +- Skip plucking redis version from the INFO stream if INFO results weren't provided. (Robert SkΓΆld) + +## v0.10.0 - December 21, 2013 + +- Instead of throwing errors asynchronously, emit errors on client. (Bryce Baril) + +## v0.9.2 - December 15, 2013 + +- Regenerate commands for new 2.8.x Redis commands. (Marek Ventur) +- Correctly time reconnect counts when using 'auth'. (William Hockey) + +## v0.9.1 - November 23, 2013 + +- Allow hmset to accept numeric keys. (Alex Stokes) +- Fix TypeError for multiple MULTI/EXEC errors. (Kwangsu Kim) + +## v0.9.0 - October 17, 2013 + +- Domains support. (Forrest L Norvell) + +## v0.8.6 - October 2, 2013 + +- If error is already an Error, don't wrap it in another Error. (Mathieu M-Gosselin) +- Fix retry delay logic (Ian Babrou) +- Return Errors instead of strings where Errors are expected (Ian Babrou) +- Add experimental `.unref()` method to RedisClient (Bryce Baril / Olivier Lalonde) +- Strengthen checking of reply to prevent conflating "message" or "pmessage" fields with pub_sub replies. (Bryce Baril) + +## v0.8.5 - September 26, 2013 + +- Add `auth_pass` option to connect and immediately authenticate (Henrik Peinar) + +## v0.8.4 - June 24, 2013 + +Many contributed features and fixes, including: + +- Ignore password set if not needed. (jbergknoff) +- Improved compatibility with 0.10.X for tests and client.end() (Bryce Baril) +- Protect connection retries from application exceptions. (Amos Barreto) +- Better exception handling for Multi/Exec (Thanasis Polychronakis) +- Renamed pubsub mode to subscriber mode (Luke Plaster) +- Treat SREM like SADD when passed an array (Martin Ciparelli) +- Fix empty unsub/punsub TypeError (Jeff Barczewski) +- Only attempt to run a callback if it one was provided (jifeng) + +## v0.8.3 - April 09, 2013 + +Many contributed features and fixes, including: + +- Fix some tests for Node.js version 0.9.x+ changes (Roman Ivanilov) +- Fix error when commands submitted after idle event handler (roamm) +- Bypass Redis for no-op SET/SETEX commands (jifeng) +- Fix HMGET + detect_buffers (Joffrey F) +- Fix CLIENT LOAD functionality (Jonas Dohse) +- Add percentage outputs to diff_multi_bench_output.js (Bryce Baril) +- Add retry_max_delay option (Tomasz Durka) +- Fix parser off-by-one errors with nested multi-bulk replies (Bryce Baril) +- Prevent parser from sinking application-side exceptions (Bryce Baril) +- Fix parser incorrect buffer skip when parsing multi-bulk errors (Bryce Baril) +- Reverted previous change with throwing on non-string values with HMSET (David Trejo) +- Fix command queue sync issue when using pubsub (Tom Leach) +- Fix compatibility with two-word Redis commands (Jonas Dohse) +- Add EVAL with array syntax (dmoena) +- Fix tests due to Redis reply order changes in 2.6.5+ (Bryce Baril) +- Added a test for the SLOWLOG command (Nitesh Sinha) +- Fix SMEMBERS order dependency in test broken by Redis changes (Garrett Johnson) +- Update commands for new Redis commands (David Trejo) +- Prevent exception from SELECT on subscriber reconnection (roamm) + +## v0.8.2 - November 11, 2012 + +Another version bump because 0.8.1 didn't get applied properly for some mysterious reason. +Sorry about that. + +Changed name of "faster" parser to "javascript". + +## v0.8.1 - September 11, 2012 + +Important bug fix for null responses (Jerry Sievert) + +## v0.8.0 - September 10, 2012 + +Many contributed features and fixes, including: + +- Pure JavaScript reply parser that is usually faster than hiredis (Jerry Sievert) +- Remove hiredis as optionalDependency from package.json. It still works if you want it. +- Restore client state on reconnect, including select, subscribe, and monitor. (Ignacio BurgueΓ±o) +- Fix idle event (Trae Robrock) +- Many documentation improvements and bug fixes (David Trejo) + +## v0.7.2 - April 29, 2012 + +Many contributed fixes. Thank you, contributors. + +- [GH-190] - pub/sub mode fix (Brian Noguchi) +- [GH-165] - parser selection fix (TEHEK) +- numerous documentation and examples updates +- auth errors emit Errors instead of Strings (David Trejo) + +## v0.7.1 - November 15, 2011 + +Fix regression in reconnect logic. + +Very much need automated tests for reconnection and queue logic. + +## v0.7.0 - November 14, 2011 + +Many contributed fixes. Thanks everybody. + +- [GH-127] - properly re-initialize parser on reconnect +- [GH-136] - handle passing undefined as callback (Ian Babrou) +- [GH-139] - properly handle exceptions thrown in pub/sub event handlers (Felix GeisendΓΆrfer) +- [GH-141] - detect closing state on stream error (Felix GeisendΓΆrfer) +- [GH-142] - re-select database on reconnection (Jean-Hugues Pinson) +- [GH-146] - add sort example (Maksim Lin) + +Some more goodies: + +- Fix bugs with node 0.6 +- Performance improvements +- New version of `multi_bench.js` that tests more realistic scenarios +- [GH-140] - support optional callback for subscribe commands +- Properly flush and error out command queue when connection fails +- Initial work on reconnection thresholds + +## v0.6.7 - July 30, 2011 + +(accidentally skipped v0.6.6) + +Fix and test for [GH-123] + +Passing an Array as as the last argument should expand as users +expect. The old behavior was to coerce the arguments into Strings, +which did surprising things with Arrays. + +## v0.6.5 - July 6, 2011 + +Contributed changes: + +- Support SlowBuffers (Umair Siddique) +- Add Multi to exports (Louis-Philippe Perron) +- Fix for drain event calculation (Vladimir Dronnikov) + +Thanks! + +## v0.6.4 - June 30, 2011 + +Fix bug with optional callbacks for hmset. + +## v0.6.2 - June 30, 2011 + +Bugs fixed: + +- authentication retry while server is loading db (danmaz74) [GH-101] +- command arguments processing issue with arrays + +New features: + +- Auto update of new commands from redis.io (Dave Hoover) +- Performance improvements and backpressure controls. +- Commands now return the true/false value from the underlying socket write(s). +- Implement command_queue high water and low water for more better control of queueing. + +See `examples/backpressure_drain.js` for more information. + +## v0.6.1 - June 29, 2011 + +Add support and tests for Redis scripting through EXEC command. + +Bug fix for monitor mode. (forddg) + +Auto update of new commands from redis.io (Dave Hoover) + +## v0.6.0 - April 21, 2011 + +Lots of bugs fixed. + +- connection error did not properly trigger reconnection logic [GH-85] +- client.hmget(key, [val1, val2]) was not expanding properly [GH-66] +- client.quit() while in pub/sub mode would throw an error [GH-87] +- client.multi(['hmset', 'key', {foo: 'bar'}]) fails [GH-92] +- unsubscribe before subscribe would make things very confused [GH-88] +- Add BRPOPLPUSH [GH-79] + +## v0.5.11 - April 7, 2011 + +Added DISCARD + +I originally didn't think DISCARD would do anything here because of the clever MULTI interface, but somebody +pointed out to me that DISCARD can be used to flush the WATCH set. + +## v0.5.10 - April 6, 2011 + +Added HVALS + +## v0.5.9 - March 14, 2011 + +Fix bug with empty Array arguments - Andy Ray + +## v0.5.8 - March 14, 2011 + +Add `MONITOR` command and special monitor command reply parsing. + +## v0.5.7 - February 27, 2011 + +Add magical auth command. + +Authentication is now remembered by the client and will be automatically sent to the server +on every connection, including any reconnections. + +## v0.5.6 - February 22, 2011 + +Fix bug in ready check with `return_buffers` set to `true`. + +Thanks to Dean Mao and Austin Chau. + +## v0.5.5 - February 16, 2011 + +Add probe for server readiness. + +When a Redis server starts up, it might take a while to load the dataset into memory. +During this time, the server will accept connections, but will return errors for all non-INFO +commands. Now node_redis will send an INFO command whenever it connects to a server. +If the info command indicates that the server is not ready, the client will keep trying until +the server is ready. Once it is ready, the client will emit a "ready" event as well as the +"connect" event. The client will queue up all commands sent before the server is ready, just +like it did before. When the server is ready, all offline/non-ready commands will be replayed. +This should be backward compatible with previous versions. + +To disable this ready check behavior, set `options.no_ready_check` when creating the client. + +As a side effect of this change, the key/val params from the info command are available as +`client.server_options`. Further, the version string is decomposed into individual elements +in `client.server_options.versions`. + +## v0.5.4 - February 11, 2011 + +Fix excess memory consumption from Queue backing store. + +Thanks to Gustaf SjΓΆberg. + +## v0.5.3 - February 5, 2011 + +Fix multi/exec error reply callback logic. + +Thanks to Stella Laurenzo. + +## v0.5.2 - January 18, 2011 + +Fix bug where unhandled error replies confuse the parser. + +## v0.5.1 - January 18, 2011 + +Fix bug where subscribe commands would not handle redis-server startup error properly. + +## v0.5.0 - December 29, 2010 + +Some bug fixes: + +- An important bug fix in reconnection logic. Previously, reply callbacks would be invoked twice after + a reconnect. +- Changed error callback argument to be an actual Error object. + +New feature: + +- Add friendly syntax for HMSET using an object. + +## v0.4.1 - December 8, 2010 + +Remove warning about missing hiredis. You probably do want it though. + +## v0.4.0 - December 5, 2010 + +Support for multiple response parsers and hiredis C library from Pieter Noordhuis. +Return Strings instead of Buffers by default. +Empty nested mb reply bug fix. + +## v0.3.9 - November 30, 2010 + +Fix parser bug on failed EXECs. + +## v0.3.8 - November 10, 2010 + +Fix for null MULTI response when WATCH condition fails. + +## v0.3.7 - November 9, 2010 + +Add "drain" and "idle" events. + +## v0.3.6 - November 3, 2010 + +Add all known Redis commands from Redis master, even ones that are coming in 2.2 and beyond. + +Send a friendlier "error" event message on stream errors like connection refused / reset. + +## v0.3.5 - October 21, 2010 + +A few bug fixes. + +- Fixed bug with `nil` multi-bulk reply lengths that showed up with `BLPOP` timeouts. +- Only emit `end` once when connection goes away. +- Fixed bug in `test.js` where driver finished before all tests completed. + +## unversioned wasteland + +See the git history for what happened before. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000000..0243cc19359 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,68 @@ +# Introduction + +First, thank you for considering contributing to Node Redis! It's people like you that make the open source community such a great community! 😊 + +We welcome any type of contribution, not just code. You can help with: + +- **QA**: file bug reports, the more details you can give the better (e.g. platform versions, screenshots, SDK versions, logs) +- **Docs**: improve reference coverage, add more examples, fix typos or anything else you can spot +- **Code**: take a look at the open issues and help triage them + +--- + +## Project Guidelines + +As maintainers of this project, we want to ensure that the project lives and continues to grow. Progress should not be blocked by any one person's availability. + +One of the simplest ways of doing this is by encouraging a larger set of contributors. Using this approach we hope to mitigate the challenges of maintaining a project that needs regular updates. + +### Getting Comfortable Contributing + +It is normal for your first pull request to be a potential fix for a problem but moving on from there to helping the project's direction can be difficult. + +We try to help contributors cross that barrier by identifying good first step issues (labelled `good-first-issue`). These issues are considered appropriate for first time contributors. Generally, these should be non-critical issues that are well defined. Established contributors will not work on these, to make space for others. + +New contributors may consider picking up issues labelled `needs-triage` or `help-wanted`. These may not necessarily require code changes but rather help with debugging and finding the cause of the issue whether it's a bug or a user's incorrect setup of the library or project. + +We keep all project discussion inside GitHub issues. This ensures that valuable information can be searched easily. GitHub issues are the go to tool for questions about how to use the library, or how the project is run. + +### Expectations of Contributors + +You shouldn't feel bad for not contributing to open source. We want contributors like yourself to provide ideas, keep the ship shipping and to take some of the load from others. It is non-obligatory; we’re here to get things done in an enjoyable way. :trophy: + +We only ask that you follow the conduct guidelines set out in our [Code of Conduct](https://redis.com/community/community-guidelines-code-of-conduct/) throughout your contribution journey. + + +#### Special Thanks + +A huge thank you to the original author of Node Redis, [Matthew Ranney](https://github.com/mranney). + +--- + +## Code Guidelines + +### Testing Code + +Node Redis has a full test suite with coverage setup. + +To run the tests, run `npm install` to install dependencies, then run `npm run build:tests-tools && npm test`. + +Note that the test suite assumes that [`docker`](https://www.docker.com/) is installed in your environment. + +### Submitting Code for Review + +The bigger the pull request, the longer it will take to review and merge. Where possible try to break down large pull requests into smaller chunks that are easier to review and merge. It is also always helpful to have some context for your pull request. What was the purpose? Why does it matter to you? What problem are you trying to solve? Tag in any relevant issues. + +To assist reviewers, we ask that you fill out the pull request template as much as possible. + +> Use a `draft` pull request if your pull request is not complete or ready for review. + +### Code Review Process + +Pull Requests to the protected branches require peer-review approvals and passing status checks to be able to be merged. + +When reviewing a Pull Request please check the following steps as well as the existing automated checks: + +- Does your Pull Request provide or update the docs if docs changes are required? +- Have the tests been updated or new tests been added to test any newly implemented or changed functionality? +- Is the test coverage at the same level as before (preferably more!)? diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000000..8509ccd678e --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022-2023, Redis, inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md index 10b90cfced8..e6332764e4c 100644 --- a/README.md +++ b/README.md @@ -1,597 +1,333 @@ -redis - a node.js redis client -=========================== +# Node-Redis -This is a complete Redis client for node.js. It supports all Redis commands, including many recently added commands like EVAL from -experimental Redis server branches. +[![Tests](https://img.shields.io/github/actions/workflow/status/redis/node-redis/tests.yml?branch=master)](https://github.com/redis/node-redis/actions/workflows/tests.yml) +[![Coverage](https://codecov.io/gh/redis/node-redis/branch/master/graph/badge.svg?token=xcfqHhJC37)](https://codecov.io/gh/redis/node-redis) +[![License](https://img.shields.io/github/license/redis/node-redis.svg)](https://github.com/redis/node-redis/blob/master/LICENSE) +[![Discord](https://img.shields.io/discord/697882427875393627.svg?style=social&logo=discord)](https://discord.gg/redis) +[![Twitch](https://img.shields.io/twitch/status/redisinc?style=social)](https://www.twitch.tv/redisinc) +[![YouTube](https://img.shields.io/youtube/channel/views/UCD78lHSwYqMlyetR0_P4Vig?style=social)](https://www.youtube.com/redisinc) +[![Twitter](https://img.shields.io/twitter/follow/redisinc?style=social)](https://twitter.com/redisinc) -Install with: +node-redis is a modern, high performance [Redis](https://redis.io) client for Node.js. - npm install redis - -Pieter Noordhuis has provided a binding to the official `hiredis` C library, which is non-blocking and fast. To use `hiredis`, do: +## How do I Redis? - npm install hiredis redis +[Learn for free at Redis University](https://university.redis.com/) -If `hiredis` is installed, `node_redis` will use it by default. Otherwise, a pure JavaScript parser will be used. +[Build faster with the Redis Launchpad](https://launchpad.redis.com/) -If you use `hiredis`, be sure to rebuild it whenever you upgrade your version of node. There are mysterious failures that can -happen between node and native code modules after a node upgrade. +[Try the Redis Cloud](https://redis.com/try-free/) +[Dive in developer tutorials](https://developer.redis.com/) -## Usage - -Simple example, included as `examples/simple.js`: - - var redis = require("redis"), - client = redis.createClient(); - - client.on("error", function (err) { - console.log("Error " + err); - }); - - client.set("string key", "string val", redis.print); - client.hset("hash key", "hashtest 1", "some value", redis.print); - client.hset(["hash key", "hashtest 2", "some other value"], redis.print); - client.hkeys("hash key", function (err, replies) { - console.log(replies.length + " replies:"); - replies.forEach(function (reply, i) { - console.log(" " + i + ": " + reply); - }); - client.quit(); - }); - -This will display: - - mjr:~/work/node_redis (master)$ node example.js - Reply: OK - Reply: 0 - Reply: 0 - 2 replies: - 0: hashtest 1 - 1: hashtest 2 - mjr:~/work/node_redis (master)$ - - -## Performance - -Here are typical results of `multi_bench.js` which is similar to `redis-benchmark` from the Redis distribution. -It uses 50 concurrent connections with no pipelining. - -JavaScript parser: - - PING: 20000 ops 42283.30 ops/sec 0/5/1.182 - SET: 20000 ops 32948.93 ops/sec 1/7/1.515 - GET: 20000 ops 28694.40 ops/sec 0/9/1.740 - INCR: 20000 ops 39370.08 ops/sec 0/8/1.269 - LPUSH: 20000 ops 36429.87 ops/sec 0/8/1.370 - LRANGE (10 elements): 20000 ops 9891.20 ops/sec 1/9/5.048 - LRANGE (100 elements): 20000 ops 1384.56 ops/sec 10/91/36.072 - -hiredis parser: - - PING: 20000 ops 46189.38 ops/sec 1/4/1.082 - SET: 20000 ops 41237.11 ops/sec 0/6/1.210 - GET: 20000 ops 39682.54 ops/sec 1/7/1.257 - INCR: 20000 ops 40080.16 ops/sec 0/8/1.242 - LPUSH: 20000 ops 41152.26 ops/sec 0/3/1.212 - LRANGE (10 elements): 20000 ops 36563.07 ops/sec 1/8/1.363 - LRANGE (100 elements): 20000 ops 21834.06 ops/sec 0/9/2.287 - -The performance of `node_redis` improves dramatically with pipelining, which happens automatically in most normal programs. - - -### Sending Commands - -Each Redis command is exposed as a function on the `client` object. -All functions take either take either an `args` Array plus optional `callback` Function or -a variable number of individual arguments followed by an optional callback. -Here is an example of passing an array of arguments and a callback: - - client.mset(["test keys 1", "test val 1", "test keys 2", "test val 2"], function (err, res) {}); - -Here is that same call in the second style: - - client.mset("test keys 1", "test val 1", "test keys 2", "test val 2", function (err, res) {}); - -Note that in either form the `callback` is optional: - - client.set("some key", "some val"); - client.set(["some other key", "some val"]); - -For a list of Redis commands, see [Redis Command Reference](http://redis.io/commands) - -The commands can be specified in uppercase or lowercase for convenience. `client.get()` is the same as `client.GET()`. - -Minimal parsing is done on the replies. Commands that return a single line reply return JavaScript Strings, -integer replies return JavaScript Numbers, "bulk" replies return node Buffers, and "multi bulk" replies return a -JavaScript Array of node Buffers. `HGETALL` returns an Object with Buffers keyed by the hash keys. - -# API - -## Connection Events - -`client` will emit some events about the state of the connection to the Redis server. - -### "ready" - -`client` will emit `ready` a connection is established to the Redis server and the server reports -that it is ready to receive commands. Commands issued before the `ready` event are queued, -then replayed just before this event is emitted. - -### "connect" - -`client` will emit `connect` at the same time as it emits `ready` unless `client.options.no_ready_check` -is set. If this options is set, `connect` will be emitted when the stream is connected, and then -you are free to try to send commands. - -### "error" - -`client` will emit `error` when encountering an error connecting to the Redis server. - -Note that "error" is a special event type in node. If there are no listeners for an -"error" event, node will exit. This is usually what you want, but it can lead to some -cryptic error messages like this: - - mjr:~/work/node_redis (master)$ node example.js - - node.js:50 - throw e; - ^ - Error: ECONNREFUSED, Connection refused - at IOWatcher.callback (net:870:22) - at node.js:607:9 - -Not very useful in diagnosing the problem, but if your program isn't ready to handle this, -it is probably the right thing to just exit. - -`client` will also emit `error` if an exception is thrown inside of `node_redis` for whatever reason. -It would be nice to distinguish these two cases. - -### "end" - -`client` will emit `end` when an established Redis server connection has closed. - -### "drain" - -`client` will emit `drain` when the TCP connection to the Redis server has been buffering, but is now -writable. This event can be used to stream commands in to Redis and adapt to backpressure. Right now, -you need to check `client.command_queue.length` to decide when to reduce your send rate. Then you can -resume sending when you get `drain`. - -### "idle" +[Join the Redis community](https://redis.com/community/) -`client` will emit `idle` when there are no outstanding commands that are awaiting a response. +[Work at Redis](https://redis.com/company/careers/jobs/) -## redis.createClient(port, host, options) +## Installation -Create a new client connection. `port` defaults to `6379` and `host` defaults -to `127.0.0.1`. If you have `redis-server` running on the same computer as node, then the defaults for -port and host are probably fine. `options` in an object with the following possible properties: +Start a redis via docker: -* `parser`: which Redis protocol reply parser to use. Defaults to `hiredis` if that module is installed. -This may also be set to `javascript`. -* `return_buffers`: defaults to `false`. If set to `true`, then all replies will be sent to callbacks as node Buffer -objects instead of JavaScript Strings. -* `detect_buffers`: default to `false`. If set to `true`, then replies will be sent to callbacks as node Buffer objects -if any of the input arguments to the original command were Buffer objects. -This option lets you switch between Buffers and Strings on a per-command basis, whereas `return_buffers` applies to -every command on a client. -* `socket_nodelay`: defaults to `true`. Whether to call setNoDelay() on the TCP stream, which disables the -Nagle algorithm on the underlying socket. Setting this option to `false` can result in additional throughput at the -cost of more latency. Most applications will want this set to `true`. -* `no_ready_check`: defaults to `false`. When a connection is established to the Redis server, the server might still -be loading the database from disk. While loading, the server not respond to any commands. To work around this, -`node_redis` has a "ready check" which sends the `INFO` command to the server. The response from the `INFO` command -indicates whether the server is ready for more commands. When ready, `node_redis` emits a `ready` event. -Setting `no_ready_check` to `true` will inhibit this check. +```bash +docker run -p 6379:6379 -d redis:8.0-rc1 +``` +To install node-redis, simply: - var redis = require("redis"), - client = redis.createClient(null, null, {detect_buffers: true}); +```bash +npm install redis +``` +> "redis" is the "whole in one" package that includes all the other packages. If you only need a subset of the commands, +> you can install the individual packages. See the list below. - client.set("foo_rand000000000000", "OK"); +## Packages - // This will return a JavaScript String - client.get("foo_rand000000000000", function (err, reply) { - console.log(reply.toString()); // Will print `OK` - }); +| Name | Description | +| ---------------------------------------------- | ------------------------------------------------------------------------------------------- | +| [`redis`](https://github.com/redis/node-redis/tree/master/packages/redis) | The client with all the ["redis-stack"](https://github.com/redis-stack/redis-stack) modules | +| [`@redis/client`](https://github.com/redis/node-redis/tree/master/packages/client) | The base clients (i.e `RedisClient`, `RedisCluster`, etc.) | +| [`@redis/bloom`](https://github.com/redis/node-redis/tree/master/packages/bloom) | [Redis Bloom](https://redis.io/docs/data-types/probabilistic/) commands | +| [`@redis/json`](https://github.com/redis/node-redis/tree/master/packages/json) | [Redis JSON](https://redis.io/docs/data-types/json/) commands | +| [`@redis/search`](https://github.com/redis/node-redis/tree/master/packages/search) | [RediSearch](https://redis.io/docs/interact/search-and-query/) commands | +| [`@redis/time-series`](https://github.com/redis/node-redis/tree/master/packages/time-series) | [Redis Time-Series](https://redis.io/docs/data-types/timeseries/) commands | +| [`@redis/entraid`](https://github.com/redis/node-redis/tree/master/packages/entraid) | Secure token-based authentication for Redis clients using Microsoft Entra ID | - // This will return a Buffer since original key is specified as a Buffer - client.get(new Buffer("foo_rand000000000000"), function (err, reply) { - console.log(reply.toString()); // Will print `` - }); - client.end(); +> Looking for a high-level library to handle object mapping? +> See [redis-om-node](https://github.com/redis/redis-om-node)! -`createClient()` returns a `RedisClient` object that is named `client` in all of the examples here. -## client.auth(password, callback) - -When connecting to Redis servers that require authentication, the `AUTH` command must be sent as the -first command after connecting. This can be tricky to coordinate with reconnections, the ready check, -etc. To make this easier, `client.auth()` stashes `password` and will send it after each connection, -including reconnections. `callback` is invoked only once, after the response to the very first -`AUTH` command sent. - -## client.end() - -Forcibly close the connection to the Redis server. Note that this does not wait until all replies have been parsed. -If you want to exit cleanly, call `client.quit()` to send the `QUIT` command after you have handled all replies. - -This example closes the connection to the Redis server before the replies have been read. You probably don't -want to do this: - - var redis = require("redis"), - client = redis.createClient(); - - client.set("foo_rand000000000000", "some fantastic value"); - client.get("foo_rand000000000000", function (err, reply) { - console.log(reply.toString()); - }); - client.end(); - -`client.end()` is useful for timeout cases where something is stuck or taking too long and you want -to start over. - -## Friendlier hash commands - -Most Redis commands take a single String or an Array of Strings as arguments, and replies are sent back as a single String or an Array of Strings. -When dealing with hash values, there are a couple of useful exceptions to this. - -### client.hgetall(hash) - -The reply from an HGETALL command will be converted into a JavaScript Object by `node_redis`. That way you can interact -with the responses using JavaScript syntax. - -Example: - - client.hmset("hosts", "mjr", "1", "another", "23", "home", "1234"); - client.hgetall("hosts", function (err, obj) { - console.dir(obj); - }); - -Output: - - { mjr: '1', another: '23', home: '1234' } - -### client.hmset(hash, obj, [callback]) - -Multiple values in a hash can be set by supplying an object: - - client.HMSET(key2, { - "0123456789": "abcdefghij", - "some manner of key": "a type of value" - }); - -The properties and values of this Object will be set as keys and values in the Redis hash. - -### client.hmset(hash, key1, val1, ... keyn, valn, [callback]) - -Multiple values may also be set by supplying a list: - - client.HMSET(key1, "0123456789", "abcdefghij", "some manner of key", "a type of value"); - - -## Publish / Subscribe - -Here is a simple example of the API for publish / subscribe. This program opens two -client connections, subscribes to a channel on one of them, and publishes to that -channel on the other: - - var redis = require("redis"), - client1 = redis.createClient(), client2 = redis.createClient(), - msg_count = 0; - - client1.on("subscribe", function (channel, count) { - client2.publish("a nice channel", "I am sending a message."); - client2.publish("a nice channel", "I am sending a second message."); - client2.publish("a nice channel", "I am sending my last message."); - }); - - client1.on("message", function (channel, message) { - console.log("client1 channel " + channel + ": " + message); - msg_count += 1; - if (msg_count === 3) { - client1.unsubscribe(); - client1.end(); - client2.end(); - } - }); - - client1.incr("did a thing"); - client1.subscribe("a nice channel"); - -When a client issues a `SUBSCRIBE` or `PSUBSCRIBE`, that connection is put into "pub/sub" mode. -At that point, only commands that modify the subscription set are valid. When the subscription -set is empty, the connection is put back into regular mode. - -If you need to send regular commands to Redis while in pub/sub mode, just open another connection. - -## Pub / Sub Events - -If a client has subscriptions active, it may emit these events: - -### "message" (channel, message) - -Client will emit `message` for every message received that matches an active subscription. -Listeners are passed the channel name as `channel` and the message Buffer as `message`. - -### "pmessage" (pattern, channel, message) - -Client will emit `pmessage` for every message received that matches an active subscription pattern. -Listeners are passed the original pattern used with `PSUBSCRIBE` as `pattern`, the sending channel -name as `channel`, and the message Buffer as `message`. - -### "subscribe" (channel, count) - -Client will emit `subscribe` in response to a `SUBSCRIBE` command. Listeners are passed the -channel name as `channel` and the new count of subscriptions for this client as `count`. - -### "psubscribe" (pattern, count) - -Client will emit `psubscribe` in response to a `PSUBSCRIBE` command. Listeners are passed the -original pattern as `pattern`, and the new count of subscriptions for this client as `count`. - -### "unsubscribe" (channel, count) +## Usage -Client will emit `unsubscribe` in response to a `UNSUBSCRIBE` command. Listeners are passed the -channel name as `channel` and the new count of subscriptions for this client as `count`. When -`count` is 0, this client has left pub/sub mode and no more pub/sub events will be emitted. +### Basic Example -### "punsubscribe" (pattern, count) +```typescript +import { createClient } from "redis"; -Client will emit `punsubscribe` in response to a `PUNSUBSCRIBE` command. Listeners are passed the -channel name as `channel` and the new count of subscriptions for this client as `count`. When -`count` is 0, this client has left pub/sub mode and no more pub/sub events will be emitted. +const client = await createClient() + .on("error", (err) => console.log("Redis Client Error", err)) + .connect(); -## client.multi([commands]) +await client.set("key", "value"); +const value = await client.get("key"); +client.destroy(); +``` -`MULTI` commands are queued up until an `EXEC` is issued, and then all commands are run atomically by -Redis. The interface in `node_redis` is to return an individual `Multi` object by calling `client.multi()`. +The above code connects to localhost on port 6379. To connect to a different host or port, use a connection string in +the format `redis[s]://[[username][:password]@][host][:port][/db-number]`: - var redis = require("./index"), - client = redis.createClient(), set_size = 20; +```typescript +createClient({ + url: "redis://alice:foobared@awesome.redis.server:6380", +}); +``` - client.sadd("bigset", "a member"); - client.sadd("bigset", "another member"); +You can also use discrete parameters, UNIX sockets, and even TLS to connect. Details can be found in +the [client configuration guide](https://github.com/redis/node-redis/blob/master/docs/client-configuration.md). - while (set_size > 0) { - client.sadd("bigset", "member " + set_size); - set_size -= 1; - } +To check if the the client is connected and ready to send commands, use `client.isReady` which returns a boolean. +`client.isOpen` is also available. This returns `true` when the client's underlying socket is open, and `false` when it +isn't (for example when the client is still connecting or reconnecting after a network error). - // multi chain with an individual callback - client.multi() - .scard("bigset") - .smembers("bigset") - .keys("*", function (err, replies) { - client.mget(replies, redis.print); - }) - .dbsize() - .exec(function (err, replies) { - console.log("MULTI got " + replies.length + " replies"); - replies.forEach(function (reply, index) { - console.log("Reply " + index + ": " + reply.toString()); - }); - }); +### Redis Commands -`client.multi()` is a constructor that returns a `Multi` object. `Multi` objects share all of the -same command methods as `client` objects do. Commands are queued up inside the `Multi` object -until `Multi.exec()` is invoked. +There is built-in support for all of the [out-of-the-box Redis commands](https://redis.io/commands). They are exposed +using the raw Redis command names (`HSET`, `HGETALL`, etc.) and a friendlier camel-cased version (`hSet`, `hGetAll`, +etc.): -You can either chain together `MULTI` commands as in the above example, or you can queue individual -commands while still sending regular client command as in this example: +```typescript +// raw Redis commands +await client.HSET("key", "field", "value"); +await client.HGETALL("key"); - var redis = require("redis"), - client = redis.createClient(), multi; +// friendly JavaScript commands +await client.hSet("key", "field", "value"); +await client.hGetAll("key"); +``` - // start a separate multi command queue - multi = client.multi(); - multi.incr("incr thing", redis.print); - multi.incr("incr other thing", redis.print); +Modifiers to commands are specified using a JavaScript object: - // runs immediately - client.mset("incr thing", 100, "incr other thing", 1, redis.print); +```typescript +await client.set("key", "value", { + EX: 10, + NX: true, +}); +``` - // drains multi queue and runs atomically - multi.exec(function (err, replies) { - console.log(replies); // 101, 2 - }); +Replies will be transformed into useful data structures: - // you can re-run the same transaction if you like - multi.exec(function (err, replies) { - console.log(replies); // 102, 3 - client.quit(); - }); +```typescript +await client.hGetAll("key"); // { field1: 'value1', field2: 'value2' } +await client.hVals("key"); // ['value1', 'value2'] +``` -In addition to adding commands to the `MULTI` queue individually, you can also pass an array -of commands and arguments to the constructor: +`Buffer`s are supported as well: - var redis = require("redis"), - client = redis.createClient(), multi; +```typescript +const client = createClient().withTypeMapping({ + [RESP_TYPES.BLOB_STRING]: Buffer +}); - client.multi([ - ["mget", "multifoo", "multibar", redis.print], - ["incr", "multifoo"], - ["incr", "multibar"] - ]).exec(function (err, replies) { - console.log(replies); - }); +await client.hSet("key", "field", Buffer.from("value")); // 'OK' +await client.hGet("key", "field"); // { field: } +``` -## Monitor mode +### Unsupported Redis Commands -Redis supports the `MONITOR` command, which lets you see all commands received by the Redis server -across all client connections, including from other client libraries and other computers. +If you want to run commands and/or use arguments that Node Redis doesn't know about (yet!) use `.sendCommand()`: -After you send the `MONITOR` command, no other commands are valid on that connection. `node_redis` -will emit a `monitor` event for every new monitor message that comes across. The callback for the -`monitor` event takes a timestamp from the Redis server and an array of command arguments. +```typescript +await client.sendCommand(["SET", "key", "value", "NX"]); // 'OK' -Here is a simple example: +await client.sendCommand(["HGETALL", "key"]); // ['key1', 'field1', 'key2', 'field2'] +``` - var client = require("redis").createClient(), - util = require("util"); +_Note: the [API is different when using a cluster](https://github.com/redis/node-redis/blob/master/docs/clustering.md#unsupported-redis-commands)._ - client.monitor(function (err, res) { - console.log("Entering monitoring mode."); - }); +### Transactions (Multi/Exec) - client.on("monitor", function (time, args) { - console.log(time + ": " + util.inspect(args)); - }); +Start a [transaction](https://redis.io/topics/transactions) by calling `.multi()`, then chaining your commands. When +you're done, call `.exec()` and you'll get an array back with your results: +```typescript +await client.set("another-key", "another-value"); -# Extras +const [setKeyReply, otherKeyValue] = await client + .multi() + .set("key", "value") + .get("another-key") + .exec(); // ['OK', 'another-value'] +``` -Some other things you might like to know about. +You can also [watch](https://redis.io/topics/transactions#optimistic-locking-using-check-and-set) keys by calling +`.watch()`. Your transaction will abort if any of the watched keys change. -## client.server_info -After the ready probe completes, the results from the INFO command are saved in the `client.server_info` -object. +### Blocking Commands -The `versions` key contains an array of the elements of the version string for easy comparison. +In v4, `RedisClient` had the ability to create a pool of connections using an "Isolation Pool" on top of the "main" +connection. However, there was no way to use the pool without a "main" connection: - > client.server_info.redis_version - '2.3.0' - > client.server_info.versions - [ 2, 3, 0 ] +```javascript +const client = await createClient() + .on("error", (err) => console.error(err)) + .connect(); -## redis.print() +await client.ping(client.commandOptions({ isolated: true })); +``` -A handy callback function for displaying return values when testing. Example: +In v5 we've extracted this pool logic into its own classβ€”`RedisClientPool`: - var redis = require("redis"), - client = redis.createClient(); +```javascript +const pool = await createClientPool() + .on("error", (err) => console.error(err)) + .connect(); - client.on("connect", function () { - client.set("foo_rand000000000000", "some fantastic value", redis.print); - client.get("foo_rand000000000000", redis.print); - }); +await pool.ping(); +``` -This will print: - Reply: OK - Reply: some fantastic value +### Pub/Sub -Note that this program will not exit cleanly because the client is still connected. +See the [Pub/Sub overview](https://github.com/redis/node-redis/blob/master/docs/pub-sub.md). -## redis.debug_mode +### Scan Iterator -Boolean to enable debug mode and protocol tracing. +[`SCAN`](https://redis.io/commands/scan) results can be looped over +using [async iterators](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Symbol/asyncIterator): - var redis = require("redis"), - client = redis.createClient(); +```typescript +for await (const keys of client.scanIterator()) { + console.log(keys, await client.mGet(keys)); +} +``` - redis.debug_mode = true; +This works with `HSCAN`, `SSCAN`, and `ZSCAN` too: - client.on("connect", function () { - client.set("foo_rand000000000000", "some fantastic value"); - }); +```typescript +for await (const { field, value } of client.hScanIterator("hash")) { +} +for await (const member of client.sScanIterator("set")) { +} +for await (const { score, value } of client.zScanIterator("sorted-set")) { +} +``` -This will display: +You can override the default options by providing a configuration object: - mjr:~/work/node_redis (master)$ node ~/example.js - send command: *3 - $3 - SET - $20 - foo_rand000000000000 - $20 - some fantastic value +```typescript +client.scanIterator({ + TYPE: "string", // `SCAN` only + MATCH: "patter*", + COUNT: 100, +}); +``` - on_data: +OK +### Disconnecting -`send command` is data sent into Redis and `on_data` is data received from Redis. +The `QUIT` command has been deprecated in Redis 7.2 and should now also be considered deprecated in Node-Redis. Instead +of sending a `QUIT` command to the server, the client can simply close the network connection. -## client.send_command(command_name, args, callback) +`client.QUIT/quit()` is replaced by `client.close()`. and, to avoid confusion, `client.disconnect()` has been renamed to +`client.destroy()`. -Used internally to send commands to Redis. For convenience, nearly all commands that are published on the Redis -Wiki have been added to the `client` object. However, if I missed any, or if new commands are introduced before -this library is updated, you can use `send_command()` to send arbitrary commands to Redis. +```typescript +client.destroy(); +``` +### Client Side Caching -All commands are sent as multi-bulk commands. `args` can either be an Array of arguments, or individual arguments, -or omitted completely. +Node Redis v5 adds support for [Client Side Caching](https://redis.io/docs/manual/client-side-caching/), which enables clients to cache query results locally. The Redis server will notify the client when cached results are no longer valid. -## client.connected +```typescript +// Enable client side caching with RESP3 +const client = createClient({ + RESP: 3, + clientSideCache: { + ttl: 0, // Time-to-live (0 = no expiration) + maxEntries: 0, // Maximum entries (0 = unlimited) + evictPolicy: "LRU" // Eviction policy: "LRU" or "FIFO" + } +}); +``` -Boolean tracking the state of the connection to the Redis server. +See the [V5 documentation](https://github.com/redis/node-redis/blob/master/docs/v5.md#client-side-caching) for more details and advanced usage. -## client.command_queue.length +### Auto-Pipelining -The number of commands that have been sent to the Redis server but not yet replied to. You can use this to -enforce some kind of maximum queue depth for commands while connected. +Node Redis will automatically pipeline requests that are made during the same "tick". -Don't mess with `client.command_queue` though unless you really know what you are doing. +```typescript +client.set("Tm9kZSBSZWRpcw==", "users:1"); +client.sAdd("users:1:tokens", "Tm9kZSBSZWRpcw=="); +``` -## client.offline_queue.length +Of course, if you don't do something with your Promises you're certain to +get [unhandled Promise exceptions](https://nodejs.org/api/process.html#process_event_unhandledrejection). To take +advantage of auto-pipelining and handle your Promises, use `Promise.all()`. -The number of commands that have been queued up for a future connection. You can use this to enforce -some kind of maximum queue depth for pre-connection commands. +```typescript +await Promise.all([ + client.set("Tm9kZSBSZWRpcw==", "users:1"), + client.sAdd("users:1:tokens", "Tm9kZSBSZWRpcw=="), +]); +``` -## client.retry_delay +### Programmability -Current delay in milliseconds before a connection retry will be attempted. This starts at `250`. +See the [Programmability overview](https://github.com/redis/node-redis/blob/master/docs/programmability.md). -## client.retry_backoff +### Clustering -Multiplier for future retry timeouts. This should be larger than 1 to add more time between retries. -Defaults to 1.7. The default initial connection retry is 250, so the second retry will be 425, followed by 723.5, etc. +Check out the [Clustering Guide](https://github.com/redis/node-redis/blob/master/docs/clustering.md) when using Node Redis to connect to a Redis Cluster. +### Events -## TODO +The Node Redis client class is an Nodejs EventEmitter and it emits an event each time the network status changes: -Better tests for auth, disconnect/reconnect, and all combinations thereof. +| Name | When | Listener arguments | +| ----------------------- | ---------------------------------------------------------------------------------- | --------------------------------------------------------- | +| `connect` | Initiating a connection to the server | _No arguments_ | +| `ready` | Client is ready to use | _No arguments_ | +| `end` | Connection has been closed (via `.disconnect()`) | _No arguments_ | +| `error` | An error has occurredβ€”usually a network issue such as "Socket closed unexpectedly" | `(error: Error)` | +| `reconnecting` | Client is trying to reconnect to the server | _No arguments_ | +| `sharded-channel-moved` | See [here](https://github.com/redis/node-redis/blob/master/docs/pub-sub.md#sharded-channel-moved-event) | See [here](https://github.com/redis/node-redis/blob/master/docs/pub-sub.md#sharded-channel-moved-event) | +| `invalidate` | Client Tracking is on with `emitInvalidate` and a key is invalidated | `(key: RedisItem \| null)` | -Stream large set/get values into and out of Redis. Otherwise the entire value must be in node's memory. +> :warning: You **MUST** listen to `error` events. If a client doesn't have at least one `error` listener registered and +> an `error` occurs, that error will be thrown and the Node.js process will exit. See the [ > `EventEmitter` docs](https://nodejs.org/api/events.html#events_error_events) for more details. -Performance can be better for very large values. +> The client will not emit [any other events](https://github.com/redis/node-redis/blob/master/docs/v3-to-v4.md#all-the-removed-events) beyond those listed above. -I think there are more performance improvements left in there for smaller values, especially for large lists of small values. +## Supported Redis versions -## Contributors +Node Redis is supported with the following versions of Redis: -Some people have have added features and fixed bugs in `node_redis` other than me. +| Version | Supported | +| ------- | ------------------ | +| 8.2.z | :heavy_check_mark: | +| 8.0.z | :heavy_check_mark: | +| 7.4.z | :heavy_check_mark: | +| 7.2.z | :heavy_check_mark: | +| < 7.2 | :x: | -In order of first contribution, they are: +> Node Redis should work with older versions of Redis, but it is not fully tested and we cannot offer support. -* [Tim Smart](https://github.com/Tim-Smart) -* [TJ Holowaychuk](https://github.com/visionmedia) -* [Rick Olson](https://github.com/technoweenie) -* [Orion Henry](https://github.com/orionz) -* [Hank Sims](https://github.com/hanksims) -* [Aivo Paas](https://github.com/aivopaas) -* [Paul Carey](https://github.com/paulcarey) -* [Pieter Noordhuis](https://github.com/pietern) -* [Vladimir Dronnikov](https://github.com/dvv) -* [Dave Hoover](https://github.com/redsquirrel) +## Migration -Thanks. +- [From V3 to V4](https://github.com/redis/node-redis/blob/master/docs/v3-to-v4.md) +- [From V4 to V5](https://github.com/redis/node-redis/blob/master/docs/v4-to-v5.md) +- [V5](https://github.com/redis/node-redis/blob/master/docs/v5.md) -## LICENSE - "MIT License" +## Contributing -Copyright (c) 2010 Matthew Ranney, http://ranney.com/ +If you'd like to contribute, check out the [contributing guide](https://github.com/redis/node-redis/blob/master/CONTRIBUTING.md). -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: +Thank you to all the people who already contributed to Node Redis! -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. +[![Contributors](https://contrib.rocks/image?repo=redis/node-redis)](https://github.com/redis/node-redis/graphs/contributors) -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. +## License -![spacer](http://ranney.com/1px.gif) +This repository is licensed under the "MIT" license. See [LICENSE](https://github.com/redis/node-redis/blob/master/LICENSE). diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 00000000000..f96aa68dc12 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,16 @@ +# Security Policy + +## Supported Versions + +Node Redis is generally backwards compatible with very few exceptions, so we recommend users to always use the latest version to experience stability, performance and security. + +| Version | Supported | +|---------|--------------------| +| 4.0.z | :heavy_check_mark: | +| 3.1.z | :heavy_check_mark: | +| < 3.1 | :x: | + +## Reporting a Vulnerability + +If you believe you’ve discovered a serious vulnerability, please contact the Node Redis core team at redis@redis.io. We will evaluate your report and if necessary issue a fix and an advisory. If the issue was previously undisclosed, +we’ll also mention your name in the credits. diff --git a/benchmark/.gitignore b/benchmark/.gitignore new file mode 100644 index 00000000000..e493e278fc7 --- /dev/null +++ b/benchmark/.gitignore @@ -0,0 +1 @@ +*.js.json diff --git a/benchmark/lib/defaults.yml b/benchmark/lib/defaults.yml new file mode 100644 index 00000000000..3fa0f04614c --- /dev/null +++ b/benchmark/lib/defaults.yml @@ -0,0 +1,32 @@ +version: 0.1 + +remote: + - type: oss-standalone + - setup: redis-small + +setups: + - oss-standalone + +spec: + setups: + - name: oss-standalone + type: oss-standalone + redis_topology: + primaries: 1 + replicas: 0 + resources: + requests: + cpus: "1" + memory: "10g" + +exporter: + output_path: "./*.js.json" + redistimeseries: + timemetric: "$.timestamp" + metrics: + - "$.p0" + - "$.p50" + - "$.p95" + - "$.p99" + - "$.p100" + - "$.operationsPerSecond" \ No newline at end of file diff --git a/benchmark/lib/index.js b/benchmark/lib/index.js new file mode 100644 index 00000000000..5576999bfbc --- /dev/null +++ b/benchmark/lib/index.js @@ -0,0 +1,66 @@ +import yargs from 'yargs'; +import { hideBin } from 'yargs/helpers'; +import { promises as fs } from 'node:fs'; +import { fork } from 'node:child_process'; +import { URL, fileURLToPath } from 'node:url'; +import { once } from 'node:events'; +import { extname } from 'node:path'; + +async function getPathChoices() { + const dirents = await fs.readdir(new URL('.', import.meta.url), { + withFileTypes: true + }); + + const choices = []; + for (const dirent of dirents) { + if (!dirent.isDirectory()) continue; + + choices.push(dirent.name); + } + + return choices; +} + +const argv = hideBin(process.argv); + +async function getName() { + return yargs(argv) + .option('name', { + demandOption: true, + choices: await getPathChoices() + }) + .parseSync().name; +} + +const runnerPath = fileURLToPath(new URL('runner.js', import.meta.url)), + path = new URL(`${await getName()}/`, import.meta.url); + +async function getMetadata() { + try { + return await import(new URL('index.js', path)); + } catch (err) { + if (err.code === 'ERR_MODULE_NOT_FOUND') return; + + throw err; + } +} + +const metadata = await getMetadata(), + timestamp = Date.now(); + +for (const file of await fs.readdir(path)) { + if (file === 'index.js' || extname(file) !== '.js') continue; + + const benchmarkProcess = fork(runnerPath, [ + ...argv, + '--path', + fileURLToPath(path) + file + ]); + + await once(benchmarkProcess, 'message'); + benchmarkProcess.send({ + metadata, + timestamp + }); + await once(benchmarkProcess, 'close'); +} diff --git a/benchmark/lib/ping/ioredis-auto-pipeline.js b/benchmark/lib/ping/ioredis-auto-pipeline.js new file mode 100644 index 00000000000..ee400fe6ca9 --- /dev/null +++ b/benchmark/lib/ping/ioredis-auto-pipeline.js @@ -0,0 +1,20 @@ +import Redis from 'ioredis'; + +export default async (host) => { + const client = new Redis({ + host, + lazyConnect: true, + enableAutoPipelining: true + }); + + await client.connect(); + + return { + benchmark() { + return client.ping(); + }, + teardown() { + return client.disconnect(); + } + } +}; diff --git a/benchmark/lib/ping/ioredis.js b/benchmark/lib/ping/ioredis.js new file mode 100644 index 00000000000..5ed0d1dd76b --- /dev/null +++ b/benchmark/lib/ping/ioredis.js @@ -0,0 +1,19 @@ +import Redis from 'ioredis'; + +export default async (host) => { + const client = new Redis({ + host, + lazyConnect: true + }); + + await client.connect(); + + return { + benchmark() { + return client.ping(); + }, + teardown() { + return client.disconnect(); + } + } +}; diff --git a/benchmark/lib/ping/local-resp2.js b/benchmark/lib/ping/local-resp2.js new file mode 100644 index 00000000000..873698a131f --- /dev/null +++ b/benchmark/lib/ping/local-resp2.js @@ -0,0 +1,21 @@ +import { createClient } from 'redis-local'; + +export default async (host) => { + const client = createClient({ + socket: { + host + }, + RESP: 2 + }); + + await client.connect(); + + return { + benchmark() { + return client.ping(); + }, + teardown() { + return client.disconnect(); + } + }; +}; diff --git a/benchmark/lib/ping/local-resp3-buffer-proxy.js b/benchmark/lib/ping/local-resp3-buffer-proxy.js new file mode 100644 index 00000000000..2ded38b21ca --- /dev/null +++ b/benchmark/lib/ping/local-resp3-buffer-proxy.js @@ -0,0 +1,23 @@ +import { createClient, RESP_TYPES } from 'redis-local'; + +export default async (host) => { + const client = createClient({ + socket: { + host + }, + RESP: 3 + }).withTypeMapping({ + [RESP_TYPES.SIMPLE_STRING]: Buffer + }); + + await client.connect(); + + return { + benchmark() { + return client.ping(); + }, + teardown() { + return client.disconnect(); + } + }; +}; diff --git a/benchmark/lib/ping/local-resp3-buffer.js b/benchmark/lib/ping/local-resp3-buffer.js new file mode 100644 index 00000000000..624a524ce06 --- /dev/null +++ b/benchmark/lib/ping/local-resp3-buffer.js @@ -0,0 +1,24 @@ +import { createClient, RESP_TYPES } from 'redis-local'; + +export default async (host) => { + const client = createClient({ + socket: { + host + }, + commandOptions: { + [RESP_TYPES.SIMPLE_STRING]: Buffer + }, + RESP: 3 + }); + + await client.connect(); + + return { + benchmark() { + return client.ping(); + }, + teardown() { + return client.disconnect(); + } + }; +}; diff --git a/benchmark/lib/ping/local-resp3-module-with-flags.js b/benchmark/lib/ping/local-resp3-module-with-flags.js new file mode 100644 index 00000000000..e58856dcb9e --- /dev/null +++ b/benchmark/lib/ping/local-resp3-module-with-flags.js @@ -0,0 +1,27 @@ +import { createClient } from 'redis-local'; +import PING from 'redis-local/dist/lib/commands/PING.js'; + +export default async (host) => { + const client = createClient({ + socket: { + host + }, + RESP: 3, + modules: { + module: { + ping: PING.default + } + } + }); + + await client.connect(); + + return { + benchmark() { + return client.withTypeMapping({}).module.ping(); + }, + teardown() { + return client.disconnect(); + } + }; +}; diff --git a/benchmark/lib/ping/local-resp3-module.js b/benchmark/lib/ping/local-resp3-module.js new file mode 100644 index 00000000000..66f6e3ec291 --- /dev/null +++ b/benchmark/lib/ping/local-resp3-module.js @@ -0,0 +1,27 @@ +import { createClient } from 'redis-local'; +import PING from 'redis-local/dist/lib/commands/PING.js'; + +export default async (host) => { + const client = createClient({ + socket: { + host + }, + RESP: 3, + modules: { + module: { + ping: PING.default + } + } + }); + + await client.connect(); + + return { + benchmark() { + return client.module.ping(); + }, + teardown() { + return client.disconnect(); + } + }; +}; diff --git a/benchmark/lib/ping/local-resp3.js b/benchmark/lib/ping/local-resp3.js new file mode 100644 index 00000000000..a4ee4f24a2a --- /dev/null +++ b/benchmark/lib/ping/local-resp3.js @@ -0,0 +1,21 @@ +import { createClient } from 'redis-local'; + +export default async (host) => { + const client = createClient({ + socket: { + host + }, + RESP: 3 + }); + + await client.connect(); + + return { + benchmark() { + return client.ping(); + }, + teardown() { + return client.disconnect(); + } + }; +}; diff --git a/benchmark/lib/ping/ping.yml b/benchmark/lib/ping/ping.yml new file mode 100644 index 00000000000..cfe3c74735a --- /dev/null +++ b/benchmark/lib/ping/ping.yml @@ -0,0 +1,9 @@ +name: "ping" + +clientconfig: + - command: | + npm install -ws + npm run build:tests-tools + cd benchmark + npm install + npm run start -- --name ping --redis-server-host ${server_private_ip} diff --git a/benchmark/lib/ping/v3.js b/benchmark/lib/ping/v3.js new file mode 100644 index 00000000000..e7e62d3e15a --- /dev/null +++ b/benchmark/lib/ping/v3.js @@ -0,0 +1,21 @@ +import { createClient } from 'redis-v3'; +import { once } from 'node:events'; +import { promisify } from 'node:util'; + +export default async (host) => { + const client = createClient({ host }), + pingAsync = promisify(client.ping).bind(client), + quitAsync = promisify(client.quit).bind(client); + + await once(client, 'connect'); + + return { + benchmark() { + return pingAsync(); + }, + teardown() { + return quitAsync(); + } + }; + +}; diff --git a/benchmark/lib/ping/v4.js b/benchmark/lib/ping/v4.js new file mode 100644 index 00000000000..c570aa1477f --- /dev/null +++ b/benchmark/lib/ping/v4.js @@ -0,0 +1,20 @@ +import { createClient } from 'redis-v4'; + +export default async (host) => { + const client = createClient({ + socket: { + host + } + }); + + await client.connect(); + + return { + benchmark() { + return client.ping(); + }, + teardown() { + return client.disconnect(); + } + }; +}; diff --git a/benchmark/lib/runner.js b/benchmark/lib/runner.js new file mode 100644 index 00000000000..7d81d3bb8c7 --- /dev/null +++ b/benchmark/lib/runner.js @@ -0,0 +1,86 @@ +import yargs from 'yargs'; +import { hideBin } from 'yargs/helpers'; +import { basename } from 'node:path'; +import { promises as fs } from 'node:fs'; +import * as hdr from 'hdr-histogram-js'; +hdr.initWebAssemblySync(); + +const { path, times, concurrency, 'redis-server-host': host } = yargs(hideBin(process.argv)) + .option('path', { + type: 'string', + demandOption: true + }) + .option('times', { + type: 'number', + default: 1_000_000, + demandOption: true + }) + .option('concurrency', { + type: 'number', + default: 100, + demandOption: true + }) + .option('redis-server-host', { + type: 'string' + }) + .parseSync(); + +const [ { metadata, timestamp }, module ] = await Promise.all([ + new Promise(resolve => { + process.once('message', resolve); + process.send('ready'); + }), + import(path) + ]), + { benchmark, teardown } = await module.default(host, metadata); + +async function run(times) { + return new Promise(resolve => { + const histogram = hdr.build({ useWebAssembly: true }); + let num = 0, + inProgress = 0; + + async function run() { + ++inProgress; + ++num; + + const start = process.hrtime.bigint(); + await benchmark(metadata); + histogram.recordValue(Number(process.hrtime.bigint() - start)); + --inProgress; + + if (num < times) { + run(); + } else if (inProgress === 0) { + resolve(histogram); + } + } + + const toInitiate = Math.min(concurrency, times); + for (let i = 0; i < toInitiate; i++) { + run(); + } + }); +} + +// warmup +await run(Math.min(times * 0.1, 10_000)); + +// benchmark +const benchmarkStart = process.hrtime.bigint(), + histogram = await run(times), + benchmarkNanoseconds = process.hrtime.bigint() - benchmarkStart, + json = { + // timestamp, + operationsPerSecond: times / Number(benchmarkNanoseconds) * 1_000_000_000, + p0: histogram.getValueAtPercentile(0), + p50: histogram.getValueAtPercentile(50), + p95: histogram.getValueAtPercentile(95), + p99: histogram.getValueAtPercentile(99), + p100: histogram.getValueAtPercentile(100) + }; +console.log(`[${basename(path)}]:`); +console.table(json); +await fs.writeFile(`${path}.json`, JSON.stringify(json)); + +await teardown(); diff --git a/benchmark/lib/set-get-delete-string/1KB.yml b/benchmark/lib/set-get-delete-string/1KB.yml new file mode 100644 index 00000000000..52fb611a24b --- /dev/null +++ b/benchmark/lib/set-get-delete-string/1KB.yml @@ -0,0 +1,9 @@ +name: "set-get-delete-string-1KB" + +clientconfig: + - command: | + npm install -ws + npm run build:tests-tools + cd benchmark + npm install + npm run start -- --name set-get-delete-string --size 1024 --redis-server-host ${server_private_ip} diff --git a/benchmark/lib/set-get-delete-string/1MB.yml b/benchmark/lib/set-get-delete-string/1MB.yml new file mode 100644 index 00000000000..f16d7c3696a --- /dev/null +++ b/benchmark/lib/set-get-delete-string/1MB.yml @@ -0,0 +1,9 @@ +name: "set-get-delete-string-1MB" + +clientconfig: + - command: | + npm install -ws + npm run build:tests-tools + cd benchmark + npm install + npm run start -- --name set-get-delete-string --size 1048576 --redis-server-host ${server_private_ip} diff --git a/benchmark/lib/set-get-delete-string/8B.yml b/benchmark/lib/set-get-delete-string/8B.yml new file mode 100644 index 00000000000..f625f74fca1 --- /dev/null +++ b/benchmark/lib/set-get-delete-string/8B.yml @@ -0,0 +1,9 @@ +name: "set-get-delete-string-8B" + +clientconfig: + - command: | + npm install -ws + npm run build:tests-tools + cd benchmark + npm install + npm run start -- --name set-get-delete-string --size 8 --redis-server-host ${server_private_ip} diff --git a/benchmark/lib/set-get-delete-string/index.js b/benchmark/lib/set-get-delete-string/index.js new file mode 100644 index 00000000000..506b222a6cb --- /dev/null +++ b/benchmark/lib/set-get-delete-string/index.js @@ -0,0 +1,13 @@ +import yargs from 'yargs'; +import { hideBin } from 'yargs/helpers'; +import { randomBytes } from 'node:crypto'; + +const { size } = yargs(hideBin(process.argv)) + .option('size', { + type: 'number', + default: 1024, + demandOption: true + }) + .parseSync(); + +export const randomString = randomBytes(size).toString('ascii'); diff --git a/benchmark/lib/set-get-delete-string/ioredis.js b/benchmark/lib/set-get-delete-string/ioredis.js new file mode 100644 index 00000000000..95456233001 --- /dev/null +++ b/benchmark/lib/set-get-delete-string/ioredis.js @@ -0,0 +1,23 @@ +import Redis from 'ioredis'; + +export default async (host, { randomString }) => { + const client = new Redis({ + host, + lazyConnect: true + }); + + await client.connect(); + + return { + benchmark() { + return Promise.all([ + client.set(randomString, randomString), + client.get(randomString), + client.del(randomString) + ]); + }, + teardown() { + return client.disconnect(); + } + } +}; diff --git a/benchmark/lib/set-get-delete-string/v3.js b/benchmark/lib/set-get-delete-string/v3.js new file mode 100644 index 00000000000..1e2122a0e49 --- /dev/null +++ b/benchmark/lib/set-get-delete-string/v3.js @@ -0,0 +1,27 @@ +import { createClient } from 'redis-v3'; +import { once } from 'node:events'; +import { promisify } from 'node:util'; + +export default async (host, { randomString }) => { + const client = createClient({ host }), + setAsync = promisify(client.set).bind(client), + getAsync = promisify(client.get).bind(client), + delAsync = promisify(client.del).bind(client), + quitAsync = promisify(client.quit).bind(client); + + await once(client, 'connect'); + + return { + benchmark() { + return Promise.all([ + setAsync(randomString, randomString), + getAsync(randomString), + delAsync(randomString) + ]); + }, + teardown() { + return quitAsync(); + } + }; + +}; diff --git a/benchmark/lib/set-get-delete-string/v4.js b/benchmark/lib/set-get-delete-string/v4.js new file mode 100644 index 00000000000..dd06b1f1036 --- /dev/null +++ b/benchmark/lib/set-get-delete-string/v4.js @@ -0,0 +1,24 @@ +import { createClient } from '@redis/client'; + +export default async (host, { randomString }) => { + const client = createClient({ + socket: { + host + } + }); + + await client.connect(); + + return { + benchmark() { + return Promise.all([ + client.set(randomString, randomString), + client.get(randomString), + client.del(randomString) + ]); + }, + teardown() { + return client.disconnect(); + } + }; +}; diff --git a/benchmark/package-lock.json b/benchmark/package-lock.json new file mode 100644 index 00000000000..30114847134 --- /dev/null +++ b/benchmark/package-lock.json @@ -0,0 +1,446 @@ +{ + "name": "@redis/client-benchmark", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "@redis/client-benchmark", + "dependencies": { + "hdr-histogram-js": "3.0.0", + "ioredis": "5", + "redis-local": "file:../packages/client", + "redis-v3": "npm:redis@3", + "redis-v4": "npm:redis@4", + "yargs": "17.7.1" + } + }, + "node_modules/@assemblyscript/loader": { + "version": "0.19.23", + "resolved": "https://registry.npmjs.org/@assemblyscript/loader/-/loader-0.19.23.tgz", + "integrity": "sha512-ulkCYfFbYj01ie1MDOyxv2F6SpRN1TOj7fQxbP07D6HmeR+gr2JLSmINKjga2emB+b1L2KGrFKBTc+e00p54nw==" + }, + "node_modules/@ioredis/commands": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@ioredis/commands/-/commands-1.2.0.tgz", + "integrity": "sha512-Sx1pU8EM64o2BrqNpEO1CNLtKQwyhuXuqyfH7oGKCk+1a33d2r5saW8zNwm3j6BTExtjrv2BxTgzzkMwts6vGg==" + }, + "node_modules/@redis/bloom": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@redis/bloom/-/bloom-1.2.0.tgz", + "integrity": "sha512-HG2DFjYKbpNmVXsa0keLHp/3leGJz1mjh09f2RLGGLQZzSHpkmZWuwJbAvo3QcRY8p80m5+ZdXZdYOSBLlp7Cg==", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, + "node_modules/@redis/client": { + "version": "1.5.7", + "resolved": "https://registry.npmjs.org/@redis/client/-/client-1.5.7.tgz", + "integrity": "sha512-gaOBOuJPjK5fGtxSseaKgSvjiZXQCdLlGg9WYQst+/GRUjmXaiB5kVkeQMRtPc7Q2t93XZcJfBMSwzs/XS9UZw==", + "dependencies": { + "cluster-key-slot": "1.1.2", + "generic-pool": "3.9.0", + "yallist": "4.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/@redis/graph": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@redis/graph/-/graph-1.1.0.tgz", + "integrity": "sha512-16yZWngxyXPd+MJxeSr0dqh2AIOi8j9yXKcKCwVaKDbH3HTuETpDVPcLujhFYVPtYrngSco31BUcSa9TH31Gqg==", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, + "node_modules/@redis/json": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@redis/json/-/json-1.0.4.tgz", + "integrity": "sha512-LUZE2Gdrhg0Rx7AN+cZkb1e6HjoSKaeeW8rYnt89Tly13GBI5eP4CwDVr+MY8BAYfCg4/N15OUrtLoona9uSgw==", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, + "node_modules/@redis/search": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@redis/search/-/search-1.1.2.tgz", + "integrity": "sha512-/cMfstG/fOh/SsE+4/BQGeuH/JJloeWuH+qJzM8dbxuWvdWibWAOAHHCZTMPhV3xIlH4/cUEIA8OV5QnYpaVoA==", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, + "node_modules/@redis/time-series": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@redis/time-series/-/time-series-1.0.4.tgz", + "integrity": "sha512-ThUIgo2U/g7cCuZavucQTQzA9g9JbDDY2f64u3AbAoz/8vE2lt2U37LamDUVChhaDA3IRT9R6VvJwqnUfTJzng==", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/cluster-key-slot": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.2.tgz", + "integrity": "sha512-RMr0FhtfXemyinomL4hrWcYJxmX6deFdCxpJzhDttxgO1+bcCnkk+9drydLVDmAMG7NE6aN/fl4F7ucU/90gAA==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, + "node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/denque": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/denque/-/denque-2.1.0.tgz", + "integrity": "sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw==", + "engines": { + "node": ">=0.10" + } + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + }, + "node_modules/escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "engines": { + "node": ">=6" + } + }, + "node_modules/generic-pool": { + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/generic-pool/-/generic-pool-3.9.0.tgz", + "integrity": "sha512-hymDOu5B53XvN4QT9dBmZxPX4CWhBPPLguTZ9MMFeFa/Kg0xWVfylOVNlJji/E7yTZWFd/q9GO5TxDLq156D7g==", + "engines": { + "node": ">= 4" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/hdr-histogram-js": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/hdr-histogram-js/-/hdr-histogram-js-3.0.0.tgz", + "integrity": "sha512-/EpvQI2/Z98mNFYEnlqJ8Ogful8OpArLG/6Tf2bPnkutBVLIeMVNHjk1ZDfshF2BUweipzbk+dB1hgSB7SIakw==", + "dependencies": { + "@assemblyscript/loader": "^0.19.21", + "base64-js": "^1.2.0", + "pako": "^1.0.3" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/ioredis": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/ioredis/-/ioredis-5.3.2.tgz", + "integrity": "sha512-1DKMMzlIHM02eBBVOFQ1+AolGjs6+xEcM4PDL7NqOS6szq7H9jSaEkIUH6/a5Hl241LzW6JLSiAbNvTQjUupUA==", + "dependencies": { + "@ioredis/commands": "^1.1.1", + "cluster-key-slot": "^1.1.0", + "debug": "^4.3.4", + "denque": "^2.1.0", + "lodash.defaults": "^4.2.0", + "lodash.isarguments": "^3.1.0", + "redis-errors": "^1.2.0", + "redis-parser": "^3.0.0", + "standard-as-callback": "^2.1.0" + }, + "engines": { + "node": ">=12.22.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/ioredis" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "engines": { + "node": ">=8" + } + }, + "node_modules/lodash.defaults": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz", + "integrity": "sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ==" + }, + "node_modules/lodash.isarguments": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz", + "integrity": "sha512-chi4NHZlZqZD18a0imDHnZPrDeBbTtVN7GXMwuGdRH9qotxAjYs3aVLKc7zNOG9eddR5Ksd8rvFEBc9SsggPpg==" + }, + "node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node_modules/pako": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz", + "integrity": "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==" + }, + "node_modules/redis-commands": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/redis-commands/-/redis-commands-1.7.0.tgz", + "integrity": "sha512-nJWqw3bTFy21hX/CPKHth6sfhZbdiHP6bTawSgQBlKOVRG7EZkfHbbHwQJnrE4vsQf0CMNE+3gJ4Fmm16vdVlQ==" + }, + "node_modules/redis-errors": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/redis-errors/-/redis-errors-1.2.0.tgz", + "integrity": "sha512-1qny3OExCf0UvUV/5wpYKf2YwPcOqXzkwKKSmKHiE6ZMQs5heeE/c8eXK+PNllPvmjgAbfnsbpkGZWy8cBpn9w==", + "engines": { + "node": ">=4" + } + }, + "node_modules/redis-local": { + "name": "@redis/client", + "version": "1.5.6", + "resolved": "file:../packages/client", + "license": "MIT", + "dependencies": { + "cluster-key-slot": "1.1.2", + "generic-pool": "3.9.0", + "yallist": "4.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/redis-parser": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/redis-parser/-/redis-parser-3.0.0.tgz", + "integrity": "sha512-DJnGAeenTdpMEH6uAJRK/uiyEIH9WVsUmoLwzudwGJUwZPp80PDBWPHXSAGNPwNvIXAbe7MSUB1zQFugFml66A==", + "dependencies": { + "redis-errors": "^1.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/redis-v3": { + "name": "redis", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/redis/-/redis-3.1.2.tgz", + "integrity": "sha512-grn5KoZLr/qrRQVwoSkmzdbw6pwF+/rwODtrOr6vuBRiR/f3rjSTGupbF90Zpqm2oenix8Do6RV7pYEkGwlKkw==", + "dependencies": { + "denque": "^1.5.0", + "redis-commands": "^1.7.0", + "redis-errors": "^1.2.0", + "redis-parser": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/node-redis" + } + }, + "node_modules/redis-v3/node_modules/denque": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/denque/-/denque-1.5.1.tgz", + "integrity": "sha512-XwE+iZ4D6ZUB7mfYRMb5wByE8L74HCn30FBN7sWnXksWc1LO1bPDl67pBR9o/kC4z/xSNAwkMYcGgqDV3BE3Hw==", + "engines": { + "node": ">=0.10" + } + }, + "node_modules/redis-v4": { + "name": "redis", + "version": "4.6.6", + "resolved": "https://registry.npmjs.org/redis/-/redis-4.6.6.tgz", + "integrity": "sha512-aLs2fuBFV/VJ28oLBqYykfnhGGkFxvx0HdCEBYdJ99FFbSEMZ7c1nVKwR6ZRv+7bb7JnC0mmCzaqu8frgOYhpA==", + "dependencies": { + "@redis/bloom": "1.2.0", + "@redis/client": "1.5.7", + "@redis/graph": "1.1.0", + "@redis/json": "1.0.4", + "@redis/search": "1.1.2", + "@redis/time-series": "1.0.4" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/standard-as-callback": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-2.1.0.tgz", + "integrity": "sha512-qoRRSyROncaz1z0mvYqIE4lCd9p2R90i6GxW3uZv5ucSu8tU7B5HXUP1gG8pVZsYNVaXjk8ClXHPttLyxAL48A==" + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "engines": { + "node": ">=10" + } + }, + "node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, + "node_modules/yargs": { + "version": "17.7.1", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.1.tgz", + "integrity": "sha512-cwiTb08Xuv5fqF4AovYacTFNxk62th7LKJ6BL9IGUpTJrWoU7/7WdQGTP2SjKf1dUNBGzDd28p/Yfs/GI6JrLw==", + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "engines": { + "node": ">=12" + } + } + } +} diff --git a/benchmark/package.json b/benchmark/package.json new file mode 100644 index 00000000000..73acf9d0f1c --- /dev/null +++ b/benchmark/package.json @@ -0,0 +1,17 @@ +{ + "name": "@redis/client-benchmark", + "private": true, + "main": "./lib", + "type": "module", + "scripts": { + "start": "node ." + }, + "dependencies": { + "hdr-histogram-js": "3.0.0", + "ioredis": "5", + "redis-local": "file:../packages/client", + "redis-v3": "npm:redis@3", + "redis-v4": "npm:redis@4", + "yargs": "17.7.1" + } +} diff --git a/benchmark/requirements.txt b/benchmark/requirements.txt new file mode 100644 index 00000000000..abff11d6abb --- /dev/null +++ b/benchmark/requirements.txt @@ -0,0 +1 @@ +redisbench_admin>=0.5.24 diff --git a/changelog.md b/changelog.md deleted file mode 100644 index 63a29bc4b76..00000000000 --- a/changelog.md +++ /dev/null @@ -1,210 +0,0 @@ -Changelog -========= - -## v0.7.1 - November 15, 2011 - -Fix regression in reconnect logic. - -Very much need automated tests for reconnection and queue logic. - -## v0.7.0 - November 14, 2011 - -Many contributed fixes. Thanks everybody. - -* [GH-127] - properly re-initialize parser on reconnect -* [GH-136] - handle passing undefined as callback (Ian Babrou) -* [GH-139] - properly handle exceptions thrown in pub/sub event handlers (Felix GeisendΓΆrfer) -* [GH-141] - detect closing state on stream error (Felix GeisendΓΆrfer) -* [GH-142] - re-select database on reconnection (Jean-Hugues Pinson) -* [GH-146] - add sort example (Maksim Lin) - -Some more goodies: - -* Fix bugs with node 0.6 -* Performance improvements -* New version of `multi_bench.js` that tests more realistic scenarios -* [GH-140] - support optional callback for subscribe commands -* Properly flush and error out command queue when connection fails -* Initial work on reconnection thresholds - -## v0.6.7 - July 30, 2011 - -(accidentally skipped v0.6.6) - -Fix and test for [GH-123] - -Passing an Array as as the last argument should expand as users -expect. The old behavior was to coerce the arguments into Strings, -which did surprising things with Arrays. - -## v0.6.5 - July 6, 2011 - -Contributed changes: - -* Support SlowBuffers (Umair Siddique) -* Add Multi to exports (Louis-Philippe Perron) -* Fix for drain event calculation (Vladimir Dronnikov) - -Thanks! - -## v0.6.4 - June 30, 2011 - -Fix bug with optional callbacks for hmset. - -## v0.6.2 - June 30, 2011 - -Bugs fixed: - -* authentication retry while server is loading db (danmaz74) [GH-101] -* command arguments processing issue with arrays - -New features: - -* Auto update of new commands from redis.io (Dave Hoover) -* Performance improvements and backpressure controls. -* Commands now return the true/false value from the underlying socket write(s). -* Implement command_queue high water and low water for more better control of queueing. - -See `examples/backpressure_drain.js` for more information. - -## v0.6.1 - June 29, 2011 - -Add support and tests for Redis scripting through EXEC command. - -Bug fix for monitor mode. (forddg) - -Auto update of new commands from redis.io (Dave Hoover) - -## v0.6.0 - April 21, 2011 - -Lots of bugs fixed. - -* connection error did not properly trigger reconnection logic [GH-85] -* client.hmget(key, [val1, val2]) was not expanding properly [GH-66] -* client.quit() while in pub/sub mode would throw an error [GH-87] -* client.multi(['hmset', 'key', {foo: 'bar'}]) fails [GH-92] -* unsubscribe before subscribe would make things very confused [GH-88] -* Add BRPOPLPUSH [GH-79] - -## v0.5.11 - April 7, 2011 - -Added DISCARD - -I originally didn't think DISCARD would do anything here because of the clever MULTI interface, but somebody -pointed out to me that DISCARD can be used to flush the WATCH set. - -## v0.5.10 - April 6, 2011 - -Added HVALS - -## v0.5.9 - March 14, 2011 - -Fix bug with empty Array arguments - Andy Ray - -## v0.5.8 - March 14, 2011 - -Add `MONITOR` command and special monitor command reply parsing. - -## v0.5.7 - February 27, 2011 - -Add magical auth command. - -Authentication is now remembered by the client and will be automatically sent to the server -on every connection, including any reconnections. - -## v0.5.6 - February 22, 2011 - -Fix bug in ready check with `return_buffers` set to `true`. - -Thanks to Dean Mao and Austin Chau. - -## v0.5.5 - February 16, 2011 - -Add probe for server readiness. - -When a Redis server starts up, it might take a while to load the dataset into memory. -During this time, the server will accept connections, but will return errors for all non-INFO -commands. Now node_redis will send an INFO command whenever it connects to a server. -If the info command indicates that the server is not ready, the client will keep trying until -the server is ready. Once it is ready, the client will emit a "ready" event as well as the -"connect" event. The client will queue up all commands sent before the server is ready, just -like it did before. When the server is ready, all offline/non-ready commands will be replayed. -This should be backward compatible with previous versions. - -To disable this ready check behavior, set `options.no_ready_check` when creating the client. - -As a side effect of this change, the key/val params from the info command are available as -`client.server_options`. Further, the version string is decomposed into individual elements -in `client.server_options.versions`. - -## v0.5.4 - February 11, 2011 - -Fix excess memory consumption from Queue backing store. - -Thanks to Gustaf SjΓΆberg. - -## v0.5.3 - February 5, 2011 - -Fix multi/exec error reply callback logic. - -Thanks to Stella Laurenzo. - -## v0.5.2 - January 18, 2011 - -Fix bug where unhandled error replies confuse the parser. - -## v0.5.1 - January 18, 2011 - -Fix bug where subscribe commands would not handle redis-server startup error properly. - -## v0.5.0 - December 29, 2010 - -Some bug fixes: - -* An important bug fix in reconnection logic. Previously, reply callbacks would be invoked twice after - a reconnect. -* Changed error callback argument to be an actual Error object. - -New feature: - -* Add friendly syntax for HMSET using an object. - -## v0.4.1 - December 8, 2010 - -Remove warning about missing hiredis. You probably do want it though. - -## v0.4.0 - December 5, 2010 - -Support for multiple response parsers and hiredis C library from Pieter Noordhuis. -Return Strings instead of Buffers by default. -Empty nested mb reply bug fix. - -## v0.3.9 - November 30, 2010 - -Fix parser bug on failed EXECs. - -## v0.3.8 - November 10, 2010 - -Fix for null MULTI response when WATCH condition fails. - -## v0.3.7 - November 9, 2010 - -Add "drain" and "idle" events. - -## v0.3.6 - November 3, 2010 - -Add all known Redis commands from Redis master, even ones that are coming in 2.2 and beyond. - -Send a friendlier "error" event message on stream errors like connection refused / reset. - -## v0.3.5 - October 21, 2010 - -A few bug fixes. - -* Fixed bug with `nil` multi-bulk reply lengths that showed up with `BLPOP` timeouts. -* Only emit `end` once when connection goes away. -* Fixed bug in `test.js` where driver finished before all tests completed. - -## unversioned wasteland - -See the git history for what happened before. diff --git a/docs/FAQ.md b/docs/FAQ.md new file mode 100644 index 00000000000..5774213da5b --- /dev/null +++ b/docs/FAQ.md @@ -0,0 +1,27 @@ +# F.A.Q. + +Nobody has *actually* asked these questions. But, we needed somewhere to put all the important bits and bobs that didn't fit anywhere else. So, here you go! + +## What happens when the network goes down? + +When a socket closes unexpectedly, all the commands that were already sent will reject as they might have been executed on the server. The rest will remain queued in memory until a new socket is established. If the client is closedβ€”either by returning an error from [`reconnectStrategy`](./client-configuration.md#reconnect-strategy) or by manually calling `.disconnect()`β€”they will be rejected. + +If don't want to queue commands in memory until a new socket is established, set the `disableOfflineQueue` option to `true` in the [client configuration](./client-configuration.md). This will result in those commands being rejected. + +## How are commands batched? + +Commands are pipelined using [`setImmediate`](https://nodejs.org/api/timers.html#setimmediatecallback-args). + +If `socket.write()` returns `false`β€”meaning that ["all or part of the data was queued in user memory"](https://nodejs.org/api/net.html#net_socket_write_data_encoding_callback:~:text=all%20or%20part%20of%20the%20data%20was%20queued%20in%20user%20memory)β€”the commands will stack in memory until the [`drain`](https://nodejs.org/api/net.html#net_event_drain) event is fired. + +## `RedisClientType` + +Redis has support for [modules](https://redis.io/modules) and running [Lua scripts](../README.md#lua-scripts) within the Redis context. To take advantage of typing within these scenarios, `RedisClient` and `RedisCluster` should be used with [typeof](https://www.typescriptlang.org/docs/handbook/2/typeof-types.html), rather than the base types `RedisClientType` and `RedisClusterType`. + +```typescript +import { createClient } from '@redis/client'; + +export const client = createClient(); + +export type RedisClientType = typeof client; +``` \ No newline at end of file diff --git a/docs/RESP.md b/docs/RESP.md new file mode 100644 index 00000000000..f8c2388226b --- /dev/null +++ b/docs/RESP.md @@ -0,0 +1,46 @@ +# Mapping RESP types + +RESP, which stands for **R**edis **SE**rialization **P**rotocol, is the protocol used by Redis to communicate with clients. This document shows how RESP types can be mapped to JavaScript types. You can learn more about RESP itself in the [offical documentation](https://redis.io/docs/reference/protocol-spec/). + +By default, each type is mapped to the first option in the lists below. To change this, configure a [`typeMapping`](.). + +## RESP2 + +- Integer (`:`) => `number` +- Simple String (`+`) => `string | Buffer` +- Blob String (`$`) => `string | Buffer` +- Simple Error (`-`) => `ErrorReply` +- Array (`*`) => `Array` + +> NOTE: the first type is the default type + +## RESP3 + +- Null (`_`) => `null` +- Boolean (`#`) => `boolean` +- Number (`:`) => `number | string` +- Big Number (`(`) => `BigInt | string` +- Double (`,`) => `number | string` +- Simple String (`+`) => `string | Buffer` +- Blob String (`$`) => `string | Buffer` +- Verbatim String (`=`) => `string | Buffer | VerbatimString` +- Simple Error (`-`) => `ErrorReply` +- Blob Error (`!`) => `ErrorReply` +- Array (`*`) => `Array` +- Set (`~`) => `Array | Set` +- Map (`%`) => `object | Map | Array` +- Push (`>`) => `Array` => PubSub push/`'push'` event + +> NOTE: the first type is the default type + +### Map keys and Set members + +When decoding a Map to `Map | object` or a Set to `Set`, keys and members of type "Simple String" or "Blob String" will be decoded as `string`s which enables lookups by value, ignoring type mapping. If you want them as `Buffer`s, decode them as `Array`s instead. + +### Not Implemented + +These parts of RESP3 are not yet implemented in Redis itself (at the time of writing), so are not yet implemented in the Node-Redis client either: + +- [Attribute type](https://github.com/redis/redis-specifications/blob/master/protocol/RESP3.md#attribute-type) +- [Streamed strings](https://github.com/redis/redis-specifications/blob/master/protocol/RESP3.md#streamed-strings) +- [Streamed aggregated data types](https://github.com/redis/redis-specifications/blob/master/protocol/RESP3.md#streamed-aggregated-data-types) diff --git a/docs/client-configuration.md b/docs/client-configuration.md new file mode 100644 index 00000000000..57af626bf71 --- /dev/null +++ b/docs/client-configuration.md @@ -0,0 +1,96 @@ +# `createClient` configuration + +| Property | Default | Description | +|------------------------------|------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| url | | `redis[s]://[[username][:password]@][host][:port][/db-number]` (see [`redis`](https://www.iana.org/assignments/uri-schemes/prov/redis) and [`rediss`](https://www.iana.org/assignments/uri-schemes/prov/rediss) IANA registration for more details) | +| socket | | Socket connection properties. Unlisted [`net.connect`](https://nodejs.org/api/net.html#socketconnectoptions-connectlistener) properties (and [`tls.connect`](https://nodejs.org/api/tls.html#tlsconnectoptions-callback)) are also supported | +| socket.port | `6379` | Redis server port | +| socket.host | `'localhost'` | Redis server hostname | +| socket.family | `0` | IP Stack version (one of `4 \| 6 \| 0`) | +| socket.path | | Path to the UNIX Socket | +| socket.connectTimeout | `5000` | Connection timeout (in milliseconds) | +| socket.socketTimeout | | The maximum duration (in milliseconds) that the socket can remain idle (i.e., with no data sent or received) before being automatically closed | +| socket.noDelay | `true` | Toggle [`Nagle's algorithm`](https://nodejs.org/api/net.html#net_socket_setnodelay_nodelay) | +| socket.keepAlive | `true` | Toggle [`keep-alive`](https://nodejs.org/api/net.html#socketsetkeepaliveenable-initialdelay) functionality | +| socket.keepAliveInitialDelay | `5000` | If set to a positive number, it sets the initial delay before the first keepalive probe is sent on an idle socket | +| socket.tls | | See explanation and examples [below](#TLS) | +| socket.reconnectStrategy | Exponential backoff with a maximum of 2000 ms; plus 0-200 ms random jitter. | A function containing the [Reconnect Strategy](#reconnect-strategy) logic | +| username | | ACL username ([see ACL guide](https://redis.io/topics/acl)) | +| password | | ACL password or the old "--requirepass" password | +| name | | Client name ([see `CLIENT SETNAME`](https://redis.io/commands/client-setname)) | +| database | | Redis database number (see [`SELECT`](https://redis.io/commands/select) command) | +| modules | | Included [Redis Modules](../README.md#packages) | +| scripts | | Script definitions (see [Lua Scripts](../README.md#lua-scripts)) | +| functions | | Function definitions (see [Functions](../README.md#functions)) | +| commandsQueueMaxLength | | Maximum length of the client's internal command queue | +| disableOfflineQueue | `false` | Disables offline queuing, see [FAQ](./FAQ.md#what-happens-when-the-network-goes-down) | +| readonly | `false` | Connect in [`READONLY`](https://redis.io/commands/readonly) mode | +| legacyMode | `false` | Maintain some backwards compatibility (see the [Migration Guide](./v3-to-v4.md)) | +| isolationPoolOptions | | An object that configures a pool of isolated connections, If you frequently need isolated connections, consider using [createClientPool](https://github.com/redis/node-redis/blob/master/docs/pool.md#creating-a-pool) instead | +| pingInterval | | Send `PING` command at interval (in ms). Useful with ["Azure Cache for Redis"](https://learn.microsoft.com/en-us/azure/azure-cache-for-redis/cache-best-practices-connection#idle-timeout) | +| disableClientInfo | `false` | Disables `CLIENT SETINFO LIB-NAME node-redis` and `CLIENT SETINFO LIB-VER X.X.X` commands | + +## Reconnect Strategy + +When the socket closes unexpectedly (without calling `.quit()`/`.disconnect()`), the client uses `reconnectStrategy` to decide what to do. The following values are supported: +1. `false` -> do not reconnect, close the client and flush the command queue. +2. `number` -> wait for `X` milliseconds before reconnecting. +3. `(retries: number, cause: Error) => false | number | Error` -> `number` is the same as configuring a `number` directly, `Error` is the same as `false`, but with a custom error. + +By default the strategy uses exponential backoff, but it can be overwritten like so: + +```javascript +createClient({ + socket: { + reconnectStrategy: (retries, cause) => { + // By default, do not reconnect on socket timeout. + if (cause instanceof SocketTimeoutError) { + return false; + } + + // Generate a random jitter between 0 – 200 ms: + const jitter = Math.floor(Math.random() * 200); + // Delay is an exponential back off, (times^2) * 50 ms, with a maximum value of 2000 ms: + const delay = Math.min(Math.pow(2, retries) * 50, 2000); + + return delay + jitter; + } + } +}); +``` + +## TLS + +To enable TLS, set `socket.tls` to `true`. Below are some basic examples. + +> For configuration options see [tls.connect](https://nodejs.org/api/tls.html#tlsconnectoptions-callback) and [tls.createSecureContext](https://nodejs.org/api/tls.html#tlscreatesecurecontextoptions), as those are the underlying functions used by this library. + +### Create a SSL client + +```javascript +createClient({ + socket: { + tls: true, + ca: '...', + cert: '...' + } +}); +``` + +### Create a SSL client using a self-signed certificate + +```javascript +createClient({ + socket: { + tls: true, + rejectUnauthorized: false, + cert: '...' + } +}); +``` +## Connection Pooling + +In most cases, a single Redis connection is sufficient, as the node-redis client efficiently handles commands using an underlying socket. Unlike traditional databases, Redis does not require connection pooling for optimal performance. + +However, if your use case requires exclusive connections see [RedisClientPool](https://github.com/redis/node-redis/blob/master/docs/pool.md), which allows you to create and manage multiple dedicated connections. + diff --git a/docs/clustering.md b/docs/clustering.md new file mode 100644 index 00000000000..4afd95afd23 --- /dev/null +++ b/docs/clustering.md @@ -0,0 +1,154 @@ +# Clustering + +## Basic Example + +Connecting to a cluster is a bit different. Create the client by specifying some (or all) of the nodes in your cluster and then use it like a regular client instance: + +```javascript +import { createCluster } from 'redis'; + +const cluster = await createCluster({ + rootNodes: [{ + url: 'redis://10.0.0.1:30001' + }, { + url: 'redis://10.0.0.2:30002' + }] + }) + .on('error', err => console.log('Redis Cluster Error', err)) + .connect(); + +await cluster.set('key', 'value'); +const value = await cluster.get('key'); +await cluster.close(); +``` + +## `createCluster` configuration + +> See the [client configuration](./client-configuration.md) page for the `rootNodes` and `defaults` configuration schemas. + +| Property | Default | Description | +|------------------------|---------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| rootNodes | | An array of root nodes that are part of the cluster, which will be used to get the cluster topology. Each element in the array is a client configuration object. There is no need to specify every node in the cluster: 3 should be enough to reliably connect and obtain the cluster configuration from the server | +| defaults | | The default configuration values for every client in the cluster. Use this for example when specifying an ACL user to connect with | +| useReplicas | `false` | When `true`, distribute load by executing readonly commands (such as `GET`, `GEOSEARCH`, etc.) across all cluster nodes. When `false`, only use master nodes | +| minimizeConnections | `false` | When `true`, `.connect()` will only discover the cluster topology, without actually connecting to all the nodes. Useful for short-term or Pub/Sub-only connections. | +| maxCommandRedirections | `16` | The maximum number of times a command will be redirected due to `MOVED` or `ASK` errors | +| nodeAddressMap | | Defines the [node address mapping](#node-address-map) | +| modules | | Included [Redis Modules](../README.md#packages) | +| scripts | | Script definitions (see [Lua Scripts](./programmability.md#lua-scripts)) | +| functions | | Function definitions (see [Functions](./programmability.md#functions)) | + +## Usage + +Most redis commands are the same as with individual clients. + +### Unsupported Redis Commands + +If you want to run commands and/or use arguments that Node Redis doesn't know about (yet!) use `.sendCommand()`. + +When clustering, `sendCommand` takes 3 arguments to help with routing to the correct redis node: +* `firstKey`: the key that is being operated on, or `undefined` to route to a random node. +* `isReadOnly`: determines if the command needs to go to the master or may go to a replica. +* `args`: the command and all arguments (including the key), as an array of strings. + +```javascript +await cluster.sendCommand("key", false, ["SET", "key", "value", "NX"]); // 'OK' + +await cluster.sendCommand("key", true, ["HGETALL", "key"]); // ['key1', 'field1', 'key2', 'field2'] +``` + +## Auth with password and username + +Specifying the password in the URL or a root node will only affect the connection to that specific node. In case you want to set the password for all the connections being created from a cluster instance, use the `defaults` option. + +```javascript +createCluster({ + rootNodes: [{ + url: 'redis://10.0.0.1:30001' + }, { + url: 'redis://10.0.0.2:30002' + }], + defaults: { + username: 'username', + password: 'password' + } +}); +``` + +## Node Address Map + +A mapping between the addresses in the cluster (see `CLUSTER SHARDS`) and the addresses the client should connect to. +Useful when the cluster is running on a different network to the client. + +```javascript +const rootNodes = [{ + url: 'external-host-1.io:30001' +}, { + url: 'external-host-2.io:30002' +}]; + +// Use either a static mapping: +createCluster({ + rootNodes, + nodeAddressMap: { + '10.0.0.1:30001': { + host: 'external-host.io', + port: 30001 + }, + '10.0.0.2:30002': { + host: 'external-host.io', + port: 30002 + } + } +}); + +// or create the mapping dynamically, as a function: +createCluster({ + rootNodes, + nodeAddressMap(address) { + const indexOfDash = address.lastIndexOf('-'), + indexOfDot = address.indexOf('.', indexOfDash), + indexOfColons = address.indexOf(':', indexOfDot); + + return { + host: `external-host-${address.substring(indexOfDash + 1, indexOfDot)}.io`, + port: Number(address.substring(indexOfColons + 1)) + }; + } +}); +``` + +> This is a common problem when using ElastiCache. See [Accessing ElastiCache from outside AWS](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/accessing-elasticache.html) for more information on that. + +### Events + +The Node Redis Cluster class extends Node.js’s EventEmitter and emits the following events: + +| Name | When | Listener arguments | +| ----------------------- | ---------------------------------------------------------------------------------- | --------------------------------------------------------- | +| `connect` | The cluster has successfully connected and is ready to us | _No arguments_ | +| `disconnect` | The cluster has disconnected | _No arguments_ | +| `error` | The cluster has errored | `(error: Error)` | +| `node-ready` | A cluster node is ready to establish a connection | `(node: { host: string, port: number })` | +| `node-connect` | A cluster node has connected | `(node: { host: string, port: number })` | +| `node-reconnecting` | A cluster node is attempting to reconnect after an error | `(node: { host: string, port: number })` | +| `node-disconnect` | A cluster node has disconnected | `(node: { host: string, port: number })` | +| `node-error` | A cluster node has has errored (usually during TCP connection) | `(error: Error, node: { host: string, port: number })` | + +> :warning: You **MUST** listen to `error` events. If a cluster doesn't have at least one `error` listener registered and +> an `error` occurs, that error will be thrown and the Node.js process will exit. See the [ > `EventEmitter` docs](https://nodejs.org/api/events.html#events_error_events) for more details. + +## Command Routing + +### Commands that operate on Redis Keys + +Commands such as `GET`, `SET`, etc. are routed by the first key specified. For example `MGET 1 2 3` will be routed by the key `1`. + +### [Server Commands](https://redis.io/commands#server) + +Admin commands such as `MEMORY STATS`, `FLUSHALL`, etc. are not attached to the cluster, and must be executed on a specific node via `.getSlotMaster()`. + +### "Forwarded Commands" + +Certain commands (e.g. `PUBLISH`) are forwarded to other cluster nodes by the Redis server. The client sends these commands to a random node in order to spread the load across the cluster. + diff --git a/docs/command-options.md b/docs/command-options.md new file mode 100644 index 00000000000..8583eae135b --- /dev/null +++ b/docs/command-options.md @@ -0,0 +1,81 @@ +# Command Options + +> :warning: The command options API in v5 has breaking changes from the previous version. For more details, refer to the [v4-to-v5 guide](./v4-to-v5.md#command-options). + +Command Options are used to create "proxy clients" that change the behavior of executed commands. See the sections below for details. + +## Type Mapping + +Some [RESP types](./RESP.md) can be mapped to more than one JavaScript type. For example, "Blob String" can be mapped to `string` or `Buffer`. You can override the default type mapping using the `withTypeMapping` function: + +```javascript +await client.get('key'); // `string | null` + +const proxyClient = client.withTypeMapping({ + [TYPES.BLOB_STRING]: Buffer +}); + +await proxyClient.get('key'); // `Buffer | null` +``` + +See [RESP](./RESP.md) for a full list of types. + +## Abort Signal + +The client [batches commands](./FAQ.md#how-are-commands-batched) before sending them to Redis. Commands that haven't been written to the socket yet can be aborted using the [`AbortSignal`](https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal) API: + +```javascript +const controller = new AbortController(), + client = client.withAbortSignal(controller.signal); + +try { + const promise = client.get('key'); + controller.abort(); + await promise; +} catch (err) { + // AbortError +} +``` + + +## Timeout + +This option is similar to the Abort Signal one, but provides an easier way to set timeout for commands. Again, this applies to commands that haven't been written to the socket yet. + +```javascript +const client = createClient({ + commandOptions: { + timeout: 1000 + } +}) +``` + +## ASAP + +Commands that are executed in the "asap" mode are added to the beginning of the "to sent" queue. + +```javascript +const asapClient = client.asap(); +await asapClient.ping(); +``` + +## `withCommandOptions` + +You can set all of the above command options in a single call with the `withCommandOptions` function: + +```javascript +client.withCommandOptions({ + typeMapping: ..., + abortSignal: ..., + asap: ... +}); +``` + +If any of the above options are omitted, the default value will be used. For example, the following client would **not** be in ASAP mode: + +```javascript +client.asap().withCommandOptions({ + typeMapping: ..., + abortSignal: ... +}); +``` diff --git a/docs/pool.md b/docs/pool.md new file mode 100644 index 00000000000..7121e601d73 --- /dev/null +++ b/docs/pool.md @@ -0,0 +1,74 @@ +# `RedisClientPool` + +Sometimes you want to run your commands on an exclusive connection. There are a few reasons to do this: + +- You want to run a blocking command that will take over the connection, such as `BLPOP` or `BLMOVE`. +- You're using [transactions](https://redis.io/docs/interact/transactions/) and need to `WATCH` a key or keys for changes. +- Some more... + +For those use cases you'll need to create a connection pool. + +## Creating a pool + +You can create a pool using the `createClientPool` function: + +```javascript +import { createClientPool } from 'redis'; + +const pool = await createClientPool() + .on('error', err => console.error('Redis Client Pool Error', err)); +``` + +the function accepts two arguments, the client configuration (see [here](./client-configuration.md) for more details), and the pool configuration: + +| Property | Default | Description | +|----------------|---------|--------------------------------------------------------------------------------------------------------------------------------| +| minimum | 1 | The minimum clients the pool should hold to. The pool won't close clients if the pool size is less than the minimum. | +| maximum | 100 | The maximum clients the pool will have at once. The pool won't create any more resources and queue requests in memory. | +| acquireTimeout | 3000 | The maximum time (in ms) a task can wait in the queue. The pool will reject the task with `TimeoutError` in case of a timeout. | +| cleanupDelay | 3000 | The time to wait before cleaning up unused clients. | + +You can also create a pool from a client (reusing it's configuration): +```javascript +const pool = await client.createPool() + .on('error', err => console.error('Redis Client Pool Error', err)); +``` + +## The Simple Scenario + +All the client APIs are exposed on the pool instance directly, and will execute the commands using one of the available clients. + +```javascript +await pool.sendCommand(['PING']); // 'PONG' +await client.ping(); // 'PONG' +await client.withTypeMapping({ + [RESP_TYPES.SIMPLE_STRING]: Buffer +}).ping(); // Buffer +``` + +## Transactions + +Things get a little more complex with transactions. Here we are `.watch()`ing some keys. If the keys change during the transaction, a `WatchError` is thrown when `.exec()` is called: + +```javascript +try { + await pool.execute(async client => { + await client.watch('key'); + + const multi = client.multi() + .ping() + .get('key'); + + if (Math.random() > 0.5) { + await client.watch('another-key'); + multi.set('another-key', await client.get('another-key') / 2); + } + + return multi.exec(); + }); +} catch (err) { + if (err instanceof WatchError) { + // the transaction aborted + } +} +``` diff --git a/docs/programmability.md b/docs/programmability.md new file mode 100644 index 00000000000..c5917c2387d --- /dev/null +++ b/docs/programmability.md @@ -0,0 +1,85 @@ +# [Programmability](https://redis.io/docs/manual/programmability/) + +Redis provides a programming interface allowing code execution on the redis server. + +## [Functions](https://redis.io/docs/manual/programmability/functions-intro/) + +The following example retrieves a key in redis, returning the value of the key, incremented by an integer. For example, if your key _foo_ has the value _17_ and we run `add('foo', 25)`, it returns the answer to Life, the Universe and Everything. + +```lua +#!lua name=library + +redis.register_function { + function_name = 'add', + callback = function(keys, args) return redis.call('GET', keys[1]) + args[1] end, + flags = { 'no-writes' } +} +``` + +Here is the same example, but in a format that can be pasted into the `redis-cli`. + +``` +FUNCTION LOAD "#!lua name=library\nredis.register_function{function_name='add', callback=function(keys, args) return redis.call('GET', keys[1])+args[1] end, flags={'no-writes'}}" +``` + +Load the prior redis function on the _redis server_ before running the example below. + +```typescript +import { CommandParser, createClient, RedisArgument } from '@redis/client'; +import { NumberReply } from '@redis/client/dist/lib/RESP/types.js'; + +const client = createClient({ + functions: { + library: { + add: { + NUMBER_OF_KEYS: 1, + parseCommand( + parser: CommandParser, + key: RedisArgument, + toAdd: RedisArgument + ) { + parser.pushKey(key) + parser.push(toAdd) + }, + transformReply: undefined as unknown as () => NumberReply + } + } + } +}); + +await client.connect(); +await client.set('key', '1'); +await client.library.add('key', '2'); // 3 +``` + +## [Lua Scripts](https://redis.io/docs/manual/programmability/eval-intro/) + +The following is an end-to-end example of the prior concept. + +```typescript +import { CommandParser, createClient, defineScript, RedisArgument } from '@redis/client'; +import { NumberReply } from '@redis/client/dist/lib/RESP/types.js'; + +const client = createClient({ + scripts: { + add: defineScript({ + SCRIPT: 'return redis.call("GET", KEYS[1]) + ARGV[1];', + NUMBER_OF_KEYS: 1, + FIRST_KEY_INDEX: 1, + parseCommand( + parser: CommandParser, + key: RedisArgument, + toAdd: RedisArgument + ) { + parser.pushKey(key) + parser.push(toAdd) + }, + transformReply: undefined as unknown as () => NumberReply + }) + } +}); + +await client.connect(); +await client.set('key', '1'); +await client.add('key', '2'); // 3 +``` diff --git a/docs/pub-sub.md b/docs/pub-sub.md new file mode 100644 index 00000000000..7bbb0733c18 --- /dev/null +++ b/docs/pub-sub.md @@ -0,0 +1,92 @@ +# Pub/Sub + +The Pub/Sub API is implemented by `RedisClient`, `RedisCluster`, and `RedisSentinel`. + +## Pub/Sub with `RedisClient` + +### RESP2 + +Using RESP2, Pub/Sub "takes over" the connection (a client with subscriptions will not execute commands), therefore it requires a dedicated connection. You can easily get one by `.duplicate()`ing an existing `RedisClient`: + +```javascript +const subscriber = client.duplicate(); +subscriber.on('error', err => console.error(err)); +await subscriber.connect(); +``` + +> When working with either `RedisCluster` or `RedisSentinel`, this is handled automatically for you. + +### `sharded-channel-moved` event + +`RedisClient` emits the `sharded-channel-moved` event when the ["cluster slot"](https://redis.io/docs/reference/cluster-spec/#key-distribution-model) of a subscribed [Sharded Pub/Sub](https://redis.io/docs/manual/pubsub/#sharded-pubsub) channel has been moved to another shard. + +The event listener signature is as follows: +```typescript +( + channel: string, + listeners: { + buffers: Set; + strings: Set; + } +) +``` + +> When working with `RedisCluster`, this is handled automatically for you. + +## Subscribing + +```javascript +const listener = (message, channel) => console.log(message, channel); +await client.subscribe('channel', listener); +await client.pSubscribe('channe*', listener); +// Use sSubscribe for sharded Pub/Sub: +await client.sSubscribe('channel', listener); +``` + +> ⚠️ Subscribing to the same channel more than once will create multiple listeners, each of which will be called when a message is received. + +## Publishing + +```javascript +await client.publish('channel', 'message'); +// Use sPublish for sharded Pub/Sub: +await client.sPublish('channel', 'message'); +``` + +## Unsubscribing + +The code below unsubscribes all listeners from all channels. + +```javascript +await client.unsubscribe(); +await client.pUnsubscribe(); +// Use sUnsubscribe for sharded Pub/Sub: +await client.sUnsubscribe(); +``` + +To unsubscribe from specific channels: + +```javascript +await client.unsubscribe('channel'); +await client.unsubscribe(['1', '2']); +``` + +To unsubscribe a specific listener: + +```javascript +await client.unsubscribe('channel', listener); +``` + +## Buffers + +Publishing and subscribing using `Buffer`s is also supported: + +```javascript +await subscriber.subscribe('channel', message => { + console.log(message); // +}, true); // true = subscribe in `Buffer` mode. + +await subscriber.publish(Buffer.from('channel'), Buffer.from('message')); +``` + +> NOTE: Buffers and strings are supported both for the channel name and the message. You can mix and match these as desired. diff --git a/docs/scan-iterators.md b/docs/scan-iterators.md new file mode 100644 index 00000000000..47c4d6c0567 --- /dev/null +++ b/docs/scan-iterators.md @@ -0,0 +1,30 @@ +# Scan Iterators + +> :warning: The scan iterators API in v5 has breaking changes from the previous version. For more details, refer to the [v4-to-v5 guide](./v4-to-v5.md#scan-iterators). + +[`SCAN`](https://redis.io/commands/scan) results can be looped over using [async iterators](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Symbol/asyncIterator): + +```javascript +for await (const keys of client.scanIterator()) { + const values = await client.mGet(keys); +} +``` + +This works with `HSCAN`, `SSCAN`, and `ZSCAN` too: + +```javascript +for await (const entries of client.hScanIterator('hash')) {} +for await (const members of client.sScanIterator('set')) {} +for await (const membersWithScores of client.zScanIterator('sorted-set')) {} +``` + +You can override the default options by providing a configuration object: + +```javascript +client.scanIterator({ + cursor: '0', // optional, defaults to '0' + TYPE: 'string', // `SCAN` only + MATCH: 'patter*', + COUNT: 100 +}); +``` diff --git a/docs/sentinel.md b/docs/sentinel.md new file mode 100644 index 00000000000..f10b2953df5 --- /dev/null +++ b/docs/sentinel.md @@ -0,0 +1,103 @@ +# Redis Sentinel + +The [Redis Sentinel](https://redis.io/docs/management/sentinel/) object of node-redis provides a high level object that provides access to a high availability redis installation managed by Redis Sentinel to provide enumeration of master and replica nodes belonging to an installation as well as reconfigure itself on demand for failover and topology changes. + +## Basic Example + +```javascript +import { createSentinel } from 'redis'; + +const sentinel = await createSentinel({ + name: 'sentinel-db', + sentinelRootNodes: [{ + host: 'example', + port: 1234 + }] + }) + .on('error', err => console.error('Redis Sentinel Error', err)) + .connect(); + +await sentinel.set('key', 'value'); +const value = await sentinel.get('key'); +await sentinel.close(); +``` + +In the above example, we configure the sentinel object to fetch the configuration for the database Redis Sentinel is monitoring as "sentinel-db" with one of the sentinels being located at `example:1234`, then using it like a regular Redis client. + +## `createSentinel` configuration + +| Property | Default | Description | +|----------------------------|-----------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| name | | The sentinel identifier for a particular database cluster | +| sentinelRootNodes | | An array of root nodes that are part of the sentinel cluster, which will be used to get the topology. Each element in the array is a client configuration object. There is no need to specify every node in the cluster: 3 should be enough to reliably connect and obtain the sentinel configuration from the server | +| maxCommandRediscovers | `16` | The maximum number of times a command will retry due to topology changes. | +| nodeClientOptions | | The configuration values for every node in the cluster. Use this for example when specifying an ACL user to connect with | +| sentinelClientOptions | | The configuration values for every sentinel in the cluster. Use this for example when specifying an ACL user to connect with | +| masterPoolSize | `1` | The number of clients connected to the master node | +| replicaPoolSize | `0` | The number of clients connected to each replica node. When greater than 0, the client will distribute the load by executing read-only commands (such as `GET`, `GEOSEARCH`, etc.) across all the cluster nodes. | +| scanInterval | `10000` | Interval in milliseconds to periodically scan for changes in the sentinel topology. The client will query the sentinel for changes at this interval. | +| passthroughClientErrorEvents | `false` | When `true`, error events from client instances inside the sentinel will be propagated to the sentinel instance. This allows handling all client errors through a single error handler on the sentinel instance. | +| reserveClient | `false` | When `true`, one client will be reserved for the sentinel object. When `false`, the sentinel object will wait for the first available client from the pool. | + +## PubSub + +It supports PubSub via the normal mechanisms, including migrating the listeners if the node they are connected to goes down. + +```javascript +await sentinel.subscribe('channel', message => { + // ... +}); +await sentinel.unsubscribe('channel'); +``` + +see [the PubSub guide](./pub-sub.md) for more details. + +## Sentinel as a pool + +The sentinel object provides the ability to manage a pool of clients for the master node: + +```javascript +createSentinel({ + // ... + masterPoolSize: 10 +}); +``` + +In addition, it also provides the ability have a pool of clients connected to the replica nodes, and to direct all read-only commands to them: + +```javascript +createSentinel({ + // ... + replicaPoolSize: 10 +}); +``` + +## Master client lease + +Sometimes multiple commands needs to run on an exclusive client (for example, using `WATCH/MULTI/EXEC`). + +There are 2 ways to get a client lease: + +`.use()` +```javascript +const result = await sentinel.use(async client => { + await client.watch('key'); + return client.multi() + .get('key') + .exec(); +}); +``` + +`.acquire()` +```javascript +const clientLease = await sentinel.acquire(); + +try { + await clientLease.watch('key'); + const resp = await clientLease.multi() + .get('key') + .exec(); +} finally { + clientLease.release(); +} +``` diff --git a/docs/todo.md b/docs/todo.md new file mode 100644 index 00000000000..49163444986 --- /dev/null +++ b/docs/todo.md @@ -0,0 +1,6 @@ +- "Isolation Pool" -> pool +- Cluster request response policies (either implement, or block "server" commands in cluster) + +Docs: +- [Command Options](./command-options.md) +- [RESP](./RESP.md) diff --git a/docs/transactions.md b/docs/transactions.md new file mode 100644 index 00000000000..6331fef4be5 --- /dev/null +++ b/docs/transactions.md @@ -0,0 +1,53 @@ +# [Transactions](https://redis.io/docs/interact/transactions/) ([`MULTI`](https://redis.io/commands/multi/)/[`EXEC`](https://redis.io/commands/exec/)) + +Start a [transaction](https://redis.io/docs/interact/transactions/) by calling `.multi()`, then chaining your commands. When you're done, call `.exec()` and you'll get an array back with your results: + +```javascript +const [setReply, getReply] = await client.multi() + .set('key', 'value') + .get('another-key') + .exec(); +``` + +## `exec<'typed'>()`/`execTyped()` + +A transaction invoked with `.exec<'typed'>`/`execTyped()` will return types appropriate to the commands in the transaction: + +```javascript +const multi = client.multi().ping(); +await multi.exec(); // Array +await multi.exec<'typed'>(); // [string] +await multi.execTyped(); // [string] +``` + +> :warning: this only works when all the commands are invoked in a single "call chain" + +## [`WATCH`](https://redis.io/commands/watch/) + +You can also [watch](https://redis.io/docs/interact/transactions/#optimistic-locking-using-check-and-set) keys by calling `.watch()`. Your transaction will abort if any of the watched keys change or if the client reconnected between the `watch` and `exec` calls. + +The `WATCH` state is stored on the connection (by the server). In case you need to run multiple `WATCH` & `MULTI` in parallel you'll need to use a [pool](./pool.md). + +## `execAsPipeline` + +`execAsPipeline` will execute the commands without "wrapping" it with `MULTI` & `EXEC` (and lose the transactional semantics). + +```javascript +await client.multi() + .get('a') + .get('b') + .execAsPipeline(); +``` + +the diffrence between the above pipeline and `Promise.all`: + +```javascript +await Promise.all([ + client.get('a'), + client.get('b') +]); +``` + +is that if the socket disconnects during the pipeline, any unwritten commands will be discarded. i.e. if the socket disconnects after `GET a` is written to the socket, but before `GET b` is: +- using `Promise.all` - the client will try to execute `GET b` when the socket reconnects +- using `execAsPipeline` - `GET b` promise will be rejected as well diff --git a/docs/v3-to-v4.md b/docs/v3-to-v4.md new file mode 100644 index 00000000000..0bf0d269b90 --- /dev/null +++ b/docs/v3-to-v4.md @@ -0,0 +1,87 @@ +# v3 to v4 Migration Guide + +Version 4 of Node Redis is a major refactor. While we have tried to maintain backwards compatibility where possible, several interfaces have changed. Read this guide to understand the differences and how to implement version 4 in your application. + +## All of the Breaking Changes + +See the [Change Log](../CHANGELOG.md). + +### Promises + +Node Redis now uses native [Promises](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise) by default for all functions. + +### `createClient` + +The configuration object passed to `createClient` has changed significantly with this release. See the [client configuration guide](./client-configuration.md) for details. + +### No Auto Connect + +In V4, the client does not automatically connect to the server. Instead you need to run `.connect()` after creating the client or you will receive an error: `ClientClosedError: The client is closed`. + +```typescript +import { createClient } from 'redis'; + +const client = createClient(); + +await client.connect(); + +await client.ping(); +``` + +### All the removed events + +The following events that existed in V3 were removed in V4: + +1. `warning` +2. `subscribe` +3. `psubscribe` +4. `unsubscribe` +5. `message` +6. `message_buffer` +7. `messageBuffer` +8. `pmessage` +9. `pmessage_buffer` +10. `pmessageBuffer` +11. `monitor` + +#### No `message`-like event + +In V4, you don't need to add a listener to the `message`-like events (items 5 to 10 of the above list), you can get the message directly in `subscribe`-like commands. + +The second argument of these commands is a callback, which will be triggered every time there is a message published to the channel. + +The third argument to these commands is a boolean to set `bufferMode` (default `false`). If it's set to `true` you will receive a buffer instead of a string. + +The `subscribe`-like commands return a promise. If the command is executed successfully the promise will be fulfilled, otherwise the promise will be rejected. + +```typescript +import { createClient } from 'redis'; + +const subscriber = createClient(); + +await subscriber.connect(); + +await subscriber.subscribe('channel_name', (message, channelName) => { + console.info(message, channelName); +}); +``` + +## Legacy Mode + +Use legacy mode to preserve the backwards compatibility of commands while still getting access to the updated experience: + +```typescript +const client = createClient({ + legacyMode: true +}); + +// legacy mode +client.set('key', 'value', 'NX', (err, reply) => { + // ... +}); + +// version 4 interface is still accessible +await client.v4.set('key', 'value', { + NX: true +}); +``` diff --git a/docs/v4-to-v5.md b/docs/v4-to-v5.md new file mode 100644 index 00000000000..fbe02e7c4d6 --- /dev/null +++ b/docs/v4-to-v5.md @@ -0,0 +1,245 @@ +# v4 to v5 migration guide + +## Client Configuration + +### Keep Alive + +To better align with Node.js build-in [`net`](https://nodejs.org/api/net.html) and [`tls`](https://nodejs.org/api/tls.html) modules, the `keepAlive` option has been split into 2 options: `keepAlive` (`boolean`) and `keepAliveInitialDelay` (`number`). The defaults remain `true` and `5000`. + +### Legacy Mode + +In the previous version, you could access "legacy" mode by creating a client and passing in `{ legacyMode: true }`. Now, you can create one off of an existing client by calling the `.legacy()` function. This allows easier access to both APIs and enables better TypeScript support. + +```javascript +// use `client` for the current API +const client = createClient(); +await client.set('key', 'value'); + +// use `legacyClient` for the "legacy" API +const legacyClient = client.legacy(); +legacyClient.set('key', 'value', (err, reply) => { + // ... +}); +``` + +## Command Options + +In v4, command options are passed as a first optional argument: + +```javascript +await client.get('key'); // `string | null` +await client.get(client.commandOptions({ returnBuffers: true }), 'key'); // `Buffer | null` +``` + +This has a couple of flaws: +1. The argument types are checked in runtime, which is a performance hit. +2. Code suggestions are less readable/usable, due to "function overloading". +3. Overall, "user code" is not as readable as it could be. + +### The new API for v5 + +With the new API, instead of passing the options directly to the commands we use a "proxy client" to store them: + +```javascript +await client.get('key'); // `string | null` + +const proxyClient = client.withCommandOptions({ + typeMapping: { + [TYPES.BLOB_STRING]: Buffer + } +}); + +await proxyClient.get('key'); // `Buffer | null` +``` + +for more information, see the [Command Options guide](./command-options.md). + +## Quit VS Disconnect + +The `QUIT` command has been deprecated in Redis 7.2 and should now also be considered deprecated in Node-Redis. Instead of sending a `QUIT` command to the server, the client can simply close the network connection. + +`client.QUIT/quit()` is replaced by `client.close()`. and, to avoid confusion, `client.disconnect()` has been renamed to `client.destroy()`. + +## Scan Iterators + +Iterator commands like `SCAN`, `HSCAN`, `SSCAN`, and `ZSCAN` return collections of elements (depending on the data type). However, v4 iterators loop over these collections and yield individual items: + +```javascript +for await (const key of client.scanIterator()) { + console.log(key, await client.get(key)); +} +``` + +This mismatch can be awkward and makes "multi-key" commands like `MGET`, `UNLINK`, etc. pointless. So, in v5 the iterators now yield a collection instead of an element: + +```javascript +for await (const keys of client.scanIterator()) { + // we can now meaningfully utilize "multi-key" commands + console.log(keys, await client.mGet(keys)); +} +``` + +for more information, see the [Scan Iterators guide](./scan-iterators.md). + +## Isolation Pool + +In v4, `RedisClient` had the ability to create a pool of connections using an "Isolation Pool" on top of the "main" connection. However, there was no way to use the pool without a "main" connection: +```javascript +const client = await createClient() + .on('error', err => console.error(err)) + .connect(); + +await client.ping( + client.commandOptions({ isolated: true }) +); +``` + +In v5 we've extracted this pool logic into its own classβ€”`RedisClientPool`: + +```javascript +const pool = await createClientPool() + .on('error', err => console.error(err)) + .connect(); + +await pool.ping(); +``` + +See the [pool guide](./pool.md) for more information. + +## Cluster `MULTI` + +In v4, `cluster.multi()` did not support executing commands on replicas, even if they were readonly. + +```javascript +// this might execute on a replica, depending on configuration +await cluster.sendCommand('key', true, ['GET', 'key']); + +// this always executes on a master +await cluster.multi() + .addCommand('key', ['GET', 'key']) + .exec(); +``` + +To support executing commands on replicas, `cluster.multi().addCommand` now requires `isReadonly` as the second argument, which matches the signature of `cluster.sendCommand`: + +```javascript +await cluster.multi() + .addCommand('key', true, ['GET', 'key']) + .exec(); +``` + +## `MULTI.execAsPipeline()` + +```javascript +await client.multi() + .set('a', 'a') + .set('b', 'b') + .execAsPipeline(); +``` + +In older versions, if the socket disconnects during the pipeline execution, i.e. after writing `SET a a` and before `SET b b`, the returned promise is rejected, but `SET b b` will still be executed on the server. + +In v5, any unwritten commands (in the same pipeline) will be discarded. + +- `RedisFlushModes` -> `REDIS_FLUSH_MODES` [^enum-to-constants] + +## Commands + +### Redis + +- `ACL GETUSER`: `selectors` +- `COPY`: `destinationDb` -> `DB`, `replace` -> `REPLACE`, `boolean` -> `number` [^boolean-to-number] +- `CLIENT KILL`: `enum ClientKillFilters` -> `const CLIENT_KILL_FILTERS` [^enum-to-constants] +- `CLUSTER FAILOVER`: `enum FailoverModes` -> `const FAILOVER_MODES` [^enum-to-constants] +- `CLIENT TRACKINGINFO`: `flags` in RESP2 - `Set` -> `Array` (to match RESP3 default type mapping) +- `CLUSTER INFO`: +- `CLUSTER SETSLOT`: `ClusterSlotStates` -> `CLUSTER_SLOT_STATES` [^enum-to-constants] +- `CLUSTER RESET`: the second argument is `{ mode: string; }` instead of `string` [^future-proofing] +- `CLUSTER FAILOVER`: `enum FailoverModes` -> `const FAILOVER_MODES` [^enum-to-constants], the second argument is `{ mode: string; }` instead of `string` [^future-proofing] +- `CLUSTER LINKS`: `createTime` -> `create-time`, `sendBufferAllocated` -> `send-buffer-allocated`, `sendBufferUsed` -> `send-buffer-used` [^map-keys] +- `CLUSTER NODES`, `CLUSTER REPLICAS`, `CLUSTER INFO`: returning the raw `VerbatimStringReply` +- `EXPIRE`: `boolean` -> `number` [^boolean-to-number] +- `EXPIREAT`: `boolean` -> `number` [^boolean-to-number] +- `HSCAN`: `tuples` has been renamed to `entries` +- `HEXISTS`: `boolean` -> `number` [^boolean-to-number] +- `HRANDFIELD_COUNT_WITHVALUES`: `Record` -> `Array<{ field: BlobString; value: BlobString; }>` (it can return duplicates). +- `HSETNX`: `boolean` -> `number` [^boolean-to-number] +- `INFO`: +- `LCS IDX`: `length` has been changed to `len`, `matches` has been changed from `Array<{ key1: RangeReply; key2: RangeReply; }>` to `Array<[key1: RangeReply, key2: RangeReply]>` + + +- `ZINTER`: instead of `client.ZINTER('key', { WEIGHTS: [1] })` use `client.ZINTER({ key: 'key', weight: 1 }])` +- `ZINTER_WITHSCORES`: instead of `client.ZINTER_WITHSCORES('key', { WEIGHTS: [1] })` use `client.ZINTER_WITHSCORES({ key: 'key', weight: 1 }])` +- `ZUNION`: instead of `client.ZUNION('key', { WEIGHTS: [1] })` use `client.ZUNION({ key: 'key', weight: 1 }])` +- `ZUNION_WITHSCORES`: instead of `client.ZUNION_WITHSCORES('key', { WEIGHTS: [1] })` use `client.ZUNION_WITHSCORES({ key: 'key', weight: 1 }])` +- `ZMPOP`: `{ elements: Array<{ member: string; score: number; }>; }` -> `{ members: Array<{ value: string; score: number; }>; }` to match other sorted set commands (e.g. `ZRANGE`, `ZSCAN`) + +- `MOVE`: `boolean` -> `number` [^boolean-to-number] +- `PEXPIRE`: `boolean` -> `number` [^boolean-to-number] +- `PEXPIREAT`: `boolean` -> `number` [^boolean-to-number] +- `PFADD`: `boolean` -> `number` [^boolean-to-number] + +- `RENAMENX`: `boolean` -> `number` [^boolean-to-number] +- `SETNX`: `boolean` -> `number` [^boolean-to-number] +- `SCAN`, `HSCAN`, `SSCAN`, and `ZSCAN`: `reply.cursor` will not be converted to number to avoid issues when the number is bigger than `Number.MAX_SAFE_INTEGER`. See [here](https://github.com/redis/node-redis/issues/2561). +- `SCRIPT EXISTS`: `Array` -> `Array` [^boolean-to-number] +- `SISMEMBER`: `boolean` -> `number` [^boolean-to-number] +- `SMISMEMBER`: `Array` -> `Array` [^boolean-to-number] +- `SMOVE`: `boolean` -> `number` [^boolean-to-number] + +- `GEOSEARCH_WITH`/`GEORADIUS_WITH`: `GeoReplyWith` -> `GEO_REPLY_WITH` [^enum-to-constants] +- `GEORADIUSSTORE` -> `GEORADIUS_STORE` +- `GEORADIUSBYMEMBERSTORE` -> `GEORADIUSBYMEMBER_STORE` +- `XACK`: `boolean` -> `number` [^boolean-to-number] +- `XADD`: the `INCR` option has been removed, use `XADD_INCR` instead +- `LASTSAVE`: `Date` -> `number` (unix timestamp) +- `HELLO`: `protover` moved from the options object to it's own argument, `auth` -> `AUTH`, `clientName` -> `SETNAME` +- `MODULE LIST`: `version` -> `ver` [^map-keys] +- `MEMORY STATS`: [^map-keys] +- `FUNCTION RESTORE`: the second argument is `{ mode: string; }` instead of `string` [^future-proofing] +- `FUNCTION STATS`: `runningScript` -> `running_script`, `durationMs` -> `duration_ms`, `librariesCount` -> `libraries_count`, `functionsCount` -> `functions_count` [^map-keys] + +- `TIME`: `Date` -> `[unixTimestamp: string, microseconds: string]` + +- `XGROUP_CREATECONSUMER`: [^boolean-to-number] +- `XGROUP_DESTROY`: [^boolean-to-number] +- `XINFO GROUPS`: `lastDeliveredId` -> `last-delivered-id` [^map-keys] +- `XINFO STREAM`: `radixTreeKeys` -> `radix-tree-keys`, `radixTreeNodes` -> `radix-tree-nodes`, `lastGeneratedId` -> `last-generated-id`, `maxDeletedEntryId` -> `max-deleted-entry-id`, `entriesAdded` -> `entries-added`, `recordedFirstEntryId` -> `recorded-first-entry-id`, `firstEntry` -> `first-entry`, `lastEntry` -> `last-entry` +- `XAUTOCLAIM`, `XCLAIM`, `XRANGE`, `XREVRANGE`: `Array<{ name: string; messages: Array<{ id: string; message: Record }>; }>` -> `Record }>>` + +- `COMMAND LIST`: `enum FilterBy` -> `const COMMAND_LIST_FILTER_BY` [^enum-to-constants], the filter argument has been moved from a "top level argument" into ` { FILTERBY: { type: ; value: } }` + +### Bloom + +- `TOPK.QUERY`: `Array` -> `Array` + +### JSON + +- `JSON.ARRINDEX`: `start` and `end` arguments moved to `{ range: { start: number; end: number; }; }` [^future-proofing] +- `JSON.ARRPOP`: `path` and `index` arguments moved to `{ path: string; index: number; }` [^future-proofing] +- `JSON.ARRLEN`, `JSON.CLEAR`, `JSON.DEBUG MEMORY`, `JSON.DEL`, `JSON.FORGET`, `JSON.OBJKEYS`, `JSON.OBJLEN`, `JSON.STRAPPEND`, `JSON.STRLEN`, `JSON.TYPE`: `path` argument moved to `{ path: string; }` [^future-proofing] + +### Search + +- `FT.SUGDEL`: [^boolean-to-number] +- `FT.CURSOR READ`: `cursor` type changed from `number` to `string` (in and out) to avoid issues when the number is bigger than `Number.MAX_SAFE_INTEGER`. See [here](https://github.com/redis/node-redis/issues/2561). +- `AggregateGroupByReducers` -> `FT_AGGREGATE_GROUP_BY_REDUCERS` [^enum-to-constants] +- `AggregateSteps` -> `FT_AGGREGATE_STEPS` [^enum-to-constants] +- `RedisSearchLanguages` -> `REDISEARCH_LANGUAGE` [^enum-to-constants] +- `SchemaFieldTypes` -> `SCHEMA_FIELD_TYPE` [^enum-to-constants] +- `SchemaTextFieldPhonetics` -> `SCHEMA_TEXT_FIELD_PHONETIC` [^enum-to-constants] +- `SearchOptions` -> `FtSearchOptions` +- `VectorAlgorithms` -> `SCHEMA_VECTOR_FIELD_ALGORITHM` [^enum-to-constants] + +### Time Series + +- `TS.ADD`: `boolean` -> `number` [^boolean-to-number] +- `TS.[M][REV]RANGE`: the `ALIGN` argument has been moved into `AGGREGATION` +- `TS.SYNUPDATE`: `Array>` -> `Record>` +- `TimeSeriesDuplicatePolicies` -> `TIME_SERIES_DUPLICATE_POLICIES` [^enum-to-constants] +- `TimeSeriesEncoding` -> `TIME_SERIES_ENCODING` [^enum-to-constants] +- `TimeSeriesAggregationType` -> `TIME_SERIES_AGGREGATION_TYPE` [^enum-to-constants] +- `TimeSeriesReducers` -> `TIME_SERIES_REDUCERS` [^enum-to-constants] +- `TimeSeriesBucketTimestamp` -> `TIME_SERIES_BUCKET_TIMESTAMP` [^enum-to-constants] + +[^map-keys]: To avoid unnecessary transformations and confusion, map keys will not be transformed to "js friendly" names (i.e. `number-of-keys` will not be renamed to `numberOfKeys`). See [here](https://github.com/redis/node-redis/discussions/2506). diff --git a/docs/v5.md b/docs/v5.md new file mode 100644 index 00000000000..15ef67c14ee --- /dev/null +++ b/docs/v5.md @@ -0,0 +1,188 @@ +# RESP3 Support + +Node Redis v5 adds support for [RESP3](https://github.com/redis/redis-specifications/blob/master/protocol/RESP3.md), the new Redis serialization protocol. RESP3 offers richer data types and improved type handling compared to RESP2. + +To use RESP3, specify it when creating your client: + +```javascript +import { createClient } from 'redis'; + +const client = createClient({ + RESP: 3 +}); +``` + +## Type Mapping + +With RESP3, you can leverage the protocol's richer type system. You can customize how different Redis types are represented in JavaScript using type mapping: + +```javascript +import { createClient, RESP_TYPES } from 'redis'; + +// By default +await client.hGetAll('key'); // Record + +// Use Map instead of plain object +await client.withTypeMapping({ + [RESP_TYPES.MAP]: Map +}).hGetAll('key'); // Map + +// Use both Map and Buffer +await client.withTypeMapping({ + [RESP_TYPES.MAP]: Map, + [RESP_TYPES.BLOB_STRING]: Buffer +}).hGetAll('key'); // Map +``` + +This replaces the previous approach of using `commandOptions({ returnBuffers: true })` in v4. + +## PubSub in RESP3 + +RESP3 uses a different mechanism for handling Pub/Sub messages. Instead of modifying the `onReply` handler as in RESP2, RESP3 provides a dedicated `onPush` handler. When using RESP3, the client automatically uses this more efficient push notification system. + +## Known Limitations + +### Unstable Commands + +Some Redis commands have unstable RESP3 transformations. These commands will throw an error when used with RESP3 unless you explicitly opt in to using them by setting `unstableResp3: true` in your client configuration: + +```javascript +const client = createClient({ + RESP: 3, + unstableResp3: true +}); +``` + +The following commands have unstable RESP3 implementations: + +1. **Stream Commands**: + - `XREAD` and `XREADGROUP` - The response format differs between RESP2 and RESP3 + +2. **Search Commands (RediSearch)**: + - `FT.AGGREGATE` + - `FT.AGGREGATE_WITHCURSOR` + - `FT.CURSOR_READ` + - `FT.INFO` + - `FT.PROFILE_AGGREGATE` + - `FT.PROFILE_SEARCH` + - `FT.SEARCH` + - `FT.SEARCH_NOCONTENT` + - `FT.SPELLCHECK` + +3. **Time Series Commands**: + - `TS.INFO` + - `TS.INFO_DEBUG` + +If you need to use these commands with RESP3, be aware that the response format might change in future versions. + +# Sentinel Support + +[Sentinel](./sentinel.md) + +# `multi.exec<'typed'>` / `multi.execTyped` + +We have introduced the ability to perform a "typed" `MULTI`/`EXEC` transaction. Rather than returning `Array`, a transaction invoked with `.exec<'typed'>` will return types appropriate to the commands in the transaction where possible: + +```javascript +const multi = client.multi().ping(); +await multi.exec(); // Array +await multi.exec<'typed'>(); // [string] +await multi.execTyped(); // [string] +``` + +# Client Side Caching + +Node Redis v5 adds support for [Client Side Caching](https://redis.io/docs/manual/client-side-caching/), which enables clients to cache query results locally. The server will notify the client when cached results are no longer valid. + +Client Side Caching is only supported with RESP3. + +## Usage + +There are two ways to implement client side caching: + +### Anonymous Cache + +```javascript +const client = createClient({ + RESP: 3, + clientSideCache: { + ttl: 0, // Time-to-live in milliseconds (0 = no expiration) + maxEntries: 0, // Maximum entries to store (0 = unlimited) + evictPolicy: "LRU" // Eviction policy: "LRU" or "FIFO" + } +}); +``` + +In this instance, the cache is managed internally by the client. + +### Controllable Cache + +```javascript +import { BasicClientSideCache } from 'redis'; + +const cache = new BasicClientSideCache({ + ttl: 0, + maxEntries: 0, + evictPolicy: "LRU" +}); + +const client = createClient({ + RESP: 3, + clientSideCache: cache +}); +``` + +With this approach, you have direct access to the cache object for more control: + +```javascript +// Manually invalidate keys +cache.invalidate(key); + +// Clear the entire cache +cache.clear(); + +// Get cache metrics +// `cache.stats()` returns a `CacheStats` object with comprehensive statistics. +const statistics = cache.stats(); + +// Key metrics: +const hits = statistics.hitCount; // Number of cache hits +const misses = statistics.missCount; // Number of cache misses +const hitRate = statistics.hitRate(); // Cache hit rate (0.0 to 1.0) + +// Many other metrics are available on the `statistics` object, e.g.: +// statistics.missRate(), statistics.loadSuccessCount, +// statistics.averageLoadPenalty(), statistics.requestCount() +``` + +## Pooled Caching + +Client side caching also works with client pools. For pooled clients, the cache is shared across all clients in the pool: + +```javascript +const client = createClientPool({RESP: 3}, { + clientSideCache: { + ttl: 0, + maxEntries: 0, + evictPolicy: "LRU" + }, + minimum: 5 +}); +``` + +For a controllable pooled cache: + +```javascript +import { BasicPooledClientSideCache } from 'redis'; + +const cache = new BasicPooledClientSideCache({ + ttl: 0, + maxEntries: 0, + evictPolicy: "LRU" +}); + +const client = createClientPool({RESP: 3}, { + clientSideCache: cache, + minimum: 5 +}); +``` diff --git a/doctests/README.md b/doctests/README.md new file mode 100644 index 00000000000..59d1cb0364c --- /dev/null +++ b/doctests/README.md @@ -0,0 +1,32 @@ +# Command examples for redis.io + +## Setup + +To set up the examples folder so that you can run an example / develop one of your own: + +``` +$ git clone https://github.com/redis/node-redis.git +$ cd node-redis +$ npm install -ws && npm run build +$ cd doctests +$ npm install +``` + +## How to add examples + +Create regular node file in the current folder with meaningful name. It makes sense prefix example files with +command category (e.g. string, set, list, hash, etc) to make navigation in the folder easier. + +### Special markup + +See https://github.com/redis-stack/redis-stack-website#readme for more details. + +## How to test the examples + +Just include necessary assertions in the example file and run +```bash +sh doctests/run_examples.sh +``` +to test all examples in the current folder. + +See `tests.js` for more details. diff --git a/doctests/cmds-cnxmgmt.js b/doctests/cmds-cnxmgmt.js new file mode 100644 index 00000000000..8b616b6f105 --- /dev/null +++ b/doctests/cmds-cnxmgmt.js @@ -0,0 +1,49 @@ +// EXAMPLE: cmds_cnxmgmt +// REMOVE_START +import assert from "node:assert"; +// REMOVE_END + +// HIDE_START +import { createClient } from 'redis'; + +const client = createClient(); +await client.connect().catch(console.error); +// HIDE_END + +// STEP_START auth1 +// REMOVE_START +await client.sendCommand(['CONFIG', 'SET', 'requirepass', 'temp_pass']); +// REMOVE_END +const res1 = await client.auth({ password: 'temp_pass' }); +console.log(res1); // OK + +const res2 = await client.auth({ username: 'default', password: 'temp_pass' }); +console.log(res2); // OK + +// REMOVE_START +assert.equal(res1, "OK"); +assert.equal(res2, "OK"); +await client.sendCommand(['CONFIG', 'SET', 'requirepass', '']); +// REMOVE_END +// STEP_END + +// STEP_START auth2 +// REMOVE_START +await client.sendCommand([ + 'ACL', 'SETUSER', 'test-user', + 'on', '>strong_password', '+acl' +]); +// REMOVE_END +const res3 = await client.auth({ username: 'test-user', password: 'strong_password' }); +console.log(res3); // OK + +// REMOVE_START +assert.equal(res3, "OK"); +await client.auth({ username: 'default', password: '' }) +await client.sendCommand(['ACL', 'DELUSER', 'test-user']); +// REMOVE_END +// STEP_END + +// HIDE_START +await client.close(); +// HIDE_END diff --git a/doctests/cmds-generic.js b/doctests/cmds-generic.js new file mode 100644 index 00000000000..50329ede460 --- /dev/null +++ b/doctests/cmds-generic.js @@ -0,0 +1,195 @@ +// EXAMPLE: cmds_generic +// REMOVE_START +import assert from "node:assert"; +// REMOVE_END + +// HIDE_START +import { createClient } from 'redis'; + +const client = createClient(); +await client.connect().catch(console.error); +// HIDE_END + +// STEP_START del +const delRes1 = await client.set('key1', 'Hello'); +console.log(delRes1); // OK + +const delRes2 = await client.set('key2', 'World'); +console.log(delRes2); // OK + +const delRes3 = await client.del(['key1', 'key2', 'key3']); +console.log(delRes3); // 2 +// REMOVE_START +assert.equal(delRes3, 2); +// REMOVE_END +// STEP_END + +// STEP_START expire +const expireRes1 = await client.set('mykey', 'Hello'); +console.log(expireRes1); // OK + +const expireRes2 = await client.expire('mykey', 10); +console.log(expireRes2); // 1 + +const expireRes3 = await client.ttl('mykey'); +console.log(expireRes3); // 10 +// REMOVE_START +assert.equal(expireRes3, 10); +// REMOVE_END + +const expireRes4 = await client.set('mykey', 'Hello World'); +console.log(expireRes4); // OK + +const expireRes5 = await client.ttl('mykey'); +console.log(expireRes5); // -1 +// REMOVE_START +assert.equal(expireRes5, -1); +// REMOVE_END + +const expireRes6 = await client.expire('mykey', 10, "XX"); +console.log(expireRes6); // 0 +// REMOVE_START +assert.equal(expireRes6, 0) +// REMOVE_END + +const expireRes7 = await client.ttl('mykey'); +console.log(expireRes7); // -1 +// REMOVE_START +assert.equal(expireRes7, -1); +// REMOVE_END + +const expireRes8 = await client.expire('mykey', 10, "NX"); +console.log(expireRes8); // 1 +// REMOVE_START +assert.equal(expireRes8, 1); +// REMOVE_END + +const expireRes9 = await client.ttl('mykey'); +console.log(expireRes9); // 10 +// REMOVE_START +assert.equal(expireRes9, 10); +await client.del('mykey'); +// REMOVE_END +// STEP_END + +// STEP_START ttl +const ttlRes1 = await client.set('mykey', 'Hello'); +console.log(ttlRes1); // OK + +const ttlRes2 = await client.expire('mykey', 10); +console.log(ttlRes2); // 1 + +const ttlRes3 = await client.ttl('mykey'); +console.log(ttlRes3); // 10 +// REMOVE_START +assert.equal(ttlRes3, 10); +await client.del('mykey'); +// REMOVE_END +// STEP_END + +// STEP_START scan1 +const scan1Res1 = await client.sAdd('myset', ['1', '2', '3', 'foo', 'foobar', 'feelsgood']); +console.log(scan1Res1); // 6 + +let scan1Res2 = []; +for await (const values of client.sScanIterator('myset', { MATCH: 'f*' })) { + scan1Res2 = scan1Res2.concat(values); +} +console.log(scan1Res2); // ['foo', 'foobar', 'feelsgood'] +// REMOVE_START +console.assert(scan1Res2.sort().toString() === ['foo', 'foobar', 'feelsgood'].sort().toString()); +await client.del('myset'); +// REMOVE_END +// STEP_END + +// STEP_START scan2 +// REMOVE_START +for (let i = 1; i <= 1000; i++) { + await client.set(`key:${i}`, i); +} +// REMOVE_END +let cursor = '0'; +let scanResult; + +scanResult = await client.scan(cursor, { MATCH: '*11*' }); +console.log(scanResult.cursor, scanResult.keys); + +scanResult = await client.scan(scanResult.cursor, { MATCH: '*11*' }); +console.log(scanResult.cursor, scanResult.keys); + +scanResult = await client.scan(scanResult.cursor, { MATCH: '*11*' }); +console.log(scanResult.cursor, scanResult.keys); + +scanResult = await client.scan(scanResult.cursor, { MATCH: '*11*' }); +console.log(scanResult.cursor, scanResult.keys); + +scanResult = await client.scan(scanResult.cursor, { MATCH: '*11*', COUNT: 1000 }); +console.log(scanResult.cursor, scanResult.keys); +// REMOVE_START +console.assert(scanResult.keys.length === 18); +cursor = '0'; +const prefix = 'key:*'; +do { + scanResult = await client.scan(cursor, { MATCH: prefix, COUNT: 1000 }); + console.log(scanResult.cursor, scanResult.keys); + cursor = scanResult.cursor; + const keys = scanResult.keys; + if (keys.length) { + await client.del(keys); + } +} while (cursor !== '0'); +// REMOVE_END +// STEP_END + +// STEP_START scan3 +const scan3Res1 = await client.geoAdd('geokey', { longitude: 0, latitude: 0, member: 'value' }); +console.log(scan3Res1); // 1 + +const scan3Res2 = await client.zAdd('zkey', [{ score: 1000, value: 'value' }]); +console.log(scan3Res2); // 1 + +const scan3Res3 = await client.type('geokey'); +console.log(scan3Res3); // zset +// REMOVE_START +console.assert(scan3Res3 === 'zset'); +// REMOVE_END + +const scan3Res4 = await client.type('zkey'); +console.log(scan3Res4); // zset +// REMOVE_START +console.assert(scan3Res4 === 'zset'); +// REMOVE_END + +const scan3Res5 = await client.scan('0', { TYPE: 'zset' }); +console.log(scan3Res5.keys); // ['zkey', 'geokey'] +// REMOVE_START +console.assert(scan3Res5.keys.sort().toString() === ['zkey', 'geokey'].sort().toString()); +await client.del(['geokey', 'zkey']); +// REMOVE_END +// STEP_END + +// STEP_START scan4 +const scan4Res1 = await client.hSet('myhash', { a: 1, b: 2 }); +console.log(scan4Res1); // 2 + +const scan4Res2 = await client.hScan('myhash', '0'); +console.log(scan4Res2.entries); // [{field: 'a', value: '1'}, {field: 'b', value: '2'}] +// REMOVE_START +assert.deepEqual(scan4Res2.entries, [ + { field: 'a', value: '1' }, + { field: 'b', value: '2' } +]); +// REMOVE_END + +const scan4Res3 = await client.hScan('myhash', '0', { COUNT: 10 }); +const items = scan4Res3.entries.map((item) => item.field) +console.log(items); // ['a', 'b'] +// REMOVE_START +assert.deepEqual(items, ['a', 'b']) +await client.del('myhash'); +// REMOVE_END +// STEP_END + +// HIDE_START +await client.close(); +// HIDE_END diff --git a/doctests/cmds-hash.js b/doctests/cmds-hash.js new file mode 100644 index 00000000000..8ce29785763 --- /dev/null +++ b/doctests/cmds-hash.js @@ -0,0 +1,109 @@ +// EXAMPLE: cmds_hash +// HIDE_START +import assert from 'node:assert'; +import { createClient } from 'redis'; + +const client = createClient(); +await client.connect().catch(console.error); +// HIDE_END + +// STEP_START hset +const res1 = await client.hSet('myhash', 'field1', 'Hello') +console.log(res1) // 1 + +const res2 = await client.hGet('myhash', 'field1') +console.log(res2) // Hello + +const res3 = await client.hSet( + 'myhash', + { + 'field2': 'Hi', + 'field3': 'World' + } +) +console.log(res3) // 2 + +const res4 = await client.hGet('myhash', 'field2') +console.log(res4) // Hi + +const res5 = await client.hGet('myhash', 'field3') +console.log(res5) // World + +const res6 = await client.hGetAll('myhash') +console.log(res6) + +// REMOVE_START +assert.equal(res1, 1); +assert.equal(res2, 'Hello'); +assert.equal(res3, 2); +assert.equal(res4, 'Hi'); +assert.equal(res5, 'World'); +assert.deepEqual(res6, { + field1: 'Hello', + field2: 'Hi', + field3: 'World' +}); +await client.del('myhash') +// REMOVE_END +// STEP_END + +// STEP_START hget +const res7 = await client.hSet('myhash', 'field1', 'foo') +console.log(res7) // 1 + +const res8 = await client.hGet('myhash', 'field1') +console.log(res8) // foo + +const res9 = await client.hGet('myhash', 'field2') +console.log(res9) // null + +// REMOVE_START +assert.equal(res7, 1); +assert.equal(res8, 'foo'); +assert.equal(res9, null); +await client.del('myhash') +// REMOVE_END +// STEP_END + +// STEP_START hgetall +const res10 = await client.hSet( + 'myhash', + { + 'field1': 'Hello', + 'field2': 'World' + } +) + +const res11 = await client.hGetAll('myhash') +console.log(res11) // [Object: null prototype] { field1: 'Hello', field2: 'World' } + +// REMOVE_START +assert.deepEqual(res11, { + field1: 'Hello', + field2: 'World' +}); +await client.del('myhash') +// REMOVE_END +// STEP_END + +// STEP_START hvals +const res12 = await client.hSet( + 'myhash', + { + 'field1': 'Hello', + 'field2': 'World' + } +) + +const res13 = await client.hVals('myhash') +console.log(res13) // [ 'Hello', 'World' ] + +// REMOVE_START +assert.deepEqual(res13, [ 'Hello', 'World' ]); +await client.del('myhash') +// REMOVE_END +// STEP_END + +// HIDE_START +await client.close(); +// HIDE_END diff --git a/doctests/cmds-list.js b/doctests/cmds-list.js new file mode 100644 index 00000000000..9d1b4154dfe --- /dev/null +++ b/doctests/cmds-list.js @@ -0,0 +1,129 @@ +// EXAMPLE: cmds_list +// HIDE_START +import assert from 'node:assert'; +import { createClient } from 'redis'; + +const client = createClient(); +await client.connect().catch(console.error); +// HIDE_END + +// STEP_START lpush +const res1 = await client.lPush('mylist', 'world'); +console.log(res1); // 1 + +const res2 = await client.lPush('mylist', 'hello'); +console.log(res2); // 2 + +const res3 = await client.lRange('mylist', 0, -1); +console.log(res3); // [ 'hello', 'world' ] + +// REMOVE_START +assert.deepEqual(res3, [ 'hello', 'world' ]); +await client.del('mylist'); +// REMOVE_END +// STEP_END + +// STEP_START lrange +const res4 = await client.rPush('mylist', 'one'); +console.log(res4); // 1 + +const res5 = await client.rPush('mylist', 'two'); +console.log(res5); // 2 + +const res6 = await client.rPush('mylist', 'three'); +console.log(res6); // 3 + +const res7 = await client.lRange('mylist', 0, 0); +console.log(res7); // [ 'one' ] + +const res8 = await client.lRange('mylist', -3, 2); +console.log(res8); // [ 'one', 'two', 'three' ] + +const res9 = await client.lRange('mylist', -100, 100); +console.log(res9); // [ 'one', 'two', 'three' ] + +const res10 = await client.lRange('mylist', 5, 10); +console.log(res10); // [] + +// REMOVE_START +assert.deepEqual(res7, [ 'one' ]); +assert.deepEqual(res8, [ 'one', 'two', 'three' ]); +assert.deepEqual(res9, [ 'one', 'two', 'three' ]); +assert.deepEqual(res10, []); +await client.del('mylist'); +// REMOVE_END +// STEP_END + +// STEP_START llen +const res11 = await client.lPush('mylist', 'World'); +console.log(res11); // 1 + +const res12 = await client.lPush('mylist', 'Hello'); +console.log(res12); // 2 + +const res13 = await client.lLen('mylist'); +console.log(res13); // 2 + +// REMOVE_START +assert.equal(res13, 2); +await client.del('mylist'); +// REMOVE_END +// STEP_END + +// STEP_START rpush +const res14 = await client.rPush('mylist', 'hello'); +console.log(res14); // 1 + +const res15 = await client.rPush('mylist', 'world'); +console.log(res15); // 2 + +const res16 = await client.lRange('mylist', 0, -1); +console.log(res16); // [ 'hello', 'world' ] + +// REMOVE_START +assert.deepEqual(res16, [ 'hello', 'world' ]); +await client.del('mylist'); +// REMOVE_END +// STEP_END + +// STEP_START lpop +const res17 = await client.rPush('mylist', ["one", "two", "three", "four", "five"]); +console.log(res17); // 5 + +const res18 = await client.lPop('mylist'); +console.log(res18); // 'one' + +const res19 = await client.lPopCount('mylist', 2); +console.log(res19); // [ 'two', 'three' ] + +const res20 = await client.lRange('mylist', 0, -1); +console.log(res20); // [ 'four', 'five' ] + +// REMOVE_START +assert.deepEqual(res20, [ 'four', 'five' ]); +await client.del('mylist'); +// REMOVE_END +// STEP_END + +// STEP_START rpop +const res21 = await client.rPush('mylist', ["one", "two", "three", "four", "five"]); +console.log(res21); // 5 + +const res22 = await client.rPop('mylist'); +console.log(res22); // 'five' + +const res23 = await client.rPopCount('mylist', 2); +console.log(res23); // [ 'four', 'three' ] + +const res24 = await client.lRange('mylist', 0, -1); +console.log(res24); // [ 'one', 'two' ] + +// REMOVE_START +assert.deepEqual(res24, [ 'one', 'two' ]); +await client.del('mylist'); +// REMOVE_END +// STEP_END + +// HIDE_START +await client.close(); +// HIDE_END diff --git a/doctests/cmds-servermgmt.js b/doctests/cmds-servermgmt.js new file mode 100644 index 00000000000..0a53e05919d --- /dev/null +++ b/doctests/cmds-servermgmt.js @@ -0,0 +1,45 @@ +// EXAMPLE: cmds_servermgmt +// REMOVE_START +import assert from 'node:assert'; +// REMOVE_END + +// HIDE_START +import { createClient } from 'redis'; + +const client = createClient(); +await client.connect().catch(console.error); +// HIDE_END + +// STEP_START flushall +// REMOVE_START +await client.set('foo', '1'); +await client.set('bar', '2'); +await client.set('baz', '3'); +// REMOVE_END +const res1 = await client.flushAll('SYNC'); // or ASYNC +console.log(res1); // OK + +const res2 = await client.keys('*'); +console.log(res2); // [] + +// REMOVE_START +assert.equal(res1, 'OK'); +assert.deepEqual(res2, []); +// REMOVE_END +// STEP_END + +// STEP_START info +const res3 = await client.info(); +console.log(res3) +// # Server +// redis_version:7.4.0 +// redis_git_sha1:c9d29f6a +// redis_git_dirty:0 +// redis_build_id:4c367a16e3f9616 +// redis_mode:standalone +// ... +// STEP_END + +// HIDE_START +await client.close(); +// HIDE_END diff --git a/doctests/cmds-set.js b/doctests/cmds-set.js new file mode 100644 index 00000000000..8a9a3837036 --- /dev/null +++ b/doctests/cmds-set.js @@ -0,0 +1,44 @@ +// EXAMPLE: cmds_set +// REMOVE_START +import assert from 'node:assert'; +// REMOVE_END + +// HIDE_START +import { createClient } from 'redis'; + +const client = createClient(); +await client.connect().catch(console.error); +// HIDE_END + +// STEP_START sadd +const res1 = await client.sAdd('myset', ['Hello', 'World']); +console.log(res1); // 2 + +const res2 = await client.sAdd('myset', ['World']); +console.log(res2); // 0 + +const res3 = await client.sMembers('myset') +console.log(res3); // ['Hello', 'World'] + +// REMOVE_START +assert.deepEqual(res3, ['Hello', 'World']); +await client.del('myset'); +// REMOVE_END +// STEP_END + +// STEP_START smembers +const res4 = await client.sAdd('myset', ['Hello', 'World']); +console.log(res4); // 2 + +const res5 = await client.sMembers('myset') +console.log(res5); // ['Hello', 'World'] + +// REMOVE_START +assert.deepEqual(res5, ['Hello', 'World']); +await client.del('myset'); +// REMOVE_END +// STEP_END + +// HIDE_START +await client.close(); +// HIDE_END diff --git a/doctests/cmds-sorted-set.js b/doctests/cmds-sorted-set.js new file mode 100644 index 00000000000..b718938cc2b --- /dev/null +++ b/doctests/cmds-sorted-set.js @@ -0,0 +1,115 @@ +// EXAMPLE: cmds_sorted_set +// REMOVE_START +import assert from "node:assert"; +// REMOVE_END + +// HIDE_START +import { createClient } from 'redis'; + +const client = createClient(); +client.on('error', err => console.log('Redis Client Error', err)); +await client.connect().catch(console.error); +// HIDE_END + +// STEP_START zadd +const val1 = await client.zAdd("myzset", [{ value: 'one', score: 1 }]); +console.log(val1); +// returns 1 + +const val2 = await client.zAdd("myzset", [{ value: 'uno', score: 1 }]); +console.log(val2); +// returns 1 + +const val3 = await client.zAdd("myzset", [{ value: 'two', score: 2 }, { value: 'three', score: 3 }]); +console.log(val3); +// returns 2 + +const val4 = await client.zRangeWithScores("myzset", 0, -1); +console.log(val4); +// returns [{value: 'one', score: 1}, {value: 'uno', score: 1}, {value: 'two', score: 2}, {value: 'three', score: 3} ] + +// REMOVE_START +assert.equal(val1, 1); +assert.equal(val2, 1); +assert.equal(val3, 2); +assert.deepEqual(val4, [ + { value: 'one', score: 1 }, + { value: 'uno', score: 1 }, + { value: 'two', score: 2 }, + { value: 'three', score: 3 } +]); +await client.del('myzset'); +// REMOVE_END +// STEP_END + +// STEP_START zrange1 +const val5 = await client.zAdd("myzset", [ + { value: 'one', score: 1 }, + { value: 'two', score: 2 }, + { value: 'three', score: 3 } +]); +console.log(val5); +// returns 3 + +const val6 = await client.zRange('myzset', 0, -1); +console.log(val6); +// returns ['one', 'two', 'three'] +// REMOVE_START +console.assert(JSON.stringify(val6) === JSON.stringify(['one', 'two', 'three'])); +// REMOVE_END + +const val7 = await client.zRange('myzset', 2, 3); +console.log(val7); +// returns ['three'] +// REMOVE_START +console.assert(JSON.stringify(val7) === JSON.stringify(['three'])); +// REMOVE_END + +const val8 = await client.zRange('myzset', -2, -1); +console.log(val8); +// returns ['two', 'three'] +// REMOVE_START +console.assert(JSON.stringify(val8) === JSON.stringify(['two', 'three'])); +await client.del('myzset'); +// REMOVE_END +// STEP_END + +// STEP_START zrange2 +const val9 = await client.zAdd("myzset", [ + { value: 'one', score: 1 }, + { value: 'two', score: 2 }, + { value: 'three', score: 3 } +]); +console.log(val9); +// returns 3 + +const val10 = await client.zRangeWithScores('myzset', 0, 1); +console.log(val10); +// returns [{value: 'one', score: 1}, {value: 'two', score: 2}] +// REMOVE_START +console.assert(JSON.stringify(val10) === JSON.stringify([{value: 'one', score: 1}, {value: 'two', score: 2}])); +await client.del('myzset'); +// REMOVE_END +// STEP_END + +// STEP_START zrange3 +const val11 = await client.zAdd("myzset", [ + { value: 'one', score: 1 }, + { value: 'two', score: 2 }, + { value: 'three', score: 3 } +]); +console.log(val11); +// returns 3 + +const val12 = await client.zRange('myzset', 2, 3, { BY: 'SCORE', LIMIT: { offset: 1, count: 1 } }); +console.log(val12); +// >>> ['three'] +// REMOVE_START +console.assert(JSON.stringify(val12) === JSON.stringify(['three'])); +await client.del('myzset'); +// REMOVE_END +// STEP_END + +// HIDE_START +await client.close(); +// HIDE_END diff --git a/doctests/cmds-string.js b/doctests/cmds-string.js new file mode 100644 index 00000000000..00b3f738fa3 --- /dev/null +++ b/doctests/cmds-string.js @@ -0,0 +1,27 @@ +// EXAMPLE: cmds_string +// REMOVE_START +import assert from "node:assert"; +// REMOVE_END + +// HIDE_START +import { createClient } from 'redis'; + +const client = createClient(); +client.on('error', err => console.log('Redis Client Error', err)); +await client.connect().catch(console.error); +// HIDE_END + +// STEP_START incr +await client.set("mykey", "10"); +const value1 = await client.incr("mykey"); +console.log(value1); +// returns 11 +// REMOVE_START +assert.equal(value1, 11); +await client.del('mykey'); +// REMOVE_END +// STEP_END + +// HIDE_START +await client.close(); +// HIDE_END diff --git a/doctests/data/query_em.json b/doctests/data/query_em.json new file mode 100644 index 00000000000..bcf908aaf85 --- /dev/null +++ b/doctests/data/query_em.json @@ -0,0 +1,92 @@ +[ + { + "pickup_zone": "POLYGON((-74.0610 40.7578, -73.9510 40.7578, -73.9510 40.6678, -74.0610 40.6678, -74.0610 40.7578))", + "store_location": "-74.0060,40.7128", + "brand": "Velorim", + "model": "Jigger", + "price": 270, + "description": "Small and powerful, the Jigger is the best ride for the smallest of tikes! This is the tiniest kids’ pedal bike on the market available without a coaster brake, the Jigger is the vehicle of choice for the rare tenacious little rider raring to go.", + "condition": "new" + }, + { + "pickup_zone": "POLYGON((-118.2887 34.0972, -118.1987 34.0972, -118.1987 33.9872, -118.2887 33.9872, -118.2887 34.0972))", + "store_location": "-118.2437,34.0522", + "brand": "Bicyk", + "model": "Hillcraft", + "price": 1200, + "description": "Kids want to ride with as little weight as possible. Especially on an incline! They may be at the age when a 27.5\" wheel bike is just too clumsy coming off a 24\" bike. The Hillcraft 26 is just the solution they need!", + "condition": "used" + }, + { + "pickup_zone": "POLYGON((-87.6848 41.9331, -87.5748 41.9331, -87.5748 41.8231, -87.6848 41.8231, -87.6848 41.9331))", + "store_location": "-87.6298,41.8781", + "brand": "Nord", + "model": "Chook air 5", + "price": 815, + "description": "The Chook Air 5 gives kids aged six years and older a durable and uberlight mountain bike for their first experience on tracks and easy cruising through forests and fields. The lower top tube makes it easy to mount and dismount in any situation, giving your kids greater safety on the trails.", + "condition": "used" + }, + { + "pickup_zone": "POLYGON((-80.2433 25.8067, -80.1333 25.8067, -80.1333 25.6967, -80.2433 25.6967, -80.2433 25.8067))", + "store_location": "-80.1918,25.7617", + "brand": "Eva", + "model": "Eva 291", + "price": 3400, + "description": "The sister company to Nord, Eva launched in 2005 as the first and only women-dedicated bicycle brand. Designed by women for women, allEva bikes are optimized for the feminine physique using analytics from a body metrics database. If you like 29ers, try the Eva 291. It’s a brand new bike for 2022.. This full-suspension, cross-country ride has been designed for velocity. The 291 has 100mm of front and rear travel, a superlight aluminum frame and fast-rolling 29-inch wheels. Yippee!", + "condition": "used" + }, + { + "pickup_zone": "POLYGON((-122.4644 37.8199, -122.3544 37.8199, -122.3544 37.7099, -122.4644 37.7099, -122.4644 37.8199))", + "store_location": "-122.4194,37.7749", + "brand": "Noka Bikes", + "model": "Kahuna", + "price": 3200, + "description": "Whether you want to try your hand at XC racing or are looking for a lively trail bike that's just as inspiring on the climbs as it is over rougher ground, the Wilder is one heck of a bike built specifically for short women. Both the frames and components have been tweaked to include a women’s saddle, different bars and unique colourway.", + "condition": "used" + }, + { + "pickup_zone": "POLYGON((-0.1778 51.5524, 0.0822 51.5524, 0.0822 51.4024, -0.1778 51.4024, -0.1778 51.5524))", + "store_location": "-0.1278,51.5074", + "brand": "Breakout", + "model": "XBN 2.1 Alloy", + "price": 810, + "description": "The XBN 2.1 Alloy is our entry-level road bike – but that’s not to say that it’s a basic machine. With an internal weld aluminium frame, a full carbon fork, and the slick-shifting Claris gears from Shimano’s, this is a bike which doesn’t break the bank and delivers craved performance.", + "condition": "new" + }, + { + "pickup_zone": "POLYGON((2.1767 48.9016, 2.5267 48.9016, 2.5267 48.5516, 2.1767 48.5516, 2.1767 48.9016))", + "store_location": "2.3522,48.8566", + "brand": "ScramBikes", + "model": "WattBike", + "price": 2300, + "description": "The WattBike is the best e-bike for people who still feel young at heart. It has a Bafang 1000W mid-drive system and a 48V 17.5AH Samsung Lithium-Ion battery, allowing you to ride for more than 60 miles on one charge. It’s great for tackling hilly terrain or if you just fancy a more leisurely ride. With three working modes, you can choose between E-bike, assisted bicycle, and normal bike modes.", + "condition": "new" + }, + { + "pickup_zone": "POLYGON((13.3260 52.5700, 13.6550 52.5700, 13.6550 52.2700, 13.3260 52.2700, 13.3260 52.5700))", + "store_location": "13.4050,52.5200", + "brand": "Peaknetic", + "model": "Secto", + "price": 430, + "description": "If you struggle with stiff fingers or a kinked neck or back after a few minutes on the road, this lightweight, aluminum bike alleviates those issues and allows you to enjoy the ride. From the ergonomic grips to the lumbar-supporting seat position, the Roll Low-Entry offers incredible comfort. The rear-inclined seat tube facilitates stability by allowing you to put a foot on the ground to balance at a stop, and the low step-over frame makes it accessible for all ability and mobility levels. The saddle is very soft, with a wide back to support your hip joints and a cutout in the center to redistribute that pressure. Rim brakes deliver satisfactory braking control, and the wide tires provide a smooth, stable ride on paved roads and gravel. Rack and fender mounts facilitate setting up the Roll Low-Entry as your preferred commuter, and the BMX-like handlebar offers space for mounting a flashlight, bell, or phone holder.", + "condition": "new" + }, + { + "pickup_zone": "POLYGON((1.9450 41.4301, 2.4018 41.4301, 2.4018 41.1987, 1.9450 41.1987, 1.9450 41.4301))", + "store_location": "2.1734, 41.3851", + "brand": "nHill", + "model": "Summit", + "price": 1200, + "description": "This budget mountain bike from nHill performs well both on bike paths and on the trail. The fork with 100mm of travel absorbs rough terrain. Fat Kenda Booster tires give you grip in corners and on wet trails. The Shimano Tourney drivetrain offered enough gears for finding a comfortable pace to ride uphill, and the Tektro hydraulic disc brakes break smoothly. Whether you want an affordable bike that you can take to work, but also take trail in mountains on the weekends or you’re just after a stable, comfortable ride for the bike path, the Summit gives a good value for money.", + "condition": "new" + }, + { + "pickup_zone": "POLYGON((12.4464 42.1028, 12.5464 42.1028, 12.5464 41.7028, 12.4464 41.7028, 12.4464 42.1028))", + "store_location": "12.4964,41.9028", + "model": "ThrillCycle", + "brand": "BikeShind", + "price": 815, + "description": "An artsy, retro-inspired bicycle that’s as functional as it is pretty: The ThrillCycle steel frame offers a smooth ride. A 9-speed drivetrain has enough gears for coasting in the city, but we wouldn’t suggest taking it to the mountains. Fenders protect you from mud, and a rear basket lets you transport groceries, flowers and books. The ThrillCycle comes with a limited lifetime warranty, so this little guy will last you long past graduation.", + "condition": "refurbished" + } +] diff --git a/doctests/data/query_vector.json b/doctests/data/query_vector.json new file mode 100644 index 00000000000..625479e111b --- /dev/null +++ b/doctests/data/query_vector.json @@ -0,0 +1,3952 @@ +[ + { + "brand": "Velorim", + "condition": "new", + "description": "Small and powerful, the Jigger is the best ride for the smallest of tikes! This is the tiniest kids\u2019 pedal bike on the market available without a coaster brake, the Jigger is the vehicle of choice for the rare tenacious little rider raring to go.", + "description_embeddings": [ + -0.026918452233076096, + 0.07200391590595245, + 0.019199736416339874, + -0.024749649688601494, + -0.09264523535966873, + 0.017702950164675713, + 0.11252444237470627, + 0.09377790987491608, + 0.005099582951515913, + 0.07054618746042252, + 0.0025260779075324535, + -0.04007257893681526, + 0.013598357327282429, + 0.03940897434949875, + -0.0069704861380159855, + 0.057934146374464035, + 0.15386416018009186, + 0.04337097704410553, + 0.07119690626859665, + -0.048173222690820694, + -0.09069827198982239, + -0.016886970028281212, + -0.04425429925322533, + -0.019464140757918358, + -0.027470778673887253, + 0.005336642265319824, + -0.09170512855052948, + 0.03556380048394203, + 0.023559197783470154, + -0.03628315031528473, + -0.04448218643665314, + 0.011364061385393143, + 0.009603296406567097, + -0.04861818626523018, + -0.03343017399311066, + -0.01483147218823433, + 0.06086479872465134, + 0.02109363302588463, + -0.025959225371479988, + 0.014000430703163147, + -0.00846729427576065, + 0.07305800914764404, + 0.02457829751074314, + -0.12663941085338593, + 0.010544337332248688, + 0.013315590098500252, + 0.07280771434307098, + -0.08232685923576355, + 0.0040486594662070274, + -0.026350753381848335, + 0.06408613175153732, + -0.01415738184005022, + 0.04628903791308403, + -0.02050374448299408, + 0.04177685081958771, + -0.09207800775766373, + -0.005421833600848913, + -0.005136478692293167, + -0.024564260616898537, + 0.026354163885116577, + -0.05851329490542412, + 0.03147275745868683, + -0.02183554694056511, + 0.03346295654773712, + -0.02240697667002678, + -0.09603817760944366, + -0.02274233102798462, + -0.039677977561950684, + 0.007695100735872984, + 0.039304088801145554, + -0.017668871209025383, + 0.022897064685821533, + -0.039273541420698166, + 0.08864572644233704, + -0.04432578384876251, + -0.06769558042287827, + 0.06696884334087372, + 0.07118263095617294, + -0.024863263592123985, + 0.01151553075760603, + -0.11591693758964539, + -0.025131937116384506, + 0.052269868552684784, + -0.03035089746117592, + 0.00906881783157587, + 0.04585501551628113, + 0.038361817598342896, + 0.03026638925075531, + 0.015340524725615978, + -0.006911538541316986, + 0.022395918145775795, + 0.13969141244888306, + 0.047686025500297546, + 0.05438247323036194, + 0.02779674343764782, + -0.04191797226667404, + -0.021741557866334915, + 0.003305602353066206, + -0.11100355535745621, + 0.016258426010608673, + 0.06977421790361404, + 0.08189859241247177, + 0.0966871827840805, + 0.03519754856824875, + 0.05674563720822334, + 0.034512776881456375, + 0.07052291929721832, + -0.06342307478189468, + 0.051868196576833725, + -0.013776403851807117, + -0.007541927974671125, + -0.043183840811252594, + 0.021481933072209358, + 0.0380198135972023, + -0.07870583236217499, + -0.10873759537935257, + -0.08491706103086472, + 0.03155837208032608, + -0.03790571913123131, + 0.041968367993831635, + -0.00593406381085515, + 0.036538854241371155, + -0.004705581348389387, + 0.004229994490742683, + -0.00729013979434967, + -0.019296232610940933, + -0.014331319369375706, + -4.401502220942261e-34, + -0.0067550260573625565, + 0.07402423769235611, + -0.012888211756944656, + -0.055266380310058594, + 0.04810081049799919, + 0.005175809375941753, + -0.004325157031416893, + -0.10392399877309799, + -0.03650582954287529, + 0.07477248460054398, + 0.0022102247457951307, + -0.05040738359093666, + -0.003033560933545232, + 0.060498371720314026, + 0.08619660884141922, + -0.04577762633562088, + -0.10468175262212753, + -0.07177772372961044, + -0.05756700038909912, + -0.02839704230427742, + -0.028650879859924316, + -0.010213681496679783, + 0.008074316196143627, + 0.03448071703314781, + -0.025478240102529526, + -0.029753824695944786, + 0.05397271364927292, + -0.0006062929751351476, + -0.03292117267847061, + 0.040799956768751144, + -0.0933983102440834, + -0.026921836659312248, + 0.00327915046364069, + -0.025635670870542526, + -0.057946983724832535, + -0.06664302200078964, + 0.04280361905694008, + -0.027111517265439034, + -0.08359260112047195, + 0.03483080118894577, + 0.023318039253354073, + -0.08598511666059494, + -0.08378149569034576, + 0.054067932069301605, + -0.014853178523480892, + -0.05498708039522171, + 0.0711284726858139, + 0.12539872527122498, + -0.04355800896883011, + -0.027553806081414223, + -0.009111037477850914, + -0.058482203632593155, + 0.07053986191749573, + -0.009027705527842045, + -0.017481567338109016, + 0.011404255405068398, + 0.06084389239549637, + -0.028110956773161888, + -0.08594057708978653, + 0.05215488001704216, + -0.07651112973690033, + -0.027076181024312973, + -0.01357623003423214, + -0.01263132132589817, + -0.0193876251578331, + 0.013576658442616463, + 0.038156066089868546, + -0.04309772700071335, + -0.04051121696829796, + -0.025885144248604774, + -0.003073574509471655, + -0.0003303807170595974, + 0.08043289929628372, + -0.039484549313783646, + 0.10091833025217056, + -0.04735022410750389, + 0.027813943102955818, + -0.0038837436586618423, + -0.05234759673476219, + -0.00716474698856473, + 0.016360750421881676, + -0.025806615129113197, + -0.03212691470980644, + -0.08456144481897354, + -0.019326699897646904, + -0.03228791803121567, + 0.07633069902658463, + -0.07643644511699677, + -0.03988131135702133, + 0.02396279387176037, + -0.055901359766721725, + 0.009231535717844963, + 0.0344320572912693, + 0.07486359030008316, + -0.03505600243806839, + -2.324423447670953e-34, + -0.04453577473759651, + 0.06512241810560226, + 0.03920532390475273, + 0.062222111970186234, + -0.015745285898447037, + -0.017774563282728195, + 0.08228200674057007, + -0.05798694118857384, + -0.042758435010910034, + -0.018822822719812393, + -0.07607664912939072, + -0.02666221559047699, + 0.036936040967702866, + -0.034714240580797195, + 0.06992944329977036, + 0.00530517753213644, + -0.0005260169273242354, + -0.03961028903722763, + 0.08799499273300171, + -0.04191635549068451, + 0.07468635588884354, + -0.010930310003459454, + -0.0611649826169014, + -0.04100184887647629, + 0.07131826132535934, + 0.03241356834769249, + -0.0545443594455719, + -0.005295638460665941, + -0.04712966829538345, + 0.032524388283491135, + -0.05130890756845474, + -0.01299980841577053, + 0.06523969769477844, + -0.011433755978941917, + 0.018730396404862404, + 0.047184932976961136, + -0.043041545897722244, + -0.03231072053313255, + -0.015864262357354164, + 0.03991076350212097, + -0.017617924138903618, + -0.03504975512623787, + 0.027346905320882797, + 0.05564267560839653, + 0.01610865257680416, + 0.0470576174557209, + -0.010647954419255257, + 0.13047614693641663, + -0.011055804789066315, + 0.011903814040124416, + -0.01350466813892126, + -0.0019897734746336937, + 0.053073540329933167, + 0.0717632919549942, + 0.007322370074689388, + -0.0206251572817564, + 0.061210062354803085, + 0.03184640407562256, + -0.035093698650598526, + -0.0026315131690353155, + -0.03291690722107887, + -0.04229205846786499, + -0.04241437837481499, + 0.1091129332780838, + 0.02229561097919941, + 0.02223002351820469, + 0.03949614241719246, + 0.031568314880132675, + -0.07121116667985916, + -0.07664268463850021, + -0.04235681891441345, + -0.011173299513757229, + 0.1190338209271431, + -0.09825095534324646, + -0.0375107042491436, + 0.007167852018028498, + 0.047537703067064285, + 0.044423725455999374, + 0.022106878459453583, + -0.02811007760465145, + 0.033864255994558334, + 0.0643145889043808, + 0.03725901246070862, + -0.0497952364385128, + -0.021733446046710014, + 0.023898839950561523, + -0.11254694312810898, + -0.06519465893507004, + 0.04424642026424408, + 0.09124527126550674, + 0.006083414424210787, + 0.09144245833158493, + 0.02653978019952774, + -0.01318738516420126, + 0.0480327382683754, + -3.0391063887691416e-08, + 0.051376331597566605, + -0.0002709411783143878, + -0.03103259764611721, + -0.018394535407423973, + 0.05002995952963829, + 0.05086217448115349, + -0.07317503541707993, + -0.0172730665653944, + -0.08379635214805603, + 0.1180257499217987, + 0.08445936441421509, + -0.025030585005879402, + -0.01965731382369995, + 0.046042654663324356, + 0.03724817931652069, + 0.028524605557322502, + 0.061249297112226486, + -0.027382537722587585, + -0.0011134583037346601, + -0.001871297718025744, + -0.04395337030291557, + 0.002261978341266513, + 0.06950556486845016, + -0.024213269352912903, + -0.0782783254981041, + -0.10320401936769485, + -0.022083906456828117, + -0.04333319142460823, + -0.0334695503115654, + 0.007842703722417355, + -0.03523677587509155, + 0.08107997477054596, + 0.00924254022538662, + -0.013395791873335838, + -0.019067300483584404, + -0.008446489460766315, + -0.1053837463259697, + 0.06697141379117966, + 0.06984667479991913, + 0.007155571132898331, + -0.038544610142707825, + 0.0132181691005826, + -0.004773592576384544, + -0.022143904119729996, + -0.09064015746116638, + -0.07600560784339905, + -0.042070601135492325, + -0.08189931511878967, + 0.03302472084760666, + 0.043238356709480286, + -0.01407547201961279, + -0.03778013586997986, + 0.030578600242733955, + 0.021573437377810478, + 0.04664295166730881, + 0.056082408875226974, + -0.07687672227621078, + -0.0018553169211372733, + -0.051700614392757416, + 0.043752558529376984, + -0.02636834792792797, + 0.05589277669787407, + 0.05282546952366829, + -0.016411008313298225 + ], + "model": "Jigger", + "pickup_zone": "POLYGON((-74.0610 40.7578, -73.9510 40.7578, -73.9510 40.6678, -74.0610 40.6678, -74.0610 40.7578))", + "price": 270, + "store_location": "-74.0060,40.7128" + }, + { + "brand": "Bicyk", + "condition": "used", + "description": "Kids want to ride with as little weight as possible. Especially on an incline! They may be at the age when a 27.5\" wheel bike is just too clumsy coming off a 24\" bike. The Hillcraft 26 is just the solution they need!", + "description_embeddings": [ + -0.004883771762251854, + 0.08099519461393356, + -0.022444017231464386, + 0.05437565594911575, + -0.07422323524951935, + 0.0066548739559948444, + -0.06268022209405899, + 0.042389899492263794, + -0.03745086118578911, + 0.058961447328329086, + 0.025613723322749138, + -0.04209878668189049, + 0.06861244142055511, + 0.01983577199280262, + 0.026353053748607635, + 0.045618414878845215, + 0.040685027837753296, + 0.09574265778064728, + 0.005801026243716478, + -0.027659950777888298, + -0.0223013274371624, + 0.040641166269779205, + 0.06608107686042786, + 0.0691058486700058, + -0.03629102557897568, + 0.035505786538124084, + -0.09211395680904388, + -0.011358118616044521, + -0.025078972801566124, + -0.017709167674183846, + 0.07587391883134842, + 0.08128049969673157, + 0.060521550476551056, + -0.0845090001821518, + -0.03779749944806099, + 0.030346086248755455, + 0.017926080152392387, + 0.003489845432341099, + -0.05622200667858124, + -0.06886664777994156, + -0.051538050174713135, + 0.029196197167038918, + -0.0028146395925432444, + 0.012419342994689941, + -0.06346380710601807, + -0.011617675423622131, + 0.04980290308594704, + -0.0799335315823555, + 0.016635078936815262, + 0.07064730674028397, + 0.04530491679906845, + -0.04372858256101608, + 0.07056037336587906, + -0.05052798613905907, + -0.01064316462725401, + -0.04754374921321869, + -0.08878123015165329, + 0.005363269243389368, + 0.032587066292762756, + -0.05610528588294983, + -0.0012875061947852373, + -0.03215320408344269, + -0.0045777312479913235, + -0.026692084968090057, + -0.09758491814136505, + -0.046251099556684494, + 0.03897765651345253, + -0.06587375700473785, + -0.013586618937551975, + -0.020807752385735512, + 0.023367363959550858, + 0.011167124845087528, + 0.003386110533028841, + -0.024887114763259888, + -0.029615335166454315, + -0.02571641281247139, + -0.03150812163949013, + -0.0395360104739666, + -0.049686528742313385, + -0.023117102682590485, + -0.07580453157424927, + 0.020851964130997658, + 0.0917917937040329, + -0.038357049226760864, + 0.05106140300631523, + -0.03367459401488304, + 0.05801103636622429, + 0.0628814697265625, + 0.024997225031256676, + 0.015594316646456718, + -0.01490987278521061, + 0.07070998847484589, + -0.039144083857536316, + 0.0657331570982933, + -0.053744420409202576, + -0.01675831526517868, + -0.008745769970119, + -0.07664742320775986, + -0.06751038879156113, + 0.0023392336443066597, + 0.018902592360973358, + 0.06754770874977112, + 0.07430258393287659, + 0.0806465670466423, + -0.031056180596351624, + 0.06557579338550568, + 0.06529161334037781, + -0.03742394223809242, + 0.0007822285988368094, + 0.10523669421672821, + 0.0038901956286281347, + -0.014934191480278969, + 0.0647430568933487, + 0.03438747301697731, + -0.046527378261089325, + 0.014247977174818516, + -0.020184241235256195, + 0.016480082646012306, + -0.05491460859775543, + 0.07232335209846497, + -0.016330908983945847, + 0.011368552222847939, + -0.001964043825864792, + 0.0009170984267257154, + 0.019140562042593956, + -0.002758787479251623, + -0.05629577115178108, + 1.7957766384702998e-33, + -0.10579885542392731, + 0.08271613717079163, + 0.03821941837668419, + 0.06078806892037392, + 0.017647448927164078, + -0.07404755055904388, + 0.06083450838923454, + -0.07097837328910828, + -0.01949647255241871, + -0.005204185377806425, + 0.0160058680921793, + -0.03624944016337395, + 0.065463587641716, + -0.04834574833512306, + 0.09314870834350586, + -0.022509299218654633, + -0.047614336013793945, + -0.042122796177864075, + 0.0014064351562410593, + 0.08215921372175217, + 0.0144058121368289, + -0.08526691794395447, + -0.01885370910167694, + 0.020506983622908592, + -0.0041589876636862755, + -0.0928102508187294, + 0.0965222716331482, + 0.05469893291592598, + 0.002785224001854658, + 0.006347258575260639, + -0.09394793212413788, + -0.08587668836116791, + 0.00999284815043211, + -0.015109539031982422, + 0.035454027354717255, + -0.08842843770980835, + -0.015698572620749474, + 0.05549640208482742, + -0.011119373142719269, + 0.012295924127101898, + 0.007523554377257824, + -0.03497130423784256, + -0.05309790000319481, + -0.021819932386279106, + 0.011010204441845417, + 0.0778549313545227, + 0.122015580534935, + 0.0451776348054409, + -0.0894458070397377, + 0.0031173918396234512, + -0.003828236600384116, + 0.0010151821188628674, + 0.0007775757694616914, + -0.0007406148943118751, + 0.0005911831394769251, + 0.029611686244606972, + -0.010095393285155296, + -0.015750357881188393, + -0.08871200680732727, + 0.06563369184732437, + 0.052563928067684174, + -0.02150006778538227, + 0.032858334481716156, + -0.039781685918569565, + -0.02986454777419567, + -0.017254218459129333, + 0.013349524699151516, + 0.04903600737452507, + -0.102760910987854, + 0.027411801740527153, + -0.007306735496968031, + 0.03547230362892151, + 0.03793823719024658, + -0.014224819839000702, + 0.004229242447763681, + -0.04914051666855812, + -0.05566011741757393, + -0.08426816016435623, + 0.0378078892827034, + -0.02177048847079277, + 0.037800222635269165, + 0.01145790982991457, + -0.03493969514966011, + -0.06417357921600342, + -0.04812582954764366, + -0.030254419893026352, + -0.12552082538604736, + 0.0017056012293323874, + -0.053679559379816055, + 0.019939688965678215, + -0.04766315221786499, + -0.143480584025383, + -0.024615854024887085, + 0.06507551670074463, + 0.01710103265941143, + -2.55080180279124e-33, + -0.01073896698653698, + 0.08023330569267273, + 0.028500312939286232, + -0.033364687114953995, + 0.018465891480445862, + -0.018969086930155754, + 0.116150863468647, + -0.04905116185545921, + 0.0067994301207363605, + -0.051097989082336426, + -0.047208935022354126, + 0.003005147911608219, + -0.006951641291379929, + 0.0299075860530138, + 0.023957515135407448, + 0.005555577110499144, + -0.020836569368839264, + 0.013542957603931427, + 0.09286782890558243, + -0.04009733721613884, + 0.05567550286650658, + 0.01991306059062481, + -0.16575683653354645, + -0.003300475887954235, + 0.11635366082191467, + 0.008300523273646832, + -0.1112738847732544, + 0.05307481065392494, + 0.009467027150094509, + 0.11263766884803772, + -0.04102758690714836, + -0.0505208782851696, + 0.1890914887189865, + -0.01593983918428421, + 0.011381726711988449, + 0.01095605455338955, + -0.08038999140262604, + -0.012621873058378696, + -0.005316049326211214, + 0.017261112108826637, + 0.03283751755952835, + -0.04533768445253372, + 0.03397509828209877, + 0.04211656376719475, + 0.024692395702004433, + -0.02541458234190941, + 0.02313675545156002, + 0.02338019199669361, + -0.011879520490765572, + -0.05438990890979767, + 0.03806900233030319, + 0.01261812262237072, + 0.02512892708182335, + -0.0028746703173965216, + -0.016077643260359764, + -0.032072994858026505, + -0.006427581422030926, + 0.01777057908475399, + 0.02934812381863594, + -0.05759994685649872, + -2.871774631785229e-05, + -0.03137838840484619, + -0.06273766607046127, + 0.04409930482506752, + -0.05993351340293884, + 0.007546861190348864, + 0.0053585791029036045, + 0.042325496673583984, + -0.007369876839220524, + 0.04489513114094734, + -0.12103329598903656, + 0.017391694709658623, + 0.0304956566542387, + -0.034047987312078476, + 0.02484256401658058, + -0.06834809482097626, + 0.06508748978376389, + 0.08324999362230301, + -0.020252887159585953, + -0.014722783118486404, + 0.02126440405845642, + -0.05160334333777428, + 0.045947108417749405, + 0.022960059344768524, + 0.023375188931822777, + -0.060902271419763565, + -0.05150751397013664, + -0.1094929575920105, + -0.04899677261710167, + 0.09132419526576996, + 0.051848214119672775, + -0.0077659315429627895, + 0.0012422297149896622, + 0.058530740439891815, + 0.040777210146188736, + -3.354356081786136e-08, + 0.025891084223985672, + -0.04088461399078369, + -0.06885679066181183, + 0.01951301097869873, + 0.047974348068237305, + 0.04472370818257332, + 0.004657004959881306, + 0.001041706302203238, + -0.02763887122273445, + 0.03814717009663582, + 0.03166148066520691, + 0.0063626947812736034, + 0.09577886760234833, + 0.06234167888760567, + 0.0010398400481790304, + 0.010609040968120098, + 0.020408503711223602, + 0.05596008151769638, + 0.00923844799399376, + 0.011290326714515686, + 0.02393697388470173, + -0.03378620743751526, + 0.010788901709020138, + 0.0072112190537154675, + -0.03552679717540741, + -0.10475718975067139, + 0.003995304461568594, + -0.002284976886585355, + -0.014504319056868553, + -0.06887608021497726, + 0.03398992121219635, + -0.005206231493502855, + 0.049566611647605896, + 0.00902023445814848, + 0.06874160468578339, + 0.014804325066506863, + -0.07230424880981445, + 0.0428827665746212, + 0.013657039031386375, + 0.027973631396889687, + -0.035619381815195084, + 0.06485525518655777, + -0.06238642707467079, + -0.012459578923881054, + 0.020500177517533302, + -0.0715484470129013, + -0.16523504257202148, + 0.013638298027217388, + 0.07008316367864609, + 0.026970835402607918, + 0.004871702753007412, + -0.0012540861498564482, + -0.028708957135677338, + 0.05812879279255867, + 0.12611250579357147, + 0.09877888858318329, + -0.04118988662958145, + -0.02214396744966507, + -0.10328112542629242, + 0.029945021495223045, + 0.004513312131166458, + 0.011272193863987923, + 0.03294430673122406, + -0.042709026485681534 + ], + "model": "Hillcraft", + "pickup_zone": "POLYGON((-118.2887 34.0972, -118.1987 34.0972, -118.1987 33.9872, -118.2887 33.9872, -118.2887 34.0972))", + "price": 1200, + "store_location": "-118.2437,34.0522" + }, + { + "brand": "Nord", + "condition": "used", + "description": "The Chook Air 5 gives kids aged six years and older a durable and uberlight mountain bike for their first experience on tracks and easy cruising through forests and fields. The lower top tube makes it easy to mount and dismount in any situation, giving your kids greater safety on the trails.", + "description_embeddings": [ + -0.0018494834657758474, + 0.057690851390361786, + 0.038153987377882004, + 0.06570218503475189, + 0.028856752440333366, + -0.062333013862371445, + -0.014953209087252617, + 0.046022992581129074, + -0.08184631168842316, + 0.03648914024233818, + 0.03869181126356125, + 0.010564669035375118, + -0.020310621708631516, + -0.04062078520655632, + -0.0125962495803833, + 0.14169928431510925, + 0.03418859466910362, + -0.06641822308301926, + 0.005633663386106491, + -0.09943458437919617, + 0.023237863555550575, + -0.04369724169373512, + 0.016574522480368614, + 0.07258585095405579, + 0.018674470484256744, + -0.05764088034629822, + -0.0795072391629219, + 0.04034125804901123, + -0.036483921110630035, + -0.033106740564107895, + 0.02980787307024002, + -0.0028512384742498398, + 0.00786224752664566, + -0.03016488254070282, + -0.12349128723144531, + 0.031072411686182022, + 0.08362030982971191, + 0.025227056816220284, + -0.030982907861471176, + -0.006486377213150263, + -0.023590318858623505, + -0.03374557942152023, + -0.04145599156618118, + -0.09421771764755249, + -0.0013142612297087908, + -0.003397064981982112, + -0.0031338112894445658, + -0.11464792490005493, + 0.040542472153902054, + 0.02896481193602085, + 0.007327641360461712, + -0.06064218282699585, + 0.049546848982572556, + -0.05917377769947052, + -0.01963184028863907, + -0.002139812568202615, + -0.14361988008022308, + -0.05401389300823212, + 0.12506668269634247, + -0.07141950726509094, + -0.0032961040269583464, + 0.015251584351062775, + -0.05507654324173927, + -0.009836667217314243, + -0.02802908606827259, + -0.01053905300796032, + -0.03239851072430611, + -0.10646941512823105, + 0.03140658512711525, + 0.028125958517193794, + -0.004000179003924131, + -0.0018343725241720676, + 0.01727917790412903, + -0.013935663737356663, + -0.02435036562383175, + 0.04411163926124573, + -0.009158330969512463, + -0.023309389129281044, + 0.01229795441031456, + -0.04689493402838707, + -0.02138197235763073, + 0.013063939288258553, + 0.02832808345556259, + 0.031972818076610565, + 0.020882662385702133, + -0.015083174221217632, + 0.002903456799685955, + -0.047304242849349976, + -0.10658963769674301, + -0.06274145841598511, + -0.030370736494660378, + 0.0539257749915123, + 0.00848578754812479, + 0.02172423154115677, + -0.007691903971135616, + -0.10581931471824646, + 0.06078812852501869, + 0.007988635450601578, + -0.116583913564682, + 0.05237157270312309, + 0.024445366114377975, + -0.02832716703414917, + 0.04004029557108879, + 0.02844531089067459, + -0.0455174520611763, + -0.07379734516143799, + 0.09453199058771133, + -0.011599121615290642, + -0.0027194281574338675, + 0.014355232007801533, + -0.059690698981285095, + -0.012937567196786404, + 0.034315045922994614, + -0.047598548233509064, + -0.03261064738035202, + 0.05839747563004494, + -0.12691465020179749, + 0.03778312727808952, + 0.006131120957434177, + 0.04806787148118019, + -0.03441976010799408, + 0.08042734116315842, + 0.008934197947382927, + 0.027216315269470215, + 0.0016972541343420744, + -0.10113930702209473, + -0.0003218930505681783, + 9.75532108829862e-34, + -0.0537416972219944, + 0.06875170767307281, + -0.01566525548696518, + 0.01952524110674858, + -0.0005404680850915611, + -0.08984242379665375, + 0.04537447541952133, + -0.1295408457517624, + 7.603532867506146e-05, + 0.040753066539764404, + 0.016371281817555428, + 0.029906686395406723, + -0.005372706335037947, + -0.06687828153371811, + 0.11607439070940018, + 0.016209086403250694, + -0.11238335072994232, + -0.057236358523368835, + -0.09619198739528656, + 0.027146028354763985, + -0.09542766213417053, + -0.0360424630343914, + -0.09913153201341629, + 0.04242968559265137, + 0.05494842678308487, + -0.025294257327914238, + 0.06307625770568848, + 0.007745219860225916, + -0.019641151651740074, + 0.07056662440299988, + -0.05425839126110077, + 0.012385660782456398, + -0.006104010157287121, + -0.07186716794967651, + -0.10919132828712463, + -0.0017968777101486921, + 0.010471112094819546, + -0.011221444234251976, + -0.035078633576631546, + 0.009300827980041504, + 0.0802159234881401, + -0.10042990744113922, + -0.01718892529606819, + 0.05525779724121094, + -0.019847391173243523, + 0.08405174314975739, + 0.06403975188732147, + 0.0627448782324791, + -0.07871688157320023, + 0.0002676364383660257, + -0.05110163986682892, + 0.006078454665839672, + -0.019803548231720924, + -0.014253707602620125, + 0.039836447685956955, + 0.021812820807099342, + 0.00014793995069339871, + -0.006739508826285601, + -0.023166682571172714, + 0.05429920181632042, + 0.1337796151638031, + -0.01773720420897007, + 0.024304436519742012, + -0.016411837190389633, + -0.07777299731969833, + 0.042669400572776794, + 0.06226645037531853, + -0.023803485557436943, + 0.00396164134144783, + -0.049458179622888565, + -0.004774407483637333, + 0.036529600620269775, + 0.02002328634262085, + -0.02465248480439186, + -0.024076614528894424, + -0.03887653350830078, + 0.057176534086465836, + -0.03888818621635437, + -0.027698175981640816, + 0.001469603506848216, + 0.04755152389407158, + -0.0938708707690239, + 0.013991769403219223, + -0.026987746357917786, + -0.03277081623673439, + -0.04837489873170853, + 0.02711542509496212, + -0.09185922145843506, + -0.03453921154141426, + 0.03532274439930916, + -0.024472877383232117, + -0.09732313454151154, + 0.008513586595654488, + 0.03207352012395859, + 0.030766190961003304, + -2.5405224720352836e-33, + 0.08067575097084045, + -0.008160247467458248, + 0.03101508319377899, + 0.005022854544222355, + 0.0010000152979046106, + 0.06958161294460297, + 0.10594375431537628, + -0.06177408620715141, + -0.01650322414934635, + 0.03043895959854126, + -0.018333615735173225, + 0.06436911225318909, + 0.02439672127366066, + -0.01213911734521389, + 0.09937868267297745, + 0.02093592658638954, + -0.003140130080282688, + -0.016580305993556976, + 0.10430759936571121, + -0.019038936123251915, + 0.05574416741728783, + 0.07455366849899292, + -0.029936065897345543, + 0.0017704741330817342, + 0.07022519409656525, + 0.02108696848154068, + -0.050372399389743805, + 0.027957700192928314, + 0.005698348395526409, + 0.03494682535529137, + -0.02610155940055847, + -0.0068950653076171875, + 0.1346345692873001, + 0.03947756066918373, + -0.05966781824827194, + -0.010489783249795437, + -0.04099087417125702, + -0.027186688035726547, + -0.04620056599378586, + -0.02478279173374176, + -0.007060651201754808, + -0.023514581844210625, + 0.06082035228610039, + -0.05849218741059303, + -0.0036981222219765186, + 0.04169313609600067, + 0.09352244436740875, + 0.020741308107972145, + -0.0019505824893712997, + -0.10489305853843689, + 0.08789870887994766, + 0.039999835193157196, + -0.014334832318127155, + 0.008347390219569206, + 0.03113699145615101, + 0.10894497483968735, + 0.027165820822119713, + 0.0064145540818572044, + -0.00803150050342083, + -0.055484138429164886, + -0.03251631557941437, + 0.02290980890393257, + 0.04825572296977043, + 0.01608354039490223, + -0.04969468340277672, + -0.004120110999792814, + -0.03278858959674835, + 0.009696378372609615, + -0.04376300796866417, + -0.0009336083312518895, + -0.0313178189098835, + -0.05882050469517708, + 0.057815250009298325, + -0.02050788328051567, + -0.024381538853049278, + 0.06283771246671677, + 0.06954411417245865, + 0.09720556437969208, + -0.056403111666440964, + -0.04526498168706894, + -0.07259991019964218, + 0.001844316371716559, + 0.04090375825762749, + 0.06941819936037064, + -0.041316937655210495, + 0.002292247023433447, + -0.03425106778740883, + -0.0628972053527832, + 0.0063382769003510475, + 0.09668626636266708, + 0.062228571623563766, + 0.03658486530184746, + -0.08789398521184921, + 0.0009696391643956304, + -0.004108588211238384, + -3.183854957455878e-08, + 0.09346635639667511, + 0.042943451553583145, + -0.0005091542261652648, + 0.026524608954787254, + 0.009858721867203712, + 0.03737989068031311, + -0.014056878164410591, + 0.038327474147081375, + -0.010239921510219574, + 0.05757640674710274, + 0.014411918818950653, + 0.038742948323488235, + 0.06475342065095901, + -0.0011537548853084445, + -0.017729472368955612, + 0.05167960748076439, + 0.024419916793704033, + 0.08681508898735046, + 0.015235288999974728, + 0.036676522344350815, + -0.03421042487025261, + -0.019762659445405006, + 0.09467294812202454, + -0.053133491426706314, + -0.04356615990400314, + -0.03919585049152374, + 0.015591761097311974, + 0.021372869610786438, + -0.0058142030611634254, + -0.022864477708935738, + -0.02901598811149597, + 0.0458187572658062, + -0.030826766043901443, + -0.008975986391305923, + 0.03365883231163025, + -0.010484383441507816, + -0.1445801854133606, + 0.05828193947672844, + -0.028194306418299675, + 0.05975533276796341, + 0.014028828591108322, + 0.0036430624313652515, + 0.024983061477541924, + 0.01454286277294159, + -0.006972659844905138, + 0.037446193397045135, + -0.06949692964553833, + -0.09630566090345383, + 0.03263894096016884, + 0.048720087856054306, + -0.06886433064937592, + 0.018142051994800568, + 0.03894415125250816, + 0.05843216925859451, + 0.06860719621181488, + 0.04971907287836075, + -0.025701280683279037, + -0.06293400377035141, + -0.05422094464302063, + 0.01912975125014782, + 0.009564549662172794, + 0.055643752217292786, + -0.0027948219794780016, + 0.0329461470246315 + ], + "model": "Chook air 5", + "pickup_zone": "POLYGON((-87.6848 41.9331, -87.5748 41.9331, -87.5748 41.8231, -87.6848 41.8231, -87.6848 41.9331))", + "price": 815, + "store_location": "-87.6298,41.8781" + }, + { + "brand": "Eva", + "condition": "used", + "description": "The sister company to Nord, Eva launched in 2005 as the first and only women-dedicated bicycle brand. Designed by women for women, allEva bikes are optimized for the feminine physique using analytics from a body metrics database. If you like 29ers, try the Eva 291. It\u2019s a brand new bike for 2022.. This full-suspension, cross-country ride has been designed for velocity. The 291 has 100mm of front and rear travel, a superlight aluminum frame and fast-rolling 29-inch wheels. Yippee!", + "description_embeddings": [ + 0.035103436559438705, + 0.02666405402123928, + -0.06364161521196365, + 0.02551678754389286, + -0.005281867925077677, + -0.04041111096739769, + -0.020820949226617813, + -0.03485208749771118, + -0.07683732360601425, + 0.021527189761400223, + 0.007516156416386366, + -0.0034474905114620924, + 0.030465560033917427, + -0.06572654843330383, + -0.020914506167173386, + 0.032637566328048706, + 0.0815017819404602, + -0.022764643654227257, + 0.07385077327489853, + 0.058004263788461685, + -0.03062591142952442, + -0.03927106410264969, + 0.01410673838108778, + 0.05480317771434784, + -0.05647570267319679, + -0.008725483901798725, + -0.05373937636613846, + 0.03546926751732826, + -0.026455262675881386, + -0.09718386828899384, + -0.040623344480991364, + 0.03622450679540634, + 0.08971034735441208, + -0.038574062287807465, + -0.05105822533369064, + 0.021020587533712387, + 0.003312851767987013, + -0.008969941176474094, + -0.0568903312087059, + 0.033502694219350815, + -0.0578635074198246, + -0.03721226751804352, + -0.036668986082077026, + 0.0022641047835350037, + 0.038053013384342194, + 0.03350543603301048, + 0.05258995667099953, + -0.007619654294103384, + -0.05145309492945671, + 0.026286069303750992, + 0.09238694608211517, + -0.06733417510986328, + 0.05791507661342621, + -0.07623173296451569, + -0.0052175214514136314, + -0.04393303394317627, + -0.15249019861221313, + -0.019764462485909462, + -0.03331150859594345, + -0.11831824481487274, + 0.05683637782931328, + 0.01903577335178852, + -0.023615414276719093, + 0.04120348393917084, + -0.05669170990586281, + 0.002014430705457926, + 0.009953595697879791, + 0.008390115574002266, + -0.025982441380620003, + -0.05977262556552887, + 0.0611012764275074, + 0.010146236047148705, + -0.047625984996557236, + 0.09061623364686966, + 0.02551751770079136, + 0.06041749566793442, + 0.1113404780626297, + 0.017673874273896217, + -0.05184035003185272, + 0.023109234869480133, + -0.04408435523509979, + -0.09029985964298248, + 0.06733208149671555, + 0.049696795642375946, + 0.05554559826850891, + 0.01563390903174877, + -0.019527558237314224, + -0.014796985313296318, + 0.003595865098759532, + 0.012917418964207172, + -0.06858907639980316, + -0.008697138167917728, + 0.04721860587596893, + 0.03153855353593826, + -0.05275731533765793, + 0.019923491403460503, + -0.009707989171147346, + -0.018920045346021652, + 0.03773808106780052, + 0.042773403227329254, + -0.03819388151168823, + 0.04029008001089096, + 0.060834936797618866, + 0.07531940191984177, + -0.04695741459727287, + 0.00488079572096467, + 0.13518987596035004, + -0.014429144561290741, + 0.0314130075275898, + 0.04135369136929512, + -0.021350333467125893, + -0.01289785373955965, + 0.032701168209314346, + -0.037027571350336075, + -0.018168980255723, + -0.07188623398542404, + -0.036501020193099976, + 0.03303954005241394, + 0.10676111280918121, + -0.03277475759387016, + -0.06245853006839752, + -0.011879103258252144, + 0.0533418171107769, + -0.01446340698748827, + 0.03530410677194595, + 0.05820532143115997, + -0.0015766610158607364, + -8.477015062817539e-34, + -0.10837127268314362, + 0.015582970343530178, + 0.0182977095246315, + 0.055213626474142075, + -0.03584470599889755, + 0.02778925560414791, + 0.08698058873414993, + -0.04999695345759392, + -0.0842076763510704, + -0.040448326617479324, + -0.06311061233282089, + 0.02668358013033867, + 0.04676111042499542, + -0.006417605560272932, + 0.12788759171962738, + 0.02197197638452053, + 0.07035308331251144, + -0.04392284154891968, + -0.008788101375102997, + -0.001108768628910184, + 0.07960236072540283, + -0.0019975434988737106, + -0.010499294847249985, + 0.03599945828318596, + 0.02329486794769764, + -0.01962442137300968, + 0.15486162900924683, + 0.027215363457798958, + -0.022036811336874962, + 0.04227740690112114, + -0.07894206047058105, + -0.03180933743715286, + 0.020022651180624962, + -0.05859709531068802, + -0.0009564562351442873, + 0.009732870385050774, + -0.09007531404495239, + 0.041766807436943054, + 0.003291067900136113, + 0.0950121283531189, + 0.02693203091621399, + -0.01629827171564102, + -0.01884087361395359, + -0.029889501631259918, + -0.003714299062266946, + 0.09247232973575592, + 0.06151247024536133, + 0.09120925515890121, + -0.011266379617154598, + 0.03680066391825676, + -0.1628604233264923, + -0.008045083843171597, + -0.023876454681158066, + 0.046465914696455, + 0.01841152086853981, + 0.01759498566389084, + -0.012628139927983284, + 0.02123965509235859, + -0.07098977267742157, + 0.025028834119439125, + -0.032171521335840225, + 0.0538402758538723, + -0.00211805896833539, + -0.019548164680600166, + -0.046793293207883835, + 0.04394293949007988, + 0.022090770304203033, + -0.058215122669935226, + -0.05101722851395607, + 0.014057288877665997, + -0.05860457569360733, + 0.006166193168610334, + 0.04641130194067955, + 0.03165149688720703, + 0.045289356261491776, + 0.038580797612667084, + -0.0335664264857769, + 0.013378577306866646, + 0.058321163058280945, + -0.03730795532464981, + -0.01996016688644886, + 0.025774186477065086, + 0.001352976425550878, + 0.023636724799871445, + 0.026727410033345222, + -0.0730309784412384, + -0.01613243669271469, + -0.009525856003165245, + -0.03985843434929848, + -0.003976964857429266, + -0.006743384525179863, + -0.06324627995491028, + 0.03959967568516731, + 0.021898096427321434, + -0.004184887744486332, + -3.110083923319741e-34, + 0.03726857528090477, + -0.016945818439126015, + 0.05512943118810654, + 0.0471712127327919, + 0.097488172352314, + 0.02456829510629177, + 0.08276744186878204, + -0.02230781689286232, + -0.03785759210586548, + 0.010082835331559181, + 0.07151538133621216, + -0.04426267370581627, + 0.03402319550514221, + 0.05808456242084503, + 0.07566764950752258, + 0.07006501406431198, + 0.002404613886028528, + -0.10078012198209763, + 0.0029093879275023937, + -0.12370351701974869, + -0.03431423008441925, + 0.08334372937679291, + 0.001441179309040308, + -0.08001153916120529, + 0.020076438784599304, + 0.011177998036146164, + 0.034560561180114746, + 0.041609298437833786, + -0.04921843111515045, + 0.03598331660032272, + -0.10671709477901459, + 0.02969883382320404, + 0.048317357897758484, + 0.12375228852033615, + 0.040479566901922226, + 0.0330270379781723, + -0.06994733214378357, + 0.008698385208845139, + 0.03447363153100014, + -0.012882913462817669, + -0.02672695368528366, + -0.06423910707235336, + -0.0506032295525074, + 0.0053747911006212234, + 0.0682206079363823, + -0.005316274706274271, + 0.027013784274458885, + -0.006269444711506367, + 0.08528510481119156, + -0.0731915608048439, + 0.0046186731196939945, + -0.024244142696261406, + 0.0360478051006794, + 0.020591434091329575, + 0.04302601516246796, + -0.14392279088497162, + 0.05873512104153633, + -0.010028650052845478, + -0.04548479616641998, + -0.013535127975046635, + 0.01113649271428585, + 0.028603754937648773, + -0.04036158323287964, + 0.0704532265663147, + -0.09373150765895844, + -0.07198052108287811, + -0.002203285926952958, + -0.0855080857872963, + -0.08842256665229797, + -9.343179408460855e-05, + -0.029457710683345795, + -0.02197202481329441, + -0.048936717212200165, + 0.06165122613310814, + -0.0626755878329277, + 0.009602922946214676, + 0.07251632958650589, + 0.03113914094865322, + -0.0010769155342131853, + -0.040936876088380814, + -0.04328843951225281, + -0.020654935389757156, + 0.08143945783376694, + 0.03210373967885971, + 0.03849901258945465, + 0.08388370275497437, + -0.04882088676095009, + -0.04764509201049805, + -0.01594746671617031, + 0.08005915582180023, + -0.0007682700525037944, + 0.04356953501701355, + -0.08784972876310349, + -0.009003485552966595, + -0.012332507409155369, + -3.9398202034135466e-08, + 0.006421600468456745, + 0.04334808140993118, + 0.0013437344459816813, + -0.009495558217167854, + -0.022657591849565506, + -0.012704327702522278, + 0.018703341484069824, + -0.07545771449804306, + -0.07807240635156631, + 0.0031038280576467514, + -0.023932253941893578, + 0.040701914578676224, + 0.10603249073028564, + 0.03773405775427818, + 0.05925403907895088, + -0.02083730883896351, + 0.052759915590286255, + 0.08195225894451141, + -0.057340435683727264, + -0.03538660705089569, + 0.04492119327187538, + -0.07101765275001526, + 0.02868317998945713, + -0.11165601015090942, + -0.03126508370041847, + -0.07953942567110062, + -0.022893071174621582, + -0.08839578926563263, + 0.00727836275473237, + -0.08685773611068726, + 0.026177138090133667, + 0.028831886127591133, + 0.03626682609319687, + -0.045381225645542145, + -0.013574030250310898, + 0.028583087027072906, + -0.004301569424569607, + 0.04326719045639038, + -0.023370176553726196, + 0.057897310703992844, + -0.029195263981819153, + -0.001789534231647849, + 0.032133687287569046, + 0.003419605316594243, + 0.028055502101778984, + -0.0038284522015601397, + -0.022835813462734222, + -0.07568305730819702, + 0.017481982707977295, + 0.013085611164569855, + 0.009357997216284275, + -0.030347391963005066, + -0.0020967889577150345, + 0.03544173017144203, + 0.004640925209969282, + 0.025202661752700806, + -0.0916307270526886, + -0.06341513246297836, + -0.017053432762622833, + 0.06747056543827057, + 0.0467485710978508, + -0.14364545047283173, + 0.024641912430524826, + -0.04414863884449005 + ], + "model": "Eva 291", + "pickup_zone": "POLYGON((-80.2433 25.8067, -80.1333 25.8067, -80.1333 25.6967, -80.2433 25.6967, -80.2433 25.8067))", + "price": 3400, + "store_location": "-80.1918,25.7617" + }, + { + "brand": "Noka Bikes", + "condition": "used", + "description": "Whether you want to try your hand at XC racing or are looking for a lively trail bike that's just as inspiring on the climbs as it is over rougher ground, the Wilder is one heck of a bike built specifically for short women. Both the frames and components have been tweaked to include a women\u2019s saddle, different bars and unique colourway.", + "description_embeddings": [ + 0.07420087605714798, + -0.01575474441051483, + 0.010670951567590237, + 0.07449527084827423, + 0.0060751838609576225, + 0.03211340680718422, + -0.008086569607257843, + 0.025549469515681267, + -0.07154879719018936, + 0.014914358966052532, + -0.030715830624103546, + -0.0064881909638643265, + -0.0071744490414857864, + -0.0495535172522068, + 0.03856685757637024, + 0.056132566183805466, + 0.10794447362422943, + -0.010181701742112637, + 0.0811777114868164, + 0.10135157406330109, + -0.0769873708486557, + -0.019640743732452393, + -0.028702793642878532, + 0.07654724270105362, + -0.07045057415962219, + -0.10104052722454071, + -0.004679638426750898, + -0.0027665267698466778, + 0.030919065698981285, + -0.05461210012435913, + -0.020669877529144287, + 0.031864751130342484, + 0.0835040882229805, + -0.05930671840906143, + -0.006361816544085741, + -0.044132985174655914, + 0.035945694893598557, + -0.0027891425415873528, + -0.07598478347063065, + -0.03105698712170124, + -0.059601303189992905, + -0.018435664474964142, + -0.03314891457557678, + 0.06877987831830978, + -0.016064301133155823, + 0.01545438077300787, + 0.056307148188352585, + -0.08382677286863327, + -0.05274924635887146, + 0.01139853335916996, + 0.10750263929367065, + -0.004042057786136866, + 0.003356053726747632, + -0.01083006989210844, + -0.06153054162859917, + -0.054990023374557495, + -0.1360272616147995, + 0.025806479156017303, + 0.043233320116996765, + -0.009591769427061081, + 0.04422037675976753, + 0.02436559833586216, + -0.0012137923622503877, + -0.015348327346146107, + 0.02425999939441681, + -0.03409525752067566, + -0.08487176895141602, + 0.017345253378152847, + 0.035498712211847305, + -0.05615586042404175, + 0.02745831571519375, + 0.04017677903175354, + -0.0936250239610672, + 0.048734042793512344, + 0.024287866428494453, + -0.030274393036961555, + 0.08800201117992401, + 0.0879313051700592, + -0.0550895519554615, + -0.0026407642289996147, + -0.09552258998155594, + -0.05870680510997772, + 0.09009534120559692, + 0.05674433708190918, + 0.05360940843820572, + -0.0023017164785414934, + 0.005036584101617336, + -0.017987212166190147, + -0.05129106715321541, + -0.0012550759129226208, + -0.05818512290716171, + 0.06725005060434341, + 0.02540011890232563, + 0.054589059203863144, + 0.01723511517047882, + -0.04752320423722267, + 0.10586173087358475, + 0.05530202388763428, + -0.03287632390856743, + 0.05258375406265259, + 0.01655220240354538, + 0.011849797330796719, + 0.032285332679748535, + 0.03760116547346115, + -0.017522728070616722, + -0.05457543954253197, + 0.006817341782152653, + -0.028400525450706482, + -0.0027318967040628195, + -0.04200681298971176, + -0.026171568781137466, + -0.03272707015275955, + 0.016305547207593918, + 0.01545319240540266, + 0.0056663742288947105, + -0.0977277085185051, + -0.0009383464348502457, + -0.005693590268492699, + 0.039240892976522446, + 0.01645808108150959, + -0.01950509287416935, + 0.007762903813272715, + -0.004406424704939127, + 0.016364052891731262, + -0.006125410553067923, + -0.029666034504771233, + 0.044946398586034775, + 1.8357034061219326e-33, + -0.053835414350032806, + 0.03495992720127106, + -0.05977822467684746, + -0.009059705771505833, + 0.004659880883991718, + 0.022712793201208115, + 0.04409413039684296, + -0.10094501823186874, + -0.08702167868614197, + 0.0066354661248624325, + 0.011878615245223045, + 0.0033176038414239883, + -0.012380938045680523, + 0.0010130798909813166, + 0.0644172728061676, + -0.07130810618400574, + -0.0045928386971354485, + -0.03375661373138428, + -0.022520871832966805, + 0.07504180818796158, + 0.009861051104962826, + 0.07191396504640579, + -0.014361198991537094, + 0.01449025422334671, + -0.010663183405995369, + -0.028888769447803497, + 0.09297508746385574, + 0.04386264830827713, + -0.09579017758369446, + 0.007718369830399752, + -0.11800525337457657, + -0.020133504644036293, + -0.0034684871789067984, + -0.03735410049557686, + 0.006975250784307718, + -0.012902614660561085, + 0.04201272875070572, + 0.04273228347301483, + -0.07133302837610245, + 0.09145014733076096, + 0.0048939259722828865, + -0.058044496923685074, + -0.017591776326298714, + -0.03502552956342697, + -0.05678534135222435, + 0.05471408739686012, + 0.09568087011575699, + 0.09486310184001923, + -0.045761823654174805, + 0.018188882619142532, + -0.12331638485193253, + -0.023129422217607498, + -0.05455191433429718, + 0.06732556223869324, + 0.048286039382219315, + -0.0014048831071704626, + 0.03308422118425369, + -0.009839850477874279, + -0.05661558359861374, + 0.03290552645921707, + 0.03661618381738663, + 0.0047610136680305, + -0.05254451557993889, + 0.006052205804735422, + -0.13153813779354095, + -0.006893055979162455, + 0.037368785589933395, + -0.03058353252708912, + -0.0017269864911213517, + -0.008851475082337856, + -0.0740399956703186, + 0.057353563606739044, + 0.12296456098556519, + 0.0413251556456089, + 0.09911972284317017, + 0.016817500814795494, + -0.048074446618556976, + 0.05658562108874321, + -0.012700358405709267, + -0.08663205802440643, + 0.0014393541496247053, + 0.00613820506259799, + -0.04446813836693764, + 0.04673916846513748, + -0.02757285162806511, + -0.05133393779397011, + -0.028082016855478287, + -0.04668722674250603, + -0.03849129378795624, + -0.047927819192409515, + -0.06503037363290787, + -0.08361010253429413, + 0.0393000952899456, + 0.04658354073762894, + 0.009724845178425312, + -2.0251938301584547e-33, + 0.07265108078718185, + -0.04307040572166443, + 0.04996086284518242, + 0.03520473092794418, + 0.1084771379828453, + 0.005833788774907589, + 0.046216338872909546, + -0.04042917117476463, + -0.04179416596889496, + 0.03598152473568916, + 0.04082870855927467, + -0.10612837225198746, + -0.010728210210800171, + 0.03610331192612648, + 0.027484672144055367, + -0.008130725473165512, + 0.026157474145293236, + -0.07420457899570465, + 0.025218356400728226, + -0.0898737907409668, + -0.02742879092693329, + 0.03535477817058563, + -0.062408916652202606, + -0.1569257229566574, + -0.018789052963256836, + 0.04845775291323662, + -0.015149657614529133, + -0.0018356313230469823, + -0.04590461030602455, + 0.023325689136981964, + -0.13069787621498108, + -0.01994006149470806, + -0.021577563136816025, + -0.0231519415974617, + -0.002478161361068487, + 0.022444186732172966, + -0.04681165888905525, + 0.02769998274743557, + 0.003438781714066863, + 0.03829822316765785, + 0.0243418887257576, + -0.020368436351418495, + -0.005010040942579508, + 0.04648580029606819, + 0.061260052025318146, + 0.03784928843379021, + 0.008547516539692879, + 0.027585946023464203, + 0.043608926236629486, + 0.047329846769571304, + 0.10433284938335419, + 0.0069401939399540424, + 0.043503858149051666, + 0.06499945372343063, + -0.01040438748896122, + -0.11596397310495377, + 0.06451629102230072, + 0.032956890761852264, + 0.0007023254293017089, + 0.0898808017373085, + -0.055529963225126266, + -0.006572310347110033, + -0.06089714914560318, + 0.022685443982481956, + -0.0013110691215842962, + -0.09440209716558456, + -0.03121936321258545, + -0.003408250631764531, + -0.08051110059022903, + 0.01802976056933403, + 0.032015636563301086, + -9.986010991269723e-05, + -0.011806532740592957, + 0.06818002462387085, + 0.028290249407291412, + -0.005312844179570675, + 0.06016719341278076, + 0.014397598803043365, + 0.07122596353292465, + 0.007864296436309814, + 0.03002011403441429, + -0.05413847416639328, + 0.09744291007518768, + -0.01965983398258686, + 0.04150940105319023, + 0.14307892322540283, + -0.11320102959871292, + 0.03503091260790825, + 0.003515399992465973, + -0.023677969351410866, + 0.03955819085240364, + 0.0534457303583622, + 0.01904974691569805, + 0.031267836689949036, + -0.018026702105998993, + -3.930426117904062e-08, + 0.0003057526773773134, + 0.013398992829024792, + -0.10113491117954254, + -0.016498351469635963, + -0.00534934364259243, + 0.017148228362202644, + -0.03217773512005806, + -0.030934104695916176, + -0.06334194540977478, + 0.07242853194475174, + -0.0047585368156433105, + -0.01996637135744095, + -0.0016463182400912046, + 0.01777566224336624, + -0.04115547239780426, + 0.02906038798391819, + 0.10273877531290054, + 0.022776247933506966, + 0.030065912753343582, + -0.014570299535989761, + 0.047994960099458694, + -0.044575825333595276, + 0.025753378868103027, + -0.002644166350364685, + -0.09429919719696045, + -0.11280428618192673, + 0.021306486800312996, + 0.010426397435367107, + 0.021093064919114113, + -0.00363176385872066, + -0.024409662932157516, + 0.11473581194877625, + 0.0020395806059241295, + -0.08371622860431671, + -0.03764123469591141, + 0.039440982043743134, + -0.022172994911670685, + 0.0410802997648716, + -0.05086303874850273, + 0.053952641785144806, + -0.06084167957305908, + -0.036132071167230606, + 0.02080748789012432, + -0.043120622634887695, + 0.04048219695687294, + 0.05917482078075409, + 0.04085611552000046, + -0.05118294060230255, + 0.015727631747722626, + 0.01915580965578556, + 0.03531794995069504, + -0.058007121086120605, + 0.08498086780309677, + 0.0036822939291596413, + 0.018125539645552635, + 0.10145474225282669, + -0.05804500728845596, + -0.008451723493635654, + -0.029611116275191307, + 0.008798911236226559, + -0.03220565244555473, + -0.05617227777838707, + 0.03482669219374657, + -0.03508705273270607 + ], + "model": "Kahuna", + "pickup_zone": "POLYGON((-122.4644 37.8199, -122.3544 37.8199, -122.3544 37.7099, -122.4644 37.7099, -122.4644 37.8199))", + "price": 3200, + "store_location": "-122.4194,37.7749" + }, + { + "brand": "Breakout", + "condition": "new", + "description": "The XBN 2.1 Alloy is our entry-level road bike \u2013 but that\u2019s not to say that it\u2019s a basic machine. With an internal weld aluminium frame, a full carbon fork, and the slick-shifting Claris gears from Shimano\u2019s, this is a bike which doesn\u2019t break the bank and delivers craved performance.", + "description_embeddings": [ + -0.03482655808329582, + -0.024290302768349648, + 0.029588153585791588, + -0.04068901762366295, + 0.006173150148242712, + -0.06071849912405014, + 0.021348219364881516, + 0.032668184489011765, + -0.12000397592782974, + -0.026135660707950592, + -0.0262606181204319, + -0.014927615411579609, + 0.06986956298351288, + -0.006424048915505409, + -0.002834598533809185, + -0.005902229342609644, + 0.07471967488527298, + -0.10197333246469498, + 0.08836513012647629, + 0.0504477359354496, + 0.005102860741317272, + 0.0359189435839653, + -0.0477643720805645, + 0.02106260135769844, + 0.030146656557917595, + 0.04048784077167511, + -0.023416390642523766, + -0.0016326266340911388, + 0.039820387959480286, + -0.04475116729736328, + -0.03422572463750839, + 0.09266746789216995, + -0.04106999188661575, + -0.02442021667957306, + -0.006325263995677233, + -0.057226430624723434, + 0.12074039876461029, + 0.012658749707043171, + -0.09748225659132004, + -0.048439353704452515, + -0.004550903104245663, + -0.014449959620833397, + 0.03684601932764053, + 0.018772389739751816, + 0.09577438980340958, + 0.04358510673046112, + 0.06987427175045013, + -0.0561334528028965, + -0.03684744983911514, + 0.02584502473473549, + 0.039536770433187485, + -0.061133887618780136, + 0.05351594462990761, + -0.025124184787273407, + 0.09431525319814682, + 0.05062168091535568, + -0.11715397238731384, + 0.044368330389261246, + -0.024262988939881325, + -0.024561986327171326, + 0.036067184060811996, + 0.025171849876642227, + 0.033648427575826645, + 0.046933531761169434, + 0.09548858553171158, + -0.06253167241811752, + -0.045195333659648895, + -0.04183578118681908, + -0.040385909378528595, + -0.027590686455368996, + 0.08240272104740143, + -0.04743463918566704, + 0.004482025280594826, + 0.03219064325094223, + 0.0007497097249142826, + -0.04202196002006531, + 0.10904812812805176, + -0.06721051037311554, + -0.023717032745480537, + -0.01718866638839245, + -0.11568725854158401, + 0.038704562932252884, + 0.04929516091942787, + -0.03311185538768768, + -0.0017465045675635338, + -0.06153444945812225, + 0.019559966400265694, + -0.010708335787057877, + -0.06623212993144989, + 0.03114122338593006, + 0.03581896796822548, + 0.05368824675679207, + 0.016093581914901733, + -0.0077278027310967445, + 0.025581376627087593, + 0.02375067211687565, + -0.024797597900032997, + 0.08371419459581375, + -0.023117447271943092, + 0.04535554349422455, + -0.007224411237984896, + 0.06131899356842041, + 0.02283627539873123, + -0.052097078412771225, + -0.07922855019569397, + 0.0016958570340648293, + 0.051785532385110855, + 0.06701217591762543, + -0.02913537621498108, + 0.020526740700006485, + 0.03941992297768593, + -0.02217993699014187, + -0.08051256835460663, + -0.10155311226844788, + -0.05453299731016159, + -0.06470758467912674, + 0.002029003808274865, + -0.013401892967522144, + -0.011006158776581287, + 0.045922696590423584, + -0.040529411286115646, + 0.00991761963814497, + -0.06682797521352768, + 0.05892198532819748, + -0.028617633506655693, + -0.06339140236377716, + 0.0044001322239637375, + -4.362402064591546e-33, + -0.10147389024496078, + 0.039431530982255936, + -0.04599899426102638, + -0.049156975001096725, + -0.02249804697930813, + -0.026725362986326218, + 0.04369090870022774, + -0.0005211823736317456, + -0.030739039182662964, + -0.0065231663174927235, + 0.025333819910883904, + 0.10470504313707352, + 0.021332278847694397, + 0.05556178465485573, + 0.035125020891427994, + -0.1538446545600891, + 0.007054235320538282, + -0.036041803658008575, + 0.05932909622788429, + 0.0337519533932209, + 0.014841740019619465, + 0.030220910906791687, + 0.04296395927667618, + 0.02538421005010605, + -0.006869863253086805, + -0.0031259972602128983, + 0.11390665918588638, + -0.046557214111089706, + 0.00429841224104166, + 0.0428440123796463, + -0.1384042203426361, + 0.04695465788245201, + -0.0574827715754509, + -0.030760489404201508, + -0.07478123158216476, + -0.029447706416249275, + -0.06204935908317566, + -0.03058161959052086, + -0.02502700313925743, + -0.019530421122908592, + -0.02094511315226555, + -0.03218250721693039, + -0.05274674668908119, + -0.02680223248898983, + -0.01955571211874485, + 0.018619371578097343, + 0.007585515268146992, + 0.07155954837799072, + -0.018960801884531975, + -0.05782055854797363, + 0.017819860950112343, + 0.04109930992126465, + -0.03562675416469574, + 0.016705146059393883, + 0.0415029413998127, + 0.05103381723165512, + 0.035572972148656845, + -0.015020242892205715, + -0.03179502114653587, + 0.0891512930393219, + 0.007475084625184536, + 0.042234599590301514, + -0.0542532242834568, + 0.05480321869254112, + -0.10602187365293503, + 0.054763536900281906, + 0.04739809036254883, + -0.03327919542789459, + 0.0082072913646698, + -0.0650608167052269, + -0.08271145075559616, + -0.10922762006521225, + 0.03824940696358681, + 0.05978637561202049, + 0.10005365312099457, + 0.029007570818066597, + -0.034110404551029205, + 0.02545103058218956, + 0.05190473049879074, + -0.003596875350922346, + -0.08473207801580429, + 0.012021052651107311, + 0.018175840377807617, + 0.017920689657330513, + -0.03348258137702942, + 0.04048585146665573, + 0.030843179672956467, + 0.019531769677996635, + -0.00910205114632845, + 0.05721440538764, + 0.007200753781944513, + -0.11394353210926056, + 0.03483451530337334, + 0.029621547088027, + -0.03755185008049011, + 9.393710556981876e-34, + -0.009642334654927254, + -0.001342272269539535, + 0.01180787943303585, + 0.026348579674959183, + -0.011158440262079239, + -0.0019478596514090896, + 0.028490403667092323, + -0.052807874977588654, + -0.029139718040823936, + 0.08380713313817978, + 0.09657946228981018, + 0.00991911068558693, + 0.03502354025840759, + -0.016754556447267532, + 0.017460109665989876, + -0.01642783358693123, + 0.022211389616131783, + -0.008778599090874195, + 3.654864485724829e-05, + -0.09229966253042221, + 0.04083291441202164, + 0.08299239724874496, + -0.04151370748877525, + -0.05360836163163185, + -0.07950830459594727, + 0.03959093242883682, + -0.13340364396572113, + 0.054901909083127975, + 0.05736061558127403, + 0.04770279303193092, + -0.11388063430786133, + 0.0237369854003191, + 0.02305987849831581, + -0.013226242735981941, + -0.020716888830065727, + 0.03368164971470833, + -0.002396275522187352, + -0.012604329735040665, + -0.02715742215514183, + 0.0019254887010902166, + -0.00887741707265377, + -0.028728270903229713, + -0.05864499509334564, + 0.10937061905860901, + -0.004027256276458502, + 0.019197454676032066, + -0.03928225114941597, + -0.018220100551843643, + 0.02466396614909172, + 0.00980517640709877, + 0.05513712763786316, + 0.10625418275594711, + 0.08366440236568451, + -0.02691512741148472, + -0.026599230244755745, + -0.049529410898685455, + -0.05376818776130676, + -0.003328752936795354, + -0.04345694184303284, + 0.11166279017925262, + 0.0407760851085186, + -0.00792837142944336, + 0.03168283402919769, + -0.0018123856279999018, + 0.0357111431658268, + -0.1368872970342636, + 0.00755245191976428, + -0.010281031019985676, + -0.07121222466230392, + 0.03101137839257717, + 0.0662078782916069, + 0.007085337769240141, + -0.007063422352075577, + -0.05238816514611244, + -0.004185437690466642, + 0.00880429521203041, + 0.05528423562645912, + -0.08964741230010986, + 0.0042182342149317265, + -0.029020531103014946, + 0.014597366563975811, + 0.0002752764557953924, + 0.032267484813928604, + 0.11250412464141846, + 0.002768452512100339, + -0.026546282693743706, + 0.017761094495654106, + 0.023686127737164497, + -0.012699384242296219, + 0.018606871366500854, + -0.009665136225521564, + -0.06902427226305008, + -0.04932688549160957, + 0.051991984248161316, + -0.05918996408581734, + -4.18107468647122e-08, + -0.06375139951705933, + -0.0029491656459867954, + 0.032401617616415024, + -0.024946069344878197, + -0.026347795501351357, + 0.020192604511976242, + -0.025759287178516388, + 0.037004951387643814, + 0.043629322201013565, + 0.06278910487890244, + 0.02097572200000286, + -0.07064115256071091, + -0.005097216460853815, + 0.016486743465065956, + -0.0032796994782984257, + 0.07957274466753006, + 0.03776371479034424, + 0.04259084165096283, + -0.005312138702720404, + -0.11992045491933823, + 0.0859612300992012, + -0.0500827394425869, + 0.02553708106279373, + -0.052115052938461304, + -0.07368568331003189, + -0.11394031345844269, + -0.04513951390981674, + 0.022121727466583252, + -0.010451988317072392, + 0.003997989930212498, + -0.13713733851909637, + 0.030619636178016663, + 0.05786789208650589, + 0.05840904265642166, + 0.04732450470328331, + -0.012547117657959461, + -0.027409126982092857, + 0.049672048538923264, + -0.00018399256805423647, + -0.004273589234799147, + -0.011101006530225277, + 0.017240682616829872, + -0.002812016289681196, + 0.034935809671878815, + 0.006601597648113966, + 0.014211715199053288, + -0.06332860141992569, + -0.05397455021739006, + -0.038572490215301514, + -0.06450864672660828, + 0.07963147014379501, + 0.029951799660921097, + 0.03153093531727791, + -0.03852103650569916, + -0.025755248963832855, + 0.13077980279922485, + -0.08185151219367981, + -0.09442253410816193, + 0.01002975832670927, + 0.02509395219385624, + -0.029981056228280067, + -0.025097224861383438, + 0.04001101851463318, + 0.006130080670118332 + ], + "model": "XBN 2.1 Alloy", + "pickup_zone": "POLYGON((-0.1778 51.5524, 0.0822 51.5524, 0.0822 51.4024, -0.1778 51.4024, -0.1778 51.5524))", + "price": 810, + "store_location": "-0.1278,51.5074" + }, + { + "brand": "ScramBikes", + "condition": "new", + "description": "The WattBike is the best e-bike for people who still feel young at heart. It has a Bafang 1000W mid-drive system and a 48V 17.5AH Samsung Lithium-Ion battery, allowing you to ride for more than 60 miles on one charge. It\u2019s great for tackling hilly terrain or if you just fancy a more leisurely ride. With three working modes, you can choose between E-bike, assisted bicycle, and normal bike modes.", + "description_embeddings": [ + -0.006927311420440674, + 0.15900678932666779, + -0.005495064426213503, + 0.06652409583330154, + 0.027506737038493156, + 0.03928178921341896, + 0.0212758406996727, + 0.08142763376235962, + -0.004556896630674601, + 0.04261007905006409, + 0.0852041020989418, + -0.048658017069101334, + 0.04896014556288719, + 0.008289206773042679, + 0.09533551335334778, + 0.042673129588365555, + 0.11990982294082642, + -0.09381455183029175, + -0.001154645229689777, + -0.009989846497774124, + 0.042972203344106674, + 0.055328626185655594, + 0.044754382222890854, + 0.01764347031712532, + 0.04503790661692619, + -0.003830699948593974, + -0.03431522846221924, + 0.03080279380083084, + -0.07722456008195877, + -0.09261710941791534, + 0.04810558632016182, + 0.04917953535914421, + 0.027227703481912613, + -0.04199009761214256, + -0.11837311834096909, + 0.028090154752135277, + 0.08678824454545975, + -0.052036624401807785, + -0.06585966050624847, + -0.04037070646882057, + -0.08040877431631088, + 0.0018185328226536512, + 0.043989114463329315, + -0.011817573569715023, + 0.006730342749506235, + -0.011594205163419247, + 0.008361537009477615, + -0.054372794926166534, + 0.033345889300107956, + -0.03933562710881233, + 0.10115987062454224, + -0.08934503048658371, + 0.04519934952259064, + 0.03582148626446724, + -0.021424520760774612, + -0.07357319444417953, + -0.07426925748586655, + 0.018053170293569565, + 0.04639451205730438, + -0.07499486207962036, + 0.057791586965322495, + -0.05478687211871147, + 0.030340412631630898, + -0.0056327772326767445, + -0.0452428013086319, + -0.044913534075021744, + 0.0526629202067852, + -0.08259481936693192, + -0.05616062134504318, + 0.0005400424124673009, + -0.04933836683630943, + 1.7464293705415912e-05, + -0.007984945550560951, + 0.009467384777963161, + -0.04055757448077202, + -0.0056908270344138145, + 0.036597516387701035, + 0.023873118683695793, + 0.00418807053938508, + 0.04676923528313637, + -0.053795333951711655, + -0.015527212992310524, + -0.080276258289814, + 0.026996882632374763, + 0.1144171804189682, + -0.011178486980497837, + 0.06854862719774246, + 0.022201502695679665, + -0.01913747377693653, + 0.009713435545563698, + 0.01423699501901865, + 0.04772024229168892, + 0.034634001553058624, + 0.0756441131234169, + -0.01204933412373066, + -0.031088251620531082, + -0.02462930977344513, + -0.04386857897043228, + -0.07706759870052338, + -0.002071107504889369, + -0.019828464835882187, + 0.10314348340034485, + 0.07235224545001984, + 0.03167572245001793, + -0.02631053328514099, + -0.036559153348207474, + 0.012040914967656136, + 0.08063311874866486, + 0.008269569836556911, + -0.012434666976332664, + 0.00650354428216815, + -0.05601133033633232, + -0.028117282316088676, + -0.025071244686841965, + -0.01907099038362503, + 0.003760937135666609, + -0.052525606006383896, + 0.07173370569944382, + 0.09372185170650482, + 0.1006019189953804, + 0.023999499157071114, + 0.041847433894872665, + 0.02114962227642536, + 0.05726422369480133, + 0.008218048140406609, + -0.06967302411794662, + 0.01968124881386757, + 1.844028477519647e-33, + -0.031731415539979935, + 0.03648626059293747, + -0.026035945862531662, + -0.026796391233801842, + 0.009511047042906284, + 0.050696976482868195, + -0.03008556365966797, + -0.027142174541950226, + -0.07182826101779938, + -0.02520623430609703, + -0.005983793176710606, + 0.07632830739021301, + 0.09301365166902542, + 0.0744069442152977, + 0.10173527896404266, + -0.08317957073450089, + -0.06219454109668732, + -0.0776224136352539, + 0.013563201762735844, + 0.005839265417307615, + 0.011761599220335484, + -0.0701083242893219, + 0.005647188518196344, + -0.007439272943884134, + -4.4855809392174706e-05, + -0.05095696449279785, + 0.13620494306087494, + 0.017342044040560722, + -0.01352923084050417, + 0.03086051531136036, + -0.0740780308842659, + -0.06721310317516327, + -0.06769067794084549, + 0.01185466069728136, + -0.07234086841344833, + 0.0020215404219925404, + -0.0108563881367445, + 0.007788154762238264, + 0.07912690192461014, + -0.02952556498348713, + -0.010350959375500679, + 0.025074176490306854, + -0.050776757299900055, + 0.00947178341448307, + 0.03259417414665222, + 0.0319671556353569, + 0.0379379540681839, + 0.05242982879281044, + -0.07516643404960632, + 0.020074544474482536, + -0.08712191134691238, + -0.01204316969960928, + -0.006554455496370792, + -0.005875407252460718, + -0.06922540068626404, + 0.041081465780735016, + -0.003257623640820384, + 0.0391659140586853, + -0.03731334209442139, + -0.03579063341021538, + -0.04008869081735611, + 0.05648549646139145, + -0.01142149232327938, + -0.05515728518366814, + -0.06406072527170181, + -0.013155528344213963, + 0.0029902313835918903, + -0.020829875022172928, + -0.13008210062980652, + -0.039412979036569595, + 0.05974568799138069, + -0.06617670506238937, + 0.09968123584985733, + -0.04378099367022514, + 0.008272947743535042, + -0.007245620246976614, + -0.05228377878665924, + -0.05929296463727951, + -0.07816570997238159, + 0.04665905237197876, + -0.01630396395921707, + -0.020880484953522682, + -0.06532212346792221, + -0.003213261254131794, + 0.0416768379509449, + -0.05093088001012802, + -0.0871049091219902, + -0.014571009203791618, + 0.0010796786518767476, + 0.007087667006999254, + 0.05550219491124153, + -0.07753361761569977, + 0.022848013788461685, + 0.02596968039870262, + 0.04259824752807617, + -2.380165022444961e-33, + 0.05820159241557121, + 0.046025633811950684, + 0.10894608497619629, + 0.05244075506925583, + 0.15792003273963928, + 0.024086199700832367, + 0.029237573966383934, + -0.023072607815265656, + -0.07495220005512238, + -0.013094011694192886, + -0.004260784015059471, + -0.053080882877111435, + -0.00895928218960762, + -0.03149857744574547, + 0.13309381902217865, + 0.038643430918455124, + -0.02136029675602913, + -0.030247559770941734, + 0.08822915703058243, + -0.026076463982462883, + 0.050446517765522, + 0.011856893077492714, + -0.06957101821899414, + -0.02773614600300789, + 0.04625667631626129, + 0.008786994963884354, + -0.07701855897903442, + 0.001076446962542832, + -0.004858669359236956, + 0.0067254831083118916, + -0.02791707031428814, + -0.019495468586683273, + 0.0703381896018982, + 0.030558260157704353, + -0.05546342208981514, + 0.0420801043510437, + -0.03730127215385437, + -0.01322201732546091, + 0.038336288183927536, + 0.06427455693483353, + 0.028238974511623383, + -0.06713984161615372, + 0.03150900453329086, + 0.024600861594080925, + 0.016221575438976288, + 0.025838203728199005, + -0.02313261851668358, + 0.0557485967874527, + 0.015211678110063076, + 0.029895616695284843, + 0.06008606031537056, + 0.04473312199115753, + -0.050083715468645096, + -0.017571061849594116, + -0.014091495424509048, + -0.06641074270009995, + -0.008071008138358593, + 0.03312089666724205, + -0.09482058882713318, + -0.08763624727725983, + 0.030528157949447632, + 0.013274747878313065, + 0.06158939003944397, + 0.0748405009508133, + -0.12128522247076035, + -0.12076683342456818, + 0.0464591421186924, + 0.0045278542675077915, + -0.002574144396930933, + -0.028266169130802155, + -0.04953430965542793, + -0.007604515179991722, + -0.01084962673485279, + -0.06370151042938232, + 0.00812828354537487, + -0.027805447578430176, + 0.044624149799346924, + 0.016610005870461464, + -0.027103105559945107, + -0.046117331832647324, + -0.002852817066013813, + 0.05479831248521805, + 0.05342277139425278, + -0.03752776235342026, + -0.0053502353839576244, + -0.018950853496789932, + -0.07712738960981369, + -0.13029317557811737, + -0.013171681202948093, + 0.02069253847002983, + 0.029317859560251236, + 0.018668048083782196, + -0.061371881514787674, + 0.05376929044723511, + 0.01483837515115738, + -3.964297690117746e-08, + 0.03559036925435066, + -0.005289694294333458, + -0.013635152950882912, + -0.020672710612416267, + 0.039649732410907745, + -0.06206038221716881, + 0.04933065176010132, + 0.010378899984061718, + 0.020493512973189354, + 0.027486076578497887, + 0.07419533282518387, + -0.03473059833049774, + 0.05251400172710419, + 0.013588557951152325, + 0.01814397983253002, + -0.024619124829769135, + 0.11751414835453033, + 0.05483951419591904, + 0.022471528500318527, + 0.0301313828676939, + 0.018649602308869362, + -0.06219587102532387, + 0.04238469526171684, + -0.053293377161026, + 0.002427657600492239, + -0.0019911346025764942, + 0.004944113548845053, + -0.061433035880327225, + -0.013191037811338902, + -0.030179856345057487, + -0.051340680569410324, + 0.022094713523983955, + -0.035341646522283554, + -0.007219062652438879, + -0.06182244420051575, + -0.06059279292821884, + -0.041057273745536804, + 0.002089729532599449, + -0.043397895991802216, + 0.07170896232128143, + 0.036471735686063766, + -0.015439609996974468, + -0.01848314143717289, + 0.0056609525345265865, + -0.0012752920156344771, + -0.056571539491415024, + -0.01979043148458004, + -0.12011151015758514, + -0.02223217859864235, + 0.029830070212483406, + 0.027551136910915375, + -0.0010704555315896869, + 0.000702365068718791, + -0.005173679441213608, + -0.019360698759555817, + 0.17009279131889343, + -0.04601005092263222, + -0.04016156122088432, + -0.01600208878517151, + 0.04198170080780983, + 0.02032368630170822, + -0.06263051927089691, + -0.048464443534612656, + 0.009351130574941635 + ], + "model": "WattBike", + "pickup_zone": "POLYGON((2.1767 48.9016, 2.5267 48.9016, 2.5267 48.5516, 2.1767 48.5516, 2.1767 48.9016))", + "price": 2300, + "store_location": "2.3522,48.8566" + }, + { + "brand": "Peaknetic", + "condition": "new", + "description": "If you struggle with stiff fingers or a kinked neck or back after a few minutes on the road, this lightweight, aluminum bike alleviates those issues and allows you to enjoy the ride. From the ergonomic grips to the lumbar-supporting seat position, the Roll Low-Entry offers incredible comfort. The rear-inclined seat tube facilitates stability by allowing you to put a foot on the ground to balance at a stop, and the low step-over frame makes it accessible for all ability and mobility levels. The saddle is very soft, with a wide back to support your hip joints and a cutout in the center to redistribute that pressure. Rim brakes deliver satisfactory braking control, and the wide tires provide a smooth, stable ride on paved roads and gravel. Rack and fender mounts facilitate setting up the Roll Low-Entry as your preferred commuter, and the BMX-like handlebar offers space for mounting a flashlight, bell, or phone holder.", + "description_embeddings": [ + 0.0022135202307254076, + 0.0681997612118721, + 0.0064607178792357445, + 0.007070810534060001, + -0.054231829941272736, + 0.008059866726398468, + 0.040072083473205566, + 0.0956423208117485, + 0.049143481999635696, + 0.0379914790391922, + -0.020757146179676056, + 0.06460786610841751, + 0.07982552796602249, + -0.0321788415312767, + -0.006194670218974352, + 0.0375053696334362, + 0.1270381063222885, + -0.02341461181640625, + -0.007247396744787693, + 0.0682845488190651, + -0.026470346376299858, + 0.005658577661961317, + 0.04653697833418846, + 0.03987714648246765, + -0.12344618886709213, + 0.03188862279057503, + -0.02708076685667038, + 0.06964577734470367, + 0.02201199159026146, + -0.02890665829181671, + -0.040280576795339584, + 0.06318672746419907, + 0.07309666275978088, + -0.08932560682296753, + -0.11648543179035187, + -0.029937371611595154, + 0.10487373173236847, + 0.041655637323856354, + -0.022871389985084534, + -0.04677774757146835, + 0.02746077999472618, + 0.01847464218735695, + 0.08173345029354095, + 0.06561841815710068, + 0.05970228090882301, + -0.024480478838086128, + 0.06907600909471512, + -0.03947169706225395, + 0.024855393916368484, + -0.013199429027736187, + 0.09996063262224197, + 0.0028578736819326878, + 0.08506831526756287, + 0.014810853637754917, + -0.03929787874221802, + 0.0018542942125350237, + -0.14215077459812164, + 0.06970464438199997, + 0.0265016071498394, + -0.06698428839445114, + 0.07732488960027695, + -0.04583248123526573, + 0.028696633875370026, + 0.013155206106603146, + 0.02578958310186863, + -0.029291296377778053, + -0.00031816380214877427, + -0.125708669424057, + -0.042060736566782, + -0.05381627008318901, + -0.0538906455039978, + 0.0014778936747461557, + -0.09889215975999832, + -0.0429794080555439, + -0.013458915054798126, + -0.04558553919196129, + 0.09005370736122131, + -0.04213777929544449, + -0.1349189579486847, + 0.055852413177490234, + 0.006197084207087755, + 0.016338912770152092, + 0.032463233917951584, + 0.02548987604677677, + 0.030681701377034187, + -0.0703219398856163, + 0.05395807698369026, + -0.009628057479858398, + -0.031475286930799484, + -0.027579376474022865, + 0.04098684713244438, + 0.03901920095086098, + -0.01192146260291338, + -0.04101356491446495, + -0.08753280341625214, + 0.0705665871500969, + 0.03578994423151016, + 0.032069701701402664, + -0.03211764618754387, + 0.09590566158294678, + 0.0611797571182251, + 0.004700345452874899, + 0.10438291728496552, + 0.07409676164388657, + -0.0090691689401865, + 0.011407091282308102, + 0.05370165407657623, + -0.020438825711607933, + -0.04398776963353157, + 0.032895661890506744, + -0.0012730252929031849, + 0.018297234550118446, + -0.04333885386586189, + 0.06208101287484169, + -0.08955910801887512, + -0.04672509804368019, + -0.06376977264881134, + 0.022537674754858017, + 0.02881341427564621, + -0.0030273371376097202, + -0.06542699784040451, + 0.007231372408568859, + 0.01855703443288803, + 0.012365416623651981, + 0.018434280529618263, + -0.04855069890618324, + -0.08907819539308548, + -2.615521077916409e-34, + -0.046131156384944916, + -0.028526470065116882, + 0.01342072058469057, + -0.010418130084872246, + -0.019669201225042343, + -0.07430055737495422, + -0.006000629626214504, + -0.03320513665676117, + -0.05303066596388817, + 0.07680810242891312, + -0.0061429338529706, + 0.06154831871390343, + 0.09432042390108109, + 0.0038354273419827223, + 0.14506474137306213, + -0.07032876461744308, + -0.07443196326494217, + -0.04439792409539223, + -0.005104243289679289, + -3.3934433304239064e-05, + -0.08257872611284256, + -0.056131236255168915, + 0.04174238070845604, + 0.0017745267832651734, + -0.002772972686216235, + 0.006998051423579454, + 0.09371285885572433, + -0.013138788752257824, + -0.042773280292749405, + 0.00896062795072794, + -0.09441300481557846, + -0.007952050305902958, + -0.015586148016154766, + -0.017506571486592293, + 0.01347337942570448, + 0.027937229722738266, + -0.11657726019620895, + -0.03324989974498749, + -0.018427638337016106, + -0.04819897934794426, + -0.04077771306037903, + -0.0001697312545729801, + -0.05697356536984444, + 0.07191669940948486, + 0.017228955402970314, + 0.03711475431919098, + 0.042057864367961884, + 0.004786928184330463, + -0.06956200301647186, + 0.06036132574081421, + -0.0016305814497172832, + 0.02468821406364441, + -0.013794313184916973, + -0.025994934141635895, + -0.07489914447069168, + 0.01067660003900528, + -0.010298662818968296, + -0.022154971957206726, + -0.09746527671813965, + 0.06335768103599548, + -0.022728655487298965, + -0.0005628599901683629, + -0.04313022270798683, + -0.04123309254646301, + -0.08378659188747406, + 0.020546691492199898, + 0.01680913008749485, + -0.010155763477087021, + 0.008547178469598293, + -0.0211178008466959, + 0.023050235584378242, + 0.06479462236166, + 0.061528537422418594, + 0.022156352177262306, + 0.019619867205619812, + 0.07456795871257782, + 0.049971628934144974, + -0.07878398895263672, + 0.04972408711910248, + -0.015442566946148872, + 0.044758349657058716, + -0.037393294274806976, + -0.03831558674573898, + -0.016534404829144478, + -0.05692937225103378, + 0.02438316121697426, + -0.008604591712355614, + -0.06003899499773979, + 0.04884570837020874, + -0.05407913774251938, + -0.004864877089858055, + -0.039421387016773224, + -0.0335439033806324, + 0.05149518698453903, + -0.00761334178969264, + -4.919814414418271e-34, + 0.0605306513607502, + 0.013155259191989899, + 0.0022042335476726294, + -0.016238724812865257, + 0.017820321023464203, + -0.029504502192139626, + 0.1179562360048294, + -0.08494631201028824, + -0.05847567319869995, + -0.03412671759724617, + 0.01584434323012829, + 0.0205944012850523, + 0.011703073978424072, + 0.024820921942591667, + 0.07683313637971878, + 0.016703670844435692, + -0.0465223602950573, + -0.08929521590471268, + 0.013778958469629288, + -0.06448621302843094, + 0.047237128019332886, + 0.11140323430299759, + -0.036479681730270386, + -0.055252984166145325, + -0.08155377954244614, + -0.0011454259511083364, + -0.07414430379867554, + 0.04839516803622246, + -0.036127883940935135, + 0.04249665141105652, + -0.05564387887716293, + 0.008439254947006702, + 0.056384216994047165, + 0.0037136792670935392, + -0.06267311424016953, + 0.06599505990743637, + -0.0944608598947525, + -0.01900491677224636, + 0.0017078104428946972, + 0.018181079998612404, + 0.0072936019860208035, + 0.03901578113436699, + 0.022009696811437607, + -0.02042953297495842, + 0.04422936588525772, + 0.099826380610466, + 0.0022175773046910763, + 0.05569764971733093, + 0.005229232832789421, + -0.06281892955303192, + 0.06203164905309677, + 0.05493532121181488, + 0.06598878651857376, + 0.06272768974304199, + 0.05623844638466835, + -0.053184594959020615, + 0.012609804049134254, + -0.009191329590976238, + -0.07956400513648987, + -0.005095178727060556, + -0.06295286118984222, + 0.05799472704529762, + 0.004298996180295944, + -0.0245245061814785, + -0.010027579963207245, + -0.02338206022977829, + -0.03576310724020004, + -0.0735088586807251, + -0.10477573424577713, + 0.010318689979612827, + -0.031293049454689026, + -0.06913845986127853, + 0.07762496173381805, + -0.029345618560910225, + 0.03347797319293022, + 0.014305013231933117, + 0.06321006268262863, + -0.09872464835643768, + -0.040937263518571854, + 0.028350593522191048, + -0.08784928917884827, + 0.005375882610678673, + 0.059733789414167404, + 0.027322115376591682, + -0.04657347500324249, + 0.02783248759806156, + -0.12815773487091064, + 0.004414730705320835, + -0.009058866649866104, + 0.06062375009059906, + 0.04072198644280434, + 0.05082780495285988, + -0.036888547241687775, + 0.07301440089941025, + 0.022250277921557426, + -4.836826761334123e-08, + -0.02333931438624859, + -0.0006597689352929592, + -0.011001646518707275, + 0.01690472476184368, + -0.0037487675435841084, + 0.039681874215602875, + -0.012263616546988487, + 0.010128140449523926, + 0.03165998309850693, + 0.0028211181052029133, + 0.009292887523770332, + -0.05068610981106758, + -0.05350435897707939, + 0.0027450143825262785, + -0.02869512513279915, + 0.18523184955120087, + 0.010015024803578854, + 0.04042758792638779, + 0.010014397092163563, + -0.07638011872768402, + -0.04126984626054764, + -0.06051325798034668, + 0.04609273001551628, + -0.00863336119800806, + -0.010159372352063656, + -0.0686831921339035, + -0.022274712100625038, + 0.054596077650785446, + -0.004217579495161772, + 0.005010589957237244, + -0.0023027928546071053, + 0.054350487887859344, + 0.051168326288461685, + -0.0005977987311780453, + 0.0008231411338783801, + -0.005349422339349985, + -0.04023698344826698, + 0.01875622756779194, + 0.051792532205581665, + 0.0725129023194313, + 0.003990984987467527, + -0.0412585511803627, + -0.030975697562098503, + 0.02229507826268673, + -0.023536084219813347, + 0.004254089202731848, + -0.047596003860235214, + 0.054139409214258194, + 0.024750487878918648, + 0.00830126740038395, + 0.04204926639795303, + -0.05580601468682289, + 0.014396552927792072, + 0.0785103291273117, + -0.01755904220044613, + 0.09679318964481354, + -0.01722336746752262, + -0.03680667281150818, + -0.021823544055223465, + 0.044612541794776917, + -0.0008680447936058044, + 0.027066197246313095, + -0.008826435543596745, + -0.034606464207172394 + ], + "model": "Secto", + "pickup_zone": "POLYGON((13.3260 52.5700, 13.6550 52.5700, 13.6550 52.2700, 13.3260 52.2700, 13.3260 52.5700))", + "price": 430, + "store_location": "13.4050,52.5200" + }, + { + "brand": "nHill", + "condition": "new", + "description": "This budget mountain bike from nHill performs well both on bike paths and on the trail. The fork with 100mm of travel absorbs rough terrain. Fat Kenda Booster tires give you grip in corners and on wet trails. The Shimano Tourney drivetrain offered enough gears for finding a comfortable pace to ride uphill, and the Tektro hydraulic disc brakes break smoothly. Whether you want an affordable bike that you can take to work, but also take trail in mountains on the weekends or you\u2019re just after a stable, comfortable ride for the bike path, the Summit gives a good value for money.", + "description_embeddings": [ + -0.024333441630005836, + 0.029308300465345383, + -0.012847754172980785, + -0.013123984448611736, + -0.039905961602926254, + -0.04596070945262909, + -0.01287133153527975, + 0.03984961286187172, + -0.043438445776700974, + 0.012349214404821396, + -0.021342391148209572, + -0.027458613738417625, + -0.004793129395693541, + 0.016479987651109695, + 0.02052542380988598, + -0.009542089886963367, + 0.10066469013690948, + 0.017397526651620865, + 0.07893014699220657, + -0.0765308141708374, + -0.007650941610336304, + 9.538845188217238e-05, + 0.03314477205276489, + 0.026631362736225128, + -0.01621824875473976, + 0.03593063727021217, + -0.014871303923428059, + 0.05259871482849121, + 0.027785824611783028, + -0.044416770339012146, + -0.026691904291510582, + -0.01390022411942482, + -0.0573929138481617, + -0.08761026710271835, + 0.012642575427889824, + 0.04252056032419205, + 0.04015251249074936, + -0.0052475095726549625, + -0.08608037978410721, + 0.0028531427960842848, + -0.003371630096808076, + -0.02032443881034851, + -0.029511747881770134, + -0.043690912425518036, + 0.042122989892959595, + -0.057133886963129044, + 0.019293654710054398, + -0.019127611070871353, + 0.036713697016239166, + -0.00833116751164198, + 0.009040397591888905, + -0.1087319627404213, + 0.09127230197191238, + -0.048213958740234375, + -0.023604029789566994, + 0.0008899428648874164, + -0.093184694647789, + -0.03436879441142082, + 0.00807907897979021, + -0.0520450733602047, + 0.02262270636856556, + -0.03259866312146187, + -0.0391012541949749, + 0.001295328140258789, + 0.06636986881494522, + -0.04248524457216263, + -0.07093628495931625, + -0.06715728342533112, + 0.007415436673909426, + -0.06185786426067352, + 0.036563266068696976, + 0.030749274417757988, + 0.007705106865614653, + 0.0023627770133316517, + -0.026215652003884315, + -0.020864665508270264, + 0.05270038917660713, + 0.021477004513144493, + 0.0068344962783157825, + 0.024355463683605194, + -0.03746088594198227, + 0.031043346971273422, + 0.09879995882511139, + -0.050035204738378525, + 0.08045479655265808, + -0.04274336248636246, + 0.02994127944111824, + -0.03783947601914406, + 0.039749279618263245, + 0.046901557594537735, + 0.08667739480733871, + 0.02831847593188286, + -0.008148318156599998, + 0.007401767186820507, + -0.10989774018526077, + -0.014092149212956429, + 9.036048140842468e-06, + 0.054770372807979584, + -0.012413500808179379, + 0.01861056312918663, + 0.060349978506565094, + 0.06291640549898148, + 0.007055714726448059, + -0.021361181512475014, + 0.03852728381752968, + -0.018908292055130005, + 0.047424111515283585, + 0.054501011967659, + 0.017648596316576004, + 0.11429999768733978, + -0.041358742862939835, + -0.03363256901502609, + 0.0030338421929627657, + 0.030056871473789215, + 0.008060693740844727, + -0.0688784271478653, + -0.09984277933835983, + 0.030267179012298584, + -0.028509166091680527, + 0.02268315851688385, + -0.07580948621034622, + -0.0405895970761776, + 0.03384138271212578, + 0.07081738114356995, + 0.019772004336118698, + -0.06172173097729683, + 0.008248022757470608, + 1.9840379362262432e-33, + 0.037930894643068314, + 0.04577890411019325, + -0.03732670471072197, + -0.032416991889476776, + 0.014588817022740841, + -0.06602118909358978, + -0.013814577832818031, + -0.10539791733026505, + -0.12850415706634521, + -0.005076196976006031, + -0.07302963733673096, + 0.07235066592693329, + -0.04646436870098114, + 0.059371400624513626, + 0.09111672639846802, + -0.10133209824562073, + -0.05483068525791168, + -0.053797587752342224, + -0.03709062561392784, + 0.08241501450538635, + 0.03958267718553543, + 0.01433452870696783, + 0.05912068858742714, + -0.0029660870786756277, + 0.01806815154850483, + -0.0680701732635498, + 0.013625666499137878, + 0.015582672320306301, + -0.02142208069562912, + 0.06357447057962418, + -0.12094264477491379, + -0.09666852653026581, + -0.03333089128136635, + -0.06621623784303665, + -0.03821765258908272, + -0.010776739567518234, + -0.08198492974042892, + -0.03277386352419853, + 0.014794036746025085, + 0.013517632149159908, + -0.0034003634937107563, + 0.005664682947099209, + -0.022207774221897125, + 0.015086682513356209, + -0.06799489259719849, + 0.08191754668951035, + 0.15600024163722992, + 0.07579171657562256, + -0.05843832343816757, + -0.008412746712565422, + -0.07857701182365417, + -0.0038521387614309788, + -0.02901686355471611, + 0.0006679021753370762, + -0.0697081908583641, + -0.05330892279744148, + 0.053166620433330536, + 0.004783661104738712, + 0.013741954229772091, + 0.07203977555036545, + -0.015861764550209045, + 0.033332549035549164, + -0.022547632455825806, + -0.011850795708596706, + -0.09526507556438446, + -0.016078950837254524, + -0.004922114312648773, + 0.004731250926852226, + -0.006536174099892378, + 0.004852978512644768, + 0.01268527377396822, + 0.05300697684288025, + 0.11899229139089584, + 0.025274425745010376, + 0.11763036996126175, + -0.08781222999095917, + -0.016429787501692772, + -0.02661072462797165, + -0.01769474893808365, + 0.05961911380290985, + 0.005877051502466202, + -0.03780582174658775, + -0.028037618845701218, + 0.023397648707032204, + -0.006716609466820955, + 0.015020519495010376, + 0.040884219110012054, + -0.06197969987988472, + -0.029691174626350403, + -0.004976964555680752, + -0.03422437235713005, + 0.018984060734510422, + -0.00813105795532465, + -0.020747167989611626, + 0.0281930360943079, + -3.2326105667872445e-33, + 0.12847235798835754, + 0.026267321780323982, + 0.10983140766620636, + 0.011159190908074379, + -0.044980239123106, + 0.017553674057126045, + -0.004794386215507984, + -0.015802551060914993, + -0.01226617582142353, + -0.019019361585378647, + 0.011876302771270275, + -0.023136194795370102, + 0.03209578990936279, + 0.07442695647478104, + 0.05641649663448334, + -0.05300089344382286, + -0.03638714924454689, + -0.014843880198895931, + 0.036886364221572876, + -0.1257546991109848, + -0.008151554502546787, + 0.07542898505926132, + -0.11252967268228531, + -0.09049033373594284, + -0.0030374047346413136, + 0.04011000320315361, + -0.1538117229938507, + 0.07717141509056091, + -0.04436258599162102, + 0.07290291786193848, + -0.0072043901309370995, + 0.03950807452201843, + 0.002569766016677022, + -0.006248284596949816, + 0.001176450401544571, + 0.12515375018119812, + 0.02005600742995739, + -0.00013864059292245656, + 0.024221621453762054, + 0.05465780198574066, + 0.09940708428621292, + -0.053944025188684464, + 0.09602796286344528, + 0.00014552564243786037, + 0.026602625846862793, + 0.016811460256576538, + 0.0015170868718996644, + 0.08731727302074432, + 0.005771235562860966, + -0.009904555976390839, + 0.06958170980215073, + 0.09265917539596558, + 0.005830116104334593, + 0.10210693627595901, + 0.0011325017549097538, + -0.006749417632818222, + 0.01503710262477398, + -0.03714510798454285, + -0.0939728170633316, + -0.03357759863138199, + -0.050196800380945206, + -0.006316252052783966, + -0.06414957344532013, + 0.007235861383378506, + -0.01670873910188675, + -0.07423098385334015, + 0.018221961334347725, + -0.05320512503385544, + -0.010248126462101936, + 0.013013344258069992, + -0.12916545569896698, + -0.04244375228881836, + 0.08774643391370773, + -0.03917357325553894, + -0.027767449617385864, + 0.02099072001874447, + 0.0595061257481575, + 0.01945589855313301, + 0.03815269470214844, + -0.028747402131557465, + -0.025533201172947884, + -0.038349006325006485, + -0.014486055821180344, + -0.013576257973909378, + 0.051731329411268234, + 0.06515145301818848, + -0.011720783077180386, + -0.1163521260023117, + -0.03931708261370659, + 0.03394515439867973, + 0.09380073100328445, + 0.00931628793478012, + -0.01171857863664627, + 0.04104544222354889, + -0.008916886523365974, + -4.140364140425845e-08, + 0.061763226985931396, + 0.02101300284266472, + -0.019075985997915268, + -0.002017116406932473, + 0.015830116346478462, + -0.009857675060629845, + -0.018932191655039787, + -0.0007055550813674927, + 0.013249439187347889, + 0.0642324835062027, + 0.12860508263111115, + 0.03213700279593468, + -0.0670522004365921, + 0.041800037026405334, + -0.0426088348031044, + 0.0437115803360939, + 0.021514814347028732, + 0.12901803851127625, + 0.015236952342092991, + -0.02232329733669758, + -0.05031055584549904, + -0.052778035402297974, + 0.000233030179515481, + -0.022497626021504402, + -0.014370465651154518, + 0.017518600448966026, + 0.014312495477497578, + 0.010902844369411469, + 0.0027771666646003723, + -0.04195915535092354, + -0.06504479050636292, + 0.038553908467292786, + -0.01374481339007616, + 0.03843652829527855, + 0.09058628976345062, + -0.0230315700173378, + -0.11761228740215302, + 0.06487669050693512, + -0.005885662976652384, + 0.06288695335388184, + 0.05191958323121071, + 0.011840583756566048, + -0.027309859171509743, + 0.040654800832271576, + -0.005980184301733971, + 0.03252403438091278, + -0.043744225054979324, + 0.006068602204322815, + -0.00032751375692896545, + -0.022902367636561394, + -0.050694938749074936, + 0.026743048802018166, + 0.06005466729402542, + 0.02092430181801319, + 0.02072584442794323, + 0.08342021703720093, + -0.05768749862909317, + -0.08585236966609955, + -0.03160274401307106, + 0.05222279205918312, + 0.029474787414073944, + -0.08763033896684647, + -0.01591801643371582, + -0.07798203080892563 + ], + "model": "Summit", + "pickup_zone": "POLYGON((1.9450 41.4301, 2.4018 41.4301, 2.4018 41.1987, 1.9450 41.1987, 1.9450 41.4301))", + "price": 1200, + "store_location": "2.1734, 41.3851" + }, + { + "brand": "BikeShind", + "condition": "refurbished", + "description": "An artsy, retro-inspired bicycle that\u2019s as functional as it is pretty: The ThrillCycle steel frame offers a smooth ride. A 9-speed drivetrain has enough gears for coasting in the city, but we wouldn\u2019t suggest taking it to the mountains. Fenders protect you from mud, and a rear basket lets you transport groceries, flowers and books. The ThrillCycle comes with a limited lifetime warranty, so this little guy will last you long past graduation.", + "description_embeddings": [ + -0.005154624115675688, + 0.10099548101425171, + 0.04890008643269539, + 0.0625317171216011, + -0.0363173745572567, + 0.025850096717476845, + 0.032137978821992874, + -0.03442877531051636, + -0.10096120089292526, + -0.007441149093210697, + 0.002616145182400942, + 0.002316742204129696, + 0.042584337294101715, + 0.026208963245153427, + 0.018415318801999092, + 0.050033390522003174, + 0.14831797778606415, + -0.006160213612020016, + 0.05086936056613922, + 0.0131875304505229, + -0.05973455682396889, + 0.045886117964982986, + -0.025052331387996674, + 0.04576171189546585, + -0.006168896332383156, + 0.03315199911594391, + -0.07763667404651642, + 0.05837876722216606, + -0.03229084238409996, + -0.021293187513947487, + -0.008595496416091919, + 0.06880632787942886, + -0.04528025537729263, + -0.020664094015955925, + -0.029125794768333435, + 0.0445701889693737, + 0.023799002170562744, + -0.013527574017643929, + -0.01646343804895878, + 0.027023920789361, + 0.0014843698590993881, + 0.014952401630580425, + -0.010172702372074127, + 0.07603776454925537, + 0.017975280061364174, + -0.08911248296499252, + 0.055482957512140274, + -0.027819648385047913, + 0.019395964220166206, + 0.05424710363149643, + 0.06634850054979324, + -0.05045575276017189, + -0.0006240577204152942, + -0.06339468061923981, + -0.023632608354091644, + -0.060364823788404465, + -0.11193189769983292, + 0.046562716364860535, + -0.010353066958487034, + -0.043506212532520294, + 0.051063187420368195, + 0.003231980837881565, + 0.027102531865239143, + 0.002380193443968892, + -0.0056939758360385895, + -0.026826998218894005, + -0.014482468366622925, + -0.0185822993516922, + 0.013927196152508259, + -0.10704471915960312, + 0.0673733651638031, + -0.0010599792003631592, + -0.07455477863550186, + -0.014452059753239155, + -0.02294282428920269, + -0.010201872326433659, + 0.023037923499941826, + -0.05595972761511803, + -0.09120096266269684, + -0.029177049174904823, + -0.02343042939901352, + -0.029726101085543633, + 0.07366959005594254, + -0.046458736062049866, + 0.006261167582124472, + -0.013148028403520584, + -0.0014217026764526963, + 0.017440352588891983, + -0.04511420056223869, + 0.008813070133328438, + -0.022654451429843903, + -0.014097515493631363, + -0.020799456164240837, + -0.05456947907805443, + -0.07338607311248779, + 0.017073828727006912, + -0.04925030097365379, + -0.024428002536296844, + -0.00979585014283657, + 0.025696750730276108, + 0.07033320516347885, + 0.0389082096517086, + 0.12711860239505768, + 0.07472020387649536, + -0.019501805305480957, + 0.01663217693567276, + 0.027799105271697044, + 0.039703886955976486, + -0.029871169477701187, + 0.005997804459184408, + -0.0015701024094596505, + 0.0411832258105278, + 8.85713889147155e-05, + -0.029442811384797096, + 0.009789851494133472, + -0.04723270237445831, + -0.09891993552446365, + 0.05102938041090965, + -0.007144609931856394, + 0.01668076030910015, + 0.004902719985693693, + 0.02218448370695114, + 0.03827798366546631, + 0.001542922342196107, + 0.004610520787537098, + -0.12595036625862122, + 0.07361572980880737, + -1.521196586481568e-33, + -0.01484542153775692, + 0.024973904713988304, + 0.006633534096181393, + 0.01254032738506794, + 0.06355929374694824, + -0.04809075966477394, + 0.02005922608077526, + -0.06251886487007141, + -0.06941430270671844, + 0.02304103970527649, + -0.020975833758711815, + 0.08137834072113037, + 0.049319952726364136, + 0.11945393681526184, + 0.17472721636295319, + -0.03430051729083061, + -0.07276412844657898, + -0.11828659474849701, + 0.04918891564011574, + -0.07279565185308456, + -0.06228293105959892, + -0.03494764119386673, + 0.004517378751188517, + -0.0431801863014698, + -0.07457974553108215, + -0.0003888327337335795, + 0.05853814631700516, + 0.05362739413976669, + 0.01866878569126129, + 0.004751134198158979, + -0.07936650514602661, + 0.033309247344732285, + -0.01660560630261898, + -0.08851712942123413, + 0.042956508696079254, + 0.03460550308227539, + -0.054352447390556335, + -0.016094308346509933, + 0.019760286435484886, + -0.03591147065162659, + -0.04307156056165695, + -0.04029138758778572, + -0.09315019100904465, + 0.055713504552841187, + 0.003642909461632371, + 0.0391569547355175, + 0.1300928145647049, + 0.026774607598781586, + -0.054334208369255066, + -0.060861144214868546, + -0.020662454888224602, + -0.034525640308856964, + 0.004193050786852837, + 0.02453654631972313, + -0.09028972685337067, + 0.004444751888513565, + 0.06320545822381973, + 0.01895289309322834, + -0.04660886526107788, + 0.08611723780632019, + 0.05705415457487106, + 0.030612647533416748, + 0.019753821194171906, + -0.061483170837163925, + -0.025262145325541496, + 0.054222241044044495, + 0.017605014145374298, + -0.013110420666635036, + 0.04744894057512283, + -0.004785404074937105, + 0.04680679365992546, + 0.001831064117141068, + 0.07773134112358093, + 0.008347200229763985, + 0.08674833923578262, + 0.024294976145029068, + -0.03695613518357277, + -0.0440739244222641, + 0.00895663257688284, + -0.0060102143324911594, + -0.0361272394657135, + -0.08447428047657013, + -0.08971717208623886, + 0.03131894767284393, + 0.0721370130777359, + 0.031180810183286667, + 0.039390258491039276, + -0.09044987708330154, + -0.007888346910476685, + 0.008131768554449081, + -0.03624662756919861, + -0.033649034798145294, + 0.031620997935533524, + 0.05870470404624939, + 0.018141048029065132, + -5.7662626416839565e-34, + 0.09117024391889572, + -0.023953478783369064, + 0.080879345536232, + 0.0035459804348647594, + -0.013222851790487766, + -0.025237491354346275, + 0.019038159400224686, + -0.06588941067457199, + -0.11689981818199158, + 0.0007165187271311879, + -0.0933104082942009, + 0.014618181623518467, + 0.12644915282726288, + 0.0027253602165728807, + 0.15038658678531647, + 0.0053163510747253895, + -0.005645217839628458, + 0.009321562945842743, + 0.002719732467085123, + -0.01651378907263279, + 0.0063886744901537895, + 0.05738385394215584, + -0.14213474094867706, + -0.0669025406241417, + -0.05766160413622856, + 0.0031909833196550608, + -0.13478563725948334, + -0.03741385415196419, + 0.041654907166957855, + 0.04030514881014824, + -0.05102578178048134, + 0.0028968513943254948, + 0.0738285705447197, + -0.03643025830388069, + -0.015445208176970482, + 0.06674294173717499, + 0.029759766533970833, + -0.024196317419409752, + -0.009355752728879452, + 0.023269686847925186, + 0.01623220555484295, + -0.08251868188381195, + 0.020170293748378754, + 0.0749305933713913, + 0.08206077665090561, + 0.012237507849931717, + -0.026570556685328484, + 0.016253553330898285, + -0.003425935748964548, + 0.005059909541159868, + 0.12109038233757019, + 0.08107315003871918, + -0.04097432270646095, + 0.0009462210582569242, + 0.058092083781957626, + -0.03609737753868103, + -0.01580999232828617, + -0.0342475026845932, + 0.012185491621494293, + -0.058348096907138824, + -0.05934947729110718, + 0.026715252548456192, + -0.031246516853570938, + 0.012632215395569801, + -0.01100403256714344, + -0.05962579324841499, + -0.000801989808678627, + -0.06919746845960617, + -0.1299775391817093, + -0.03721063956618309, + -0.03989635780453682, + 0.04918158799409866, + -0.0022671804763376713, + -0.04244564101099968, + -0.02631748840212822, + -0.04100838303565979, + 0.08634430915117264, + 0.05031242221593857, + 0.06688959896564484, + 0.024407675489783287, + 0.0018788984743878245, + -0.026026425883173943, + 0.04438408091664314, + 0.02198263816535473, + -0.014974343590438366, + -0.026490231975913048, + -0.09914876520633698, + -0.09834708273410797, + 0.017346272245049477, + 0.00089951854897663, + 0.05497178062796593, + 0.06180766969919205, + -0.050425756722688675, + 0.013637324795126915, + -0.03599657490849495, + -4.404103037813911e-08, + -0.0024933917447924614, + 0.049585312604904175, + -0.020710783079266548, + -0.014283453114330769, + -0.0064896950498223305, + 0.04579087719321251, + 0.02242390066385269, + 0.02515084482729435, + -0.031156959012150764, + 0.006718308199197054, + 0.061417631804943085, + -0.009935885667800903, + -0.021248064935207367, + 0.011152776889503002, + -0.059073783457279205, + 0.05840323865413666, + 0.06523993611335754, + 0.03690555691719055, + 0.03174441307783127, + 0.02336188219487667, + 0.003073832020163536, + -0.017117813229560852, + 0.02877660095691681, + 0.0017827276606112719, + -0.07995487004518509, + -0.032443612813949585, + -0.01474942360073328, + -0.016080526635050774, + 0.0771399736404419, + 0.017354682087898254, + -0.02578366920351982, + 0.004991421941667795, + 0.006590475793927908, + -0.032372940331697464, + 0.038421064615249634, + -0.1204696074128151, + -0.05457846075296402, + 0.036750562489032745, + 0.013932188041508198, + 0.0031626380514353514, + 0.009978784248232841, + -0.0138266421854496, + 0.006769631989300251, + 0.059896957129240036, + 0.00822784099727869, + -0.05025117099285126, + -0.10305225104093552, + -0.07187453657388687, + 0.00836241990327835, + 0.049537766724824905, + 0.007570290472358465, + -0.08729138970375061, + -0.020592620596289635, + 0.04918212443590164, + 0.054685790091753006, + 0.04091939702630043, + -0.05420788377523422, + -0.04473182559013367, + -0.11178043484687805, + 0.09808126091957092, + -0.015394269488751888, + -0.0039725536480546, + 0.03283742815256119, + -0.05677533522248268 + ], + "model": "ThrillCycle", + "pickup_zone": "POLYGON((12.4464 42.1028, 12.5464 42.1028, 12.5464 41.7028, 12.4464 41.7028, 12.4464 42.1028))", + "price": 815, + "store_location": "12.4964,41.9028" + } +] \ No newline at end of file diff --git a/doctests/dt-bitfield.js b/doctests/dt-bitfield.js new file mode 100644 index 00000000000..9685372de41 --- /dev/null +++ b/doctests/dt-bitfield.js @@ -0,0 +1,76 @@ +// EXAMPLE: bitfield_tutorial +// HIDE_START +import assert from 'assert'; +import { createClient } from 'redis'; + +const client = createClient(); +await client.connect(); +// HIDE_END + +// REMOVE_START +await client.flushDb(); +// REMOVE_END + +// STEP_START bf +let res1 = await client.bitField("bike:1:stats", [{ + operation: 'SET', + encoding: 'u32', + offset: '#0', + value: 1000 +}]); +console.log(res1); // >>> [0] + +let res2 = await client.bitField('bike:1:stats', [ + { + operation: 'INCRBY', + encoding: 'u32', + offset: '#0', + increment: -50 + }, + { + operation: 'INCRBY', + encoding: 'u32', + offset: '#1', + increment: 1 + } +]); +console.log(res2); // >>> [950, 1] + +let res3 = await client.bitField('bike:1:stats', [ + { + operation: 'INCRBY', + encoding: 'u32', + offset: '#0', + increment: 500 + }, + { + operation: 'INCRBY', + encoding: 'u32', + offset: '#1', + increment: 1 + } +]); +console.log(res3); // >>> [1450, 2] + +let res4 = await client.bitField('bike:1:stats', [ + { + operation: 'GET', + encoding: 'u32', + offset: '#0' + }, + { + operation: 'GET', + encoding: 'u32', + offset: '#1' + } +]); +console.log(res4); // >>> [1450, 2] +// STEP_END + +// REMOVE_START +assert.deepEqual(res1, [0]) +assert.deepEqual(res2, [950, 1]) +assert.deepEqual(res3, [1450, 2]) +assert.deepEqual(res4, [1450, 2]) +await client.close(); +// REMOVE_END diff --git a/doctests/dt-bitmap.js b/doctests/dt-bitmap.js new file mode 100644 index 00000000000..0d6a292d0f1 --- /dev/null +++ b/doctests/dt-bitmap.js @@ -0,0 +1,159 @@ +// EXAMPLE: bitmap_tutorial +// REMOVE_START +import assert from 'assert'; +// REMOVE_END +import { createClient, RESP_TYPES } from 'redis'; + +const client = createClient({ + commandOptions: { + typeMapping: { + [RESP_TYPES.BLOB_STRING]: Buffer + } + } +}); +await client.connect(); + +// REMOVE_START +await client.flushDb(); +// REMOVE_END + +// STEP_START ping +const res1 = await client.setBit("pings:2024-01-01-00:00", 123, 1) +console.log(res1) // >>> 0 + +const res2 = await client.getBit("pings:2024-01-01-00:00", 123) +console.log(res2) // >>> 1 + +const res3 = await client.getBit("pings:2024-01-01-00:00", 456) +console.log(res3) // >>> 0 +// STEP_END + +// REMOVE_START +assert.equal(res1, 0) +// REMOVE_END + +// STEP_START bitcount +// HIDE_START +await client.setBit("pings:2024-01-01-00:00", 123, 1) +// HIDE_END +const res4 = await client.bitCount("pings:2024-01-01-00:00") +console.log(res4) // >>> 1 +// STEP_END +// REMOVE_START +assert.equal(res4, 1) +// REMOVE_END + +// STEP_START bitop_setup +await client.setBit("A", 0, 1) +await client.setBit("A", 1, 1) +await client.setBit("A", 3, 1) +await client.setBit("A", 4, 1) + +const res5 = await client.get("A") +console.log(res5.readUInt8(0).toString(2).padStart(8, '0')) +// >>> 11011000 + +await client.setBit("B", 3, 1) +await client.setBit("B", 4, 1) +await client.setBit("B", 7, 1) + +const res6 = await client.get("B") +console.log(res6.readUInt8(0).toString(2).padStart(8, '0')) +// >>> 00011001 + +await client.setBit("C", 1, 1) +await client.setBit("C", 2, 1) +await client.setBit("C", 4, 1) +await client.setBit("C", 5, 1) + +const res7 = await client.get("C") +console.log(res7.readUInt8(0).toString(2).padStart(8, '0')) +// >>> 01101100 +// STEP_END +// REMOVE_START +assert.equal(res5.readUInt8(0), 0b11011000) +assert.equal(res6.readUInt8(0), 0b00011001) +assert.equal(res7.readUInt8(0), 0b01101100) +// REMOVE_END + +// STEP_START bitop_and +await client.bitOp("AND", "R", ["A", "B", "C"]) +const res8 = await client.get("R") +console.log(res8.readUInt8(0).toString(2).padStart(8, '0')) +// >>> 00001000 +// STEP_END +// REMOVE_START +assert.equal(res8.readUInt8(0), 0b00001000) +// REMOVE_END + +// STEP_START bitop_or +await client.bitOp("OR", "R", ["A", "B", "C"]) +const res9 = await client.get("R") +console.log(res9.readUInt8(0).toString(2).padStart(8, '0')) +// >>> 11111101 +// STEP_END +// REMOVE_START +assert.equal(res9.readUInt8(0), 0b11111101) +// REMOVE_END + +// STEP_START bitop_xor +await client.bitOp("XOR", "R", ["A", "B"]) // XOR uses two keys here +const res10 = await client.get("R") +console.log(res10.readUInt8(0).toString(2).padStart(8, '0')) +// >>> 11000001 +// STEP_END +// REMOVE_START +assert.equal(res10.readUInt8(0), 0b11000001) +// REMOVE_END + +// STEP_START bitop_not +await client.bitOp("NOT", "R", "A") +const res11 = await client.get("R") +console.log(res11.readUInt8(0).toString(2).padStart(8, '0')) +// >>> 00100111 +// STEP_END +// REMOVE_START +assert.equal(res11.readUInt8(0), 0b00100111) +// REMOVE_END + +// STEP_START bitop_diff +await client.bitOp("DIFF", "R", ["A", "B", "C"]) +const res12 = await client.get("R") +console.log(res12.readUInt8(0).toString(2).padStart(8, '0')) +// >>> 10000000 +// STEP_END +// REMOVE_START +assert.equal(res12.readUInt8(0), 0b10000000) +// REMOVE_END + +// STEP_START bitop_diff1 +await client.bitOp("DIFF1", "R", ["A", "B", "C"]) +const res13 = await client.get("R") +console.log(res13.readUInt8(0).toString(2).padStart(8, '0')) +// >>> 00100101 +// STEP_END +// REMOVE_START +assert.equal(res13.readUInt8(0), 0b00100101) +// REMOVE_END + +// STEP_START bitop_andor +await client.bitOp("ANDOR", "R", ["A", "B", "C"]) +const res14 = await client.get("R") +console.log(res14.readUInt8(0).toString(2).padStart(8, '0')) +// >>> 01011000 +// STEP_END +// REMOVE_START +assert.equal(res14.readUInt8(0), 0b01011000) +// REMOVE_END + +// STEP_START bitop_one +await client.bitOp("ONE", "R", ["A", "B", "C"]) +const res15 = await client.get("R") +console.log(res15.readUInt8(0).toString(2).padStart(8, '0')) +// >>> 10100101 +// STEP_END +// REMOVE_START +assert.equal(res15.readUInt8(0), 0b10100101) + +await client.close(); +// REMOVE_END \ No newline at end of file diff --git a/doctests/dt-bloom.js b/doctests/dt-bloom.js new file mode 100644 index 00000000000..d065937e763 --- /dev/null +++ b/doctests/dt-bloom.js @@ -0,0 +1,46 @@ +// EXAMPLE: bf_tutorial +// HIDE_START +import assert from 'assert'; +import { createClient } from 'redis'; + +const client = createClient(); +await client.connect(); +// HIDE_END + +// REMOVE_START +await client.flushDb(); +// REMOVE_END + +// STEP_START bloom +const res1 = await client.bf.reserve('bikes:models', 0.01, 1000); +console.log(res1); // >>> OK + +const res2 = await client.bf.add('bikes:models', 'Smoky Mountain Striker'); +console.log(res2); // >>> true + +const res3 = await client.bf.exists('bikes:models', 'Smoky Mountain Striker'); +console.log(res3); // >>> true + +const res4 = await client.bf.mAdd('bikes:models', [ + 'Rocky Mountain Racer', + 'Cloudy City Cruiser', + 'Windy City Wippet' +]); +console.log(res4); // >>> [true, true, true] + +const res5 = await client.bf.mExists('bikes:models', [ + 'Rocky Mountain Racer', + 'Cloudy City Cruiser', + 'Windy City Wippet' +]); +console.log(res5); // >>> [true, true, true] +// STEP_END + +// REMOVE_START +assert.equal(res1, 'OK') +assert.equal(res2, true) +assert.equal(res3, true) +assert.deepEqual(res4, [true, true, true]) +assert.deepEqual(res5, [true, true, true]) +await client.close(); +// REMOVE_END diff --git a/doctests/dt-cms.js b/doctests/dt-cms.js new file mode 100644 index 00000000000..b0d9fa68469 --- /dev/null +++ b/doctests/dt-cms.js @@ -0,0 +1,50 @@ +// EXAMPLE: cms_tutorial +// HIDE_START +import assert from 'assert'; +import { createClient } from 'redis'; + +const client = createClient(); +await client.connect(); +// HIDE_END + +// REMOVE_START +await client.flushDb(); +// REMOVE_END + +// STEP_START cms +const res1 = await client.cms.initByProb('bikes:profit', 0.001, 0.002); +console.log(res1); // >>> OK + +const res2 = await client.cms.incrBy('bikes:profit', { + item: 'Smoky Mountain Striker', + incrementBy: 100 +}); +console.log(res2); // >>> [100] + +const res3 = await client.cms.incrBy('bikes:profit', [ + { + item: 'Rocky Mountain Racer', + incrementBy: 200 + }, + { + item: 'Cloudy City Cruiser', + incrementBy: 150 + } +]); +console.log(res3); // >>> [200, 150] + +const res4 = await client.cms.query('bikes:profit', 'Smoky Mountain Striker'); +console.log(res4); // >>> [100] + +const res5 = await client.cms.info('bikes:profit'); +console.log(res5.width, res5.depth, res5.count); // >>> 2000 9 450 +// STEP_END + +// REMOVE_START +assert.equal(res1, 'OK') +assert.deepEqual(res2, [100]) +assert.deepEqual(res3, [200, 150]) +assert.deepEqual(res4, [100]) +assert.deepEqual(res5, { width: 2000, depth: 9, count: 450 }) +await client.close(); +// REMOVE_END \ No newline at end of file diff --git a/doctests/dt-cuckoo.js b/doctests/dt-cuckoo.js new file mode 100644 index 00000000000..4b11bca2345 --- /dev/null +++ b/doctests/dt-cuckoo.js @@ -0,0 +1,38 @@ +// EXAMPLE: cuckoo_tutorial +// HIDE_START +import assert from 'assert'; +import { createClient } from 'redis'; + +const client = createClient(); +await client.connect(); +// HIDE_END + +// REMOVE_START +await client.flushDb(); +// REMOVE_END + +// STEP_START cuckoo +const res1 = await client.cf.reserve('bikes:models', 1000000); +console.log(res1); // >>> OK + +const res2 = await client.cf.add('bikes:models', 'Smoky Mountain Striker'); +console.log(res2); // >>> true + +const res3 = await client.cf.exists('bikes:models', 'Smoky Mountain Striker'); +console.log(res3); // >>> true + +const res4 = await client.cf.exists('bikes:models', 'Terrible Bike Name'); +console.log(res4); // >>> false + +const res5 = await client.cf.del('bikes:models', 'Smoky Mountain Striker'); +console.log(res5); // >>> true +// STEP_END + +// REMOVE_START +assert.equal(res1, 'OK') +assert.equal(res2, true) +assert.equal(res3, true) +assert.equal(res4, false) +assert.equal(res5, true) +await client.close(); +// REMOVE_END diff --git a/doctests/dt-geo.js b/doctests/dt-geo.js new file mode 100644 index 00000000000..7ec9376a8a7 --- /dev/null +++ b/doctests/dt-geo.js @@ -0,0 +1,59 @@ +// EXAMPLE: geo_tutorial +// HIDE_START +import assert from 'assert'; +import { createClient } from 'redis'; + +const client = createClient(); +await client.connect(); +// HIDE_END + +// REMOVE_START +await client.del('bikes:rentable') +// REMOVE_END + +// STEP_START geoAdd +const res1 = await client.geoAdd('bikes:rentable', { + longitude: -122.27652, + latitude: 37.805186, + member: 'station:1' +}); +console.log(res1) // 1 + +const res2 = await client.geoAdd('bikes:rentable', { + longitude: -122.2674626, + latitude: 37.8062344, + member: 'station:2' +}); +console.log(res2) // 1 + +const res3 = await client.geoAdd('bikes:rentable', { + longitude: -122.2469854, + latitude: 37.8104049, + member: 'station:3' +}) +console.log(res3) // 1 +// STEP_END + +// REMOVE_START +assert.equal(res1, 1); +assert.equal(res2, 1); +assert.equal(res3, 1); +// REMOVE_END + +// STEP_START geoSearch +const res4 = await client.geoSearch( + 'bikes:rentable', { + longitude: -122.27652, + latitude: 37.805186, + }, + { radius: 5, + unit: 'km' + } +); +console.log(res4) // ['station:1', 'station:2', 'station:3'] +// STEP_END + +// REMOVE_START +assert.deepEqual(res4, ['station:1', 'station:2', 'station:3']); +// REMOVE_END +await client.close() diff --git a/doctests/dt-hash.js b/doctests/dt-hash.js new file mode 100644 index 00000000000..e77d6fd7b95 --- /dev/null +++ b/doctests/dt-hash.js @@ -0,0 +1,98 @@ +// EXAMPLE: hash_tutorial +// HIDE_START +import assert from 'assert'; +import { createClient } from 'redis'; + +const client = createClient(); +await client.connect(); +// HIDE_END +// STEP_START set_get_all +const res1 = await client.hSet( + 'bike:1', + { + 'model': 'Deimos', + 'brand': 'Ergonom', + 'type': 'Enduro bikes', + 'price': 4972, + } +) +console.log(res1) // 4 + +const res2 = await client.hGet('bike:1', 'model') +console.log(res2) // 'Deimos' + +const res3 = await client.hGet('bike:1', 'price') +console.log(res3) // '4972' + +const res4 = await client.hGetAll('bike:1') +console.log(res4) +/* +{ + brand: 'Ergonom', + model: 'Deimos', + price: '4972', + type: 'Enduro bikes' +} +*/ +// STEP_END + +// REMOVE_START +assert.equal(res1, 4); +assert.equal(res2, 'Deimos'); +assert.equal(res3, '4972'); +assert.deepEqual(res4, { + model: 'Deimos', + brand: 'Ergonom', + type: 'Enduro bikes', + price: '4972' +}); +// REMOVE_END + +// STEP_START hmGet +const res5 = await client.hmGet('bike:1', ['model', 'price']) +console.log(res5) // ['Deimos', '4972'] +// STEP_END + +// REMOVE_START +assert.deepEqual(Object.values(res5), ['Deimos', '4972']) +// REMOVE_END + +// STEP_START hIncrBy +const res6 = await client.hIncrBy('bike:1', 'price', 100) +console.log(res6) // 5072 +const res7 = await client.hIncrBy('bike:1', 'price', -100) +console.log(res7) // 4972 +// STEP_END + +// REMOVE_START +assert.equal(res6, 5072) +assert.equal(res7, 4972) +// REMOVE_END + +// STEP_START hIncrBy_hGet_hMget +const res11 = await client.hIncrBy('bike:1:stats', 'rides', 1) +console.log(res11) // 1 +const res12 = await client.hIncrBy('bike:1:stats', 'rides', 1) +console.log(res12) // 2 +const res13 = await client.hIncrBy('bike:1:stats', 'rides', 1) +console.log(res13) // 3 +const res14 = await client.hIncrBy('bike:1:stats', 'crashes', 1) +console.log(res14) // 1 +const res15 = await client.hIncrBy('bike:1:stats', 'owners', 1) +console.log(res15) // 1 +const res16 = await client.hGet('bike:1:stats', 'rides') +console.log(res16) // 3 +const res17 = await client.hmGet('bike:1:stats', ['crashes', 'owners']) +console.log(res17) // ['1', '1'] +// STEP_END + +// REMOVE_START +assert.equal(res11, 1); +assert.equal(res12, 2); +assert.equal(res13, 3); +assert.equal(res14, 1); +assert.equal(res15, 1); +assert.equal(res16, '3'); +assert.deepEqual(res17, ['1', '1']); +await client.close(); +// REMOVE_END \ No newline at end of file diff --git a/doctests/dt-hll.js b/doctests/dt-hll.js new file mode 100644 index 00000000000..762ce5db15e --- /dev/null +++ b/doctests/dt-hll.js @@ -0,0 +1,38 @@ +// EXAMPLE: hll_tutorial +// HIDE_START +import assert from 'assert'; +import { createClient } from 'redis'; + +const client = createClient(); +await client.connect(); +// HIDE_END + +// REMOVE_START +await client.flushDb(); +// REMOVE_END + +// STEP_START pfadd +const res1 = await client.pfAdd('bikes', ['Hyperion', 'Deimos', 'Phoebe', 'Quaoar']); +console.log(res1); // >>> 1 + +const res2 = await client.pfCount('bikes'); +console.log(res2); // >>> 4 + +const res3 = await client.pfAdd('commuter_bikes', ['Salacia', 'Mimas', 'Quaoar']); +console.log(res3); // >>> 1 + +const res4 = await client.pfMerge('all_bikes', ['bikes', 'commuter_bikes']); +console.log(res4); // >>> OK + +const res5 = await client.pfCount('all_bikes'); +console.log(res5); // >>> 6 +// STEP_END + +// REMOVE_START +assert.equal(res1, 1) +assert.equal(res2, 4) +assert.equal(res3, 1) +assert.equal(res4, 'OK') +assert.equal(res5, 6) +await client.close(); +// REMOVE_END diff --git a/doctests/dt-json.js b/doctests/dt-json.js new file mode 100644 index 00000000000..00578f48f33 --- /dev/null +++ b/doctests/dt-json.js @@ -0,0 +1,425 @@ +// EXAMPLE: json_tutorial +// HIDE_START +import assert from 'assert'; +import { + createClient +} from 'redis'; + +const client = await createClient(); +await client.connect(); +// HIDE_END +// REMOVE_START +await client.flushDb(); +// REMOVE_END + +// STEP_START set_get +const res1 = await client.json.set("bike", "$", '"Hyperion"'); +console.log(res1); // OK + +const res2 = await client.json.get("bike", { path: "$" }); +console.log(res2); // ['"Hyperion"'] + +const res3 = await client.json.type("bike", { path: "$" }); +console.log(res3); // [ 'string' ] +// STEP_END + +// REMOVE_START +assert.deepEqual(res2, ['"Hyperion"']); +// REMOVE_END + +// STEP_START str +const res4 = await client.json.strLen("bike", { path: "$" }); +console.log(res4) // [10] + +const res5 = await client.json.strAppend("bike", '" (Enduro bikes)"'); +console.log(res5) // 27 + +const res6 = await client.json.get("bike", { path: "$" }); +console.log(res6) // ['"Hyperion"" (Enduro bikes)"'] +// STEP_END + +// REMOVE_START +assert.deepEqual(res6, ['"Hyperion"" (Enduro bikes)"']); +// REMOVE_END + +// STEP_START num +const res7 = await client.json.set("crashes", "$", 0); +console.log(res7) // OK + +const res8 = await client.json.numIncrBy("crashes", "$", 1); +console.log(res8) // [1] + +const res9 = await client.json.numIncrBy("crashes", "$", 1.5); +console.log(res9) // [2.5] + +const res10 = await client.json.numIncrBy("crashes", "$", -0.75); +console.log(res10) // [1.75] +// STEP_END + +// REMOVE_START +assert.deepEqual(res10, [1.75]) +// REMOVE_END + +// STEP_START arr +const res11 = await client.json.set("newbike", "$", ["Deimos", {"crashes": 0 }, null]); +console.log(res11); // OK + +const res12 = await client.json.get("newbike", { path: "$" }); +console.log(res12); // [[ 'Deimos', { crashes: 0 }, null ]] + +const res13 = await client.json.get("newbike", { path: "$[1].crashes" }); +console.log(res13); // [0] + +const res14 = await client.json.del("newbike", { path: "$.[-1]"} ); +console.log(res14); // 1 + +const res15 = await client.json.get("newbike", { path: "$" }); +console.log(res15); // [[ 'Deimos', { crashes: 0 } ]] +// STEP_END + +// REMOVE_START +assert.deepEqual(res15, [["Deimos", { + "crashes": 0 +}]]); +// REMOVE_END + +// STEP_START arr2 +const res16 = await client.json.set("riders", "$", []); +console.log(res16); // OK + +const res17 = await client.json.arrAppend("riders", "$", "Norem"); +console.log(res17); // [1] + +const res18 = await client.json.get("riders", { path: "$" }); +console.log(res18); // [[ 'Norem' ]] + +const res19 = await client.json.arrInsert("riders", "$", 1, "Prickett", "Royse", "Castilla"); +console.log(res19); // [4] + +const res20 = await client.json.get("riders", { path: "$" }); +console.log(res20); // [[ 'Norem', 'Prickett', 'Royse', 'Castilla' ]] + +const res21 = await client.json.arrTrim("riders", "$", 1, 1); +console.log(res21); // [1] + +const res22 = await client.json.get("riders", { path: "$" }); +console.log(res22); // [[ 'Prickett' ]] + +const res23 = await client.json.arrPop("riders", { path: "$" }); +console.log(res23); // [ 'Prickett' ] + +const res24 = await client.json.arrPop("riders", { path: "$" }); +console.log(res24); // [null] +// STEP_END + +// REMOVE_START +assert.deepEqual(res24, [null]); +// REMOVE_END + +// STEP_START obj +const res25 = await client.json.set( + "bike:1", "$", { + "model": "Deimos", + "brand": "Ergonom", + "price": 4972 + } +); +console.log(res25); // OK + +const res26 = await client.json.objLen("bike:1", { path: "$" }); +console.log(res26); // [3] + +const res27 = await client.json.objKeys("bike:1", { path: "$" }); +console.log(res27); // [['model', 'brand', 'price']] +// STEP_END + +// REMOVE_START +assert.deepEqual(res27, [ + ["model", "brand", "price"] +]); +// REMOVE_END + +// STEP_START set_bikes +// HIDE_START +const inventoryJSON = { + "inventory": { + "mountain_bikes": [{ + "id": "bike:1", + "model": "Phoebe", + "description": "This is a mid-travel trail slayer that is a fantastic daily driver or one bike quiver. The Shimano Claris 8-speed groupset gives plenty of gear range to tackle hills and there\u2019s room for mudguards and a rack too. This is the bike for the rider who wants trail manners with low fuss ownership.", + "price": 1920, + "specs": { + "material": "carbon", + "weight": 13.1 + }, + "colors": ["black", "silver"], + }, + { + "id": "bike:2", + "model": "Quaoar", + "description": "Redesigned for the 2020 model year, this bike impressed our testers and is the best all-around trail bike we've ever tested. The Shimano gear system effectively does away with an external cassette, so is super low maintenance in terms of wear and teaawait client. All in all it's an impressive package for the price, making it very competitive.", + "price": 2072, + "specs": { + "material": "aluminium", + "weight": 7.9 + }, + "colors": ["black", "white"], + }, + { + "id": "bike:3", + "model": "Weywot", + "description": "This bike gives kids aged six years and older a durable and uberlight mountain bike for their first experience on tracks and easy cruising through forests and fields. A set of powerful Shimano hydraulic disc brakes provide ample stopping ability. If you're after a budget option, this is one of the best bikes you could get.", + "price": 3264, + "specs": { + "material": "alloy", + "weight": 13.8 + }, + }, + ], + "commuter_bikes": [{ + "id": "bike:4", + "model": "Salacia", + "description": "This bike is a great option for anyone who just wants a bike to get about on With a slick-shifting Claris gears from Shimano\u2019s, this is a bike which doesn\u2019t break the bank and delivers craved performance. It\u2019s for the rider who wants both efficiency and capability.", + "price": 1475, + "specs": { + "material": "aluminium", + "weight": 16.6 + }, + "colors": ["black", "silver"], + }, + { + "id": "bike:5", + "model": "Mimas", + "description": "A real joy to ride, this bike got very high scores in last years Bike of the year report. The carefully crafted 50-34 tooth chainset and 11-32 tooth cassette give an easy-on-the-legs bottom gear for climbing, and the high-quality Vittoria Zaffiro tires give balance and grip.It includes a low-step frame , our memory foam seat, bump-resistant shocks and conveniently placed thumb throttle. Put it all together and you get a bike that helps redefine what can be done for this price.", + "price": 3941, + "specs": { + "material": "alloy", + "weight": 11.6 + }, + }, + ], + } +}; +// HIDE_END + +const res28 = await client.json.set("bikes:inventory", "$", inventoryJSON); +console.log(res28); // OK +// STEP_END + +// STEP_START get_bikes +const res29 = await client.json.get("bikes:inventory", { + path: "$.inventory.*" +}); +console.log(res29); +/* +[ + [ + { + id: 'bike:1', + model: 'Phoebe', + description: 'This is a mid-travel trail slayer that is a fantastic daily driver or one bike quiver. The Shimano Claris 8-speed groupset gives plenty of gear range to tackle hills and there’s room for mudguards and a rack too. This is the bike for the rider who wants trail manners with low fuss ownership.', + price: 1920, + specs: [Object], + colors: [Array] + }, + { + id: 'bike:2', + model: 'Quaoar', + description: "Redesigned for the 2020 model year, this bike impressed our testers and is the best all-around trail bike we've ever tested. The Shimano gear system effectively does away with an external cassette, so is super low maintenance in terms of wear and teaawait client. All in all it's an impressive package for the price, making it very competitive.", + price: 2072, + specs: [Object], + colors: [Array] + }, + { + id: 'bike:3', + model: 'Weywot', + description: "This bike gives kids aged six years and older a durable and uberlight mountain bike for their first experience on tracks and easy cruising through forests and fields. A set of powerful Shimano hydraulic disc brakes provide ample stopping ability. If you're after a budget option, this is one of the best bikes you could get.", + price: 3264, + specs: [Object] + } + ], + [ + { + id: 'bike:4', + model: 'Salacia', + description: 'This bike is a great option for anyone who just wants a bike to get about on With a slick-shifting Claris gears from Shimano’s, this is a bike which doesn’t break the bank and delivers craved performance. It’s for the rider who wants both efficiency and capability.', + price: 1475, + specs: [Object], + colors: [Array] + }, + { + id: 'bike:5', + model: 'Mimas', + description: 'A real joy to ride, this bike got very high scores in last years Bike of the year report. The carefully crafted 50-34 tooth chainset and 11-32 tooth cassette give an easy-on-the-legs bottom gear for climbing, and the high-quality Vittoria Zaffiro tires give balance and grip.It includes a low-step frame , our memory foam seat, bump-resistant shocks and conveniently placed thumb throttle. Put it all together and you get a bike that helps redefine what can be done for this price.', + price: 3941, + specs: [Object] + } + ] +] +*/ +// STEP_END + +// STEP_START get_mtnbikes +const res30 = await client.json.get("bikes:inventory", { + path: "$.inventory.mountain_bikes[*].model" +}); +console.log(res30); // ['Phoebe', 'Quaoar', 'Weywot'] + +const res31 = await client.json.get("bikes:inventory", { + path: '$.inventory["mountain_bikes"][*].model' +}); +console.log(res31); // ['Phoebe', 'Quaoar', 'Weywot'] + +const res32 = await client.json.get("bikes:inventory", { + path: "$..mountain_bikes[*].model" +}); +console.log(res32); // ['Phoebe', 'Quaoar', 'Weywot'] +// STEP_END + +// REMOVE_START +assert.deepEqual(res30, ["Phoebe", "Quaoar", "Weywot"]); +assert.deepEqual(res31, ["Phoebe", "Quaoar", "Weywot"]); +assert.deepEqual(res32, ["Phoebe", "Quaoar", "Weywot"]); +// REMOVE_END + +// STEP_START get_models +const res33 = await client.json.get("bikes:inventory", { + path: "$..model" +}); +console.log(res33); // ['Phoebe', 'Quaoar', 'Weywot', 'Salacia', 'Mimas'] +// STEP_END + +// REMOVE_START +assert.deepEqual(res33, ["Phoebe", "Quaoar", "Weywot", "Salacia", "Mimas"]); +// REMOVE_END + +// STEP_START get2mtnbikes +const res34 = await client.json.get("bikes:inventory", { + path: "$..mountain_bikes[0:2].model" +}); +console.log(res34); // ['Phoebe', 'Quaoar'] +// STEP_END + +// REMOVE_START +assert.deepEqual(res34, ["Phoebe", "Quaoar"]); +// REMOVE_END + +// STEP_START filter1 +const res35 = await client.json.get("bikes:inventory", { + path: "$..mountain_bikes[?(@.price < 3000 && @.specs.weight < 10)]" +}); +console.log(res35); +/* +[ + { + id: 'bike:2', + model: 'Quaoar', + description: "Redesigned for the 2020 model year, this bike impressed our testers and is the best all-around trail bike we've ever tested. The Shimano gear system effectively does away with an external cassette, so is super low maintenance in terms of wear and teaawait client. All in all it's an impressive package for the price, making it very competitive.", + price: 2072, + specs: { material: 'aluminium', weight: 7.9 }, + colors: [ 'black', 'white' ] + } +] +*/ +// STEP_END + +// STEP_START filter2 +// names of bikes made from an alloy +const res36 = await client.json.get("bikes:inventory", { + path: "$..[?(@.specs.material == 'alloy')].model" +}); +console.log(res36); // ['Weywot', 'Mimas'] +// STEP_END +// REMOVE_START +assert.deepEqual(res36, ["Weywot", "Mimas"]); +// REMOVE_END + +// STEP_START filter3 +const res37 = await client.json.get("bikes:inventory", { + path: "$..[?(@.specs.material =~ '(?i)al')].model" +}); +console.log(res37); // ['Quaoar', 'Weywot', 'Salacia', 'Mimas'] +// STEP_END + +// REMOVE_START +assert.deepEqual(res37, ["Quaoar", "Weywot", "Salacia", "Mimas"]); +// REMOVE_END + +// STEP_START filter4 +const res37a = await client.json.set( + 'bikes:inventory', + '$.inventory.mountain_bikes[0].regex_pat', + '(?i)al' +); + +const res37b = await client.json.set( + 'bikes:inventory', + '$.inventory.mountain_bikes[1].regex_pat', + '(?i)al' +); + +const res37c = await client.json.set( + 'bikes:inventory', + '$.inventory.mountain_bikes[2].regex_pat', + '(?i)al' +); + +const res37d = await client.json.get( + 'bikes:inventory', + { path: '$.inventory.mountain_bikes[?(@.specs.material =~ @.regex_pat)].model' } +); +console.log(res37d); // ['Quaoar', 'Weywot'] +// STEP_END + +// STEP_START update_bikes +const res38 = await client.json.get("bikes:inventory", { + path: "$..price" +}); +console.log(res38); // [1920, 2072, 3264, 1475, 3941] + +const res39 = await client.json.numIncrBy("bikes:inventory", "$..price", -100); +console.log(res39); // [1820, 1972, 3164, 1375, 3841] + +const res40 = await client.json.numIncrBy("bikes:inventory", "$..price", 100); +console.log(res40); // [1920, 2072, 3264, 1475, 3941] +// STEP_END + +// REMOVE_START +assert.deepEqual(res40.sort(), [1475, 1920, 2072, 3264, 3941]); +// REMOVE_END + +// STEP_START update_filters1 +const res40a = await client.json.set( + 'bikes:inventory', + '$.inventory.*[?(@.price<2000)].price', + 1500 +); + +// Get all prices from the inventory +const res40b = await client.json.get( + 'bikes:inventory', + { path: "$..price" } +); +console.log(res40b); // [1500, 2072, 3264, 1500, 3941] +// STEP_END + +// STEP_START update_filters2 +const res41 = await client.json.arrAppend( + "bikes:inventory", "$.inventory.*[?(@.price<2000)].colors", "pink" +); +console.log(res41); // [3, 3] + +const res42 = await client.json.get("bikes:inventory", { + path: "$..[*].colors" +}); +console.log(res42); // [['black', 'silver', 'pink'], ['black', 'white'], ['black', 'silver', 'pink']] +// STEP_END + +// REMOVE_START +assert.deepEqual(res42, [ + ["black", "silver", "pink"], + ["black", "white"], + ["black", "silver", "pink"], +]); +await client.close(); +// REMOVE_END diff --git a/doctests/dt-list.js b/doctests/dt-list.js new file mode 100644 index 00000000000..a2d0fb86c66 --- /dev/null +++ b/doctests/dt-list.js @@ -0,0 +1,329 @@ +// EXAMPLE: list_tutorial +// HIDE_START +import assert from 'assert'; +import { createClient } from 'redis'; + +const client = createClient(); +await client.connect(); +// HIDE_END +// REMOVE_START +await client.del('bikes:repairs'); +await client.del('bikes:finished'); +// REMOVE_END + +// STEP_START queue +const res1 = await client.lPush('bikes:repairs', 'bike:1'); +console.log(res1); // 1 + +const res2 = await client.lPush('bikes:repairs', 'bike:2'); +console.log(res2); // 2 + +const res3 = await client.rPop('bikes:repairs'); +console.log(res3); // bike:1 + +const res4 = await client.rPop('bikes:repairs'); +console.log(res4); // bike:2 +// STEP_END + +// REMOVE_START +assert.equal(res1, 1); +assert.equal(res2, 2); +assert.equal(res3, 'bike:1'); +assert.equal(res4, 'bike:2'); +// REMOVE_END + +// STEP_START stack +const res5 = await client.lPush('bikes:repairs', 'bike:1'); +console.log(res5); // 1 + +const res6 = await client.lPush('bikes:repairs', 'bike:2'); +console.log(res6); // 2 + +const res7 = await client.lPop('bikes:repairs'); +console.log(res7); // bike:2 + +const res8 = await client.lPop('bikes:repairs'); +console.log(res8); // bike:1 +// STEP_END + +// REMOVE_START +assert.equal(res5, 1); +assert.equal(res6, 2); +assert.equal(res7, 'bike:2'); +assert.equal(res8, 'bike:1'); +// REMOVE_END + +// STEP_START lLen +const res9 = await client.lLen('bikes:repairs'); +console.log(res9); // 0 +// STEP_END + +// REMOVE_START +assert.equal(res9, 0); +// REMOVE_END + +// STEP_START lMove_lRange +const res10 = await client.lPush('bikes:repairs', 'bike:1'); +console.log(res10); // 1 + +const res11 = await client.lPush('bikes:repairs', 'bike:2'); +console.log(res11); // 2 + +const res12 = await client.lMove('bikes:repairs', 'bikes:finished', 'LEFT', 'LEFT'); +console.log(res12); // 'bike:2' + +const res13 = await client.lRange('bikes:repairs', 0, -1); +console.log(res13); // ['bike:1'] + +const res14 = await client.lRange('bikes:finished', 0, -1); +console.log(res14); // ['bike:2'] +// STEP_END + +// REMOVE_START +assert.equal(res10, 1); +assert.equal(res11, 2); +assert.equal(res12, 'bike:2'); +assert.deepEqual(res13, ['bike:1']); +assert.deepEqual(res14, ['bike:2']); +await client.del('bikes:repairs'); +// REMOVE_END + +// STEP_START lPush_rPush +const res15 = await client.rPush('bikes:repairs', 'bike:1'); +console.log(res15); // 1 + +const res16 = await client.rPush('bikes:repairs', 'bike:2'); +console.log(res16); // 2 + +const res17 = await client.lPush('bikes:repairs', 'bike:important_bike'); +console.log(res17); // 3 + +const res18 = await client.lRange('bikes:repairs', 0, -1); +console.log(res18); // ['bike:important_bike', 'bike:1', 'bike:2'] +// STEP_END + +// REMOVE_START +assert.equal(res15, 1); +assert.equal(res16, 2); +assert.equal(res17, 3); +assert.deepEqual(res18, ['bike:important_bike', 'bike:1', 'bike:2']); +await client.del('bikes:repairs'); +// REMOVE_END + +// STEP_START variadic +const res19 = await client.rPush('bikes:repairs', ['bike:1', 'bike:2', 'bike:3']); +console.log(res19); // 3 + +const res20 = await client.lPush( + 'bikes:repairs', ['bike:important_bike', 'bike:very_important_bike'] +); +console.log(res20); // 5 + +const res21 = await client.lRange('bikes:repairs', 0, -1); +console.log(res21); // ['bike:very_important_bike', 'bike:important_bike', 'bike:1', 'bike:2', 'bike:3'] +// STEP_END + +// REMOVE_START +assert.equal(res19, 3); +assert.equal(res20, 5); +assert.deepEqual(res21, [ + 'bike:very_important_bike', + 'bike:important_bike', + 'bike:1', + 'bike:2', + 'bike:3', +]); +await client.del('bikes:repairs'); +// REMOVE_END + +// STEP_START lPop_rPop +const res22 = await client.rPush('bikes:repairs', ['bike:1', 'bike:2', 'bike:3']); +console.log(res22); // 3 + +const res23 = await client.rPop('bikes:repairs'); +console.log(res23); // 'bike:3' + +const res24 = await client.lPop('bikes:repairs'); +console.log(res24); // 'bike:1' + +const res25 = await client.rPop('bikes:repairs'); +console.log(res25); // 'bike:2' + +const res26 = await client.rPop('bikes:repairs'); +console.log(res26); // null +// STEP_END + +// REMOVE_START +assert.deepEqual(res22, 3); +assert.equal(res23, 'bike:3'); +assert.equal(res24, 'bike:1'); +assert.equal(res25, 'bike:2'); +assert.equal(res26, null); +// REMOVE_END + +// STEP_START lTrim +const res27 = await client.lPush( + 'bikes:repairs', ['bike:1', 'bike:2', 'bike:3', 'bike:4', 'bike:5'] +); +console.log(res27); // 5 + +const res28 = await client.lTrim('bikes:repairs', 0, 2); +console.log(res28); // OK + +const res29 = await client.lRange('bikes:repairs', 0, -1); +console.log(res29); // ['bike:5', 'bike:4', 'bike:3'] +// STEP_END + +// REMOVE_START +assert.equal(res27, 5); +assert.equal(res28, 'OK'); +assert.deepEqual(res29, ['bike:5', 'bike:4', 'bike:3']); +await client.del('bikes:repairs'); +// REMOVE_END + +// STEP_START lTrim_end_of_list +const res27eol = await client.rPush( + 'bikes:repairs', ['bike:1', 'bike:2', 'bike:3', 'bike:4', 'bike:5'] +); +console.log(res27eol); // 5 + +const res28eol = await client.lTrim('bikes:repairs', -3, -1); +console.log(res28eol); // 'OK' + +const res29eol = await client.lRange('bikes:repairs', 0, -1); +console.log(res29eol); // ['bike:3', 'bike:4', 'bike:5'] +// STEP_END + +// REMOVE_START +assert.equal(res27eol, 5); +assert.equal(res28eol, 'OK'); +assert.deepEqual(res29eol, ['bike:3', 'bike:4', 'bike:5']); +await client.del('bikes:repairs'); +// REMOVE_END + +// STEP_START brPop +const res31 = await client.rPush('bikes:repairs', ['bike:1', 'bike:2']); +console.log(res31); // 2 + +const res32 = await client.brPop('bikes:repairs', 1); +console.log(res32); // { key: 'bikes:repairs', element: 'bike:2' } + +const res33 = await client.brPop('bikes:repairs', 1); +console.log(res33); // { key: 'bikes:repairs', element: 'bike:1' } + +const res34 = await client.brPop('bikes:repairs', 1); +console.log(res34); // null +// STEP_END + +// REMOVE_START +assert.equal(res31, 2); +assert.deepEqual(res32, { key: 'bikes:repairs', element: 'bike:2' }); +assert.deepEqual(res33, { key: 'bikes:repairs', element: 'bike:1' }); +assert.equal(res34, null); +await client.del('bikes:repairs'); +await client.del('new_bikes'); +// REMOVE_END + +// STEP_START rule_1 +const res35 = await client.del('new_bikes'); +console.log(res35); // 0 + +const res36 = await client.lPush('new_bikes', ['bike:1', 'bike:2', 'bike:3']); +console.log(res36); // 3 +// STEP_END + +// REMOVE_START +assert.equal(res35, 0); +assert.equal(res36, 3); +await client.del('new_bikes'); +// REMOVE_END + +// STEP_START rule_1.1 +const res37 = await client.set('new_bikes', 'bike:1'); +console.log(res37); // 'OK' + +const res38 = await client.type('new_bikes'); +console.log(res38); // 'string' + +try { + const res39 = await client.lPush('new_bikes', 'bike:2', 'bike:3'); + // redis.exceptions.ResponseError: + // [SimpleError: WRONGTYPE Operation against a key holding the wrong kind of value] +} +catch(e){ + console.log(e); +} +// STEP_END + +// REMOVE_START +assert.equal(res37, 'OK'); +assert.equal(res38, 'string'); +await client.del('new_bikes'); +// REMOVE_END + +// STEP_START rule_2 +await client.lPush('bikes:repairs', ['bike:1', 'bike:2', 'bike:3']); +console.log(res36); // 3 + +const res40 = await client.exists('bikes:repairs') +console.log(res40); // 1 + +const res41 = await client.lPop('bikes:repairs'); +console.log(res41); // 'bike:3' + +const res42 = await client.lPop('bikes:repairs'); +console.log(res42); // 'bike:2' + +const res43 = await client.lPop('bikes:repairs'); +console.log(res43); // 'bike:1' + +const res44 = await client.exists('bikes:repairs'); +console.log(res44); // 0 +// STEP_END + +// REMOVE_START +assert.equal(res40, 1); +assert.equal(res41, 'bike:3'); +assert.equal(res42, 'bike:2'); +assert.equal(res43, 'bike:1'); +assert.equal(res44, 0); +await client.del('bikes:repairs'); +// REMOVE_END + +// STEP_START rule_3 +const res45 = await client.del('bikes:repairs'); +console.log(res45); // 0 + +const res46 = await client.lLen('bikes:repairs'); +console.log(res46); // 0 + +const res47 = await client.lPop('bikes:repairs'); +console.log(res47); // null +// STEP_END + +// REMOVE_START +assert.equal(res45, 0); +assert.equal(res46, 0); +assert.equal(res47, null); +// REMOVE_END + +// STEP_START lTrim.1 +const res48 = await client.lPush( + 'bikes:repairs', ['bike:1', 'bike:2', 'bike:3', 'bike:4', 'bike:5'] +); +console.log(res48); // 5 + +const res49 = await client.lTrim('bikes:repairs', 0, 2); +console.log(res49); // 'OK' + +const res50 = await client.lRange('bikes:repairs', 0, -1); +console.log(res50); // ['bike:5', 'bike:4', 'bike:3'] +// STEP_END + +// REMOVE_START +assert.equal(res48, 5); +assert.equal(res49, 'OK'); +assert.deepEqual(res50, ['bike:5', 'bike:4', 'bike:3']); +await client.del('bikes:repairs'); +await client.close(); +// REMOVE_END \ No newline at end of file diff --git a/doctests/dt-set.js b/doctests/dt-set.js new file mode 100644 index 00000000000..e3d34096399 --- /dev/null +++ b/doctests/dt-set.js @@ -0,0 +1,176 @@ +// EXAMPLE: sets_tutorial +// HIDE_START +import assert from 'assert'; +import { createClient } from 'redis'; + +const client = createClient(); +await client.connect(); +// HIDE_END +// REMOVE_START +await client.del('bikes:racing:france') +await client.del('bikes:racing:usa') +// REMOVE_END + +// STEP_START sAdd +const res1 = await client.sAdd('bikes:racing:france', 'bike:1') +console.log(res1) // >>> 1 + +const res2 = await client.sAdd('bikes:racing:france', 'bike:1') +console.log(res2) // >>> 0 +const res3 = await client.sAdd('bikes:racing:france', ['bike:2', 'bike:3']) +console.log(res3) // >>> 2 +const res4 = await client.sAdd('bikes:racing:usa', ['bike:1', 'bike:4']) +console.log(res4) // >>> 2 +// STEP_END + +// REMOVE_START +assert.equal(res1, 1) +assert.equal(res2, 0) +assert.equal(res3, 2) +assert.equal(res4, 2) +// REMOVE_END + +// STEP_START sIsMember +// HIDE_START +await client.del('bikes:racing:france') +await client.del('bikes:racing:usa') +await client.sAdd('bikes:racing:france', 'bike:1', 'bike:2', 'bike:3') +await client.sAdd('bikes:racing:usa', 'bike:1', 'bike:4') +// HIDE_END +const res5 = await client.sIsMember('bikes:racing:usa', 'bike:1') +console.log(res5) // >>> 1 + +const res6 = await client.sIsMember('bikes:racing:usa', 'bike:2') +console.log(res6) // >>> 0 +// STEP_END + +// REMOVE_START +assert.equal(res5, 1) +assert.equal(res6, 0) +// REMOVE_END + +// STEP_START sinster +// HIDE_START +await client.del('bikes:racing:france') +await client.del('bikes:racing:usa') +await client.sAdd('bikes:racing:france', 'bike:1', 'bike:2', 'bike:3') +await client.sAdd('bikes:racing:usa', 'bike:1', 'bike:4') +// HIDE_END +const res7 = await client.sInter('bikes:racing:france', 'bikes:racing:usa') +console.log(res7) // >>> {'bike:1'} +// STEP_END + +// REMOVE_START +assert.deepEqual(res7, [ 'bike:1' ]) +// REMOVE_END + +// STEP_START sCard +// HIDE_START +await client.del('bikes:racing:france') +await client.sAdd('bikes:racing:france', ['bike:1', 'bike:2', 'bike:3']) +// HIDE_END +const res8 = await client.sCard('bikes:racing:france') +console.log(res8) // >>> 3 +// STEP_END + +// REMOVE_START +assert.equal(res8, 3) +await client.del('bikes:racing:france') +// REMOVE_END + +// STEP_START sAdd_sMembers +const res9 = await client.sAdd('bikes:racing:france', ['bike:1', 'bike:2', 'bike:3']) +console.log(res9) // >>> 3 + +const res10 = await client.sMembers('bikes:racing:france') +console.log(res10) // >>> ['bike:1', 'bike:2', 'bike:3'] +// STEP_END + +// REMOVE_START +assert.equal(res9, 3) +assert.deepEqual(res10.sort(), ['bike:1', 'bike:2', 'bike:3']) +// REMOVE_END + +// STEP_START smIsMember +const res11 = await client.sIsMember('bikes:racing:france', 'bike:1') +console.log(res11) // >>> 1 + +const res12 = await client.smIsMember('bikes:racing:france', ['bike:2', 'bike:3', 'bike:4']) +console.log(res12) // >>> [1, 1, 0] +// STEP_END + +// REMOVE_START +assert.equal(res11, 1) +assert.deepEqual(res12, [1, 1, 0]) +// REMOVE_END + +// STEP_START sDiff +await client.sAdd('bikes:racing:france', ['bike:1', 'bike:2', 'bike:3']) +await client.sAdd('bikes:racing:usa', ['bike:1', 'bike:4']) +const res13 = await client.sDiff(['bikes:racing:france', 'bikes:racing:usa']) +console.log(res13) // >>> [ 'bike:2', 'bike:3' ] +// STEP_END + +// REMOVE_START +assert.deepEqual(res13.sort(), ['bike:2', 'bike:3'].sort()) +await client.del('bikes:racing:france') +await client.del('bikes:racing:usa') +// REMOVE_END + +// STEP_START multisets +await client.sAdd('bikes:racing:france', ['bike:1', 'bike:2', 'bike:3']) +await client.sAdd('bikes:racing:usa', ['bike:1', 'bike:4']) +await client.sAdd('bikes:racing:italy', ['bike:1', 'bike:2', 'bike:3', 'bike:4']) + +const res14 = await client.sInter( + ['bikes:racing:france', 'bikes:racing:usa', 'bikes:racing:italy'] +) +console.log(res14) // >>> ['bike:1'] + +const res15 = await client.sUnion( + ['bikes:racing:france', 'bikes:racing:usa', 'bikes:racing:italy'] +) +console.log(res15) // >>> ['bike:1', 'bike:2', 'bike:3', 'bike:4'] + +const res16 = await client.sDiff(['bikes:racing:france', 'bikes:racing:usa', 'bikes:racing:italy']) +console.log(res16) // >>> [] + +const res17 = await client.sDiff(['bikes:racing:usa', 'bikes:racing:france']) +console.log(res17) // >>> ['bike:4'] + +const res18 = await client.sDiff(['bikes:racing:france', 'bikes:racing:usa']) +console.log(res18) // >>> ['bike:2', 'bike:3'] +// STEP_END + +// REMOVE_START +assert.deepEqual(res14, ['bike:1']) +assert.deepEqual(res15.sort(), ['bike:1', 'bike:2', 'bike:3', 'bike:4']) +assert.deepEqual(res16, []) +assert.deepEqual(res17, ['bike:4']) +assert.deepEqual(res18.sort(), ['bike:2', 'bike:3'].sort()) +await client.del('bikes:racing:france') +await client.del('bikes:racing:usa') +await client.del('bikes:racing:italy') +// REMOVE_END + +// STEP_START sRem +await client.sAdd('bikes:racing:france', ['bike:1', 'bike:2', 'bike:3', 'bike:4', 'bike:5']) + +const res19 = await client.sRem('bikes:racing:france', 'bike:1') +console.log(res19) // >>> 1 + +const res20 = await client.sPop('bikes:racing:france') +console.log(res20) // >>> bike:3 or other random value + +const res21 = await client.sMembers('bikes:racing:france') +console.log(res21) // >>> ['bike:2', 'bike:4', 'bike:5']; depends on previous result + +const res22 = await client.sRandMember('bikes:racing:france') +console.log(res22) // >>> bike:4 or other random value +// STEP_END + +// REMOVE_START +assert.equal(res19, 1) +await client.close() +// none of the other results are deterministic +// REMOVE_END diff --git a/doctests/dt-ss.js b/doctests/dt-ss.js new file mode 100644 index 00000000000..bba85b11580 --- /dev/null +++ b/doctests/dt-ss.js @@ -0,0 +1,162 @@ +// EXAMPLE: ss_tutorial +// HIDE_START +import assert from 'assert'; +import { createClient } from 'redis'; + +const client = createClient(); +await client.connect(); +// HIDE_END + +// REMOVE_START +await client.flushDb(); +// REMOVE_END + +// STEP_START zadd +const res1 = await client.zAdd('racer_scores', { score: 10, value: 'Norem' }); +console.log(res1); // >>> 1 + +const res2 = await client.zAdd('racer_scores', { score: 12, value: 'Castilla' }); +console.log(res2); // >>> 1 + +const res3 = await client.zAdd('racer_scores', [ + { score: 8, value: 'Sam-Bodden' }, + { score: 10, value: 'Royce' }, + { score: 6, value: 'Ford' }, + { score: 14, value: 'Prickett' }, + { score: 12, value: 'Castilla' } +]); +console.log(res3); // >>> 4 +// STEP_END + +// REMOVE_START +assert.equal(res1, 1) +assert.equal(res2, 1) +assert.equal(res3, 4) +// REMOVE_END + +// REMOVE_START +const count = await client.zCard('racer_scores'); +console.assert(count === 6); +// REMOVE_END + +// STEP_START zrange +const res4 = await client.zRange('racer_scores', 0, -1); +console.log(res4); // >>> ['Ford', 'Sam-Bodden', 'Norem', 'Royce', 'Castilla', 'Prickett'] +// STEP_END + +// REMOVE_START +assert.deepEqual(res4, ['Ford', 'Sam-Bodden', 'Norem', 'Royce', 'Castilla', 'Prickett']) +// REMOVE_END + +// STEP_START zrange_withscores +const res6 = await client.zRangeWithScores('racer_scores', 0, -1); +console.log(res6); +// >>> [ +// { value: 'Ford', score: 6 }, { value: 'Sam-Bodden', score: 8 }, +// { value: 'Norem', score: 10 }, { value: 'Royce', score: 10 }, +// { value: 'Castilla', score: 12 }, { value: 'Prickett', score: 14 } +// ] +// STEP_END + +// REMOVE_START +assert.deepEqual(res6, [ { value: 'Ford', score: 6 }, { value: 'Sam-Bodden', score: 8 }, { value: 'Norem', score: 10 }, { value: 'Royce', score: 10 }, { value: 'Castilla', score: 12 }, { value: 'Prickett', score: 14 } ] +) +// REMOVE_END + +// STEP_START zrangebyscore +const res7 = await client.zRangeByScore('racer_scores', '-inf', 10); +console.log(res7); // >>> ['Ford', 'Sam-Bodden', 'Norem', 'Royce'] +// STEP_END + +// REMOVE_START +assert.deepEqual(res7, ['Ford', 'Sam-Bodden', 'Norem', 'Royce']) +// REMOVE_END + +// STEP_START zremrangebyscore +const res8 = await client.zRem('racer_scores', 'Castilla'); +console.log(res8); // >>> 1 + +const res9 = await client.zRemRangeByScore('racer_scores', '-inf', 9); +console.log(res9); // >>> 2 + +// REMOVE_START +assert.equal(res8, 1) +assert.equal(res9, 2) +// REMOVE_END + +const res10 = await client.zRange('racer_scores', 0, -1); +console.log(res10); // >>> ['Norem', 'Royce', 'Prickett'] +// STEP_END + +// REMOVE_START +assert.deepEqual(res10, ['Norem', 'Royce', 'Prickett']) +// REMOVE_END + +// REMOVE_START +const count2 = await client.zCard('racer_scores'); +console.assert(count2 === 3); +// REMOVE_END + +// STEP_START zrank +const res11 = await client.zRank('racer_scores', 'Norem'); +console.log(res11); // >>> 0 + +const res12 = await client.zRevRank('racer_scores', 'Norem'); +console.log(res12); // >>> 2 +// STEP_END + +// STEP_START zadd_lex +const res13 = await client.zAdd('racer_scores', [ + { score: 0, value: 'Norem' }, + { score: 0, value: 'Sam-Bodden' }, + { score: 0, value: 'Royce' }, + { score: 0, value: 'Ford' }, + { score: 0, value: 'Prickett' }, + { score: 0, value: 'Castilla' } +]); +console.log(res13); // >>> 3 + +// REMOVE_START +assert.equal(count2, 3) +assert.equal(res11, 0) +assert.equal(res12, 2) +assert.equal(res13, 3) +// REMOVE_END + +const res14 = await client.zRange('racer_scores', 0, -1); +console.log(res14); // >>> ['Castilla', 'Ford', 'Norem', 'Prickett', 'Royce', 'Sam-Bodden'] + +const res15 = await client.zRangeByLex('racer_scores', '[A', '[L'); +console.log(res15); // >>> ['Castilla', 'Ford'] +// STEP_END + +// REMOVE_START +assert.deepEqual(res14, ['Castilla', 'Ford', 'Norem', 'Prickett', 'Royce', 'Sam-Bodden']) +assert.deepEqual(res15, ['Castilla', 'Ford']) +// REMOVE_END + +// STEP_START leaderboard +const res16 = await client.zAdd('racer_scores', { score: 100, value: 'Wood' }); +console.log(res16); // >>> 1 + +const res17 = await client.zAdd('racer_scores', { score: 100, value: 'Henshaw' }); +console.log(res17); // >>> 1 + +const res18 = await client.zAdd('racer_scores', { score: 150, value: 'Henshaw' }, { nx: true }); +console.log(res18); // >>> 0 + +const res19 = await client.zIncrBy('racer_scores', 50, 'Wood'); +console.log(res19); // >>> 150.0 + +const res20 = await client.zIncrBy('racer_scores', 50, 'Henshaw'); +console.log(res20); // >>> 200.0 +// STEP_END + +// REMOVE_START +assert.equal(res16, 1) +assert.equal(res17, 1) +assert.equal(res18, 0) +assert.equal(res19, 150.0) +assert.equal(res20, 200.0) +await client.close(); +// REMOVE_END diff --git a/doctests/dt-streams.js b/doctests/dt-streams.js new file mode 100644 index 00000000000..00768654730 --- /dev/null +++ b/doctests/dt-streams.js @@ -0,0 +1,366 @@ +// EXAMPLE: stream_tutorial +// HIDE_START +import assert from 'assert'; +import { + createClient +} from 'redis'; + +const client = await createClient(); +await client.connect(); +// HIDE_END +// REMOVE_START +await client.flushDb(); +// REMOVE_END + +// STEP_START xAdd +const res1 = await client.xAdd( + 'race:france', '*', { + 'rider': 'Castilla', + 'speed': '30.2', + 'position': '1', + 'location_id': '1' + } +); +console.log(res1); // >>> 1700073067968-0 N.B. actual values will differ from these examples + +const res2 = await client.xAdd( + 'race:france', '*', { + 'rider': 'Norem', + 'speed': '28.8', + 'position': '3', + 'location_id': '1' + }, +); +console.log(res2); // >>> 1692629594113-0 + +const res3 = await client.xAdd( + 'race:france', '*', { + 'rider': 'Prickett', + 'speed': '29.7', + 'position': '2', + 'location_id': '1' + }, +); +console.log(res3); // >>> 1692629613374-0 +// STEP_END + +// REMOVE_START +assert.equal(await client.xLen('race:france'), 3); +// REMOVE_END + +// STEP_START xRange +const res4 = await client.xRange('race:france', '1691765278160-0', '+', {COUNT: 2}); +console.log(res4); // >>> [{ id: '1692629576966-0', message: { rider: 'Castilla', speed: '30.2', position: '1', location_id: '1' } }, { id: '1692629594113-0', message: { rider: 'Norem', speed: '28.8', position: '3', location_id: '1' } }] +// STEP_END + +// STEP_START xread_block +const res5 = await client.xRead({ + key: 'race:france', + id: '0-0' +}, { + COUNT: 100, + BLOCK: 300 +}); +console.log(res5); // >>> [{ name: 'race:france', messages: [{ id: '1692629576966-0', message: { rider: 'Castilla', speed: '30.2', position: '1', location_id: '1' } }, { id: '1692629594113-0', message: { rider: 'Norem', speed: '28.8', position: '3', location_id: '1' } }, { id: '1692629613374-0', message: { rider: 'Prickett', speed: '29.7', position: '2', location_id: '1' } }] }] +// STEP_END + +// STEP_START xAdd_2 +const res6 = await client.xAdd( + 'race:france', '*', { + 'rider': 'Castilla', + 'speed': '29.9', + 'position': '1', + 'location_id': '2' + } +); +console.log(res6); // >>> 1692629676124-0 +// STEP_END + +// STEP_START xlen +const res7 = await client.xLen('race:france'); +console.log(res7); // >>> 4 +// STEP_END + + +// STEP_START xAdd_id +const res8 = await client.xAdd('race:usa', '0-1', { + 'racer': 'Castilla' +}); +console.log(res8); // >>> 0-1 + +const res9 = await client.xAdd('race:usa', '0-2', { + 'racer': 'Norem' +}); +console.log(res9); // >>> 0-2 +// STEP_END + +// STEP_START xAdd_bad_id +try { + const res10 = await client.xAdd('race:usa', '0-1', { + 'racer': 'Prickett' + }); + console.log(res10); // >>> 0-1 +} catch (error) { + console.error(error); // >>> [SimpleError: ERR The ID specified in XADD is equal or smaller than the target stream top item] +} +// STEP_END + +// STEP_START xadd_7 +const res11a = await client.xAdd('race:usa', '0-*', { racer: 'Norem' }); +console.log(res11a); // >>> 0-3 +// STEP_END + +// STEP_START xRange_all +const res11 = await client.xRange('race:france', '-', '+'); +console.log(res11); // >>> [{ id: '1692629576966-0', message: { rider: 'Castilla', speed: '30.2', position: '1', location_id: '1' } }, { id: '1692629594113-0', message: { rider: 'Norem', speed: '28.8', position: '3', location_id: '1' } }, { id: '1692629613374-0', message: { rider: 'Prickett', speed: '29.7', position: '2', location_id: '1' } }, { id: '1692629676124-0', message: { rider: 'Castilla', speed: '29.9', position: '1', location_id: '2' } }] +// STEP_END + +// STEP_START xRange_time +const res12 = await client.xRange('race:france', '1692629576965', '1692629576967'); +console.log(res12); // >>> [{ id: '1692629576966-0', message: { rider: 'Castilla', speed: '30.2', position: '1', location_id: '1' } }] +// STEP_END + +// STEP_START xRange_step_1 +const res13 = await client.xRange('race:france', '-', '+', {COUNT: 2}); +console.log(res13); // >>> [{ id: '1692629576966-0', message: { rider: 'Castilla', speed: '30.2', position: '1', location_id: '1' } }, { id: '1692629594113-0', message: { rider: 'Norem', speed: '28.8', position: '3', location_id: '1' } }] +// STEP_END + +// STEP_START xRange_step_2 +const res14 = await client.xRange('race:france', '(1692629594113-0', '+', {COUNT: 2}); +console.log(res14); // >>> [{ id: '1692629613374-0', message: { rider: 'Prickett', speed: '29.7', position: '2', location_id: '1' } }, { id: '1692629676124-0', message: { rider: 'Castilla', speed: '29.9', position: '1', location_id: '2' } }] +// STEP_END + +// STEP_START xRange_empty +const res15 = await client.xRange('race:france', '(1692629676124-0', '+', {COUNT: 2}); +console.log(res15); // >>> [] +// STEP_END + +// STEP_START xrevrange +const res16 = await client.xRevRange('race:france', '+', '-', {COUNT: 1}); +console.log( + res16 +); // >>> [{ id: '1692629676124-0', message: { rider: 'Castilla', speed: '29.9', position: '1', location_id: '2' } }] +// STEP_END + +// STEP_START xread +const res17 = await client.xRead({ + key: 'race:france', + id: '0-0' +}, { + COUNT: 2 +}); +console.log(res17); // >>> [{ name: 'race:france', messages: [{ id: '1692629576966-0', message: { rider: 'Castilla', speed: '30.2', position: '1', location_id: '1' } }, { id: '1692629594113-0', message: { rider: 'Norem', speed: '28.8', position: '3', location_id: '1' } }] }] +// STEP_END + +// STEP_START xgroup_create +const res18 = await client.xGroupCreate('race:france', 'france_riders', '$'); +console.log(res18); // >>> OK +// STEP_END + +// STEP_START xgroup_create_mkstream +const res19 = await client.xGroupCreate('race:italy', 'italy_riders', '$', { + MKSTREAM: true +}); +console.log(res19); // >>> OK +// STEP_END + +// STEP_START xgroup_read +await client.xAdd('race:italy', '*', { + 'rider': 'Castilla' +}); +await client.xAdd('race:italy', '*', { + 'rider': 'Royce' +}); +await client.xAdd('race:italy', '*', { + 'rider': 'Sam-Bodden' +}); +await client.xAdd('race:italy', '*', { + 'rider': 'Prickett' +}); +await client.xAdd('race:italy', '*', { + 'rider': 'Norem' +}); + +const res20 = await client.xReadGroup( + 'italy_riders', + 'Alice', { + key: 'race:italy', + id: '>' + }, { + COUNT: 1 + } +); +console.log(res20); // >>> [{ name: 'race:italy', messages: [{ id: '1692629925771-0', message: { rider: 'Castilla' } }] }] +// STEP_END + +// STEP_START xgroup_read_id +const res21 = await client.xReadGroup( + 'italy_riders', + 'Alice', { + key: 'race:italy', + id: '0' + }, { + COUNT: 1 + } +); +console.log(res21); // >>> [{ name: 'race:italy', messages: [{ id: '1692629925771-0', message: { rider: 'Castilla' } }] }] +// STEP_END + +// STEP_START xack +const res22 = await client.xAck('race:italy', 'italy_riders', '1692629925771-0') +console.log(res22); // >>> 1 + +const res23 = await client.xReadGroup( + 'italy_riders', + 'Alice', { + key: 'race:italy', + id: '0' + }, { + COUNT: 1 + } +); +console.log(res23); // >>> [{ name: 'race:italy', messages: [] }] +// STEP_END + +// STEP_START xgroup_read_bob +const res24 = await client.xReadGroup( + 'italy_riders', + 'Bob', { + key: 'race:italy', + id: '>' + }, { + COUNT: 2 + } +); +console.log(res24); // >>> [{ name: 'race:italy', messages: [{ id: '1692629925789-0', message: { rider: 'Royce' } }, { id: '1692629925790-0', message: { rider: 'Sam-Bodden' } }] }] +// STEP_END + +// STEP_START xpending +const res25 = await client.xPending('race:italy', 'italy_riders'); +console.log(res25); // >>> {'pending': 2, 'firstId': '1692629925789-0', 'lastId': '1692629925790-0', 'consumers': [{'name': 'Bob', 'deliveriesCounter': 2}]} +// STEP_END + +// STEP_START xpending_plus_minus +const res26 = await client.xPendingRange('race:italy', 'italy_riders', '-', '+', 10); +console.log(res26); // >>> [{'id': '1692629925789-0', 'consumer': 'Bob', 'millisecondsSinceLastDelivery': 31084, 'deliveriesCounter:': 1}, {'id': '1692629925790-0', 'consumer': 'Bob', 'millisecondsSinceLastDelivery': 31084, 'deliveriesCounter': 1}] +// STEP_END + +// STEP_START xRange_pending +const res27 = await client.xRange('race:italy', '1692629925789-0', '1692629925789-0'); +console.log(res27); // >>> [{ id: '1692629925789-0', message: { rider: 'Royce' } }] +// STEP_END + +// STEP_START xclaim +const res28 = await client.xClaim( + 'race:italy', 'italy_riders', 'Alice', 60000, ['1692629925789-0'] +); +console.log(res28); // >>> [{ id: '1692629925789-0', message: { rider: 'Royce' } }] +// STEP_END + +// STEP_START xautoclaim +const res29 = await client.xAutoClaim('race:italy', 'italy_riders', 'Alice', 1, '0-0', { + COUNT: 1 +}); +console.log(res29); // >>> { nextId: '1692629925790-0', messages: [{ id: '1692629925789-0', message: { rider: 'Royce' } }], deletedMessages: [] } +// STEP_END + +// STEP_START xautoclaim_cursor +const res30 = await client.xAutoClaim( + 'race:italy', 'italy_riders', 'Alice', 1, '(1692629925789-0', + { + COUNT: 1 + } +); +console.log(res30); // >>> { nextId: '0-0', messages: [{ id: '1692629925790-0', message: { rider: 'Sam-Bodden' } }], deletedMessages: [] } +// STEP_END + +// STEP_START xinfo +const res31 = await client.xInfoStream('race:italy'); +console.log(res31); // >>> { length: 5, 'radix-tree-keys': 1, 'radix-tree-nodes': 2, 'last-generated-id': '1692629926436-0', 'max-deleted-entry-id': '0-0', 'entries-added': 5, 'recorded-first-entry-id': '1692629925771-0', groups: 1, 'first-entry': { id: '1692629925771-0', message: { rider: 'Castilla' } }, 'last-entry': { id: '1692629926436-0', message: { rider: 'Norem' } } } +// STEP_END + +// STEP_START xinfo_groups +const res32 = await client.xInfoGroups('race:italy'); +console.log(res32); // >>> [{ name: 'italy_riders', consumers: 2, pending: 3, 'last-delivered-id': '1692629925790-0', 'entries-read': 3, lag: 2 }] +// STEP_END + +// STEP_START xinfo_consumers +const res33 = await client.xInfoConsumers('race:italy', 'italy_riders'); +console.log(res33); // >>> [{ name: 'Alice', pending: 3, idle: 170582, inactive: 170582 }, { name: 'Bob', pending: 0, idle: 489404, inactive: 489404 }] +// STEP_END + +// STEP_START maxlen +await client.xAdd('race:italy', '*', { + 'rider': 'Jones' +}, { + TRIM: { + strategy: 'MAXLEN', + strategyModifier: '~', + threshold: 2 + } +}); +await client.xAdd('race:italy', '*', { + 'rider': 'Wood' +}, { + TRIM: { + strategy: 'MAXLEN', + strategyModifier: '~', + threshold: 2 + } +}); +await client.xAdd('race:italy', '*', { + 'rider': 'Henshaw' +}, { + TRIM: { + strategy: 'MAXLEN', + strategyModifier: '~', + threshold: 2 + } +}); + +const res34 = await client.xLen('race:italy'); +console.log(res34); // >>> 8 + +const res35 = await client.xRange('race:italy', '-', '+'); +console.log(res35); // >>> [{ id: '1692629925771-0', message: { rider: 'Castilla' } }, { id: '1692629925789-0', message: { rider: 'Royce' } }, { id: '1692629925790-0', message: { rider: 'Sam-Bodden' } }, { id: '1692629925791-0', message: { rider: 'Prickett' } }, { id: '1692629926436-0', message: { rider: 'Norem' } }, { id: '1692630612602-0', message: { rider: 'Jones' } }, { id: '1692630641947-0', message: { rider: 'Wood' } }, { id: '1692630648281-0', message: { rider: 'Henshaw' } }] + +await client.xAdd('race:italy', '*', { + 'rider': 'Smith' +}, { + TRIM: { + strategy: 'MAXLEN', + strategyModifier: '=', + threshold: 2 + } +}); + +const res36 = await client.xRange('race:italy', '-', '+'); +console.log(res36); // >>> [{ id: '1692630648281-0', message: { rider: 'Henshaw' } }, { id: '1692631018238-0', message: { rider: 'Smith' } }] +// STEP_END + +// STEP_START xTrim +const res37 = await client.xTrim('race:italy', 'MAXLEN', 10, { + strategyModifier: '=', +}); +console.log(res37); // >>> 0 +// STEP_END + +// STEP_START xTrim2 +const res38 = await client.xTrim('race:italy', "MAXLEN", 10); +console.log(res38); // >>> 0 +// STEP_END + +// STEP_START xDel +const res39 = await client.xRange('race:italy', '-', '+'); +console.log(res39); // >>> [{ id: '1692630648281-0', message: { rider: 'Henshaw' } }, { id: '1692631018238-0', message: { rider: 'Smith' } }] + +const res40 = await client.xDel('race:italy', '1692631018238-0'); +console.log(res40); // >>> 1 + +const res41 = await client.xRange('race:italy', '-', '+'); +console.log(res41); // >>> [{ id: '1692630648281-0', message: { rider: 'Henshaw' } }] +// STEP_END + +// REMOVE_START +await client.quit(); +// REMOVE_END \ No newline at end of file diff --git a/doctests/dt-string.js b/doctests/dt-string.js new file mode 100644 index 00000000000..6f77709abfa --- /dev/null +++ b/doctests/dt-string.js @@ -0,0 +1,68 @@ +// EXAMPLE: set_tutorial +// HIDE_START +import assert from 'assert'; +import { createClient } from 'redis'; + +const client = createClient(); +await client.connect(); +// HIDE_END +// REMOVE_START +await client.flushDb(); +// REMOVE_END + +// STEP_START set_get +const res1 = await client.set("bike:1", "Deimos"); +console.log(res1); // OK +const res2 = await client.get("bike:1"); +console.log(res2); // Deimos +// STEP_END + +// REMOVE_START +assert.equal(res1, 'OK'); +assert.equal(res2, 'Deimos'); +// REMOVE_END + +// STEP_START setnx_xx +const res3 = await client.set("bike:1", "bike", {'NX': true}); +console.log(res3); // null +console.log(await client.get("bike:1")); // Deimos +const res4 = await client.set("bike:1", "bike", {'XX': true}); +console.log(res4); // OK +// STEP_END + +// REMOVE_START +assert.equal(res3, null); +assert.equal(res4, 'OK'); +// REMOVE_END + +// STEP_START mset +const res5 = await client.mSet([ + ["bike:1", "Deimos"], + ["bike:2", "Ares"], + ["bike:3", "Vanth"] +]); + +console.log(res5); // OK +const res6 = await client.mGet(["bike:1", "bike:2", "bike:3"]); +console.log(res6); // ['Deimos', 'Ares', 'Vanth'] +// STEP_END + +// REMOVE_START +assert.equal(res5, 'OK'); +assert.deepEqual(res6, ["Deimos", "Ares", "Vanth"]); +// REMOVE_END + +// STEP_START incr +await client.set("total_crashes", 0); +const res7 = await client.incr("total_crashes"); +console.log(res7); // 1 +const res8 = await client.incrBy("total_crashes", 10); +console.log(res8); // 11 +// STEP_END + +// REMOVE_START +assert.equal(res7, 1); +assert.equal(res8, 11); + +await client.close(); +// REMOVE_END diff --git a/doctests/dt-tdigest.js b/doctests/dt-tdigest.js new file mode 100644 index 00000000000..c59168076e3 --- /dev/null +++ b/doctests/dt-tdigest.js @@ -0,0 +1,85 @@ +// EXAMPLE: tdigest_tutorial +// HIDE_START +import assert from 'assert'; +import { createClient } from 'redis'; + +const client = createClient(); +await client.connect(); +// HIDE_END + +// REMOVE_START +await client.flushDb(); +// REMOVE_END + +// STEP_START tdig_start +const res1 = await client.tDigest.create('bikes:sales', 100); +console.log(res1); // >>> OK + +const res2 = await client.tDigest.add('bikes:sales', [21]); +console.log(res2); // >>> OK + +const res3 = await client.tDigest.add('bikes:sales', [150, 95, 75, 34]); +console.log(res3); // >>> OK +// STEP_END + +// REMOVE_START +assert.equal(res1, 'OK') +assert.equal(res2, 'OK') +assert.equal(res3, 'OK') +// REMOVE_END + +// STEP_START tdig_cdf +const res4 = await client.tDigest.create('racer_ages'); +console.log(res4); // >>> OK + +const res5 = await client.tDigest.add('racer_ages', [ + 45.88, 44.2, 58.03, 19.76, 39.84, 69.28, 50.97, 25.41, 19.27, 85.71, 42.63 +]); +console.log(res5); // >>> OK + +const res6 = await client.tDigest.rank('racer_ages', [50]); +console.log(res6); // >>> [7] + +const res7 = await client.tDigest.rank('racer_ages', [50, 40]); +console.log(res7); // >>> [7, 4] +// STEP_END + +// REMOVE_START +assert.equal(res4, 'OK') +assert.equal(res5, 'OK') +assert.deepEqual(res6, [7]) +assert.deepEqual(res7, [7, 4]) +// REMOVE_END + +// STEP_START tdig_quant +const res8 = await client.tDigest.quantile('racer_ages', [0.5]); +console.log(res8); // >>> [44.2] + +const res9 = await client.tDigest.byRank('racer_ages', [4]); +console.log(res9); // >>> [42.63] +// STEP_END + +// STEP_START tdig_min +const res10 = await client.tDigest.min('racer_ages'); +console.log(res10); // >>> 19.27 + +const res11 = await client.tDigest.max('racer_ages'); +console.log(res11); // >>> 85.71 +// STEP_END + +// REMOVE_START +assert.deepEqual(res8, [44.2]) +assert.deepEqual(res9, [42.63]) +assert.equal(res10, 19.27) +assert.equal(res11, 85.71) +// REMOVE_END + +// STEP_START tdig_reset +const res12 = await client.tDigest.reset('racer_ages'); +console.log(res12); // >>> OK +// STEP_END + +// REMOVE_START +assert.equal(res12, 'OK') +await client.close(); +// REMOVE_END diff --git a/doctests/dt-time-series.js b/doctests/dt-time-series.js new file mode 100644 index 00000000000..d2d94e7dc40 --- /dev/null +++ b/doctests/dt-time-series.js @@ -0,0 +1,635 @@ +// EXAMPLE: time_series_tutorial +// HIDE_START +import assert from 'assert'; +import { createClient } from 'redis'; +import { TIME_SERIES_AGGREGATION_TYPE, TIME_SERIES_REDUCERS } from '@redis/time-series'; + +const client = createClient(); +await client.connect(); +// HIDE_END + +// REMOVE_START +await client.del([ + 'thermometer:1', 'thermometer:2', 'thermometer:3', + 'rg:1', 'rg:2', 'rg:3', 'rg:4', + 'sensor3', + 'wind:1', 'wind:2', 'wind:3', 'wind:4', + 'hyg:1', 'hyg:compacted' +]); +// REMOVE_END + +// STEP_START create +const res1 = await client.ts.create('thermometer:1'); +console.log(res1); // >>> OK + +const res2 = await client.type('thermometer:1'); +console.log(res2); // >>> TSDB-TYPE + +const res3 = await client.ts.info('thermometer:1'); +console.log(res3); +// >>> { rules: [], ... totalSamples: 0, ... +// STEP_END +// REMOVE_START +assert.equal(res1, 'OK'); +assert.equal(res2, 'TSDB-TYPE'); +assert.equal(res3.totalSamples, 0); +// REMOVE_END + +// STEP_START create_retention +const res4 = await client.ts.add('thermometer:2', 1, 10.8, { RETENTION: 100 }); +console.log(res4); // >>> 1 + +const res5 = await client.ts.info('thermometer:2'); +console.log(res5); +// >>> { rules: [], ... retentionTime: 100, ... +// STEP_END +// REMOVE_START +assert.equal(res4, 1); +assert.equal(res5.retentionTime, 100); +// REMOVE_END + +// STEP_START create_labels +const res6 = await client.ts.add('thermometer:3', 1, 10.4, { + LABELS: { location: 'UK', type: 'Mercury' } +}); +console.log(res6); // >>> 1 + +const res7 = await client.ts.info('thermometer:3'); +console.log(res7); +// >>> { labels: [{ name: 'location', value: 'UK' }, { name: 'type', value: 'Mercury' }], ... } +// STEP_END +// REMOVE_START +assert.equal(res6, 1); +assert.deepEqual(res7.labels, [ + { name: 'location', value: 'UK' }, + { name: 'type', value: 'Mercury' }, +]); +// REMOVE_END + +// STEP_START madd +const res8 = await client.ts.mAdd([ + { key: 'thermometer:1', timestamp: 1, value: 9.2 }, + { key: 'thermometer:1', timestamp: 2, value: 9.9 }, + { key: 'thermometer:2', timestamp: 2, value: 10.3 } +]); +console.log(res8); // >>> [1, 2, 2] +// STEP_END +// REMOVE_START +assert.deepEqual(res8, [1, 2, 2]); +// REMOVE_END + +// STEP_START get +// The last recorded temperature for thermometer:2 +// was 10.3 at time 2. +const res9 = await client.ts.get('thermometer:2'); +console.log(res9); // >>> { timestamp: 2, value: 10.3 } +// STEP_END +// REMOVE_START +assert.equal(res9.timestamp, 2); +assert.equal(res9.value, 10.3); +// REMOVE_END + +// STEP_START range +// Add 5 data points to a time series named "rg:1". +const res10 = await client.ts.create('rg:1'); +console.log(res10); // >>> OK + +const res11 = await client.ts.mAdd([ + { key: 'rg:1', timestamp: 0, value: 18 }, + { key: 'rg:1', timestamp: 1, value: 14 }, + { key: 'rg:1', timestamp: 2, value: 22 }, + { key: 'rg:1', timestamp: 3, value: 18 }, + { key: 'rg:1', timestamp: 4, value: 24 } +]); +console.log(res11); // >>> [0, 1, 2, 3, 4] + +// Retrieve all the data points in ascending order. +const res12 = await client.ts.range('rg:1', '-', '+'); +console.log(res12); +// >>> [{ timestamp: 0, value: 18 }, { timestamp: 1, value: 14 }, ...] + +// Retrieve data points up to time 1 (inclusive). +const res13 = await client.ts.range('rg:1', '-', 1); +console.log(res13); +// >>> [{ timestamp: 0, value: 18 }, { timestamp: 1, value: 14 }] + +// Retrieve data points from time 3 onwards. +const res14 = await client.ts.range('rg:1', 3, '+'); +console.log(res14); +// >>> [{ timestamp: 3, value: 18 }, { timestamp: 4, value: 24 }] + +// Retrieve all the data points in descending order. +const res15 = await client.ts.revRange('rg:1', '-', '+'); +console.log(res15); +// >>> [{ timestamp: 4, value: 24 }, { timestamp: 3, value: 18 }, ...] + +// Retrieve data points up to time 1 (inclusive), but return them +// in descending order. +const res16 = await client.ts.revRange('rg:1', '-', 1); +console.log(res16); +// >>> [{ timestamp: 1, value: 14 }, { timestamp: 0, value: 18 }] +// STEP_END +// REMOVE_START +assert.equal(res10, 'OK'); +assert.deepEqual(res11, [0, 1, 2, 3, 4]); + +assert.deepEqual(res12, [ + { timestamp: 0, value: 18 }, + { timestamp: 1, value: 14 }, + { timestamp: 2, value: 22 }, + { timestamp: 3, value: 18 }, + { timestamp: 4, value: 24 } +]); +assert.deepEqual(res13, [ + { timestamp: 0, value: 18 }, + { timestamp: 1, value: 14 } +]); +assert.deepEqual(res14, [ + { timestamp: 3, value: 18 }, + { timestamp: 4, value: 24 } +]); +assert.deepEqual(res15, [ + { timestamp: 4, value: 24 }, + { timestamp: 3, value: 18 }, + { timestamp: 2, value: 22 }, + { timestamp: 1, value: 14 }, + { timestamp: 0, value: 18 } +]); +assert.deepEqual(res16, [ + { timestamp: 1, value: 14 }, + { timestamp: 0, value: 18 } +]); +// REMOVE_END + +// STEP_START range_filter +const res17 = await client.ts.range('rg:1', '-', '+', { + FILTER_BY_TS: [0, 2, 4] +}); +console.log(res17); +// >>> [{ timestamp: 0, value: 18 }, { timestamp: 2, value: 22 }, { timestamp: 4, value: 24 }] + +const res18 = await client.ts.revRange('rg:1', '-', '+', { + FILTER_BY_TS: [0, 2, 4], + FILTER_BY_VALUE: { min: 20, max: 25 } +}); +console.log(res18); +// >>> [{ timestamp: 4, value: 24 }, { timestamp: 2, value: 22 }] + +const res19 = await client.ts.revRange('rg:1', '-', '+', { + FILTER_BY_TS: [0, 2, 4], + FILTER_BY_VALUE: { min: 22, max: 22 }, + COUNT: 1 +}); +console.log(res19); +// >>> [{ timestamp: 2, value: 22 }] +// STEP_END +// REMOVE_START +assert.deepEqual(res17, [ + { timestamp: 0, value: 18 }, + { timestamp: 2, value: 22 }, + { timestamp: 4, value: 24 } +]); +assert.deepEqual(res18, [ + { timestamp: 4, value: 24 }, + { timestamp: 2, value: 22 } +]); +assert.deepEqual(res19, [ + { timestamp: 2, value: 22 } +]); +// REMOVE_END + +// STEP_START query_multi +// Create three new "rg:" time series (two in the US +// and one in the UK, with different units) and add some +// data points. +const res20 = await client.ts.create('rg:2', { + LABELS: { location: 'us', unit: 'cm' } +}); +console.log(res20); // >>> OK + +const res21 = await client.ts.create('rg:3', { + LABELS: { location: 'us', unit: 'in' } +}); +console.log(res21); // >>> OK + +const res22 = await client.ts.create('rg:4', { + LABELS: { location: 'uk', unit: 'mm' } +}); +console.log(res22); // >>> OK + +const res23 = await client.ts.mAdd([ + { key: 'rg:2', timestamp: 0, value: 1.8 }, + { key: 'rg:3', timestamp: 0, value: 0.9 }, + { key: 'rg:4', timestamp: 0, value: 25 } +]); +console.log(res23); // >>> [0, 0, 0] + +const res24 = await client.ts.mAdd([ + { key: 'rg:2', timestamp: 1, value: 2.1 }, + { key: 'rg:3', timestamp: 1, value: 0.77 }, + { key: 'rg:4', timestamp: 1, value: 18 } +]); +console.log(res24); // >>> [1, 1, 1] + +const res25 = await client.ts.mAdd([ + { key: 'rg:2', timestamp: 2, value: 2.3 }, + { key: 'rg:3', timestamp: 2, value: 1.1 }, + { key: 'rg:4', timestamp: 2, value: 21 } +]); +console.log(res25); // >>> [2, 2, 2] + +const res26 = await client.ts.mAdd([ + { key: 'rg:2', timestamp: 3, value: 1.9 }, + { key: 'rg:3', timestamp: 3, value: 0.81 }, + { key: 'rg:4', timestamp: 3, value: 19 } +]); +console.log(res26); // >>> [3, 3, 3] + +const res27 = await client.ts.mAdd([ + { key: 'rg:2', timestamp: 4, value: 1.78 }, + { key: 'rg:3', timestamp: 4, value: 0.74 }, + { key: 'rg:4', timestamp: 4, value: 23 } +]); +console.log(res27); // >>> [4, 4, 4] + +// Retrieve the last data point from each US time series. +const res28 = await client.ts.mGet(['location=us']); +console.log(res28); +// >>> { "rg:2": { sample: { timestamp: 4, value: 1.78 } }, "rg:3": { sample: { timestamp: 4, value: 0.74 } } } + +// Retrieve the same data points, but include the `unit` +// label in the results. +const res29 = await client.ts.mGetSelectedLabels(['location=us'], ['unit']); +console.log(res29); +// >>> { "rg:2": { labels: { unit: 'cm' }, sample: { timestamp: 4, value: 1.78 } }, "rg:3": { labels: { unit: 'in' }, sample: { timestamp: 4, value: 0.74 } } } + +// Retrieve data points up to time 2 (inclusive) from all +// time series that use millimeters as the unit. Include all +// labels in the results. +const res30 = await client.ts.mRangeWithLabels('-', 2, 'unit=mm'); +console.log(res30); +// >>> { "rg:4": { labels: { location: 'uk', unit: 'mm' }, samples: [ +// { timestamp: 0, value: 25 }, +// { timestamp: 1, value: 18 }, +// { timestamp: 2, value: 21 } +// ] } } + +// Retrieve data points from time 1 to time 3 (inclusive) from +// all time series that use centimeters or millimeters as the unit, +// but only return the `location` label. Return the results +// in descending order of timestamp. +const res31 = await client.ts.mRevRangeSelectedLabels( + 1, 3, + ['location'], + ['unit=(cm,mm)'] +); +console.log(res31); +// >>> { "rg:2": { labels: { location: 'us' }, samples: [ +// { timestamp: 3, value: 1.9 }, +// { timestamp: 2, value: 2.3 }, +// { timestamp: 1, value: 2.1 } +// ] }, "rg:4": { labels: { location: 'uk' }, samples: [ +// { timestamp: 3, value: 19 }, +// { timestamp: 2, value: 21 }, +// { timestamp: 1, value: 18 } +// ] } } +// STEP_END +// REMOVE_START +assert.equal(res20, 'OK'); +assert.equal(res21, 'OK'); +assert.equal(res22, 'OK'); +assert.deepEqual(res23, [0, 0, 0]); +assert.deepEqual(res24, [1, 1, 1]); +assert.deepEqual(res25, [2, 2, 2]); +assert.deepEqual(res26, [3, 3, 3]); +assert.deepEqual(res27, [4, 4, 4]); + +assert.deepEqual(res28, { + "rg:2": { sample: { timestamp: 4, value: 1.78 } }, + "rg:3": { sample: { timestamp: 4, value: 0.74 } } +}); +assert.deepEqual(res29, { + "rg:2": { labels: { unit: 'cm' }, sample: { timestamp: 4, value: 1.78 } }, + "rg:3": { labels: { unit: 'in' }, sample: { timestamp: 4, value: 0.74 } } +}); + +assert.deepEqual(res30, { + "rg:4": { + labels: { location: 'uk', unit: 'mm' }, + samples: [ + { timestamp: 0, value: 25 }, + { timestamp: 1, value: 18 }, + { timestamp: 2, value: 21 } + ] + } +}); +assert.deepEqual(res31, { + "rg:2": { + labels: { location: 'us' }, + samples: [ + { timestamp: 3, value: 1.9 }, + { timestamp: 2, value: 2.3 }, + { timestamp: 1, value: 2.1 } + ] + }, + "rg:4": { + labels: { location: 'uk' }, + samples: [ + { timestamp: 3, value: 19 }, + { timestamp: 2, value: 21 }, + { timestamp: 1, value: 18 } + ] + } +}); +// REMOVE_END + +// STEP_START agg +const res32 = await client.ts.range('rg:2', '-', '+', { + AGGREGATION: { + type: TIME_SERIES_AGGREGATION_TYPE.AVG, + timeBucket: 2 + } +}); +console.log(res32); +// >>> [{ timestamp: 0, value: 1.9500000000000002 },{ timestamp: 2, value: 2.0999999999999996 }, { timestamp: 4, value: 1.78 }] +// STEP_END +// REMOVE_START +assert.deepEqual(res32, [ + { timestamp: 0, value: 1.9500000000000002 }, + { timestamp: 2, value: 2.0999999999999996 }, + { timestamp: 4, value: 1.78 } +]); +// REMOVE_END + +// STEP_START agg_bucket +const res33 = await client.ts.create('sensor3'); +console.log(res33); // >>> OK + +const res34 = await client.ts.mAdd([ + { key: 'sensor3', timestamp: 10, value: 1000 }, + { key: 'sensor3', timestamp: 20, value: 2000 }, + { key: 'sensor3', timestamp: 30, value: 3000 }, + { key: 'sensor3', timestamp: 40, value: 4000 }, + { key: 'sensor3', timestamp: 50, value: 5000 }, + { key: 'sensor3', timestamp: 60, value: 6000 }, + { key: 'sensor3', timestamp: 70, value: 7000 } +]); +console.log(res34); // >>> [10, 20, 30, 40, 50, 60, 70] + +const res35 = await client.ts.range('sensor3', 10, 70, { + AGGREGATION: { + type: TIME_SERIES_AGGREGATION_TYPE.MIN, + timeBucket: 25 + } +}); +console.log(res35); +// >>> [{ timestamp: 0, value: 1000 }, { timestamp: 25, value: 3000 }, { timestamp: 50, value: 5000 }] +// STEP_END +// REMOVE_START +assert.equal(res33, 'OK'); +assert.deepEqual(res34, [10, 20, 30, 40, 50, 60, 70]); +assert.deepEqual(res35, [ + { timestamp: 0, value: 1000 }, + { timestamp: 25, value: 3000 }, + { timestamp: 50, value: 5000 } +]); +// REMOVE_END + +// STEP_START agg_align +const res36 = await client.ts.range('sensor3', 10, 70, { + AGGREGATION: { + type: TIME_SERIES_AGGREGATION_TYPE.MIN, + timeBucket: 25 + }, + ALIGN: 'START' +}); +console.log(res36); +// >>> [{ timestamp: 10, value: 1000 }, { timestamp: 35, value: 4000 }, { timestamp: 60, value: 6000 }] +// STEP_END +// REMOVE_START +assert.deepEqual(res36, [ + { timestamp: 10, value: 1000 }, + { timestamp: 35, value: 4000 }, + { timestamp: 60, value: 6000 } +]); +// REMOVE_END + +// STEP_START agg_multi +const res37 = await client.ts.create('wind:1', { + LABELS: { country: 'uk' } +}); +console.log(res37); // >>> OK + +const res38 = await client.ts.create('wind:2', { + LABELS: { country: 'uk' } +}); +console.log(res38); // >>> OK + +const res39 = await client.ts.create('wind:3', { + LABELS: { country: 'us' } +}); +console.log(res39); // >>> OK + +const res40 = await client.ts.create('wind:4', { + LABELS: { country: 'us' } +}); +console.log(res40); // >>> OK + +const res41 = await client.ts.mAdd([ + { key: 'wind:1', timestamp: 1, value: 12 }, + { key: 'wind:2', timestamp: 1, value: 18 }, + { key: 'wind:3', timestamp: 1, value: 5 }, + { key: 'wind:4', timestamp: 1, value: 20 } +]); +console.log(res41); // >>> [1, 1, 1, 1] + +const res42 = await client.ts.mAdd([ + { key: 'wind:1', timestamp: 2, value: 14 }, + { key: 'wind:2', timestamp: 2, value: 21 }, + { key: 'wind:3', timestamp: 2, value: 4 }, + { key: 'wind:4', timestamp: 2, value: 25 } +]); +console.log(res42); // >>> [2, 2, 2, 2] + +const res43 = await client.ts.mAdd([ + { key: 'wind:1', timestamp: 3, value: 10 }, + { key: 'wind:2', timestamp: 3, value: 24 }, + { key: 'wind:3', timestamp: 3, value: 8 }, + { key: 'wind:4', timestamp: 3, value: 18 } +]); +console.log(res43); // >>> [3, 3, 3, 3] + +// The result pairs contain the timestamp and the maximum sample value +// for the country at that timestamp. +const res44 = await client.ts.mRangeGroupBy( + '-', '+', ['country=(us,uk)'], + {label: 'country', REDUCE: TIME_SERIES_REDUCERS.MAX} +); +console.log(res44); +// >>> { "country=uk": { samples: [ +// { timestamp: 1, value: 18 }, +// { timestamp: 2, value: 21 }, +// { timestamp: 3, value: 24 } +// ] }, "country=us": { samples: [ +// { timestamp: 1, value: 20 }, +// { timestamp: 2, value: 25 }, +// { timestamp: 3, value: 18 } +// ] } } + +// The result pairs contain the timestamp and the average sample value +// for the country at that timestamp. +const res45 = await client.ts.mRangeGroupBy( + '-', '+', ['country=(us,uk)'], + { label: 'country', REDUCE: TIME_SERIES_REDUCERS.AVG} +); +console.log(res45); +// >>> { +// "country=uk": { +// samples: [{ timestamp: 1, value: 15 }, { timestamp: 2, value: 17.5 }, { timestamp: 3, value: 17 }] +// }, +// "country=us": { +// samples: [{ timestamp: 1, value: 12.5 }, { timestamp: 2, value: 14.5 }, { timestamp: 3, value: 13 }] +// } +// } +// STEP_END +// REMOVE_START +assert.equal(res37, 'OK'); +assert.equal(res38, 'OK'); +assert.equal(res39, 'OK'); +assert.equal(res40, 'OK'); +assert.deepEqual(res41, [1, 1, 1, 1]); +assert.deepEqual(res42, [2, 2, 2, 2]); +assert.deepEqual(res43, [3, 3, 3, 3]); + +assert.deepEqual(res44, { + "country=uk": { + samples: [ + { timestamp: 1, value: 18 }, + { timestamp: 2, value: 21 }, + { timestamp: 3, value: 24 } + ] + }, + "country=us": { + samples: [ + { timestamp: 1, value: 20 }, + { timestamp: 2, value: 25 }, + { timestamp: 3, value: 18 } + ] + } +}); +assert.deepEqual(res45, { + "country=uk": { + samples: [ + { timestamp: 1, value: 15 }, + { timestamp: 2, value: 17.5 }, + { timestamp: 3, value: 17 } + ] + }, + "country=us": { + samples: [ + { timestamp: 1, value: 12.5 }, + { timestamp: 2, value: 14.5 }, + { timestamp: 3, value: 13 } + ] + } +}); +// REMOVE_END + +// STEP_START create_compaction +const res46 = await client.ts.create('hyg:1'); +console.log(res46); // >>> OK + +const res47 = await client.ts.create('hyg:compacted'); +console.log(res47); // >>> OK + +const res48 = await client.ts.createRule('hyg:1', 'hyg:compacted', TIME_SERIES_AGGREGATION_TYPE.MIN, 3); +console.log(res48); // >>> OK + +const res49 = await client.ts.info('hyg:1'); +console.log(res49.rules); +// >>> [{ aggregationType: 'MIN', key: 'hyg:compacted', timeBucket: 3}] + +const res50 = await client.ts.info('hyg:compacted'); +console.log(res50.sourceKey); // >>> 'hyg:1' +// STEP_END +// REMOVE_START +assert.equal(res46, 'OK'); +assert.equal(res47, 'OK'); +assert.equal(res48, 'OK'); +assert.deepEqual(res49.rules, [ + { aggregationType: 'MIN', key: 'hyg:compacted', timeBucket: 3} +]); +assert.equal(res50.sourceKey, 'hyg:1'); +// REMOVE_END + +// STEP_START comp_add +const res51 = await client.ts.mAdd([ + { key: 'hyg:1', timestamp: 0, value: 75 }, + { key: 'hyg:1', timestamp: 1, value: 77 }, + { key: 'hyg:1', timestamp: 2, value: 78 } +]); +console.log(res51); // >>> [0, 1, 2] + +const res52 = await client.ts.range('hyg:compacted', '-', '+'); +console.log(res52); // >>> [] + +const res53 = await client.ts.add('hyg:1', 3, 79); +console.log(res53); // >>> 3 + +const res54 = await client.ts.range('hyg:compacted', '-', '+'); +console.log(res54); // >>> [{ timestamp: 0, value: 75 }] +// STEP_END +// REMOVE_START +assert.deepEqual(res51, [0, 1, 2]); +assert.deepEqual(res52, []); +assert.equal(res53, 3); +assert.deepEqual(res54, [{ timestamp: 0, value: 75 }]); +// REMOVE_END + +// STEP_START del +const res55 = await client.ts.info('thermometer:1'); +console.log(res55.totalSamples); // >>> 2 +console.log(res55.firstTimestamp); // >>> 1 +console.log(res55.lastTimestamp); // >>> 2 + +const res56 = await client.ts.add('thermometer:1', 3, 9.7); +console.log(res56); // >>> 3 + +const res57 = await client.ts.info('thermometer:1'); +console.log(res57.totalSamples); // >>> 3 +console.log(res57.firstTimestamp); // >>> 1 +console.log(res57.lastTimestamp); // >>> 3 + +const res58 = await client.ts.del('thermometer:1', 1, 2); +console.log(res58); // >>> 2 + +const res59 = await client.ts.info('thermometer:1'); +console.log(res59.totalSamples); // >>> 1 +console.log(res59.firstTimestamp); // >>> 3 +console.log(res59.lastTimestamp); // >>> 3 + +const res60 = await client.ts.del('thermometer:1', 3, 3); +console.log(res60); // >>> 1 + +const res61 = await client.ts.info('thermometer:1'); +console.log(res61.totalSamples); // >>> 0 +// STEP_END +// REMOVE_START +assert.equal(res55.totalSamples, 2); +assert.equal(res55.firstTimestamp, 1); +assert.equal(res55.lastTimestamp, 2); +assert.equal(res56, 3); +assert.equal(res57.totalSamples, 3); +assert.equal(res57.firstTimestamp, 1); +assert.equal(res57.lastTimestamp, 3); +assert.equal(res58, 2); +assert.equal(res59.totalSamples, 1); +assert.equal(res59.firstTimestamp, 3); +assert.equal(res59.lastTimestamp, 3); +assert.equal(res60, 1); +assert.equal(res61.totalSamples, 0); +// REMOVE_END + +// HIDE_START +await client.quit(); +// HIDE_END \ No newline at end of file diff --git a/doctests/dt-topk.js b/doctests/dt-topk.js new file mode 100644 index 00000000000..49b929aed8a --- /dev/null +++ b/doctests/dt-topk.js @@ -0,0 +1,48 @@ +// EXAMPLE: topk_tutorial +// HIDE_START +import assert from 'assert'; +import { createClient } from 'redis'; + +const client = createClient(); +await client.connect(); +// HIDE_END + +// REMOVE_START +await client.flushDb(); +// REMOVE_END + +// STEP_START topk +const res1 = await client.topK.reserve('bikes:keywords', 5, { + width: 2000, + depth: 7, + decay: 0.925 +}); +console.log(res1); // >>> OK + +const res2 = await client.topK.add('bikes:keywords', [ + 'store', + 'seat', + 'handlebars', + 'handles', + 'pedals', + 'tires', + 'store', + 'seat' +]); +console.log(res2); // >>> [null, null, null, null, null, 'handlebars', null, null] + +const res3 = await client.topK.list('bikes:keywords'); +console.log(res3); // >>> ['store', 'seat', 'pedals', 'tires', 'handles'] + +const res4 = await client.topK.query('bikes:keywords', ['store', 'handlebars']); +console.log(res4); // >>> [true, false] +// STEP_END + +// REMOVE_START +assert.equal(res1, 'OK') +assert.deepEqual(res2, [null, null, null, null, null, 'handlebars', null, null]) +assert.deepEqual(res3, ['store', 'seat', 'pedals', 'tires', 'handles']) +assert.deepEqual(res4, [1, 0]) +await client.close(); +// REMOVE_END + diff --git a/doctests/dt-vec-set.js b/doctests/dt-vec-set.js new file mode 100644 index 00000000000..0e8cb918d7e --- /dev/null +++ b/doctests/dt-vec-set.js @@ -0,0 +1,281 @@ +// EXAMPLE: vecset_tutorial +// REMOVE_START +/** + * Code samples for Vector set doc pages: + * https://redis.io/docs/latest/develop/data-types/vector-sets/ + */ + +import assert from 'assert'; +// REMOVE_END +// HIDE_START +import { createClient } from 'redis'; + +const client = createClient({ + RESP: 3 // Required for vector set commands +}); + +await client.connect(); +// HIDE_END + +// REMOVE_START +await client.del([ + "points", "quantSetQ8", "quantSetNoQ", + "quantSetBin", "setNotReduced", "setReduced" +]); +// REMOVE_END + +// STEP_START vadd +const res1 = await client.vAdd("points", [1.0, 1.0], "pt:A"); +console.log(res1); // >>> true + +const res2 = await client.vAdd("points", [-1.0, -1.0], "pt:B"); +console.log(res2); // >>> true + +const res3 = await client.vAdd("points", [-1.0, 1.0], "pt:C"); +console.log(res3); // >>> true + +const res4 = await client.vAdd("points", [1.0, -1.0], "pt:D"); +console.log(res4); // >>> true + +const res5 = await client.vAdd("points", [1.0, 0], "pt:E"); +console.log(res5); // >>> true + +const res6 = await client.type("points"); +console.log(res6); // >>> vectorset +// STEP_END +// REMOVE_START +assert.equal(res1, true); +assert.equal(res2, true); +assert.equal(res3, true); +assert.equal(res4, true); +assert.equal(res5, true); +assert.equal(res6, "vectorset"); +// REMOVE_END + +// STEP_START vcardvdim +const res7 = await client.vCard("points"); +console.log(res7); // >>> 5 + +const res8 = await client.vDim("points"); +console.log(res8); // >>> 2 +// STEP_END +// REMOVE_START +assert.equal(res7, 5); +assert.equal(res8, 2); +// REMOVE_END + +// STEP_START vemb +const res9 = await client.vEmb("points", "pt:A"); +console.log(res9); // >>> [0.9999999403953552, 0.9999999403953552] + +const res10 = await client.vEmb("points", "pt:B"); +console.log(res10); // >>> [-0.9999999403953552, -0.9999999403953552] + +const res11 = await client.vEmb("points", "pt:C"); +console.log(res11); // >>> [-0.9999999403953552, 0.9999999403953552] + +const res12 = await client.vEmb("points", "pt:D"); +console.log(res12); // >>> [0.9999999403953552, -0.9999999403953552] + +const res13 = await client.vEmb("points", "pt:E"); +console.log(res13); // >>> [1, 0] +// STEP_END +// REMOVE_START +assert(Math.abs(1 - res9[0]) < 0.001); +assert(Math.abs(1 - res9[1]) < 0.001); +assert(Math.abs(1 + res10[0]) < 0.001); +assert(Math.abs(1 + res10[1]) < 0.001); +assert(Math.abs(1 + res11[0]) < 0.001); +assert(Math.abs(1 - res11[1]) < 0.001); +assert(Math.abs(1 - res12[0]) < 0.001); +assert(Math.abs(1 + res12[1]) < 0.001); +assert.deepEqual(res13, [1, 0]); +// REMOVE_END + +// STEP_START attr +const res14 = await client.vSetAttr("points", "pt:A", { + name: "Point A", + description: "First point added" +}); +console.log(res14); // >>> true + +const res15 = await client.vGetAttr("points", "pt:A"); +console.log(res15); +// >>> {name: 'Point A', description: 'First point added'} + +const res16 = await client.vSetAttr("points", "pt:A", ""); +console.log(res16); // >>> true + +const res17 = await client.vGetAttr("points", "pt:A"); +console.log(res17); // >>> null +// STEP_END +// REMOVE_START +assert.equal(res14, true); +assert.deepEqual(res15, {name: "Point A", description: "First point added"}); +assert.equal(res16, true); +assert.equal(res17, null); +// REMOVE_END + +// STEP_START vrem +const res18 = await client.vAdd("points", [0, 0], "pt:F"); +console.log(res18); // >>> true + +const res19 = await client.vCard("points"); +console.log(res19); // >>> 6 + +const res20 = await client.vRem("points", "pt:F"); +console.log(res20); // >>> true + +const res21 = await client.vCard("points"); +console.log(res21); // >>> 5 +// STEP_END +// REMOVE_START +assert.equal(res18, true); +assert.equal(res19, 6); +assert.equal(res20, true); +assert.equal(res21, 5); +// REMOVE_END + +// STEP_START vsim_basic +const res22 = await client.vSim("points", [0.9, 0.1]); +console.log(res22); +// >>> ['pt:E', 'pt:A', 'pt:D', 'pt:C', 'pt:B'] +// STEP_END +// REMOVE_START +assert.deepEqual(res22, ["pt:E", "pt:A", "pt:D", "pt:C", "pt:B"]); +// REMOVE_END + +// STEP_START vsim_options +const res23 = await client.vSimWithScores("points", "pt:A", { COUNT: 4 }); +console.log(res23); +// >>> {pt:A: 1.0, pt:E: 0.8535534143447876, pt:D: 0.5, pt:C: 0.5} +// STEP_END +// REMOVE_START +assert.equal(res23["pt:A"], 1.0); +assert.equal(res23["pt:C"], 0.5); +assert.equal(res23["pt:D"], 0.5); +assert(Math.abs(res23["pt:E"] - 0.85) < 0.005); +// REMOVE_END + +// STEP_START vsim_filter +const res24 = await client.vSetAttr("points", "pt:A", { + size: "large", + price: 18.99 +}); +console.log(res24); // >>> true + +const res25 = await client.vSetAttr("points", "pt:B", { + size: "large", + price: 35.99 +}); +console.log(res25); // >>> true + +const res26 = await client.vSetAttr("points", "pt:C", { + size: "large", + price: 25.99 +}); +console.log(res26); // >>> true + +const res27 = await client.vSetAttr("points", "pt:D", { + size: "small", + price: 21.00 +}); +console.log(res27); // >>> true + +const res28 = await client.vSetAttr("points", "pt:E", { + size: "small", + price: 17.75 +}); +console.log(res28); // >>> true + +// Return elements in order of distance from point A whose +// `size` attribute is `large`. +const res29 = await client.vSim("points", "pt:A", { + FILTER: '.size == "large"' +}); +console.log(res29); // >>> ['pt:A', 'pt:C', 'pt:B'] + +// Return elements in order of distance from point A whose size is +// `large` and whose price is greater than 20.00. +const res30 = await client.vSim("points", "pt:A", { + FILTER: '.size == "large" && .price > 20.00' +}); +console.log(res30); // >>> ['pt:C', 'pt:B'] +// STEP_END +// REMOVE_START +assert.equal(res24, true); +assert.equal(res25, true); +assert.equal(res26, true); +assert.equal(res27, true); +assert.equal(res28, true); +assert.deepEqual(res29, ['pt:A', 'pt:C', 'pt:B']); +assert.deepEqual(res30, ['pt:C', 'pt:B']); +// REMOVE_END + +// STEP_START add_quant +const res31 = await client.vAdd("quantSetQ8", [1.262185, 1.958231], "quantElement", { + QUANT: 'Q8' +}); +console.log(res31); // >>> true + +const res32 = await client.vEmb("quantSetQ8", "quantElement"); +console.log(`Q8: ${res32}`); +// >>> Q8: [1.2643694877624512, 1.958230972290039] + +const res33 = await client.vAdd("quantSetNoQ", [1.262185, 1.958231], "quantElement", { + QUANT: 'NOQUANT' +}); +console.log(res33); // >>> true + +const res34 = await client.vEmb("quantSetNoQ", "quantElement"); +console.log(`NOQUANT: ${res34}`); +// >>> NOQUANT: [1.262184977531433, 1.958230972290039] + +const res35 = await client.vAdd("quantSetBin", [1.262185, 1.958231], "quantElement", { + QUANT: 'BIN' +}); +console.log(res35); // >>> true + +const res36 = await client.vEmb("quantSetBin", "quantElement"); +console.log(`BIN: ${res36}`); +// >>> BIN: [1, 1] +// STEP_END +// REMOVE_START +assert.equal(res31, true); +assert(Math.abs(res32[0] - 1.2643694877624512) < 0.001); +assert(Math.abs(res32[1] - 1.958230972290039) < 0.001); +assert.equal(res33, true); +assert(Math.abs(res34[0] - 1.262184977531433) < 0.001); +assert(Math.abs(res34[1] - 1.958230972290039) < 0.001); +assert.equal(res35, true); +assert.deepEqual(res36, [1, 1]); +// REMOVE_END + +// STEP_START add_reduce +// Create a list of 300 arbitrary values. +const values = Array.from({length: 300}, (_, x) => x / 299); + +const res37 = await client.vAdd("setNotReduced", values, "element"); +console.log(res37); // >>> true + +const res38 = await client.vDim("setNotReduced"); +console.log(res38); // >>> 300 + +const res39 = await client.vAdd("setReduced", values, "element", { + REDUCE: 100 +}); +console.log(res39); // >>> true + +const res40 = await client.vDim("setReduced"); +console.log(res40); // >>> 100 +// STEP_END +// REMOVE_START +assert.equal(res37, true); +assert.equal(res38, 300); +assert.equal(res39, true); +assert.equal(res40, 100); +// REMOVE_END + +// HIDE_START +await client.quit(); +// HIDE_END diff --git a/doctests/package-lock.json b/doctests/package-lock.json new file mode 100644 index 00000000000..2f30678e529 --- /dev/null +++ b/doctests/package-lock.json @@ -0,0 +1,889 @@ +{ + "name": "node-redis-doctests", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "node-redis-doctests", + "version": "1.0.0", + "dependencies": { + "@xenova/transformers": "^2.17.2", + "redis": "file:/packages/redis" + } + }, + "..": { + "name": "redis-monorepo", + "extraneous": true, + "workspaces": [ + "./packages/client", + "./packages/test-utils", + "./packages/bloom", + "./packages/json", + "./packages/search", + "./packages/time-series", + "./packages/entraid", + "./packages/redis" + ], + "devDependencies": { + "@istanbuljs/nyc-config-typescript": "^1.0.2", + "@release-it/bumper": "^7.0.5", + "@types/mocha": "^10.0.6", + "@types/node": "^20.11.16", + "gh-pages": "^6.1.1", + "mocha": "^10.2.0", + "nyc": "^15.1.0", + "release-it": "^19.0.2", + "ts-node": "^10.9.2", + "tsx": "^4.7.0", + "typedoc": "^0.25.7", + "typescript": "^5.3.3" + } + }, + "../../../../../packages/redis": {}, + "node_modules/@huggingface/jinja": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/@huggingface/jinja/-/jinja-0.2.2.tgz", + "integrity": "sha512-/KPde26khDUIPkTGU82jdtTW9UAuvUTumCAbFs/7giR0SxsvZC4hru51PBvpijH6BVkHcROcvZM/lpy5h1jRRA==", + "license": "MIT", + "engines": { + "node": ">=18" + } + }, + "node_modules/@protobufjs/aspromise": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", + "integrity": "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/base64": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz", + "integrity": "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/codegen": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz", + "integrity": "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/eventemitter": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz", + "integrity": "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/fetch": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz", + "integrity": "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==", + "license": "BSD-3-Clause", + "dependencies": { + "@protobufjs/aspromise": "^1.1.1", + "@protobufjs/inquire": "^1.1.0" + } + }, + "node_modules/@protobufjs/float": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz", + "integrity": "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/inquire": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz", + "integrity": "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/path": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz", + "integrity": "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/pool": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz", + "integrity": "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/utf8": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz", + "integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==", + "license": "BSD-3-Clause" + }, + "node_modules/@types/long": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.2.tgz", + "integrity": "sha512-MqTGEo5bj5t157U6fA/BiDynNkn0YknVdh48CMPkTSpFTVmvao5UQmm7uEF6xBEo7qIMAlY/JSleYaE6VOdpaA==", + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "24.0.10", + "resolved": "https://registry.npmjs.org/@types/node/-/node-24.0.10.tgz", + "integrity": "sha512-ENHwaH+JIRTDIEEbDK6QSQntAYGtbvdDXnMXnZaZ6k13Du1dPMmprkEHIL7ok2Wl2aZevetwTAb5S+7yIF+enA==", + "license": "MIT", + "dependencies": { + "undici-types": "~7.8.0" + } + }, + "node_modules/@xenova/transformers": { + "version": "2.17.2", + "resolved": "https://registry.npmjs.org/@xenova/transformers/-/transformers-2.17.2.tgz", + "integrity": "sha512-lZmHqzrVIkSvZdKZEx7IYY51TK0WDrC8eR0c5IMnBsO8di8are1zzw8BlLhyO2TklZKLN5UffNGs1IJwT6oOqQ==", + "license": "Apache-2.0", + "dependencies": { + "@huggingface/jinja": "^0.2.2", + "onnxruntime-web": "1.14.0", + "sharp": "^0.32.0" + }, + "optionalDependencies": { + "onnxruntime-node": "1.14.0" + } + }, + "node_modules/b4a": { + "version": "1.6.7", + "resolved": "https://registry.npmjs.org/b4a/-/b4a-1.6.7.tgz", + "integrity": "sha512-OnAYlL5b7LEkALw87fUVafQw5rVR9RjwGd4KUwNQ6DrrNmaVaUCgLipfVlzrPQ4tWOR9P0IXGNOx50jYCCdSJg==", + "license": "Apache-2.0" + }, + "node_modules/bare-events": { + "version": "2.5.4", + "resolved": "https://registry.npmjs.org/bare-events/-/bare-events-2.5.4.tgz", + "integrity": "sha512-+gFfDkR8pj4/TrWCGUGWmJIkBwuxPS5F+a5yWjOHQt2hHvNZd5YLzadjmDUtFmMM4y429bnKLa8bYBMHcYdnQA==", + "license": "Apache-2.0", + "optional": true + }, + "node_modules/bare-fs": { + "version": "4.1.6", + "resolved": "https://registry.npmjs.org/bare-fs/-/bare-fs-4.1.6.tgz", + "integrity": "sha512-25RsLF33BqooOEFNdMcEhMpJy8EoR88zSMrnOQOaM3USnOK2VmaJ1uaQEwPA6AQjrv1lXChScosN6CzbwbO9OQ==", + "license": "Apache-2.0", + "optional": true, + "dependencies": { + "bare-events": "^2.5.4", + "bare-path": "^3.0.0", + "bare-stream": "^2.6.4" + }, + "engines": { + "bare": ">=1.16.0" + }, + "peerDependencies": { + "bare-buffer": "*" + }, + "peerDependenciesMeta": { + "bare-buffer": { + "optional": true + } + } + }, + "node_modules/bare-os": { + "version": "3.6.1", + "resolved": "https://registry.npmjs.org/bare-os/-/bare-os-3.6.1.tgz", + "integrity": "sha512-uaIjxokhFidJP+bmmvKSgiMzj2sV5GPHaZVAIktcxcpCyBFFWO+YlikVAdhmUo2vYFvFhOXIAlldqV29L8126g==", + "license": "Apache-2.0", + "optional": true, + "engines": { + "bare": ">=1.14.0" + } + }, + "node_modules/bare-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/bare-path/-/bare-path-3.0.0.tgz", + "integrity": "sha512-tyfW2cQcB5NN8Saijrhqn0Zh7AnFNsnczRcuWODH0eYAXBsJ5gVxAUuNr7tsHSC6IZ77cA0SitzT+s47kot8Mw==", + "license": "Apache-2.0", + "optional": true, + "dependencies": { + "bare-os": "^3.0.1" + } + }, + "node_modules/bare-stream": { + "version": "2.6.5", + "resolved": "https://registry.npmjs.org/bare-stream/-/bare-stream-2.6.5.tgz", + "integrity": "sha512-jSmxKJNJmHySi6hC42zlZnq00rga4jjxcgNZjY9N5WlOe/iOoGRtdwGsHzQv2RlH2KOYMwGUXhf2zXd32BA9RA==", + "license": "Apache-2.0", + "optional": true, + "dependencies": { + "streamx": "^2.21.0" + }, + "peerDependencies": { + "bare-buffer": "*", + "bare-events": "*" + }, + "peerDependenciesMeta": { + "bare-buffer": { + "optional": true + }, + "bare-events": { + "optional": true + } + } + }, + "node_modules/base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/bl": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", + "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", + "license": "MIT", + "dependencies": { + "buffer": "^5.5.0", + "inherits": "^2.0.4", + "readable-stream": "^3.4.0" + } + }, + "node_modules/buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, + "node_modules/chownr": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz", + "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==", + "license": "ISC" + }, + "node_modules/color": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/color/-/color-4.2.3.tgz", + "integrity": "sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A==", + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1", + "color-string": "^1.9.0" + }, + "engines": { + "node": ">=12.5.0" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "license": "MIT" + }, + "node_modules/color-string": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/color-string/-/color-string-1.9.1.tgz", + "integrity": "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==", + "license": "MIT", + "dependencies": { + "color-name": "^1.0.0", + "simple-swizzle": "^0.2.2" + } + }, + "node_modules/decompress-response": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", + "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==", + "license": "MIT", + "dependencies": { + "mimic-response": "^3.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/deep-extend": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", + "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==", + "license": "MIT", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/detect-libc": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.4.tgz", + "integrity": "sha512-3UDv+G9CsCKO1WKMGw9fwq/SWJYbI0c5Y7LU1AXYoDdbhE2AHQ6N6Nb34sG8Fj7T5APy8qXDCKuuIHd1BR0tVA==", + "license": "Apache-2.0", + "engines": { + "node": ">=8" + } + }, + "node_modules/end-of-stream": { + "version": "1.4.5", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.5.tgz", + "integrity": "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==", + "license": "MIT", + "dependencies": { + "once": "^1.4.0" + } + }, + "node_modules/expand-template": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/expand-template/-/expand-template-2.0.3.tgz", + "integrity": "sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==", + "license": "(MIT OR WTFPL)", + "engines": { + "node": ">=6" + } + }, + "node_modules/fast-fifo": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/fast-fifo/-/fast-fifo-1.3.2.tgz", + "integrity": "sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ==", + "license": "MIT" + }, + "node_modules/flatbuffers": { + "version": "1.12.0", + "resolved": "https://registry.npmjs.org/flatbuffers/-/flatbuffers-1.12.0.tgz", + "integrity": "sha512-c7CZADjRcl6j0PlvFy0ZqXQ67qSEZfrVPynmnL+2zPc+NtMvrF8Y0QceMo7QqnSPc7+uWjUIAbvCQ5WIKlMVdQ==", + "license": "SEE LICENSE IN LICENSE.txt" + }, + "node_modules/fs-constants": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz", + "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==", + "license": "MIT" + }, + "node_modules/github-from-package": { + "version": "0.0.0", + "resolved": "https://registry.npmjs.org/github-from-package/-/github-from-package-0.0.0.tgz", + "integrity": "sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==", + "license": "MIT" + }, + "node_modules/guid-typescript": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/guid-typescript/-/guid-typescript-1.0.9.tgz", + "integrity": "sha512-Y8T4vYhEfwJOTbouREvG+3XDsjr8E3kIr7uf+JZ0BYloFsttiHU0WfvANVsR7TxNUJa/WpCnw/Ino/p+DeBhBQ==", + "license": "ISC" + }, + "node_modules/ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "BSD-3-Clause" + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "license": "ISC" + }, + "node_modules/ini": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", + "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", + "license": "ISC" + }, + "node_modules/is-arrayish": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz", + "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==", + "license": "MIT" + }, + "node_modules/long": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz", + "integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA==", + "license": "Apache-2.0" + }, + "node_modules/mimic-response": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", + "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==", + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/mkdirp-classic": { + "version": "0.5.3", + "resolved": "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz", + "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==", + "license": "MIT" + }, + "node_modules/napi-build-utils": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/napi-build-utils/-/napi-build-utils-2.0.0.tgz", + "integrity": "sha512-GEbrYkbfF7MoNaoh2iGG84Mnf/WZfB0GdGEsM8wz7Expx/LlWf5U8t9nvJKXSp3qr5IsEbK04cBGhol/KwOsWA==", + "license": "MIT" + }, + "node_modules/node-abi": { + "version": "3.75.0", + "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.75.0.tgz", + "integrity": "sha512-OhYaY5sDsIka7H7AtijtI9jwGYLyl29eQn/W623DiN/MIv5sUqc4g7BIDThX+gb7di9f6xK02nkp8sdfFWZLTg==", + "license": "MIT", + "dependencies": { + "semver": "^7.3.5" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/node-addon-api": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-6.1.0.tgz", + "integrity": "sha512-+eawOlIgy680F0kBzPUNFhMZGtJ1YmqM6l4+Crf4IkImjYrO/mqPwRMh352g23uIaQKFItcQ64I7KMaJxHgAVA==", + "license": "MIT" + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/onnx-proto": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/onnx-proto/-/onnx-proto-4.0.4.tgz", + "integrity": "sha512-aldMOB3HRoo6q/phyB6QRQxSt895HNNw82BNyZ2CMh4bjeKv7g/c+VpAFtJuEMVfYLMbRx61hbuqnKceLeDcDA==", + "license": "MIT", + "dependencies": { + "protobufjs": "^6.8.8" + } + }, + "node_modules/onnxruntime-common": { + "version": "1.14.0", + "resolved": "https://registry.npmjs.org/onnxruntime-common/-/onnxruntime-common-1.14.0.tgz", + "integrity": "sha512-3LJpegM2iMNRX2wUmtYfeX/ytfOzNwAWKSq1HbRrKc9+uqG/FsEA0bbKZl1btQeZaXhC26l44NWpNUeXPII7Ew==", + "license": "MIT" + }, + "node_modules/onnxruntime-node": { + "version": "1.14.0", + "resolved": "https://registry.npmjs.org/onnxruntime-node/-/onnxruntime-node-1.14.0.tgz", + "integrity": "sha512-5ba7TWomIV/9b6NH/1x/8QEeowsb+jBEvFzU6z0T4mNsFwdPqXeFUM7uxC6QeSRkEbWu3qEB0VMjrvzN/0S9+w==", + "license": "MIT", + "optional": true, + "os": [ + "win32", + "darwin", + "linux" + ], + "dependencies": { + "onnxruntime-common": "~1.14.0" + } + }, + "node_modules/onnxruntime-web": { + "version": "1.14.0", + "resolved": "https://registry.npmjs.org/onnxruntime-web/-/onnxruntime-web-1.14.0.tgz", + "integrity": "sha512-Kcqf43UMfW8mCydVGcX9OMXI2VN17c0p6XvR7IPSZzBf/6lteBzXHvcEVWDPmCKuGombl997HgLqj91F11DzXw==", + "license": "MIT", + "dependencies": { + "flatbuffers": "^1.12.0", + "guid-typescript": "^1.0.9", + "long": "^4.0.0", + "onnx-proto": "^4.0.4", + "onnxruntime-common": "~1.14.0", + "platform": "^1.3.6" + } + }, + "node_modules/platform": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/platform/-/platform-1.3.6.tgz", + "integrity": "sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg==", + "license": "MIT" + }, + "node_modules/prebuild-install": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/prebuild-install/-/prebuild-install-7.1.3.tgz", + "integrity": "sha512-8Mf2cbV7x1cXPUILADGI3wuhfqWvtiLA1iclTDbFRZkgRQS0NqsPZphna9V+HyTEadheuPmjaJMsbzKQFOzLug==", + "license": "MIT", + "dependencies": { + "detect-libc": "^2.0.0", + "expand-template": "^2.0.3", + "github-from-package": "0.0.0", + "minimist": "^1.2.3", + "mkdirp-classic": "^0.5.3", + "napi-build-utils": "^2.0.0", + "node-abi": "^3.3.0", + "pump": "^3.0.0", + "rc": "^1.2.7", + "simple-get": "^4.0.0", + "tar-fs": "^2.0.0", + "tunnel-agent": "^0.6.0" + }, + "bin": { + "prebuild-install": "bin.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/prebuild-install/node_modules/tar-fs": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.3.tgz", + "integrity": "sha512-090nwYJDmlhwFwEW3QQl+vaNnxsO2yVsd45eTKRBzSzu+hlb1w2K9inVq5b0ngXuLVqQ4ApvsUHHnu/zQNkWAg==", + "license": "MIT", + "dependencies": { + "chownr": "^1.1.1", + "mkdirp-classic": "^0.5.2", + "pump": "^3.0.0", + "tar-stream": "^2.1.4" + } + }, + "node_modules/prebuild-install/node_modules/tar-stream": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz", + "integrity": "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==", + "license": "MIT", + "dependencies": { + "bl": "^4.0.3", + "end-of-stream": "^1.4.1", + "fs-constants": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/protobufjs": { + "version": "6.11.4", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.11.4.tgz", + "integrity": "sha512-5kQWPaJHi1WoCpjTGszzQ32PG2F4+wRY6BmAT4Vfw56Q2FZ4YZzK20xUYQH4YkfehY1e6QSICrJquM6xXZNcrw==", + "hasInstallScript": true, + "license": "BSD-3-Clause", + "dependencies": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/long": "^4.0.1", + "@types/node": ">=13.7.0", + "long": "^4.0.0" + }, + "bin": { + "pbjs": "bin/pbjs", + "pbts": "bin/pbts" + } + }, + "node_modules/pump": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.3.tgz", + "integrity": "sha512-todwxLMY7/heScKmntwQG8CXVkWUOdYxIvY2s0VWAAMh/nd8SoYiRaKjlr7+iCs984f2P8zvrfWcDDYVb73NfA==", + "license": "MIT", + "dependencies": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, + "node_modules/rc": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", + "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==", + "license": "(BSD-2-Clause OR MIT OR Apache-2.0)", + "dependencies": { + "deep-extend": "^0.6.0", + "ini": "~1.3.0", + "minimist": "^1.2.0", + "strip-json-comments": "~2.0.1" + }, + "bin": { + "rc": "cli.js" + } + }, + "node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "license": "MIT", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/redis": { + "resolved": "../../../../../packages/redis", + "link": true + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/semver": { + "version": "7.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", + "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/sharp": { + "version": "0.32.6", + "resolved": "https://registry.npmjs.org/sharp/-/sharp-0.32.6.tgz", + "integrity": "sha512-KyLTWwgcR9Oe4d9HwCwNM2l7+J0dUQwn/yf7S0EnTtb0eVS4RxO0eUSvxPtzT4F3SY+C4K6fqdv/DO27sJ/v/w==", + "hasInstallScript": true, + "license": "Apache-2.0", + "dependencies": { + "color": "^4.2.3", + "detect-libc": "^2.0.2", + "node-addon-api": "^6.1.0", + "prebuild-install": "^7.1.1", + "semver": "^7.5.4", + "simple-get": "^4.0.1", + "tar-fs": "^3.0.4", + "tunnel-agent": "^0.6.0" + }, + "engines": { + "node": ">=14.15.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/simple-concat": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/simple-concat/-/simple-concat-1.0.1.tgz", + "integrity": "sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/simple-get": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/simple-get/-/simple-get-4.0.1.tgz", + "integrity": "sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "decompress-response": "^6.0.0", + "once": "^1.3.1", + "simple-concat": "^1.0.0" + } + }, + "node_modules/simple-swizzle": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz", + "integrity": "sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg==", + "license": "MIT", + "dependencies": { + "is-arrayish": "^0.3.1" + } + }, + "node_modules/streamx": { + "version": "2.22.1", + "resolved": "https://registry.npmjs.org/streamx/-/streamx-2.22.1.tgz", + "integrity": "sha512-znKXEBxfatz2GBNK02kRnCXjV+AA4kjZIUxeWSr3UGirZMJfTE9uiwKHobnbgxWyL/JWro8tTq+vOqAK1/qbSA==", + "license": "MIT", + "dependencies": { + "fast-fifo": "^1.3.2", + "text-decoder": "^1.1.0" + }, + "optionalDependencies": { + "bare-events": "^2.2.0" + } + }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "license": "MIT", + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, + "node_modules/strip-json-comments": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", + "integrity": "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/tar-fs": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-3.1.0.tgz", + "integrity": "sha512-5Mty5y/sOF1YWj1J6GiBodjlDc05CUR8PKXrsnFAiSG0xA+GHeWLovaZPYUDXkH/1iKRf2+M5+OrRgzC7O9b7w==", + "license": "MIT", + "dependencies": { + "pump": "^3.0.0", + "tar-stream": "^3.1.5" + }, + "optionalDependencies": { + "bare-fs": "^4.0.1", + "bare-path": "^3.0.0" + } + }, + "node_modules/tar-stream": { + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-3.1.7.tgz", + "integrity": "sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ==", + "license": "MIT", + "dependencies": { + "b4a": "^1.6.4", + "fast-fifo": "^1.2.0", + "streamx": "^2.15.0" + } + }, + "node_modules/text-decoder": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/text-decoder/-/text-decoder-1.2.3.tgz", + "integrity": "sha512-3/o9z3X0X0fTupwsYvR03pJ/DjWuqqrfwBgTQzdWDiQSm9KitAyz/9WqsT2JQW7KV2m+bC2ol/zqpW37NHxLaA==", + "license": "Apache-2.0", + "dependencies": { + "b4a": "^1.6.4" + } + }, + "node_modules/tunnel-agent": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", + "integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==", + "license": "Apache-2.0", + "dependencies": { + "safe-buffer": "^5.0.1" + }, + "engines": { + "node": "*" + } + }, + "node_modules/undici-types": { + "version": "7.8.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.8.0.tgz", + "integrity": "sha512-9UJ2xGDvQ43tYyVMpuHlsgApydB8ZKfVYTsLDhXkFL/6gfkp+U8xTGdh8pMJv1SpZna0zxG1DwsKZsreLbXBxw==", + "license": "MIT" + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "license": "MIT" + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "license": "ISC" + } + } +} diff --git a/doctests/package.json b/doctests/package.json new file mode 100644 index 00000000000..ee07eb44fb8 --- /dev/null +++ b/doctests/package.json @@ -0,0 +1,12 @@ +{ + "name": "node-redis-doctests", + "version": "1.0.0", + "description": "Code examples for redis.io", + "main": "index.js", + "private": true, + "type": "module", + "dependencies": { + "redis": "file:/packages/redis", + "@xenova/transformers": "^2.17.2" + } +} diff --git a/doctests/query-agg.js b/doctests/query-agg.js new file mode 100644 index 00000000000..82d378b86b2 --- /dev/null +++ b/doctests/query-agg.js @@ -0,0 +1,139 @@ +// EXAMPLE: query_agg +// HIDE_START +import assert from 'node:assert'; +import fs from 'node:fs'; +import { createClient } from 'redis'; +import { SCHEMA_FIELD_TYPE, FT_AGGREGATE_STEPS, FT_AGGREGATE_GROUP_BY_REDUCERS } from '@redis/search'; + +const client = createClient(); + +await client.connect().catch(console.error); + +// create index +await client.ft.create('idx:bicycle', { + '$.condition': { + type: SCHEMA_FIELD_TYPE.TAG, + AS: 'condition' + }, + '$.price': { + type: SCHEMA_FIELD_TYPE.NUMERIC, + AS: 'price' + } +}, { + ON: 'JSON', + PREFIX: 'bicycle:' +}) + +// load data +const bicycles = JSON.parse(fs.readFileSync('data/query_em.json', 'utf8')); + +await Promise.all( + bicycles.map((bicycle, bid) => { + return client.json.set(`bicycle:${bid}`, '$', bicycle); + }) +); +// HIDE_END + +// STEP_START agg1 +const res1 = await client.ft.aggregate('idx:bicycle', '@condition:{new}', { + LOAD: ['__key', 'price'], + APPLY: { + expression: '@price - (@price * 0.1)', + AS: 'discounted' + } +}); + +console.log(res1.results.length); // >>> 5 +console.log(res1.results); // >>> +//[ +// [Object: null prototype] { __key: 'bicycle:0', price: '270' }, +// [Object: null prototype] { __key: 'bicycle:5', price: '810' }, +// [Object: null prototype] { __key: 'bicycle:6', price: '2300' }, +// [Object: null prototype] { __key: 'bicycle:7', price: '430' }, +// [Object: null prototype] { __key: 'bicycle:8', price: '1200' } +//] +// REMOVE_START +assert.strictEqual(res1.results.length, 5); +// REMOVE_END +// STEP_END + +// STEP_START agg2 +const res2 = await client.ft.aggregate('idx:bicycle', '*', { + LOAD: ['@price'], + STEPS: [{ + type: FT_AGGREGATE_STEPS.APPLY, + expression: '@price<1000', + AS: 'price_category' + },{ + type: FT_AGGREGATE_STEPS.GROUPBY, + properties: '@condition', + REDUCE:[{ + type: FT_AGGREGATE_GROUP_BY_REDUCERS.SUM, + property: '@price_category', + AS: 'num_affordable' + }] + }] +}); +console.log(res2.results.length); // >>> 3 +console.log(res2.results); // >>> +//[[Object: null prototype] { condition: 'refurbished', num_affordable: '1' }, +// [Object: null prototype] { condition: 'used', num_affordable: '1' }, +// [Object: null prototype] { condition: 'new', num_affordable: '3' } +//] +// REMOVE_START +assert.strictEqual(res2.results.length, 3); +// REMOVE_END +// STEP_END + +// STEP_START agg3 +const res3 = await client.ft.aggregate('idx:bicycle', '*', { + STEPS: [{ + type: FT_AGGREGATE_STEPS.APPLY, + expression: "'bicycle'", + AS: 'type' + }, { + type: FT_AGGREGATE_STEPS.GROUPBY, + properties: '@type', + REDUCE: [{ + type: FT_AGGREGATE_GROUP_BY_REDUCERS.COUNT, + property: null, + AS: 'num_total' + }] + }] +}); +console.log(res3.results.length); // >>> 1 +console.log(res3.results); // >>> +//[ [Object: null prototype] { type: 'bicycle', num_total: '10' } ] +// REMOVE_START +assert.strictEqual(res3.results.length, 1); +// REMOVE_END +// STEP_END + +// STEP_START agg4 +const res4 = await client.ft.aggregate('idx:bicycle', '*', { + LOAD: ['__key'], + STEPS: [{ + type: FT_AGGREGATE_STEPS.GROUPBY, + properties: '@condition', + REDUCE: [{ + type: FT_AGGREGATE_GROUP_BY_REDUCERS.TOLIST, + property: '__key', + AS: 'bicycles' + }] + }] +}); +console.log(res4.results.length); // >>> 3 +console.log(res4.results); // >>> +//[[Object: null prototype] {condition: 'refurbished', bicycles: [ 'bicycle:9' ]}, +// [Object: null prototype] {condition: 'used', bicycles: [ 'bicycle:1', 'bicycle:2', 'bicycle:3', 'bicycle:4' ]}, +// [Object: null prototype] {condition: 'new', bicycles: [ 'bicycle:5', 'bicycle:6', 'bicycle:7', 'bicycle:0', 'bicycle:8' ]}] +// REMOVE_START +assert.strictEqual(res4.results.length, 3); +// REMOVE_END +// STEP_END + +// REMOVE_START +// destroy index and data +await client.ft.dropIndex('idx:bicycle', { DD: true }); +await client.close(); +// REMOVE_END \ No newline at end of file diff --git a/doctests/query-combined.js b/doctests/query-combined.js new file mode 100644 index 00000000000..a2ad8d43c93 --- /dev/null +++ b/doctests/query-combined.js @@ -0,0 +1,191 @@ +// EXAMPLE: query_combined +// HIDE_START +import assert from 'node:assert'; +import fs from 'node:fs'; +import { createClient } from 'redis'; +import { SCHEMA_FIELD_TYPE, SCHEMA_VECTOR_FIELD_ALGORITHM } from '@redis/search'; +import { pipeline } from '@xenova/transformers'; + +function float32Buffer(arr) { + const floatArray = new Float32Array(arr); + const float32Buffer = Buffer.from(floatArray.buffer); + return float32Buffer; +} + +async function embedText(sentence) { + let modelName = 'Xenova/all-MiniLM-L6-v2'; + let pipe = await pipeline('feature-extraction', modelName); + + let vectorOutput = await pipe(sentence, { + pooling: 'mean', + normalize: true, + }); + + if (vectorOutput == null) { + throw new Error('vectorOutput is undefined'); + } + + const embedding = Object.values(vectorOutput.data); + + return embedding; +} + +let vector_query = float32Buffer(await embedText('That is a very happy person')); + +const client = createClient(); +await client.connect().catch(console.error); + +// create index +await client.ft.create('idx:bicycle', { + '$.description': { + type: SCHEMA_FIELD_TYPE.TEXT, + AS: 'description' + }, + '$.condition': { + type: SCHEMA_FIELD_TYPE.TAG, + AS: 'condition' + }, + '$.price': { + type: SCHEMA_FIELD_TYPE.NUMERIC, + AS: 'price' + }, + '$.description_embeddings': { + type: SCHEMA_FIELD_TYPE.VECTOR, + TYPE: 'FLOAT32', + ALGORITHM: SCHEMA_VECTOR_FIELD_ALGORITHM.FLAT, + DIM: 384, + DISTANCE_METRIC: 'COSINE', + AS: 'vector', + } +}, { + ON: 'JSON', + PREFIX: 'bicycle:' +}); + +// load data +const bicycles = JSON.parse(fs.readFileSync('data/query_vector.json', 'utf8')); + +await Promise.all( + bicycles.map((bicycle, bid) => { + return client.json.set(`bicycle:${bid}`, '$', bicycle); + }) +); +// HIDE_END + +// STEP_START combined1 +const res1 = await client.ft.search('idx:bicycle', '@price:[500 1000] @condition:{new}'); +console.log(res1.total); // >>> 1 +console.log(res1); // >>> +//{ +// total: 1, +// documents: [ { id: 'bicycle:5', value: [Object: null prototype] } ] +//} +// REMOVE_START +assert.strictEqual(res1.total, 1); +// REMOVE_END +// STEP_END + +// STEP_START combined2 +const res2 = await client.ft.search('idx:bicycle', 'kids @price:[500 1000] @condition:{used}'); +console.log(res2.total); // >>> 1 +console.log(res2); // >>> +// { +// total: 1, +// documents: [ { id: 'bicycle:2', value: [Object: null prototype] } ] +// } +// REMOVE_START +assert.strictEqual(res2.total, 1); +// REMOVE_END +// STEP_END + +// STEP_START combined3 +const res3 = await client.ft.search('idx:bicycle', '(kids | small) @condition:{used}'); +console.log(res3.total); // >>> 2 +console.log(res3); // >>> +//{ +// total: 2, +// documents: [ +// { id: 'bicycle:2', value: [Object: null prototype] }, +// { id: 'bicycle:1', value: [Object: null prototype] } +// ] +//} +// REMOVE_START +assert.strictEqual(res3.total, 2); +// REMOVE_END +// STEP_END + +// STEP_START combined4 +const res4 = await client.ft.search('idx:bicycle', '@description:(kids | small) @condition:{used}'); +console.log(res4.total); // >>> 2 +console.log(res4); // >>> +//{ +// total: 2, +// documents: [ +// { id: 'bicycle:2', value: [Object: null prototype] }, +// { id: 'bicycle:1', value: [Object: null prototype] } +// ] +//} +// REMOVE_START +assert.strictEqual(res4.total, 2); +// REMOVE_END +// STEP_END + +// STEP_START combined5 +const res5 = await client.ft.search('idx:bicycle', '@description:(kids | small) @condition:{new | used}'); +console.log(res5.total); // >>> 3 +console.log(res5); // >>> +//{ +// total: 3, +// documents: [ +// { id: 'bicycle:1', value: [Object: null prototype] }, +// { id: 'bicycle:0', value: [Object: null prototype] }, +// { id: 'bicycle:2', value: [Object: null prototype] } +// ] +//} +// REMOVE_START +assert.strictEqual(res5.total, 3); +// REMOVE_END +// STEP_END + +// STEP_START combined6 +const res6 = await client.ft.search('idx:bicycle', '@price:[500 1000] -@condition:{new}'); +console.log(res6.total); // >>> 2 +console.log(res6); // >>> +//{ +// total: 2, +// documents: [ +// { id: 'bicycle:2', value: [Object: null prototype] }, +// { id: 'bicycle:9', value: [Object: null prototype] } +// ] +//} +// REMOVE_START +assert.strictEqual(res6.total, 2); +// REMOVE_END +// STEP_END + +// STEP_START combined7 +const res7 = await client.ft.search('idx:bicycle', + '(@price:[500 1000] -@condition:{new})=>[KNN 3 @vector $query_vector]', { + PARAMS: { query_vector: vector_query }, + DIALECT: 2 + } +); +console.log(res7.total); // >>> 2 +console.log(res7); // >>> +//{ +// total: 2, +// documents: [ +// { id: 'bicycle:2', value: [Object: null prototype] }, +// { id: 'bicycle:9', value: [Object: null prototype] } +// ] +//} +// REMOVE_START +assert.strictEqual(res7.total, 2); +// REMOVE_END +// STEP_END + +// REMOVE_START +// destroy index and data +await client.ft.dropIndex('idx:bicycle', { DD: true }); +await client.close(); +// REMOVE_END \ No newline at end of file diff --git a/doctests/query-em.js b/doctests/query-em.js new file mode 100644 index 00000000000..9b5782e09ce --- /dev/null +++ b/doctests/query-em.js @@ -0,0 +1,121 @@ +// EXAMPLE: query_em +// HIDE_START +import assert from 'node:assert'; +import fs from 'node:fs'; +import { createClient, SCHEMA_FIELD_TYPE } from 'redis'; + +const client = createClient(); + +await client.connect().catch(console.error); + +// create index +await client.ft.create('idx:bicycle', { + '$.description': { + type: SCHEMA_FIELD_TYPE.TEXT, + AS: 'description' + }, + '$.price': { + type: SCHEMA_FIELD_TYPE.NUMERIC, + AS: 'price' + }, + '$.condition': { + type: SCHEMA_FIELD_TYPE.TAG, + AS: 'condition' + } +}, { + ON: 'JSON', + PREFIX: 'bicycle:' +}) + +// load data +const bicycles = JSON.parse(fs.readFileSync('data/query_em.json', 'utf8')); + +await Promise.all( + bicycles.map((bicycle, bid) => { + return client.json.set(`bicycle:${bid}`, '$', bicycle); + }) +); +// HIDE_END + +// STEP_START em1 +const res1 = await client.ft.search('idx:bicycle', '@price:[270 270]'); +console.log(res1.total); // >>> 1 +// REMOVE_START +assert.strictEqual(res1.total, 1); +// REMOVE_END + +try { + const res2 = await client.ft.search('idx:bicycle', '@price:[270]'); + console.log(res2.total); // >>> 1 + assert.strictEqual(res2.total, 1); +} catch (err) { + console.log("'@price:[270]' syntax not yet supported."); +} + +try { + const res3 = await client.ft.search('idx:bicycle', '@price==270'); + console.log(res3.total); // >>> 1 + assert.strictEqual(res3.total, 1); +} catch (err) { + console.log("'@price==270' syntax not yet supported."); +} + +// FILTER is not supported +// const res4 = await client.ft.search('idx:bicycle', '*', { +// FILTER: { +// field: 'price', +// min: 270, +// max: 270, +// } +// }); +// console.log(res4.total); // >>> 1 +// REMOVE_START +// assert.strictEqual(res4.total, 10); +// REMOVE_END +// STEP_END + +// STEP_START em2 +const res5 = await client.ft.search('idx:bicycle', '@condition:{new}'); +console.log(res5.total); // >>> 5 +// REMOVE_START +assert.strictEqual(res5.total, 5); +// REMOVE_END +// STEP_END + +// STEP_START em3 +await client.ft.create('idx:email', { + '$.email': { + type: SCHEMA_FIELD_TYPE.TAG, + AS: 'email' + } +}, { + ON: 'JSON', + PREFIX: 'key:' +}) + +await client.json.set('key:1', '$', { email: 'test@redis.com' }); + +try { + const res6 = await client.ft.search('idx:email', 'test@redis.com', { DIALECT: 2 }); + console.log(res6); +} catch (err) { + console.log("'test@redis.com' syntax not yet supported."); +} +// REMOVE_START +await client.ft.dropIndex('idx:email', { DD: true }); +// REMOVE_END +// STEP_END + +// STEP_START em4 +const res7 = await client.ft.search('idx:bicycle', '@description:"rough terrain"'); +console.log(res7.total); // >>> 1 (Result{1 total, docs: [Document {'id': 'bicycle:8'...) +// REMOVE_START +assert.strictEqual(res7.total, 1); +// REMOVE_END +// STEP_END + +// REMOVE_START +// destroy index and data +await client.ft.dropIndex('idx:bicycle', { DD: true }); +await client.close(); +// REMOVE_END diff --git a/doctests/query-ft.js b/doctests/query-ft.js new file mode 100644 index 00000000000..a70b2ea3f5d --- /dev/null +++ b/doctests/query-ft.js @@ -0,0 +1,84 @@ +// EXAMPLE: query_ft +// HIDE_START +import assert from 'node:assert'; +import fs from 'node:fs'; +import { createClient, SCHEMA_FIELD_TYPE } from 'redis'; + +const client = createClient(); + +await client.connect().catch(console.error); + +// create index +await client.ft.create('idx:bicycle', { + '$.model': { + type: SCHEMA_FIELD_TYPE.TEXT, + AS: 'model' + }, + '$.brand': { + type: SCHEMA_FIELD_TYPE.TEXT, + AS: 'brand' + }, + '$.description': { + type: SCHEMA_FIELD_TYPE.TEXT, + AS: 'description' + } +}, { + ON: 'JSON', + PREFIX: 'bicycle:' +}) + +// load data +const bicycles = JSON.parse(fs.readFileSync('data/query_em.json', 'utf8')); + +await Promise.all( + bicycles.map((bicycle, bid) => { + return client.json.set(`bicycle:${bid}`, '$', bicycle); + }) +); +// HIDE_END + +// STEP_START ft1 +const res1 = await client.ft.search('idx:bicycle', '@description: kids'); +console.log(res1.total); // >>> 2 +// REMOVE_START +assert.strictEqual(res1.total, 2); +// REMOVE_END +// STEP_END + +// STEP_START ft2 +const res2 = await client.ft.search('idx:bicycle', '@model: ka*'); +console.log(res2.total); // >>> 1 +// REMOVE_START +assert.strictEqual(res2.total, 1); +// REMOVE_END +// STEP_END + +// STEP_START ft3 +const res3 = await client.ft.search('idx:bicycle', '@brand: *bikes'); +console.log(res3.total); // >>> 2 +// REMOVE_START +assert.strictEqual(res3.total, 2); +// REMOVE_END +// STEP_END + +// STEP_START ft4 +const res4 = await client.ft.search('idx:bicycle', '%optamized%'); +console.log(res4); // >>> { total: 1, documents: [ { id: 'bicycle:3', value: [Object: null prototype] } ]} +// REMOVE_START +assert.strictEqual(res4.total, 1); +// REMOVE_END +// STEP_END + +// STEP_START ft5 +const res5 = await client.ft.search('idx:bicycle', '%%optamised%%'); +console.log(res5); // >>> { total: 1, documents: [ { id: 'bicycle:3', value: [Object: null prototype] } ]} +// REMOVE_START +assert.strictEqual(res5.total, 1); +// REMOVE_END +// STEP_END + +// REMOVE_START +// destroy index and data +await client.ft.dropIndex('idx:bicycle', { DD: true }); +await client.close(); +// REMOVE_END \ No newline at end of file diff --git a/doctests/query-geo.js b/doctests/query-geo.js new file mode 100644 index 00000000000..41ef2141eb8 --- /dev/null +++ b/doctests/query-geo.js @@ -0,0 +1,82 @@ +// EXAMPLE: query_geo +// HIDE_START +import assert from 'node:assert'; +import fs from 'node:fs'; +import { createClient } from 'redis'; +import { SCHEMA_FIELD_TYPE } from '@redis/search'; + +const client = createClient(); + +await client.connect().catch(console.error); + +// create index +await client.ft.create('idx:bicycle', { + '$.store_location': { + type: SCHEMA_FIELD_TYPE.GEO, + AS: 'store_location' + }, + '$.pickup_zone': { + type: SCHEMA_FIELD_TYPE.GEOSHAPE, + AS: 'pickup_zone' + } +}, { + ON: 'JSON', + PREFIX: 'bicycle:' +}) + +// load data +const bicycles = JSON.parse(fs.readFileSync('data/query_em.json', 'utf8')); + +await Promise.all( + bicycles.map((bicycle, bid) => { + return client.json.set(`bicycle:${bid}`, '$', bicycle); + }) +); +// HIDE_END + +// STEP_START geo1 +const res1= await client.ft.search('idx:bicycle', '@store_location:[-0.1778 51.5524 20 mi]'); +console.log(res1.total); // >>> 1 +console.log(res1); // >>> {total: 1, documents: [ { id: 'bicycle:5', value: [Object: null prototype] } ]} +// REMOVE_START +assert.strictEqual(res1.total, 1); +// REMOVE_END +// STEP_END + +// STEP_START geo2 +const params_dict_geo2 = { bike: 'POINT(-0.1278 51.5074)' }; +const q_geo2 = '@pickup_zone:[CONTAINS $bike]'; +const res2 = await client.ft.search('idx:bicycle', q_geo2, { PARAMS: params_dict_geo2, DIALECT: 3 }); +console.log(res2.total); // >>> 1 +console.log(res2); // >>> {total: 1, documents: [ { id: 'bicycle:5', value: [Object: null prototype] } ]} +// REMOVE_START +assert.strictEqual(res2.total, 1); +// REMOVE_END +// STEP_END + +// STEP_START geo3 +const params_dict_geo3 = { europe: 'POLYGON((-25 35, 40 35, 40 70, -25 70, -25 35))' }; +const q_geo3 = '@pickup_zone:[WITHIN $europe]'; +const res3 = await client.ft.search('idx:bicycle', q_geo3, { PARAMS: params_dict_geo3, DIALECT: 3 }); +console.log(res3.total); // >>> 5 +console.log(res3); // >>> +// { +// total: 5, +// documents: [ +// { id: 'bicycle:5', value: [Object: null prototype] }, +// { id: 'bicycle:6', value: [Object: null prototype] }, +// { id: 'bicycle:7', value: [Object: null prototype] }, +// { id: 'bicycle:8', value: [Object: null prototype] }, +// { id: 'bicycle:9', value: [Object: null prototype] } +// ] +// } +// REMOVE_START +assert.strictEqual(res3.total, 5); +// REMOVE_END +// STEP_END + +// REMOVE_START +// destroy index and data +await client.ft.dropIndex('idx:bicycle', { DD: true }); +await client.close(); +// REMOVE_END \ No newline at end of file diff --git a/doctests/query-range.js b/doctests/query-range.js new file mode 100644 index 00000000000..29b269e37b5 --- /dev/null +++ b/doctests/query-range.js @@ -0,0 +1,98 @@ +// EXAMPLE: query_range +// HIDE_START +import assert from 'node:assert'; +import fs from 'node:fs'; +import { createClient, SCHEMA_FIELD_TYPE,} from 'redis'; + +const client = createClient(); + +await client.connect().catch(console.error); + +// create index +await client.ft.create('idx:bicycle', { + '$.description': { + type: SCHEMA_FIELD_TYPE.TEXT, + AS: 'description' + }, + '$.price': { + type: SCHEMA_FIELD_TYPE.NUMERIC, + AS: 'price' + }, + '$.condition': { + type: SCHEMA_FIELD_TYPE.TAG, + AS: 'condition' + } +}, { + ON: 'JSON', + PREFIX: 'bicycle:' +}) + +// load data +const bicycles = JSON.parse(fs.readFileSync('data/query_em.json', 'utf8')); + +await Promise.all( + bicycles.map((bicycle, bid) => { + return client.json.set(`bicycle:${bid}`, '$', bicycle); + }) +); +// HIDE_END + +// STEP_START range1 +const res1 = await client.ft.search('idx:bicycle', '@price:[500 1000]'); +console.log(res1.total); // >>> 3 +// REMOVE_START +assert.strictEqual(res1.total, 3); +// REMOVE_END +// STEP_END + +// STEP_START range2 +// FILTER is not supported +// const res2 = await client.ft.search('idx:bicycle', '*', { +// FILTER: { +// field: 'price', +// min: 500, +// max: 1000, +// } +// }); +// console.log(res2.total); // >>> 3 +// REMOVE_START +// assert.strictEqual(res2.total, 3); +// REMOVE_END +// STEP_END + +// STEP_START range3 +// FILTER is not supported +// const res3 = await client.ft.search('idx:bicycle', '*', { +// FILTER: { +// field: 'price', +// min: '(1000', +// max: '+inf, +// } +// }); +// console.log(res3.total); // >>> 5 +// REMOVE_START +// assert.strictEqual(res3.total, 5); +// REMOVE_END +// STEP_END + +// STEP_START range4 +const res4 = await client.ft.search( + 'idx:bicycle', + '@price:[-inf 2000]', + { + SORTBY: 'price', + LIMIT: { from: 0, size: 5 } + } +); +console.log(res4.total); // >>> 7 +console.log(res4); // >>> { total: 7, documents: [ { id: 'bicycle:0', value: [Object: null prototype] }, { id: 'bicycle:7', value: [Object: null prototype] }, { id: 'bicycle:5', value: [Object: null prototype] }, { id: 'bicycle:2', value: [Object: null prototype] }, { id: 'bicycle:9', value: [Object: null prototype] } ] } +// REMOVE_START +assert.strictEqual(res4.total, 7); +// REMOVE_END +// STEP_END + +// REMOVE_START +// destroy index and data +await client.ft.dropIndex('idx:bicycle', { DD: true }); +await client.close(); +// REMOVE_END \ No newline at end of file diff --git a/doctests/query-vector.js b/doctests/query-vector.js new file mode 100644 index 00000000000..91ee63120d3 --- /dev/null +++ b/doctests/query-vector.js @@ -0,0 +1,110 @@ +// EXAMPLE: query_vector +// HIDE_START +import assert from 'node:assert'; +import fs from 'node:fs'; +import { createClient } from 'redis'; +import { SCHEMA_FIELD_TYPE, SCHEMA_VECTOR_FIELD_ALGORITHM } from '@redis/search'; +import { pipeline } from '@xenova/transformers'; + +function float32Buffer(arr) { + const floatArray = new Float32Array(arr); + const float32Buffer = Buffer.from(floatArray.buffer); + return float32Buffer; +} + +async function embedText(sentence) { + let modelName = 'Xenova/all-MiniLM-L6-v2'; + let pipe = await pipeline('feature-extraction', modelName); + + let vectorOutput = await pipe(sentence, { + pooling: 'mean', + normalize: true, + }); + + const embedding = Object.values(vectorOutput?.data); + + return embedding; +} + +const vector_query = float32Buffer(await embedText('That is a very happy person')); + +const client = createClient(); +await client.connect().catch(console.error); + +// create index +await client.ft.create('idx:bicycle', { + '$.description': { + type: SCHEMA_FIELD_TYPE.TEXT, + AS: 'description' + }, + '$.description_embeddings': { + type: SCHEMA_FIELD_TYPE.VECTOR, + TYPE: 'FLOAT32', + ALGORITHM: SCHEMA_VECTOR_FIELD_ALGORITHM.FLAT, + DIM: 384, + DISTANCE_METRIC: 'COSINE', + AS: 'vector' + } +}, { + ON: 'JSON', + PREFIX: 'bicycle:' +}); + +// load data +const bicycles = JSON.parse(fs.readFileSync('data/query_vector.json', 'utf8')); + +await Promise.all( + bicycles.map((bicycle, bid) => { + return client.json.set(`bicycle:${bid}`, '$', bicycle); + }) +); +// HIDE_END + +// STEP_START vector1 +const res1 = await client.ft.search('idx:bicycle', + '*=>[KNN 3 @vector $query_vector AS score]', { + PARAMS: { query_vector: vector_query }, + RETURN: ['description'], + DIALECT: 2 + } +); +console.log(res1.total); // >>> 3 +console.log(res1); // >>> +//{ +// total: 3, +// documents: [ +// { id: 'bicycle:0', value: [Object: null prototype] {} }, +// { id: 'bicycle:2', value: [Object: null prototype] {} }, +// { id: 'bicycle:9', value: [Object: null prototype] {} } +// ] +//} +// REMOVE_START +assert.strictEqual(res1.total, 3); +// REMOVE_END +// STEP_END + +// STEP_START vector2 +const res2 = await client.ft.search('idx:bicycle', + '@vector:[VECTOR_RANGE 0.9 $query_vector]=>{$YIELD_DISTANCE_AS: vector_dist}', { + PARAMS: { query_vector: vector_query }, + SORTBY: 'vector_dist', + RETURN: ['vector_dist', 'description'], + DIALECT: 2 + } +); +console.log(res2.total); // >>> 1 +console.log(res2); // >>> +//{ +// total: 1, +// documents: [ { id: 'bicycle:0', value: [Object: null prototype] } ] +//} +// REMOVE_START +assert.strictEqual(res2.total, 1); +// REMOVE_END +// STEP_END + +// REMOVE_START +// destroy index and data +await client.ft.dropIndex('idx:bicycle', { DD: true }); +await client.close(); +// REMOVE_END \ No newline at end of file diff --git a/doctests/run_examples.sh b/doctests/run_examples.sh new file mode 100755 index 00000000000..ee7b50dc69a --- /dev/null +++ b/doctests/run_examples.sh @@ -0,0 +1,15 @@ +#!/bin/sh + + +basepath=`readlink -f $1` +if [ $? -ne 0 ]; then +basepath=`readlink -f $(dirname $0)` +fi +echo "No path specified, using ${basepath}" + +set -e +cd ${basepath} +for i in `ls ${basepath}/*.js`; do + redis-cli flushdb + node $i +done \ No newline at end of file diff --git a/doctests/search-quickstart.js b/doctests/search-quickstart.js new file mode 100644 index 00000000000..ec3f65a5e3a --- /dev/null +++ b/doctests/search-quickstart.js @@ -0,0 +1,231 @@ +// EXAMPLE: search_quickstart +// REMOVE_START +import assert from 'assert'; +// REMOVE_END +// HIDE_START +import { createClient, SCHEMA_FIELD_TYPE } from 'redis'; +// HIDE_END +// STEP_START connect +const client = createClient(); +client.on('error', err => console.log('Redis Client Error', err)); + +await client.connect(); +// STEP_END + +// STEP_START data_sample +const bicycle1 = { + brand: 'Velorim', + model: 'Jigger', + price: 270, + description: + 'Small and powerful, the Jigger is the best ' + + 'ride for the smallest of tikes! This is the tiniest kids\u2019 ' + + 'pedal bike on the market available without a coaster brake, the ' + + 'Jigger is the vehicle of choice for the rare tenacious little' + + 'rider raring to go.', + condition: 'new' +}; +// STEP_END +const bicycles = [ + bicycle1, + { + brand: 'Bicyk', + model: 'Hillcraft', + price: 1200, + description: 'Kids want to ride with as little weight as possible. Especially on an incline! They may be at the age when a 27.5\" wheel bike is just too clumsy coming off a 24\" bike. The Hillcraft 26 is just the solution they need!', + condition: 'used' + }, + { + brand: 'Nord', + model: 'Chook air 5', + price: 815, + description: 'The Chook Air 5 gives kids aged six years and older a durable and uberlight mountain bike for their first experience on tracks and easy cruising through forests and fields. The lower top tube makes it easy to mount and dismount in any situation, giving your kids greater safety on the trails.', + condition: 'used' + }, + { + brand: 'Eva', + model: 'Eva 291', + price: 3400, + description: 'The sister company to Nord, Eva launched in 2005 as the first and only women-dedicated bicycle brand. Designed by women for women, allEva bikes are optimized for the feminine physique using analytics from a body metrics database. If you like 29ers, try the Eva 291. It\u2019s a brand new bike for 2022.. This full-suspension, cross-country ride has been designed for velocity. The 291 has 100mm of front and rear travel, a superlight aluminum frame and fast-rolling 29-inch wheels. Yippee!', + condition: 'used' + }, + { + brand: 'Noka Bikes', + model: 'Kahuna', + price: 3200, + description: 'Whether you want to try your hand at XC racing or are looking for a lively trail bike that\'s just as inspiring on the climbs as it is over rougher ground, the Wilder is one heck of a bike built specifically for short women. Both the frames and components have been tweaked to include a women\u2019s saddle, different bars and unique colourway.', + condition: 'used' + }, + { + brand: 'Breakout', + model: 'XBN 2.1 Alloy', + price: 810, + description: 'The XBN 2.1 Alloy is our entry-level road bike \u2013 but that\u2019s not to say that it\u2019s a basic machine. With an internal weld aluminium frame, a full carbon fork, and the slick-shifting Claris gears from Shimano\u2019s, this is a bike which doesn\u2019t break the bank and delivers craved performance.', + condition: 'new' + }, + { + brand: 'ScramBikes', + model: 'WattBike', + price: 2300, + description: 'The WattBike is the best e-bike for people who still feel young at heart. It has a Bafang 1000W mid-drive system and a 48V 17.5AH Samsung Lithium-Ion battery, allowing you to ride for more than 60 miles on one charge. It\u2019s great for tackling hilly terrain or if you just fancy a more leisurely ride. With three working modes, you can choose between E-bike, assisted bicycle, and normal bike modes.', + condition: 'new' + }, + { + brand: 'Peaknetic', + model: 'Secto', + price: 430, + description: 'If you struggle with stiff fingers or a kinked neck or back after a few minutes on the road, this lightweight, aluminum bike alleviates those issues and allows you to enjoy the ride. From the ergonomic grips to the lumbar-supporting seat position, the Roll Low-Entry offers incredible comfort. The rear-inclined seat tube facilitates stability by allowing you to put a foot on the ground to balance at a stop, and the low step-over frame makes it accessible for all ability and mobility levels. The saddle is very soft, with a wide back to support your hip joints and a cutout in the center to redistribute that pressure. Rim brakes deliver satisfactory braking control, and the wide tires provide a smooth, stable ride on paved roads and gravel. Rack and fender mounts facilitate setting up the Roll Low-Entry as your preferred commuter, and the BMX-like handlebar offers space for mounting a flashlight, bell, or phone holder.', + condition: 'new' + }, + { + brand: 'nHill', + model: 'Summit', + price: 1200, + description: 'This budget mountain bike from nHill performs well both on bike paths and on the trail. The fork with 100mm of travel absorbs rough terrain. Fat Kenda Booster tires give you grip in corners and on wet trails. The Shimano Tourney drivetrain offered enough gears for finding a comfortable pace to ride uphill, and the Tektro hydraulic disc brakes break smoothly. Whether you want an affordable bike that you can take to work, but also take trail in mountains on the weekends or you\u2019re just after a stable, comfortable ride for the bike path, the Summit gives a good value for money.', + condition: 'new' + }, + { + model: 'ThrillCycle', + brand: 'BikeShind', + price: 815, + description: 'An artsy, retro-inspired bicycle that\u2019s as functional as it is pretty: The ThrillCycle steel frame offers a smooth ride. A 9-speed drivetrain has enough gears for coasting in the city, but we wouldn\u2019t suggest taking it to the mountains. Fenders protect you from mud, and a rear basket lets you transport groceries, flowers and books. The ThrillCycle comes with a limited lifetime warranty, so this little guy will last you long past graduation.', + condition: 'refurbished' + } +]; +// STEP_START create_index +const schema = { + '$.brand': { + type: SCHEMA_FIELD_TYPE.TEXT, + SORTABLE: true, + AS: 'brand' + }, + '$.model': { + type: SCHEMA_FIELD_TYPE.TEXT, + AS: 'model' + }, + '$.description': { + type: SCHEMA_FIELD_TYPE.TEXT, + AS: 'description' + }, + '$.price': { + type: SCHEMA_FIELD_TYPE.NUMERIC, + AS: 'price' + }, + '$.condition': { + type: SCHEMA_FIELD_TYPE.TAG, + AS: 'condition' + } +}; + +try { + await client.ft.create('idx:bicycle', schema, { + ON: 'JSON', + PREFIX: 'bicycle:' + }); +} catch (e) { + if (e.message === 'Index already exists') { + console.log('Index exists already, skipped creation.'); + } else { + // Something went wrong, perhaps RediSearch isn't installed... + console.error(e); + process.exit(1); + } +} +// STEP_END + +// STEP_START add_documents +await Promise.all( + bicycles.map((bicycle, i) => client.json.set(`bicycle:${i}`, '$', bicycle)) +); +// STEP_END + +// STEP_START wildcard_query +let result = await client.ft.search('idx:bicycle', '*', { + LIMIT: { + from: 0, + size: 10 + } +}); + +console.log(JSON.stringify(result, null, 2)); + +/* +{ + "total": 10, + "documents": ... +} +*/ +// STEP_END + +// REMOVE_START +assert.equal(result.documents[0].id, 'bicycle:0'); +// REMOVE_END + +// STEP_START query_single_term +result = await client.ft.search( + 'idx:bicycle', + '@model:Jigger', + { + LIMIT: { + from: 0, + size: 10 + } +}); + +console.log(JSON.stringify(result, null, 2)); +/* +{ + "total": 1, + "documents": [{ + "id": "bicycle:0", + "value": { + "brand": "Velorim", + "model": "Jigger", + "price": 270, + "description": "Small and powerful, the Jigger is the best ride for the smallest of tikes! This is the tiniest kids’ pedal bike on the market available without a coaster brake, the Jigger is the vehicle of choice for the rare tenacious little rider raring to go.", + "condition": "new" + } + }] +} + */ +// STEP_END +// REMOVE_START +assert.equal(result.documents[0].id, 'bicycle:0'); +// REMOVE_END + +// STEP_START query_exact_matching +result = await client.ft.search( + 'idx:bicycle', + '@brand:"Noka Bikes"', + { + LIMIT: { + from: 0, + size: 10 + } + } +); + +console.log(JSON.stringify(result, null, 2)); + +/* +{ + "total": 1, + "documents": [{ + "id": "bicycle:4", + "value": { + "brand": "Noka Bikes", + "model": "Kahuna", + "price": 3200, + "description": "Whether you want to try your hand at XC racing or are looking for a lively trail bike that's just as inspiring on the climbs as it is over rougher ground, the Wilder is one heck of a bike built specifically for short women. Both the frames and components have been tweaked to include a women’s saddle, different bars and unique colourway.", + "condition": "used" + } + }] +} +*/ +// STEP_END + +// REMOVE_START +assert.equal(result.documents[0].id, 'bicycle:4'); +// REMOVE END + +await client.close(); diff --git a/doctests/string-set-get-example.js b/doctests/string-set-get-example.js new file mode 100644 index 00000000000..b9be382285e --- /dev/null +++ b/doctests/string-set-get-example.js @@ -0,0 +1,27 @@ +// EXAMPLE: set_and_get +// REMOVE_START +import assert from "node:assert"; +// REMOVE_END + +// HIDE_START +import { createClient } from 'redis'; + +const client = createClient(); + +client.on('error', err => console.log('Redis Client Error', err)); + +await client.connect().catch(console.error); + +// HIDE_END +await client.set('bike:1', 'Process 134'); +const value = await client.get('bike:1'); +console.log(value); +// returns 'Process 134' +//REMOVE_START +assert.equal(value, 'Process 134'); +await client.del('bike:1'); +//REMOVE_END + +// HIDE_START +await client.close(); +// HIDE_END diff --git a/examples/.gitignore b/examples/.gitignore new file mode 100644 index 00000000000..d8b83df9cdb --- /dev/null +++ b/examples/.gitignore @@ -0,0 +1 @@ +package-lock.json diff --git a/examples/README.md b/examples/README.md new file mode 100644 index 00000000000..47c9912fd31 --- /dev/null +++ b/examples/README.md @@ -0,0 +1,95 @@ +# Node Redis: Examples + +This folder contains example scripts showing how to use Node Redis in different scenarios. + +| File Name | Description | +|------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------| +| `blocking-list-pop.js` | Block until an element is pushed to a list. | +| `bloom-filter.js` | Space efficient set membership checks with a [Bloom Filter](https://en.wikipedia.org/wiki/Bloom_filter) using [RedisBloom](https://redisbloom.io). | +| `check-connection-status.js` | Check the client's connection status. | +| `command-with-modifiers.js` | Define a script that allows to run a command with several modifiers. | +| `connect-as-acl-user.js` | Connect to Redis 6 using an ACL user. | +| `connect-to-cluster.js` | Connect to a Redis cluster. | +| `count-min-sketch.js` | Estimate the frequency of a given event using the [RedisBloom](https://redisbloom.io) Count-Min Sketch. | +| `cuckoo-filter.js` | Space efficient set membership checks with a [Cuckoo Filter](https://en.wikipedia.org/wiki/Cuckoo_filter) using [RedisBloom](https://redisbloom.io). | +| `dump-and-restore.js` | Demonstrates the use of the [`DUMP`](https://redis.io/commands/dump/) and [`RESTORE`](https://redis.io/commands/restore/) commands | +| `get-server-time.js` | Get the time from the Redis server. | +| `hyperloglog.js` | Showing use of Hyperloglog commands [PFADD, PFCOUNT and PFMERGE](https://redis.io/commands/?group=hyperloglog). | +| `lua-multi-incr.js` | Define a custom lua script that allows you to perform INCRBY on multiple keys. | +| `managing-json.js` | Store, retrieve and manipulate JSON data atomically with [RedisJSON](https://redisjson.io/). | +| `pubsub-publisher.js` | Adds multiple messages on 2 different channels messages to Redis. | +| `pubsub-subscriber.js` | Reads messages from channels using `PSUBSCRIBE` command. | +| `search-hashes.js` | Uses [RediSearch](https://redisearch.io) to index and search data in hashes. | +| `search-json.js` | Uses [RediSearch](https://redisearch.io/) and [RedisJSON](https://redisjson.io/) to index and search JSON data. | +| `search-knn.js` | Uses [RediSearch vector similarity]([https://redisearch.io/](https://redis.io/docs/stack/search/reference/vectors/)) to index and run KNN queries. | +| `set-scan.js` | An example script that shows how to use the SSCAN iterator functionality. | +| `sorted-set.js` | Add members with scores to a Sorted Set and retrieve them using the ZSCAN iteractor functionality. | +| `stream-producer.js` | Adds entries to a [Redis Stream](https://redis.io/topics/streams-intro) using the `XADD` command. | +| `stream-consumer.js` | Reads entries from a [Redis Stream](https://redis.io/topics/streams-intro) using the blocking `XREAD` command. | +| `time-series.js` | Create, populate and query timeseries data with [Redis Timeseries](https://redistimeseries.io). | +| `topk.js` | Use the [RedisBloom](https://redisbloom.io) TopK to track the most frequently seen items. | +| `stream-consumer-group.js` | Reads entries from a [Redis Stream](https://redis.io/topics/streams-intro) as part of a consumer group using the blocking `XREADGROUP` command. | +| `transaction-with-arbitrary-commands.js` | Mix and match supported commands with arbitrary command strings in a Redis transaction. | +| `transaction-with-watch.js` | An Example of [Redis transaction](https://redis.io/docs/manual/transactions) with `WATCH` command on isolated connection with optimistic locking. | + +## Contributing + +We'd love to see more examples here. If you have an idea that you'd like to see included here, submit a Pull Request and we'll be sure to review it! Don't forget to check out our [contributing guide](../CONTRIBUTING.md). + +## Setup + +To set up the examples folder so that you can run an example / develop one of your own: + +```bash +git clone https://github.com/redis/node-redis.git +cd node-redis +npm install -ws && npm run build +cd examples +npm install +``` + +### Coding Guidelines for Examples + +When adding a new example, please follow these guidelines: + +- Add your code in a single JavaScript or TypeScript file per example, directly in the `examples` folder +- Do not introduce other dependencies in your example +- Give your `.js` file a meaningful name using `-` separators e.g. `adding-to-a-stream.js` / `adding-to-a-stream.ts` +- Indent your code using 2 spaces +- Use the single line `//` comment style and comment your code +- Add a comment at the top of your `.js` / `.ts` file describing what your example does +- Add a comment at the top of your `.js` / `.ts` file describing any Redis commands that need to be run to set up data for your example (try and keep this minimal) +- Use semicolons +- Use `async` and `await` +- Use single quotes, `'hello'` not `"hello"` +- Use [template literals](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Template_literals) when embedding expressions in strings +- Unless your example requires a connection string, assume Redis is on the default localhost port 6379 with no password +- Use meaningful example data, let's not use `foo`, `bar`, `baz` etc! +- Leave an empty line at the end of your `.js` file +- Update this `README.md` file to add your example to the table + +Use [connect-as-acl-user.js](./connect-as-acl-user.js) as a guide to develop a well formatted example script. + +### Example Template + +Here's a starter template for adding a new example, imagine this is stored in `do-something.js`: + +```javascript +// This comment should describe what the example does +// and can extend to multiple lines. + +// Set up the data in redis-cli using these commands: +// +// +// Alternatively, add code that sets up the data. + +import { createClient } from 'redis'; + +const client = createClient(); + +await client.connect(); + +// Add your example code here... + +client.close(); +``` diff --git a/examples/auth.js b/examples/auth.js deleted file mode 100644 index 6c0a563cd8b..00000000000 --- a/examples/auth.js +++ /dev/null @@ -1,5 +0,0 @@ -var redis = require("redis"), - client = redis.createClient(); - -// This command is magical. Client stashes the password and will issue on every connect. -client.auth("somepass"); diff --git a/examples/backpressure_drain.js b/examples/backpressure_drain.js deleted file mode 100644 index 3488ef4d3f7..00000000000 --- a/examples/backpressure_drain.js +++ /dev/null @@ -1,33 +0,0 @@ -var redis = require("../index"), - client = redis.createClient(null, null, { - command_queue_high_water: 5, - command_queue_low_water: 1 - }), - remaining_ops = 100000, paused = false; - -function op() { - if (remaining_ops <= 0) { - console.error("Finished."); - process.exit(0); - } - - remaining_ops--; - if (client.hset("test hash", "val " + remaining_ops, remaining_ops) === false) { - console.log("Pausing at " + remaining_ops); - paused = true; - } else { - process.nextTick(op); - } -} - -client.on("drain", function () { - if (paused) { - console.log("Resuming at " + remaining_ops); - paused = false; - process.nextTick(op); - } else { - console.log("Got drain while not paused at " + remaining_ops); - } -}); - -op(); diff --git a/examples/blocking-list-pop.js b/examples/blocking-list-pop.js new file mode 100644 index 00000000000..f29a409398c --- /dev/null +++ b/examples/blocking-list-pop.js @@ -0,0 +1,29 @@ +// This example shows how to use the blocking LPUSH command. + +// This code shows how to run with isolation the blPop Command to block the script while waiting for a value to be pushed to the list. +// The script will be blocked until the LPUSH command is executed. +// After which we log the list and quit the client. + +import { createClientPool } from 'redis'; + +const client = createClientPool(); + +await client.connect(); + +const keyName = 'keyName'; + +const blpopPromise = client.blPop( + keyName, + 0 +); + +await client.lPush(keyName, 'value'); + +const listItem = await blpopPromise; + +console.log('blpopPromise resolved'); +// listItem will be: +// {"key":"keyName","element":"value"} +console.log(`listItem is '${JSON.stringify(listItem)}'`); + +client.destroy(); diff --git a/examples/bloom-filter.js b/examples/bloom-filter.js new file mode 100644 index 00000000000..a133b0274f2 --- /dev/null +++ b/examples/bloom-filter.js @@ -0,0 +1,80 @@ +// This example demonstrates the use of the Bloom Filter +// in the RedisBloom module (https://redis.io/docs/stack/bloom/) + +import { createClient } from 'redis'; + +const client = createClient(); + +await client.connect(); + +// Delete any pre-existing Bloom Filter. +await client.del('mybloom'); + +// Reserve a Bloom Filter with configurable error rate and capacity. +// https://redis.io/commands/bf.reserve/ +try { + await client.bf.reserve('mybloom', 0.01, 1000); + console.log('Reserved Bloom Filter.'); +} catch (e) { + if (e.message.endsWith('item exists')) { + console.log('Bloom Filter already reserved.'); + } else { + console.log('Error, maybe RedisBloom is not installed?:'); + console.log(e); + } +} + +// Add items to Bloom Filter individually with BF.ADD command. +// https://redis.io/commands/bf.add/ +await Promise.all([ + client.bf.add('mybloom', 'leibale'), + client.bf.add('mybloom', 'simon'), + client.bf.add('mybloom', 'guy'), + client.bf.add('mybloom', 'suze'), + client.bf.add('mybloom', 'brian'), + client.bf.add('mybloom', 'steve'), + client.bf.add('mybloom', 'kyle'), + client.bf.add('mybloom', 'josefin'), + client.bf.add('mybloom', 'alex'), + client.bf.add('mybloom', 'nava'), +]); + +// Add multiple items to Bloom Filter at once with BF.MADD command. +// https://redis.io/commands/bf.madd/ +await client.bf.mAdd('mybloom', [ + 'kaitlyn', + 'rachel' +]); + +console.log('Added members to Bloom Filter.'); + +// Check whether a member exists with the BF.EXISTS command. +// https://redis.io/commands/bf.exists/ +const simonExists = await client.bf.exists('mybloom', 'simon'); +console.log(`simon ${simonExists ? 'may' : 'does not'} exist in the Bloom Filter.`); + +// Check whether multiple members exist with the BF.MEXISTS command. +// https://redis.io/commands/bf.mexists/ +const [ lanceExists, leibaleExists ] = await client.bf.mExists('mybloom', [ + 'lance', + 'leibale' +]); + +console.log(`lance ${lanceExists ? 'may' : 'does not'} exist in the Bloom Filter.`); +console.log(`leibale ${leibaleExists ? 'may' : 'does not'} exist in the Bloom Filter.`); + +// Get stats for the Bloom Filter with the BF.INFO command. +// https://redis.io/commands/bf.info/ +const info = await client.bf.info('mybloom'); +// info looks like this: +// +// { +// capacity: 1000, +// size: 1531, +// numberOfFilters: 1, +// numberOfInsertedItems: 12, +// expansionRate: 2 +// } +console.log(info); + +client.destroy(); diff --git a/examples/check-connection-status.js b/examples/check-connection-status.js new file mode 100644 index 00000000000..ae3c863fb14 --- /dev/null +++ b/examples/check-connection-status.js @@ -0,0 +1,28 @@ +// Check the connection status of the Redis client instance. +import { createClient } from 'redis'; + +const client = createClient(); + +console.log('Before client.connect()...'); + +// isOpen will return False here as the client's socket is not open yet. +// isReady will return False here, client is not yet ready to use. +console.log(`client.isOpen: ${client.isOpen}, client.isReady: ${client.isReady}`); + +// Begin connection process... +const connectPromise = client.connect(); + +console.log('After client.connect()...'); + +// isOpen will return True here as the client's socket is open now. +// isReady will return False here as the promise hasn't resolved yet. +console.log(`client.isOpen: ${client.isOpen}, client.isReady: ${client.isReady}`); + +await connectPromise; +console.log('Afer connectPromise has resolved...'); + +// isOpen will return True here as the client's socket is open now. +// isReady will return True here, client is ready to use. +console.log(`client.isOpen: ${client.isOpen}, client.isReady: ${client.isReady}`); + +client.destroy(); diff --git a/examples/command-with-modifiers.js b/examples/command-with-modifiers.js new file mode 100644 index 00000000000..356304722c0 --- /dev/null +++ b/examples/command-with-modifiers.js @@ -0,0 +1,31 @@ +// Define a custom script that shows example of SET command +// with several modifiers. + +import { createClient } from 'redis'; + +const client = createClient(); + +await client.connect(); +await client.del('mykey'); + +console.log( + await client.set('mykey', 'myvalue', { + expiration: { + type: 'EX', + value: 60 + }, + GET: true + }) +); // null + +console.log( + await client.set('mykey', 'newvalue', { + expiration: { + type: 'EX', + value: 60 + }, + GET: true + }) +); // 'myvalue' + +await client.close(); diff --git a/examples/connect-as-acl-user.js b/examples/connect-as-acl-user.js new file mode 100644 index 00000000000..bc3069b5bbc --- /dev/null +++ b/examples/connect-as-acl-user.js @@ -0,0 +1,26 @@ +// Connect to Redis 6.x as an ACL user. Attempt to run a command +// that the user is allowed to execute, and a command that the +// user is not allowed to execute. + +// Create the test user in redis-cli with this command: +// acl setuser testuser on >testpassword +ping + +import { createClient } from 'redis'; + +const client = createClient({ + url: 'redis://testuser:testpassword@127.0.0.1:6379' +}); + +await client.connect(); + +// Returns PONG +console.log(`Response from PING command: ${await client.ping()}`); + +try { + // This will error as this user is not allowed to run this command... + console.log(`Response from GET command: ${await client.get('somekey')}`); +} catch (e) { + console.log(`GET command failed: ${e.message}`); +} + +client.destroy(); diff --git a/examples/connect-to-cluster.js b/examples/connect-to-cluster.js new file mode 100644 index 00000000000..86e45b87968 --- /dev/null +++ b/examples/connect-to-cluster.js @@ -0,0 +1,32 @@ +// This is an example script to connect to a running cluster. +// After connecting to the cluster the code sets and get a value. + +// To setup this cluster you can follow the guide here: +// https://redis.io/docs/manual/scaling/ +// In this guide the ports which are being used are 7000 - 7005 + + +import { createCluster } from 'redis'; + +const cluster = createCluster({ + rootNodes : [ + { + url : 'redis://127.0.0.1:7001' + }, + { + url : 'redis://127.0.0.1:7002' + }, + { + url : 'redis://127.0.0.1:7003' + } + ] +}); + +cluster.on('error', (err) => console.log('Redis Client Error', err)); + +await cluster.connect(); + +await cluster.set('hello', 'cluster'); +const value = await cluster.get('hello'); +console.log(value); +await cluster.close(); diff --git a/examples/count-min-sketch.js b/examples/count-min-sketch.js new file mode 100644 index 00000000000..ffbe13a7c27 --- /dev/null +++ b/examples/count-min-sketch.js @@ -0,0 +1,80 @@ +// This example demonstrates the use of the Count-Min Sketch +// in the RedisBloom module (https://redis.io/docs/stack/bloom/) + +import { createClient } from 'redis'; + +const client = createClient(); + +await client.connect(); + +// Delete any pre-existing Count-Min Sketch. +await client.del('mycms'); + +// Initialize a Count-Min Sketch with error rate and probability: +// https://redis.io/commands/cms.initbyprob/ +try { + await client.cms.initByProb('mycms', 0.001, 0.01); + console.log('Reserved Count Min Sketch.'); +} catch (e) { + console.log('Error, maybe RedisBloom is not installed?:'); + console.log(e); +} + +const teamMembers = [ + 'leibale', + 'simon', + 'guy', + 'suze', + 'brian', + 'steve', + 'kyleb', + 'kyleo', + 'josefin', + 'alex', + 'nava', + 'lance', + 'rachel', + 'kaitlyn' +]; + +// Store actual counts for comparison with CMS. +let actualCounts = {}; + +// Randomly emit a team member and count them with the CMS. +// https://redis.io/commands/cms.incrby/ +for (let n = 0; n < 1000; n++) { + const teamMember = teamMembers[Math.floor(Math.random() * teamMembers.length)]; + await client.cms.incrBy('mycms', { + item: teamMember, + incrementBy: 1 + }); + + actualCounts[teamMember] = actualCounts[teamMember] ? actualCounts[teamMember] + 1 : 1; + + console.log(`Incremented score for ${teamMember}.`); +} + +// Get count estimate for some team members: +// https://redis.io/commands/cms.query/ +const [ alexCount, rachelCount ] = await client.cms.query('mycms', [ + 'alex', + 'rachel' +]); + +console.log(`Count estimate for alex: ${alexCount} (actual ${actualCounts.alex}).`); +console.log(`Count estimate for rachel: ${rachelCount} (actual ${actualCounts.rachel}).`); + +// Get overall information about the Count-Min Sketch: +// https://redis.io/commands/cms.info/ +const info = await client.cms.info('mycms'); +console.log('Count-Min Sketch info:'); + +// info looks like this: +// { +// width: 2000, +// depth: 7, +// count: 1000 +// } +console.log(info); + +client.destroy(); diff --git a/examples/cuckoo-filter.js b/examples/cuckoo-filter.js new file mode 100644 index 00000000000..6ab58fbfa5c --- /dev/null +++ b/examples/cuckoo-filter.js @@ -0,0 +1,79 @@ +// This example demonstrates the use of the Cuckoo Filter +// in the RedisBloom module (https://redis.io/docs/stack/bloom/) + +import { createClient } from 'redis'; + +const client = createClient(); + +await client.connect(); + +// Delete any pre-existing Cuckoo Filter. +await client.del('mycuckoo'); + +// Reserve a Cuckoo Filter with a capacity of 10000 items. +// https://redis.io/commands/cf.reserve/ +try { + await client.cf.reserve('mycuckoo', 10000); + console.log('Reserved Cuckoo Filter.'); +} catch (e) { + console.log('Error, maybe RedisBloom is not installed?:'); + console.log(e); +} + +// Add items to Cuckoo Filter individually with CF.ADD command. +// https://redis.io/commands/cf.add/ +await Promise.all([ + client.cf.add('mycuckoo', 'leibale'), + client.cf.add('mycuckoo', 'simon'), + client.cf.add('mycuckoo', 'guy'), + client.cf.add('mycuckoo', 'suze'), + client.cf.add('mycuckoo', 'brian'), + client.cf.add('mycuckoo', 'steve'), + client.cf.add('mycuckoo', 'kyle'), + client.cf.add('mycuckoo', 'josefin'), + client.cf.add('mycuckoo', 'alex'), + client.cf.add('mycuckoo', 'nava'), +]); + +// Add items to the Cuckoo Filter only if they don't exist in it... +// https://redis.io/commands/cf.addnx/ +const nxReply = await Promise.all([ + client.cf.addNX('mycuckoo', 'kaitlyn'), // New + client.cf.addNX('mycuckoo', 'rachel'), // New + client.cf.addNX('mycuckoo', 'brian') // Previously added +]); + +console.log('Added members to Cuckoo Filter.'); +console.log('nxReply:'); + +// nxReply looks like this: +// [ +// true, +// true, +// false +// ] +console.log(nxReply); + +// Check whether a member exists with the CF.EXISTS command. +// https://redis.io/commands/cf.exists/ +const simonExists = await client.bf.exists('mycuckoo', 'simon'); +console.log(`simon ${simonExists ? 'may' : 'does not'} exist in the Cuckoo Filter.`); + +// Get stats for the Cuckoo Filter with the CF.INFO command: +// https://redis.io/commands/cf.info/ +const info = await client.cf.info('mycuckoo'); + +// info looks like this: +// { +// size: 16440, +// numberOfBuckets: 8192, +// numberOfFilters: 1, +// numberOfInsertedItems: 12, +// numberOfDeletedItems: 0, +// bucketSize: 2, +// expansionRate: 1, +// maxIteration: 20 +// } +console.log(info); + +client.destroy(); diff --git a/examples/dump-and-restore.js b/examples/dump-and-restore.js new file mode 100644 index 00000000000..f464fd38be1 --- /dev/null +++ b/examples/dump-and-restore.js @@ -0,0 +1,32 @@ +// This example demonstrates the use of the DUMP and RESTORE commands + +import { createClient, RESP_TYPES } from 'redis'; + +const client = await createClient({ + commandOptions: { + typeMapping: { + [RESP_TYPES.BLOB_STRING]: Buffer + } + } +}).on('error', err => { + console.log('Redis Client Error', err); +}).connect(); + +// Make sure the source key exists +await client.set('source', 'value'); + +// Make sure destination doesnt exist +await client.del('destination'); + +// DUMP a specific key into a local variable +const dump = await client.dump('source'); + +// RESTORE into a new key +await client.restore('destination', 0, dump); + +// RESTORE and REPLACE an existing key +await client.restore('destination', 0, dump, { + REPLACE: true +}); + +await client.close(); diff --git a/examples/eval.js b/examples/eval.js deleted file mode 100644 index c1fbf8a5568..00000000000 --- a/examples/eval.js +++ /dev/null @@ -1,9 +0,0 @@ -var redis = require("./index"), - client = redis.createClient(); - -redis.debug_mode = true; - -client.eval("return 100.5", 0, function (err, res) { - console.dir(err); - console.dir(res); -}); diff --git a/examples/extend.js b/examples/extend.js deleted file mode 100644 index 488b8c2dc5d..00000000000 --- a/examples/extend.js +++ /dev/null @@ -1,24 +0,0 @@ -var redis = require("redis"), - client = redis.createClient(); - -// Extend the RedisClient prototype to add a custom method -// This one converts the results from "INFO" into a JavaScript Object - -redis.RedisClient.prototype.parse_info = function (callback) { - this.info(function (err, res) { - var lines = res.toString().split("\r\n").sort(); - var obj = {}; - lines.forEach(function (line) { - var parts = line.split(':'); - if (parts[1]) { - obj[parts[0]] = parts[1]; - } - }); - callback(obj) - }); -}; - -client.parse_info(function (info) { - console.dir(info); - client.quit(); -}); diff --git a/examples/file.js b/examples/file.js deleted file mode 100644 index 4d2b5d1c987..00000000000 --- a/examples/file.js +++ /dev/null @@ -1,32 +0,0 @@ -// Read a file from disk, store it in Redis, then read it back from Redis. - -var redis = require("redis"), - client = redis.createClient(), - fs = require("fs"), - filename = "kids_in_cart.jpg"; - -// Get the file I use for testing like this: -// curl http://ranney.com/kids_in_cart.jpg -o kids_in_cart.jpg -// or just use your own file. - -// Read a file from fs, store it in Redis, get it back from Redis, write it back to fs. -fs.readFile(filename, function (err, data) { - if (err) throw err - console.log("Read " + data.length + " bytes from filesystem."); - - client.set(filename, data, redis.print); // set entire file - client.get(filename, function (err, reply) { // get entire file - if (err) { - console.log("Get error: " + err); - } else { - fs.writeFile("duplicate_" + filename, reply, function (err) { - if (err) { - console.log("Error on write: " + err) - } else { - console.log("File written."); - } - client.end(); - }); - } - }); -}); diff --git a/examples/get-server-time.js b/examples/get-server-time.js new file mode 100644 index 00000000000..752264df349 --- /dev/null +++ b/examples/get-server-time.js @@ -0,0 +1,18 @@ +// Get the time from the Redis Server. + +import { createClient } from 'redis'; + +const client = createClient(); +await client.connect(); + +const serverTime = await client.time(); +// In v5, TIME returns [unixTimestamp: string, microseconds: string] instead of Date +// Example: ['1708956789', '123456'] +console.log(serverTime); + +// Convert to JavaScript Date if needed +const [seconds, microseconds] = serverTime; +const date = new Date(parseInt(seconds) * 1000 + parseInt(microseconds) / 1000); +console.log('Converted to Date:', date); + +client.close(); diff --git a/examples/hyperloglog.js b/examples/hyperloglog.js new file mode 100644 index 00000000000..1f8f04f2a6c --- /dev/null +++ b/examples/hyperloglog.js @@ -0,0 +1,51 @@ +// Example to log traffic data at intersections for the city of San Francisco. +// Log license plates of each car scanned at each intersection and add to the intersections Hyperloglog. +// Reference: https://www.youtube.com/watch?v=MunL8nnwscQ + +import { createClient } from 'redis'; + +const client = createClient(); + +await client.connect(); + +// Use `pfAdd` to add an element to a Hyperloglog, creating the Hyperloglog if necessary. +// await client.pfAdd(key, value) // returns 1 or 0 + +// To get a count, the `pfCount` method is used. +// await client.pfCount(key) + +try { + // Corner of Market Street (ID: 12) and 10th street (ID:27). + await client.pfAdd('count:sf:12:27', 'GHN34X'); + await client.pfAdd('count:sf:12:27', 'ECN94Y'); + await client.pfAdd('count:sf:12:27', 'VJL12V'); + await client.pfAdd('count:sf:12:27', 'ORV87O'); + + // To get the count of Corner of Market Street (ID: 12) and 10th street (ID:27). + const countForMarket10thStreet = await client.pfCount('count:sf:12:27'); + console.log(`Count for Market Street & 10th Street is ${countForMarket10thStreet}`); + // Count for Market Street & 10th Street is 4. + + // Corner of Market Street (ID: 12) and 11 street (ID:26). + await client.pfAdd('count:sf:12:26', 'GHN34X'); + await client.pfAdd('count:sf:12:26', 'ECN94Y'); + await client.pfAdd('count:sf:12:26', 'IRV84E'); + await client.pfAdd('count:sf:12:26', 'ORV87O'); + await client.pfAdd('count:sf:12:26', 'TEY34S'); + + // To get the count of Corner of Market Street (ID: 12) and 11th street (ID:26). + const countForMarket11thStreet = await client.pfCount('count:sf:12:26'); + console.log(`Count for Market Street & 11th Street is ${countForMarket11thStreet}`); + // Count for Market Street & 11th Street is 5. + + // To merge the Hyperloglogs `count:sf:12:26` and `count:sf:12:27`. + await client.pfMerge('count:merge', ['count:sf:12:27', 'count:sf:12:26']); + const countMerge = await client.pfCount('count:merge'); + console.log(`Count for the merge is ${countMerge}`); + // Count for the merge is 6. +} catch (e) { + // something went wrong. + console.error(e); +} + +client.close(); diff --git a/examples/lua-multi-incr.js b/examples/lua-multi-incr.js new file mode 100644 index 00000000000..71b12bdab0f --- /dev/null +++ b/examples/lua-multi-incr.js @@ -0,0 +1,29 @@ +// Define a custome lua script that accepts two keys and an amount to +// increment each of them by + +import { createClient, defineScript } from 'redis'; + +const client = createClient({ + scripts: { + mincr: defineScript({ + NUMBER_OF_KEYS: 2, + SCRIPT: + 'return {' + + 'redis.pcall("INCRBY", KEYS[1], ARGV[1]),' + + 'redis.pcall("INCRBY", KEYS[2], ARGV[1])' + + '}', + parseCommand(parser, key1, key2, increment) { + parser.pushKey(key1); + parser.pushKey(key2); + parser.push(increment.toString()); + }, + }), + }, +}); + +await client.connect(); + +await client.set('mykey', '5'); +console.log(await client.mincr('mykey', 'myotherkey', 10)); // [ 15, 10 ] + +client.destroy(); diff --git a/examples/managing-json.js b/examples/managing-json.js new file mode 100644 index 00000000000..0f1eb3b3c21 --- /dev/null +++ b/examples/managing-json.js @@ -0,0 +1,81 @@ +// Store, retrieve and manipulate JSON data atomically with RedisJSON. + +import { createClient } from 'redis'; + +const client = createClient(); + +await client.connect(); +await client.del('noderedis:jsondata'); + +// Store a JSON object... +await client.json.set('noderedis:jsondata', '$', { + name: 'Roberta McDonald', + pets: [ + { + name: 'Fluffy', + species: 'dog', + age: 5, + isMammal: true + }, + { + name: 'Rex', + species: 'dog', + age: 3, + isMammal: true + }, + { + name: 'Goldie', + species: 'fish', + age: 2, + isMammal: false + } + ], + address: { + number: 99, + street: 'Main Street', + city: 'Springfield', + state: 'OH', + country: 'USA' + } +}); + +// Retrieve the name and age of the second pet in the pets array. +let results = await client.json.get('noderedis:jsondata', { + path: [ + '$.pets[1].name', + '$.pets[1].age' + ] +}); + +// { '$.pets[1].name': [ 'Rex' ], '$.pets[1].age': [ 3 ] } +console.log(results); + +// Goldie had a birthday, increment the age... +await client.json.numIncrBy('noderedis:jsondata', '$.pets[2].age', 1); +results = await client.json.get('noderedis:jsondata', { + path: '$.pets[2].age' +}); + +// Goldie is 3 years old now. +console.log(`Goldie is ${JSON.parse(results)[0]} years old now.`); + +// Add a new pet... +await client.json.arrAppend('noderedis:jsondata', '$.pets', { + name: 'Robin', + species: 'bird', + isMammal: false, + age: 1 +}); + +// How many pets do we have now? +const numPets = await client.json.arrLen('noderedis:jsondata', { path: '$.pets' }); + +// We now have 4 pets. +console.log(`We now have ${numPets} pets.`); + +const rex = { name: 'Rex', species: 'dog', age: 3, isMammal: true } + +const index = await client.json.arrIndex( 'noderedis:jsondata', '$.pets', rex); +console.log(`Rex is at index ${index}`); + +client.close(); diff --git a/examples/mget.js b/examples/mget.js deleted file mode 100644 index 936740d32f3..00000000000 --- a/examples/mget.js +++ /dev/null @@ -1,5 +0,0 @@ -var client = require("redis").createClient(); - -client.mget(["sessions started", "sessions started", "foo"], function (err, res) { - console.dir(res); -}); \ No newline at end of file diff --git a/examples/monitor.js b/examples/monitor.js deleted file mode 100644 index 2cb6a4e1ecb..00000000000 --- a/examples/monitor.js +++ /dev/null @@ -1,10 +0,0 @@ -var client = require("../index").createClient(), - util = require("util"); - -client.monitor(function (err, res) { - console.log("Entering monitoring mode."); -}); - -client.on("monitor", function (time, args) { - console.log(time + ": " + util.inspect(args)); -}); diff --git a/examples/multi.js b/examples/multi.js deleted file mode 100644 index 35c08e18403..00000000000 --- a/examples/multi.js +++ /dev/null @@ -1,46 +0,0 @@ -var redis = require("redis"), - client = redis.createClient(), set_size = 20; - -client.sadd("bigset", "a member"); -client.sadd("bigset", "another member"); - -while (set_size > 0) { - client.sadd("bigset", "member " + set_size); - set_size -= 1; -} - -// multi chain with an individual callback -client.multi() - .scard("bigset") - .smembers("bigset") - .keys("*", function (err, replies) { - client.mget(replies, redis.print); - }) - .dbsize() - .exec(function (err, replies) { - console.log("MULTI got " + replies.length + " replies"); - replies.forEach(function (reply, index) { - console.log("Reply " + index + ": " + reply.toString()); - }); - }); - -client.mset("incr thing", 100, "incr other thing", 1, redis.print); - -// start a separate multi command queue -var multi = client.multi(); -multi.incr("incr thing", redis.print); -multi.incr("incr other thing", redis.print); - -// runs immediately -client.get("incr thing", redis.print); // 100 - -// drains multi queue and runs atomically -multi.exec(function (err, replies) { - console.log(replies); // 101, 2 -}); - -// you can re-run the same transaction if you like -multi.exec(function (err, replies) { - console.log(replies); // 102, 3 - client.quit(); -}); diff --git a/examples/multi2.js b/examples/multi2.js deleted file mode 100644 index 8be4d7313cc..00000000000 --- a/examples/multi2.js +++ /dev/null @@ -1,29 +0,0 @@ -var redis = require("redis"), - client = redis.createClient(), multi; - -// start a separate command queue for multi -multi = client.multi(); -multi.incr("incr thing", redis.print); -multi.incr("incr other thing", redis.print); - -// runs immediately -client.mset("incr thing", 100, "incr other thing", 1, redis.print); - -// drains multi queue and runs atomically -multi.exec(function (err, replies) { - console.log(replies); // 101, 2 -}); - -// you can re-run the same transaction if you like -multi.exec(function (err, replies) { - console.log(replies); // 102, 3 - client.quit(); -}); - -client.multi([ - ["mget", "multifoo", "multibar", redis.print], - ["incr", "multifoo"], - ["incr", "multibar"] -]).exec(function (err, replies) { - console.log(replies.toString()); -}); diff --git a/examples/package.json b/examples/package.json new file mode 100644 index 00000000000..c350c0b248b --- /dev/null +++ b/examples/package.json @@ -0,0 +1,12 @@ +{ + "name": "node-redis-examples", + "version": "1.0.0", + "description": "node-redis 5 example script", + "main": "index.js", + "private": true, + "type": "module", + "dependencies": { + "redis": "../packages/redis" + } +} + diff --git a/examples/psubscribe.js b/examples/psubscribe.js deleted file mode 100644 index c57117b8a63..00000000000 --- a/examples/psubscribe.js +++ /dev/null @@ -1,33 +0,0 @@ -var redis = require("redis"), - client1 = redis.createClient(), - client2 = redis.createClient(), - client3 = redis.createClient(), - client4 = redis.createClient(), - msg_count = 0; - -redis.debug_mode = false; - -client1.on("psubscribe", function (pattern, count) { - console.log("client1 psubscribed to " + pattern + ", " + count + " total subscriptions"); - client2.publish("channeltwo", "Me!"); - client3.publish("channelthree", "Me too!"); - client4.publish("channelfour", "And me too!"); -}); - -client1.on("punsubscribe", function (pattern, count) { - console.log("client1 punsubscribed from " + pattern + ", " + count + " total subscriptions"); - client4.end(); - client3.end(); - client2.end(); - client1.end(); -}); - -client1.on("pmessage", function (pattern, channel, message) { - console.log("("+ pattern +")" + " client1 received message on " + channel + ": " + message); - msg_count += 1; - if (msg_count === 3) { - client1.punsubscribe(); - } -}); - -client1.psubscribe("channel*"); diff --git a/examples/pub_sub.js b/examples/pub_sub.js deleted file mode 100644 index aa508d6c9d2..00000000000 --- a/examples/pub_sub.js +++ /dev/null @@ -1,41 +0,0 @@ -var redis = require("redis"), - client1 = redis.createClient(), msg_count = 0, - client2 = redis.createClient(); - -redis.debug_mode = false; - -// Most clients probably don't do much on "subscribe". This example uses it to coordinate things within one program. -client1.on("subscribe", function (channel, count) { - console.log("client1 subscribed to " + channel + ", " + count + " total subscriptions"); - if (count === 2) { - client2.publish("a nice channel", "I am sending a message."); - client2.publish("another one", "I am sending a second message."); - client2.publish("a nice channel", "I am sending my last message."); - } -}); - -client1.on("unsubscribe", function (channel, count) { - console.log("client1 unsubscribed from " + channel + ", " + count + " total subscriptions"); - if (count === 0) { - client2.end(); - client1.end(); - } -}); - -client1.on("message", function (channel, message) { - console.log("client1 channel " + channel + ": " + message); - msg_count += 1; - if (msg_count === 3) { - client1.unsubscribe(); - } -}); - -client1.on("ready", function () { - // if you need auth, do it here - client1.incr("did a thing"); - client1.subscribe("a nice channel", "another one"); -}); - -client2.on("ready", function () { - // if you need auth, do it here -}); diff --git a/examples/pubsub-publisher.js b/examples/pubsub-publisher.js new file mode 100644 index 00000000000..354e0ae2f0a --- /dev/null +++ b/examples/pubsub-publisher.js @@ -0,0 +1,20 @@ +// A sample publisher using the publish function to put message on different channels. +// https://redis.io/commands/publish/ +import { createClient } from 'redis'; + +const client = createClient(); + +await client.connect(); + +// Declare constant variables for the name of the clients we will publish to as they will be required for logging. +const channel1 = 'chan1nel'; +const channel2 = 'chan2nel'; + +for (let i = 0; i < 10000; i++) { + // 1st channel created to publish 10000 messages. + await client.publish(channel1, `channel1_message_${i}`); + console.log(`publishing message on ${channel1}`); + // 2nd channel created to publish 10000 messages. + await client.publish(channel2, `channel2_message_${i}`); + console.log(`publishing message on ${channel2}`); +} diff --git a/examples/pubsub-subscriber.js b/examples/pubsub-subscriber.js new file mode 100644 index 00000000000..ff4c05f083e --- /dev/null +++ b/examples/pubsub-subscriber.js @@ -0,0 +1,41 @@ +// A sample subscriber showing how the subscribe method and pSubscribe method work. +// https://redis.io/commands/subscribe/ +// https://redis.io/commands/pSubscribe/ +// This consumes messages published by pubsub-publisher.js + +import { createClient} from 'redis'; + +// Create and connect client before executing any Redis commands. +const client = createClient(); +await client.connect(); + +// Each subscriber needs to connect individually therefore we duplicate the client. +const channel1Sub = client.duplicate(); +const channel2Sub = client.duplicate(); +const noChannelsSub = client.duplicate(); +const allChannelsSub = client.duplicate(); + +await channel1Sub.connect(); +await channel2Sub.connect(); +await noChannelsSub.connect(); +await allChannelsSub.connect(); + +// This subscriber only will receive messages from channel 1 as they are using the subscribe method and subscribed to chan1nel. +await channel1Sub.subscribe('chan1nel', (message) => { + console.log(`Channel1 subscriber collected message: ${message}`); +},true); + +// This subscriber only will receive messages from channel 2 as they are using the subscribe method and subscribed to chan2nel. +await channel2Sub.subscribe('chan2nel', (message) => { + console.log(`Channel2 subscriber collected message: ${message}`); +},true); + +// This subscriber will not receive any messages as its channel does not exist. +await noChannelsSub.subscribe('chan*nel', (message) => { + console.log(`This message will never be seen as we are not using pSubscribe here. ${message}`); +},true); + +// This subscriber receive messages from both channel 1 and channel 2 using the pSubscribe method. +await allChannelsSub.pSubscribe('chan*nel', (message, channel) => { + console.log(`Channel ${channel} sent message: ${message}`); +},true); \ No newline at end of file diff --git a/examples/search-hashes.js b/examples/search-hashes.js new file mode 100644 index 00000000000..a496fec823a --- /dev/null +++ b/examples/search-hashes.js @@ -0,0 +1,88 @@ +// This example demonstrates how to use RediSearch to index and query data +// stored in Redis hashes. + +import { createClient, SCHEMA_FIELD_TYPE } from 'redis'; + +const client = createClient(); + +await client.connect(); + +// Create an index... +try { + // Documentation: https://redis.io/commands/ft.create/ + await client.ft.create('idx:animals', { + name: { + type: SCHEMA_FIELD_TYPE.TEXT, + SORTABLE: true + }, + species: SCHEMA_FIELD_TYPE.TAG, + age: SCHEMA_FIELD_TYPE.NUMERIC + }, { + ON: 'HASH', + PREFIX: 'noderedis:animals' + }); +} catch (e) { + if (e.message === 'Index already exists') { + console.log('Index exists already, skipped creation.'); + } else { + // Something went wrong, perhaps RediSearch isn't installed... + console.error(e); + process.exit(1); + } +} + +// Add some sample data... +// https://redis.io/commands/hset/ +await Promise.all([ + client.hSet('noderedis:animals:1', {name: 'Fluffy', species: 'cat', age: 3}), + client.hSet('noderedis:animals:2', {name: 'Ginger', species: 'cat', age: 4}), + client.hSet('noderedis:animals:3', {name: 'Rover', species: 'dog', age: 9}), + client.hSet('noderedis:animals:4', {name: 'Fido', species: 'dog', age: 7}) +]); + +// Perform a search query, find all the dogs... sort by age, descending. +// Documentation: https://redis.io/commands/ft.search/ +// Query syntax: https://redis.io/docs/stack/search/reference/query_syntax/ +const results = await client.ft.search( + 'idx:animals', + '@species:{dog}', + { + SORTBY: { + BY: 'age', + DIRECTION: 'DESC' // or 'ASC (default if DIRECTION is not present) + } + } +); + +// results: +// { +// total: 2, +// documents: [ +// { +// id: 'noderedis:animals:3', +// value: { +// name: 'Rover', +// species: 'dog', +// age: '9' +// } +// }, +// { +// id: 'noderedis:animals:4', +// value: { +// name: 'Fido', +// species: 'dog', +// age: '7' +// } +// } +// ] +// } + +console.log(`Results found: ${results.total}.`); + +for (const doc of results.documents) { + // noderedis:animals:3: Rover, 9 years old. + // noderedis:animals:4: Fido, 7 years old. + console.log(`${doc.id}: ${doc.value.name}, ${doc.value.age} years old.`); +} + +client.destroy(); diff --git a/examples/search-json.js b/examples/search-json.js new file mode 100644 index 00000000000..60f2ff095ed --- /dev/null +++ b/examples/search-json.js @@ -0,0 +1,148 @@ +// This example demonstrates how to use RediSearch and RedisJSON together. +// Requires both the RediSearch and RedisJSON modules: +// https://redis.io/docs/stack/search/ +// https://redis.io/docs/stack/json/ + +import { createClient, SCHEMA_FIELD_TYPE, FT_AGGREGATE_GROUP_BY_REDUCERS, FT_AGGREGATE_STEPS } from 'redis'; + +const client = createClient(); + +await client.connect(); + +// Create an index. +// https://redis.io/commands/ft.create/ +try { + await client.ft.create('idx:users', { + '$.name': { + type: SCHEMA_FIELD_TYPE.TEXT, + SORTABLE: true + }, + '$.age': { + type: SCHEMA_FIELD_TYPE.NUMERIC, + AS: 'age' + }, + '$.coins': { + type: SCHEMA_FIELD_TYPE.NUMERIC, + AS: 'coins' + }, + '$.email': { + type: SCHEMA_FIELD_TYPE.TAG, + AS: 'email' + } + }, { + ON: 'JSON', + PREFIX: 'noderedis:users' + }); +} catch (e) { + if (e.message === 'Index already exists') { + console.log('Index exists already, skipped creation.'); + } else { + // Something went wrong, perhaps RediSearch isn't installed... + console.error(e); + process.exit(1); + } +} + +// Add some users. +// https://redis.io/commands/json.set/ +await Promise.all([ + client.json.set('noderedis:users:1', '$', { + name: 'Alice', + age: 32, + coins: 100, + email: 'alice@nonexist.com' + }), + client.json.set('noderedis:users:2', '$', { + name: 'Bob', + age: 23, + coins: 15, + email: 'bob@somewhere.gov' + }) +]); + +// Search all users under 30 +console.log('Users under 30 years old:'); +console.log( + // https://redis.io/commands/ft.search/ + JSON.stringify( + await client.ft.search('idx:users', '@age:[0 30]'), + null, + 2 + ) +); +// { +// "total": 1, +// "documents": [ +// { +// "id": "noderedis:users:2", +// "value": { +// "name": "Bob", +// "age": 23, +// "coins": 15, +// "email": "bob@somewhere.gov" +// } +// } +// ] +// } + +// Find a user by email - note we need to escape . and @ characters +// in the email address. This applies for other punctuation too. +// https://redis.io/docs/stack/search/reference/tags/#including-punctuation-in-tags +console.log('Users with email "bob@somewhere.gov":'); +const emailAddress = 'bob@somewhere.gov'.replace(/[.@\\]/g, '\\$&'); +console.log( + JSON.stringify( + await client.ft.search('idx:users', `@email:{${emailAddress}}`), + null, + 2 + ) +); +// { +// "total": 1, +// "documents": [ +// { +// "id": "noderedis:users:2", +// "value": { +// "name": "Bob", +// "age": 23, +// "coins": 15, +// "email": "bob@somewhere.gov" +// } +// } +// ] +// } + +// Some aggregrations, what's the average age and total number of coins... +// https://redis.io/commands/ft.aggregate/ +console.log('Aggregation Demo:'); +console.log( + JSON.stringify( + await client.ft.aggregate('idx:users', '*', { + STEPS: [{ + type: FT_AGGREGATE_STEPS.GROUPBY, + REDUCE: [{ + type: FT_AGGREGATE_GROUP_BY_REDUCERS.AVG, + property: 'age', + AS: 'averageAge' + }, { + type: FT_AGGREGATE_GROUP_BY_REDUCERS.SUM, + property: 'coins', + AS: 'totalCoins' + }] + }] + }), + null, + 2 + ) +); +// { +// "total": 1, +// "results": [ +// { +// "averageAge": "27.5", +// "totalCoins": "115" +// } +// ] +// } + +client.destroy(); diff --git a/examples/search-knn.js b/examples/search-knn.js new file mode 100644 index 00000000000..abfce990189 --- /dev/null +++ b/examples/search-knn.js @@ -0,0 +1,91 @@ +// This example demonstrates how to use RediSearch to index and query data +// stored in Redis hashes using vector similarity search. +// +// Inspired by RediSearch Python tests: +// https://github.com/RediSearch/RediSearch/blob/06e36d48946ea08bd0d8b76394a4e82eeb919d78/tests/pytests/test_vecsim.py#L96 + +import { createClient, SCHEMA_FIELD_TYPE, SCHEMA_VECTOR_FIELD_ALGORITHM } from 'redis'; + +const client = createClient(); + +await client.connect(); + +// Create an index... +try { + // Documentation: https://redis.io/docs/stack/search/reference/vectors/ + await client.ft.create('idx:knn-example', { + v: { + type: SCHEMA_FIELD_TYPE.VECTOR, + ALGORITHM: SCHEMA_VECTOR_FIELD_ALGORITHM.HNSW, + TYPE: 'FLOAT32', + DIM: 2, + DISTANCE_METRIC: 'COSINE' + } + }, { + ON: 'HASH', + PREFIX: 'noderedis:knn' + }); +} catch (e) { + if (e.message === 'Index already exists') { + console.log('Index exists already, skipped creation.'); + } else { + // Something went wrong, perhaps RediSearch isn't installed... + console.error(e); + process.exit(1); + } +} + +function float32Buffer(arr) { + return Buffer.from(new Float32Array(arr).buffer); +} + +// Add some sample data... +// https://redis.io/commands/hset/ +await Promise.all([ + client.hSet('noderedis:knn:a', { v: float32Buffer([0.1, 0.1]) }), + client.hSet('noderedis:knn:b', { v: float32Buffer([0.1, 0.2]) }), + client.hSet('noderedis:knn:c', { v: float32Buffer([0.1, 0.3]) }), + client.hSet('noderedis:knn:d', { v: float32Buffer([0.1, 0.4]) }), +]); +// Perform a K-Nearest Neighbors vector similarity search +// Documentation: https://redis.io/docs/stack/search/reference/vectors/#pure-knn-queries +const results = await client.ft.search('idx:knn-example', '*=>[KNN 4 @v $BLOB AS dist]', { + PARAMS: { + BLOB: float32Buffer([0.1, 0.1]) + }, + SORTBY: 'dist', + DIALECT: 2, + RETURN: ['dist'] +}); +console.log(JSON.stringify(results, null, 2)); +// results: +// { +// "total": 4, +// "documents": [ +// { +// "id": "noderedis:knn:a", +// "value": { +// "dist": "5.96046447754e-08" +// } +// }, +// { +// "id": "noderedis:knn:b", +// "value": { +// "dist": "0.0513167381287" +// } +// }, +// { +// "id": "noderedis:knn:c", +// "value": { +// "dist": "0.10557281971" +// } +// }, +// { +// "id": "noderedis:knn:d", +// "value": { +// "dist": "0.142507016659" +// } +// } +// ] +// } +client.destroy(); diff --git a/examples/set-scan.js b/examples/set-scan.js new file mode 100644 index 00000000000..698b05983b0 --- /dev/null +++ b/examples/set-scan.js @@ -0,0 +1,21 @@ +// An example script that shows how to use the SSCAN iterator functionality to retrieve the contents of a Redis set. +// Create the set in redis-cli with this command: +// sadd setName a b c d e f g h i j k l m n o p q + +import { createClient } from 'redis'; + +const client = createClient(); +await client.connect(); + +const setName = 'setName'; + +for await (const members of client.sScanIterator(setName)) { + console.log('Batch of members:', members); + + // Process each member in the batch if needed + for (const member of members) { + console.log('Individual member:', member); + } +} + +client.close(); diff --git a/examples/simple.js b/examples/simple.js deleted file mode 100644 index f1f2e3209b8..00000000000 --- a/examples/simple.js +++ /dev/null @@ -1,24 +0,0 @@ -var redis = require("redis"), - client = redis.createClient(); - -client.on("error", function (err) { - console.log("error event - " + client.host + ":" + client.port + " - " + err); -}); - -client.set("string key", "string val", redis.print); -client.hset("hash key", "hashtest 1", "some value", redis.print); -client.hset(["hash key", "hashtest 2", "some other value"], redis.print); -client.hkeys("hash key", function (err, replies) { - if (err) { - return console.error("error response - " + err); - } - - console.log(replies.length + " replies:"); - replies.forEach(function (reply, i) { - console.log(" " + i + ": " + reply); - }); -}); - -client.quit(function (err, res) { - console.log("Exiting from quit command."); -}); diff --git a/examples/sort.js b/examples/sort.js deleted file mode 100644 index e7c6249e405..00000000000 --- a/examples/sort.js +++ /dev/null @@ -1,17 +0,0 @@ -var redis = require("redis"), - client = redis.createClient(); - -client.sadd("mylist", 1); -client.sadd("mylist", 2); -client.sadd("mylist", 3); - -client.set("weight_1", 5); -client.set("weight_2", 500); -client.set("weight_3", 1); - -client.set("object_1", "foo"); -client.set("object_2", "bar"); -client.set("object_3", "qux"); - -client.sort("mylist", "by", "weight_*", "get", "object_*", redis.print); -// Prints Reply: qux,foo,bar \ No newline at end of file diff --git a/examples/sorted-set.js b/examples/sorted-set.js new file mode 100644 index 00000000000..830427bea9a --- /dev/null +++ b/examples/sorted-set.js @@ -0,0 +1,53 @@ +// Add several values with their scores to a Sorted Set, +// then retrieve them all using ZSCAN. + +import { createClient } from 'redis'; + +const client = createClient(); +await client.connect(); + +await client.zAdd('mysortedset', [ + { + score: 99, + value: 'Ninety Nine' + }, + { + score: 100, + value: 'One Hundred' + }, + { + score: 101, + value: 'One Hundred and One' + } +]); + +// Get all of the values/scores from the sorted set using +// the scan approach: +// https://redis.io/commands/zscan +for await (const membersWithScores of client.zScanIterator('mysortedset')) { + console.log('Batch of members with scores:', membersWithScores); + + for (const memberWithScore of membersWithScores) { + console.log('Individual member with score:', memberWithScore); + } +} + +await client.zAdd('anothersortedset', [ + { + score: 99, + value: 'Ninety Nine' + }, + { + score: 102, + value: 'One Hundred and Two' + } +]); + +// Intersection of two sorted sets +const intersection = await client.zInter([ + { key: 'mysortedset', weight: 1 }, + { key: 'anothersortedset', weight: 1 } +]); +console.log('Intersection:', intersection); + +client.close(); diff --git a/examples/stream-consumer-group.js b/examples/stream-consumer-group.js new file mode 100644 index 00000000000..cf82b5e96af --- /dev/null +++ b/examples/stream-consumer-group.js @@ -0,0 +1,105 @@ +// A sample stream consumer using the blocking variant of XREADGROUP. +// https://redis.io/commands/xreadgroup/ + +// This consumer works in collaboration with other instances of itself +// in the same consumer group such that the group as a whole receives +// every entry from the stream. +// +// This consumes entries from a stream created by stream-producer.js +// +// Run this as follows: +// +// $ node stream-consumer-group.js +// +// Run multiple instances with different values of +// to see them processing the stream as a group: +// +// $ node stream-consumer-group.js consumer1 +// +// In another terminal: +// +// $ node stream-consumer-group.js consumer2 + +import { createClient } from 'redis'; + +const client = createClient(); + +if (process.argv.length !== 3) { + console.log(`usage: node stream-consumer-group.js `); + process.exit(1); +} + +const consumerName = process.argv[2]; + +await client.connect(); + +// Create the consumer group (and stream) if needed... +try { + // https://redis.io/commands/xgroup-create/ + await client.xGroupCreate('mystream', 'myconsumergroup', '0', { + MKSTREAM: true + }); + console.log('Created consumer group.'); +} catch (e) { + console.log('Consumer group already exists, skipped creation.'); +} + +console.log(`Starting consumer ${consumerName}.`); + +const pool = client.createPool(); + +while (true) { + try { + // https://redis.io/commands/xreadgroup/ + let response = await pool.xReadGroup( + 'myconsumergroup', + consumerName, [ + // XREADGROUP can read from multiple streams, starting at a + // different ID for each... + { + key: 'mystream', + id: '>' // Next entry ID that no consumer in this group has read + } + ], { + // Read 1 entry at a time, block for 5 seconds if there are none. + COUNT: 1, + BLOCK: 5000 + } + ); + + if (response) { + // Response is an array of streams, each containing an array of + // entries: + // + // [ + // { + // "name": "mystream", + // "messages": [ + // { + // "id": "1642088708425-0", + // "message": { + // "num": "999" + // } + // } + // ] + // } + // ] + console.log(JSON.stringify(response)); + + // Use XACK to acknowledge successful processing of this + // stream entry. + // https://redis.io/commands/xack/ + const entryId = response[0].messages[0].id; + const ackResult = await pool.xAck('mystream', 'myconsumergroup', entryId); + + // ackResult will be 1 if the message was successfully acknowledged, 0 otherwise + console.log(`Acknowledged processing of entry ${entryId}. Result: ${ackResult}`); + } else { + // Response is null, we have read everything that is + // in the stream right now... + console.log('No new stream entries.'); + } + } catch (err) { + console.error(err); + } +} diff --git a/examples/stream-consumer.js b/examples/stream-consumer.js new file mode 100644 index 00000000000..3d5fc20fb43 --- /dev/null +++ b/examples/stream-consumer.js @@ -0,0 +1,61 @@ +// A sample stream consumer using the blocking variant of XREAD. +// https://redis.io/commands/xread/ +// This consumes entries from a stream created by stream-producer.js + +import { createClient, commandOptions } from 'redis'; + +const client = createClient(); + +await client.connect(); + +let currentId = '0-0'; // Start at lowest possible stream ID + +while (true) { + try { + let response = await client.xRead( + commandOptions({ + isolated: true + }), [ + // XREAD can read from multiple streams, starting at a + // different ID for each... + { + key: 'mystream', + id: currentId + } + ], { + // Read 1 entry at a time, block for 5 seconds if there are none. + COUNT: 1, + BLOCK: 5000 + } + ); + + if (response) { + // Response is an array of streams, each containing an array of + // entries: + // [ + // { + // "name": "mystream", + // "messages": [ + // { + // "id": "1642088708425-0", + // "message": { + // "num": "999" + // } + // } + // ] + // } + // ] + console.log(JSON.stringify(response)); + + // Get the ID of the first (only) entry returned. + currentId = response[0].messages[0].id; + console.log(currentId); + } else { + // Response is null, we have read everything that is + // in the stream right now... + console.log('No new stream entries.'); + } + } catch (err) { + console.error(err); + } +} diff --git a/examples/stream-producer.js b/examples/stream-producer.js new file mode 100644 index 00000000000..113265dbd40 --- /dev/null +++ b/examples/stream-producer.js @@ -0,0 +1,50 @@ +// A sample stream producer using XADD. +// https://redis.io/commands/xadd/ +import { createClient } from 'redis'; + +const client = createClient(); + +await client.connect(); + +for (let i = 0; i < 10000; i++) { + await client.xAdd( + 'mystream', + '*', // * = Let Redis generate a timestamp ID for this new entry. + // Payload to add to the stream: + { + i: i.toString() + // Other name/value pairs can go here as required... + } + ); + + // Also add to a stream whose length we will cap at approximately + // 1000 entries using the MAXLEN trimming strategy: + // https://redis.io/commands/xadd/ + + await client.xAdd( + 'mytrimmedstream', + '*', + // Payload to add to the stream: + { + i: i.toString() + // Other name/value pairs can go here as required... + }, + // Specify a trimming strategy... + { + TRIM: { + strategy: 'MAXLEN', // Trim by length. + strategyModifier: '~', // Approximate trimming. + threshold: 1000 // Retain around 1000 entries. + } + } + ); +} + +// Take a look at how many entries are in the streams... +// https://redis.io/commands/xlen/ +// Should be 10000: +console.log(`Length of mystream: ${await client.xLen('mystream')}.`); +// Should be approximately 1000: +console.log(`Length of mytrimmedstream: ${await client.xLen('mytrimmedstream')}.`); + +client.destroy(); diff --git a/examples/subqueries.js b/examples/subqueries.js deleted file mode 100644 index 560db2404e1..00000000000 --- a/examples/subqueries.js +++ /dev/null @@ -1,15 +0,0 @@ -// Sending commands in response to other commands. -// This example runs "type" against every key in the database -// -var client = require("redis").createClient(); - -client.keys("*", function (err, keys) { - keys.forEach(function (key, pos) { - client.type(key, function (err, keytype) { - console.log(key + " is " + keytype); - if (pos === (keys.length - 1)) { - client.quit(); - } - }); - }); -}); diff --git a/examples/subquery.js b/examples/subquery.js deleted file mode 100644 index 861657e1f3a..00000000000 --- a/examples/subquery.js +++ /dev/null @@ -1,19 +0,0 @@ -var client = require("redis").createClient(); - -function print_results(obj) { - console.dir(obj); -} - -// build a map of all keys and their types -client.keys("*", function (err, all_keys) { - var key_types = {}; - - all_keys.forEach(function (key, pos) { // use second arg of forEach to get pos - client.type(key, function (err, type) { - key_types[key] = type; - if (pos === all_keys.length - 1) { // callbacks all run in order - print_results(key_types); - } - }); - }); -}); diff --git a/examples/time-series.js b/examples/time-series.js new file mode 100644 index 00000000000..75df2736f81 --- /dev/null +++ b/examples/time-series.js @@ -0,0 +1,122 @@ +// Add data to a Redis TimeSeries and query it. +// Requires the RedisTimeSeries module: https://redis.io/docs/stack/timeseries/ + +import { createClient } from 'redis'; +import { TIME_SERIES_DUPLICATE_POLICIES, TIME_SERIES_ENCODING, TIME_SERIES_AGGREGATION_TYPE } from '@redis/time-series'; + +const client = createClient(); + +await client.connect(); +await client.del('mytimeseries'); + +try { + // Create a timeseries + // https://redis.io/commands/ts.create/ + const created = await client.ts.create('mytimeseries', { + RETENTION: 86400000, // 1 day in milliseconds + ENCODING: TIME_SERIES_ENCODING.UNCOMPRESSED, // No compression + DUPLICATE_POLICY: TIME_SERIES_DUPLICATE_POLICIES.BLOCK // No duplicates + }); + + if (created === 'OK') { + console.log('Created timeseries.'); + } else { + console.log('Error creating timeseries :('); + process.exit(1); + } + + let value = Math.floor(Math.random() * 1000) + 1; // Random data point value + let currentTimestamp = 1640995200000; // Jan 1 2022 00:00:00 + let num = 0; + + while (num < 10000) { + // Add a new value to the timeseries, providing our own timestamp: + // https://redis.io/commands/ts.add/ + await client.ts.add('mytimeseries', currentTimestamp, value); + console.log(`Added timestamp ${currentTimestamp}, value ${value}.`); + + num += 1; + value = Math.floor(Math.random() * 1000) + 1; // Get another random value + currentTimestamp += 1000; // Move on one second. + } + + // Add multiple values to the timeseries in round trip to the server: + // https://redis.io/commands/ts.madd/ + const response = await client.ts.mAdd([{ + key: 'mytimeseries', + timestamp: currentTimestamp + 60000, + value: Math.floor(Math.random() * 1000) + 1 + }, { + key: 'mytimeseries', + timestamp: currentTimestamp + 120000, + value: Math.floor(Math.random() * 1000) + 1 + }]); + + // response = array of timestamps added by TS.MADD command. + if (response.length === 2) { + console.log('Added 2 entries to timeseries with TS.MADD.'); + } + + // Update timeseries retention with TS.ALTER: + // https://redis.io/commands/ts.alter/ + const alterResponse = await client.ts.alter('mytimeseries', { + RETENTION: 0 // Keep the entries forever + }); + + if (alterResponse === 'OK') { + console.log('Timeseries retention settings altered successfully.'); + } + + // Query the timeseries with TS.RANGE: + // https://redis.io/commands/ts.range/ + const fromTimestamp = 1640995200000; // Jan 1 2022 00:00:00 + const toTimestamp = 1640995260000; // Jan 1 2022 00:01:00 + const rangeResponse = await client.ts.range('mytimeseries', fromTimestamp, toTimestamp, { + // Group into 10 second averages. + AGGREGATION: { + type: TIME_SERIES_AGGREGATION_TYPE.AVG, + timeBucket: 10000 + } + }); + + console.log('RANGE RESPONSE:'); + // rangeResponse looks like: + // [ + // { timestamp: 1640995200000, value: 356.8 }, + // { timestamp: 1640995210000, value: 534.8 }, + // { timestamp: 1640995220000, value: 481.3 }, + // { timestamp: 1640995230000, value: 437 }, + // { timestamp: 1640995240000, value: 507.3 }, + // { timestamp: 1640995250000, value: 581.2 }, + // { timestamp: 1640995260000, value: 600 } + // ] + + console.log(rangeResponse); + + // Get some information about the state of the timeseries. + // https://redis.io/commands/ts.info/ + const tsInfo = await client.ts.info('mytimeseries'); + + // tsInfo looks like this: + // { + // totalSamples: 1440, + // memoryUsage: 28904, + // firstTimestamp: 1641508920000, + // lastTimestamp: 1641595320000, + // retentionTime: 86400000, + // chunkCount: 7, + // chunkSize: 4096, + // chunkType: 'uncompressed', + // duplicatePolicy: 'block', + // labels: [], + // sourceKey: null, + // rules: [] + // } + + console.log('Timeseries info:'); + console.log(tsInfo); +} catch (e) { + console.error(e); +} + +client.close(); diff --git a/examples/topk.js b/examples/topk.js new file mode 100644 index 00000000000..10cc29950ed --- /dev/null +++ b/examples/topk.js @@ -0,0 +1,113 @@ +// This example demonstrates the use of the Top K +// in the RedisBloom module (https://redis.io/docs/stack/bloom/) + +import { createClient } from 'redis'; + +const client = createClient(); + +await client.connect(); + +// Delete any pre-existing Top K. +await client.del('mytopk'); + +// Reserve a Top K to track the 10 most common items. +// https://redis.io/commands/topk.reserve/ +try { + await client.topK.reserve('mytopk', 10, { width: 400, depth: 10, decay: 0.9 }); + console.log('Reserved Top K.'); +} catch (e) { + if (e.message.endsWith('key already exists')) { + console.log('Top K already reserved.'); + } else { + console.log('Error, maybe RedisBloom is not installed?:'); + console.log(e); + } +} + +const teamMembers = [ + 'leibale', + 'simon', + 'guy', + 'suze', + 'brian', + 'steve', + 'kyleb', + 'kyleo', + 'josefin', + 'alex', + 'nava', + 'lance', + 'rachel', + 'kaitlyn' +]; + +// Add random counts for random team members with TOPK.INCRBY +// https://redis.io/commands/topk.incrby/ +for (let n = 0; n < 1000; n++) { + const teamMember = teamMembers[Math.floor(Math.random() * teamMembers.length)]; + const points = Math.floor(Math.random() * 1000) + 1; + await client.topK.incrBy('mytopk', { + item: teamMember, + incrementBy: points + }); + console.log(`Added ${points} points for ${teamMember}.`); +} + +// List out the top 10 with TOPK.LIST +// https://redis.io/commands/topk.list/ +const top10 = await client.topK.list('mytopk'); +console.log('The top 10:'); +// top10 looks like this: +// [ +// 'guy', 'nava', +// 'kaitlyn', 'brian', +// 'simon', 'suze', +// 'lance', 'alex', +// 'steve', 'kyleo' +// ] +console.log(top10); + +// List out the top 10 with their counts (requires RedisBloom >=2.2.9) +// https://redis.io/commands/topk.list/ +const top10WithCounts = await client.topK.listWithCount('mytopk'); +console.log('The top 10 with counts:'); +console.log(top10WithCounts); +// top10WithCounts looks like this: +// [ +// { item: 'suze', count: 42363 }, +// { item: 'lance', count: 41982 }, +// { item: 'simon', count: 41831 }, +// { item: 'steve', count: 39237 }, +// { item: 'guy', count: 39078 }, +// { item: 'kyleb', count: 37338 }, +// { item: 'leibale', count: 34230 }, +// { item: 'kyleo', count: 33812 }, +// { item: 'alex', count: 33679 }, +// { item: 'nava', count: 32663 } +// ] + +// Check if a few team members are in the top 10 with TOPK.QUERY: +// https://redis.io/commands/topk.query/ +const [ steve, suze, leibale, frederick ] = await client.topK.query('mytopk', [ + 'steve', + 'suze', + 'leibale', + 'frederick' +]); + +console.log(`steve ${steve ? 'is': 'is not'} in the top 10.`); +console.log(`suze ${suze ? 'is': 'is not'} in the top 10.`); +console.log(`leibale ${leibale ? 'is': 'is not'} in the top 10.`); +console.log(`frederick ${frederick ? 'is': 'is not'} in the top 10.`); + +// Get count estimate for some team members with TOPK.COUNT: +// https://redis.io/commands/topk.count/ +const [ simonCount, lanceCount ] = await client.topK.count('mytopk', [ + 'simon', + 'lance' +]); + +console.log(`Count estimate for simon: ${simonCount}.`); +console.log(`Count estimate for lance: ${lanceCount}.`); + +client.close(); diff --git a/examples/transaction-with-arbitrary-commands.js b/examples/transaction-with-arbitrary-commands.js new file mode 100644 index 00000000000..cc22a659678 --- /dev/null +++ b/examples/transaction-with-arbitrary-commands.js @@ -0,0 +1,51 @@ +// How to mix and match supported commands that have named functions with +// commands sent as arbitrary strings in the same transaction context. +// Use this when working with new Redis commands that haven't been added to +// node-redis yet, or when working with commands that have been added to Redis +// by modules other than those directly supported by node-redis. + +import { createClient } from 'redis'; + +const client = createClient(); + +await client.connect(); + +// Build some data fixtures. +await Promise.all([ + client.hSet('hash1', { name: 'hash1', number: 1}), + client.hSet('hash2', { name: 'hash2', number: 1}), + client.hSet('hash3', { name: 'hash3', number: 3}) +]); + +// Outside of a transaction, use sendCommand to send arbitrary commands. +await client.sendCommand(['hset', 'hash2', 'number', '3']); + +// In a transaction context, use addCommand to send arbitrary commands. +// addCommand can be mixed and matched with named command functions as +// shown. +const multi = client.multi() + .hGetAll('hash2') + .addCommand(['hset', 'hash3', 'number', '4']) + .hGet('hash3', 'number'); + +// exec() returns Array +const responses = await multi.exec(); + +// responses will be: +// [ [Object: null prototype] { name: 'hash2', number: '3' }, 0, '4' ] +console.log('Using exec():', responses); + +// This is equivalent to multi.exec<'typed'>() +const typedResponses = await multi + .hGetAll('hash2') + .addCommand(['hset', 'hash3', 'number', '4']) + .hGet('hash3', 'number') + .execTyped(); + +// typedResponses will have more specific types +console.log('Using execTyped():', typedResponses); + +// Clean up fixtures. +await client.del(['hash1', 'hash2', 'hash3']); + +client.close(); diff --git a/examples/transaction-with-watch.js b/examples/transaction-with-watch.js new file mode 100644 index 00000000000..752d0b6a4e3 --- /dev/null +++ b/examples/transaction-with-watch.js @@ -0,0 +1,42 @@ +import { createClient, WatchError } from 'redis'; + +const delay = (ms) => new Promise((resolve) => setTimeout(resolve, ms)); +const client = createClient(); +await client.connect(); + +function restrictFunctionCalls(fn, maxCalls) { + let count = 1; + return function (...args) { + return count++ < maxCalls ? fn(...args) : false; + }; +} + +const fn = restrictFunctionCalls(transaction, 4); + +const pool = await client.createPool(); + +async function transaction() { + try { + await pool.execute(async (isolatedClient) => { + await isolatedClient.watch('paymentId:1259'); + const multi = isolatedClient + .multi() + .set('paymentId:1259', 'Payment Successfully Completed!') + .set('paymentId:1260', 'Refund Processed Successfully!'); + await delay(5000); // Do some changes to the watched key during this time... + await multi.exec(); + console.log('Transaction completed Successfully!'); + client.quit(); + }); + } catch (error) { + if (error instanceof WatchError) { + console.log('Transaction Failed Due To Concurrent Modification!'); + fn(); + } else { + console.log(`Error: ${error}`); + client.quit(); + } + } +} + +transaction(); diff --git a/examples/unix_socket.js b/examples/unix_socket.js deleted file mode 100644 index 4a5e0bb0e84..00000000000 --- a/examples/unix_socket.js +++ /dev/null @@ -1,29 +0,0 @@ -var redis = require("redis"), - client = redis.createClient("/tmp/redis.sock"), - profiler = require("v8-profiler"); - -client.on("connect", function () { - console.log("Got Unix socket connection.") -}); - -client.on("error", function (err) { - console.log(err.message); -}); - -client.set("space chars", "space value"); - -setInterval(function () { - client.get("space chars"); -}, 100); - -function done() { - client.info(function (err, reply) { - console.log(reply.toString()); - client.quit(); - }); -} - -setTimeout(function () { - console.log("Taking snapshot."); - var snap = profiler.takeSnapshot(); -}, 5000); diff --git a/examples/web_server.js b/examples/web_server.js deleted file mode 100644 index 9fd85923de1..00000000000 --- a/examples/web_server.js +++ /dev/null @@ -1,31 +0,0 @@ -// A simple web server that generates dyanmic content based on responses from Redis - -var http = require("http"), server, - redis_client = require("redis").createClient(); - -server = http.createServer(function (request, response) { - response.writeHead(200, { - "Content-Type": "text/plain" - }); - - var redis_info, total_requests; - - redis_client.info(function (err, reply) { - redis_info = reply; // stash response in outer scope - }); - redis_client.incr("requests", function (err, reply) { - total_requests = reply; // stash response in outer scope - }); - redis_client.hincrby("ip", request.connection.remoteAddress, 1); - redis_client.hgetall("ip", function (err, reply) { - // This is the last reply, so all of the previous replies must have completed already - response.write("This page was generated after talking to redis.\n\n" + - "Redis info:\n" + redis_info + "\n" + - "Total requests: " + total_requests + "\n\n" + - "IP count: \n"); - Object.keys(reply).forEach(function (ip) { - response.write(" " + ip + ": " + reply[ip] + "\n"); - }); - response.end(); - }); -}).listen(80); diff --git a/generate_commands.js b/generate_commands.js deleted file mode 100644 index e6949d3a141..00000000000 --- a/generate_commands.js +++ /dev/null @@ -1,39 +0,0 @@ -var http = require("http"), - fs = require("fs"); - -function prettyCurrentTime() { - var date = new Date(); - return date.toLocaleString(); -} - -function write_file(commands, path) { - var file_contents, out_commands; - - console.log("Writing " + Object.keys(commands).length + " commands to " + path); - - file_contents = "// This file was generated by ./generate_commands.js on " + prettyCurrentTime() + "\n"; - - out_commands = Object.keys(commands).map(function (key) { - return key.toLowerCase(); - }); - - file_contents += "module.exports = " + JSON.stringify(out_commands, null, " ") + ";\n"; - - fs.writeFile(path, file_contents); -} - -http.get({host: "redis.io", path: "/commands.json"}, function (res) { - var body = ""; - - console.log("Response from redis.io/commands.json: " + res.statusCode); - - res.on('data', function (chunk) { - body += chunk; - }); - - res.on('end', function () { - write_file(JSON.parse(body), "lib/commands.js"); - }); -}).on('error', function (e) { - console.log("Error fetching command list from redis.io: " + e.message); -}); diff --git a/index.js b/index.js deleted file mode 100644 index 5c7799a8f83..00000000000 --- a/index.js +++ /dev/null @@ -1,1027 +0,0 @@ -/*global Buffer require exports console setTimeout */ - -var net = require("net"), - util = require("./lib/util"), - Queue = require("./lib/queue"), - to_array = require("./lib/to_array"), - events = require("events"), - parsers = [], commands, - connection_id = 0, - default_port = 6379, - default_host = "127.0.0.1"; - -// can set this to true to enable for all connections -exports.debug_mode = false; - -// hiredis might not be installed -try { - require("./lib/parser/hiredis"); - parsers.push(require("./lib/parser/hiredis")); -} catch (err) { - if (exports.debug_mode) { - console.warn("hiredis parser not installed."); - } -} - -parsers.push(require("./lib/parser/javascript")); - -function RedisClient(stream, options) { - this.stream = stream; - this.options = options = options || {}; - - this.connection_id = ++connection_id; - this.connected = false; - this.ready = false; - this.connections = 0; - if (this.options.socket_nodelay === undefined) { - this.options.socket_nodelay = true; - } - this.should_buffer = false; - this.command_queue_high_water = this.options.command_queue_high_water || 1000; - this.command_queue_low_water = this.options.command_queue_low_water || 0; - this.max_attempts = null; - if (options.max_attempts && !isNaN(options.max_attempts) && options.max_attempts > 0) { - this.max_attempts = +options.max_attempts; - } - this.command_queue = new Queue(); // holds sent commands to de-pipeline them - this.offline_queue = new Queue(); // holds commands issued but not able to be sent - this.commands_sent = 0; - this.connect_timeout = false; - if (options.connect_timeout && !isNaN(options.connect_timeout) && options.connect_timeout > 0) { - this.connect_timeout = +options.connect_timeout; - } - this.initialize_retry_vars(); - this.pub_sub_mode = false; - this.subscription_set = {}; - this.monitoring = false; - this.closing = false; - this.server_info = {}; - this.auth_pass = null; - this.parser_module = null; - this.selected_db = null; // save the selected db here, used when reconnecting - - var self = this; - - this.stream.on("connect", function () { - self.on_connect(); - }); - - this.stream.on("data", function (buffer_from_socket) { - self.on_data(buffer_from_socket); - }); - - this.stream.on("error", function (msg) { - self.on_error(msg.message); - }); - - this.stream.on("close", function () { - self.connection_gone("close"); - }); - - this.stream.on("end", function () { - self.connection_gone("end"); - }); - - this.stream.on("drain", function () { - self.should_buffer = false; - self.emit("drain"); - }); - - events.EventEmitter.call(this); -} -util.inherits(RedisClient, events.EventEmitter); -exports.RedisClient = RedisClient; - -RedisClient.prototype.initialize_retry_vars = function () { - this.retry_timer = null; - this.retry_totaltime = 0; - this.retry_delay = 150; - this.retry_backoff = 1.7; - this.attempts = 1; -}; - -// flush offline_queue and command_queue, erroring any items with a callback first -RedisClient.prototype.flush_and_error = function (message) { - var command_obj; - while (this.offline_queue.length > 0) { - command_obj = this.offline_queue.shift(); - if (typeof command_obj.callback === "function") { - command_obj.callback(message); - } - } - this.offline_queue = new Queue(); - - while (this.command_queue.length > 0) { - command_obj = this.command_queue.shift(); - if (typeof command_obj.callback === "function") { - command_obj.callback(message); - } - } - this.command_queue = new Queue(); -}; - -RedisClient.prototype.on_error = function (msg) { - var message = "Redis connection to " + this.host + ":" + this.port + " failed - " + msg, - self = this, command_obj; - - if (this.closing) { - return; - } - - if (exports.debug_mode) { - console.warn(message); - } - - this.flush_and_error(message); - - this.connected = false; - this.ready = false; - - this.emit("error", new Error(message)); - // "error" events get turned into exceptions if they aren't listened for. If the user handled this error - // then we should try to reconnect. - this.connection_gone("error"); -}; - -RedisClient.prototype.do_auth = function () { - var self = this; - - if (exports.debug_mode) { - console.log("Sending auth to " + self.host + ":" + self.port + " id " + self.connection_id); - } - self.send_anyway = true; - self.send_command("auth", [this.auth_pass], function (err, res) { - if (err) { - if (err.toString().match("LOADING")) { - // if redis is still loading the db, it will not authenticate and everything else will fail - console.log("Redis still loading, trying to authenticate later"); - setTimeout(function () { - self.do_auth(); - }, 2000); // TODO - magic number alert - return; - } else { - return self.emit("error", "Auth error: " + err); - } - } - if (res.toString() !== "OK") { - return self.emit("error", "Auth failed: " + res.toString()); - } - if (exports.debug_mode) { - console.log("Auth succeeded " + self.host + ":" + self.port + " id " + self.connection_id); - } - if (self.auth_callback) { - self.auth_callback(err, res); - self.auth_callback = null; - } - - // now we are really connected - self.emit("connect"); - if (self.options.no_ready_check) { - self.on_ready(); - } else { - self.ready_check(); - } - }); - self.send_anyway = false; -}; - -RedisClient.prototype.on_connect = function () { - if (exports.debug_mode) { - console.log("Stream connected " + this.host + ":" + this.port + " id " + this.connection_id); - } - var self = this; - - this.connected = true; - this.ready = false; - this.attempts = 0; - this.connections += 1; - this.command_queue = new Queue(); - this.emitted_end = false; - this.initialize_retry_vars(); - if (this.options.socket_nodelay) { - this.stream.setNoDelay(); - } - this.stream.setTimeout(0); - - this.init_parser(); - - if (this.auth_pass) { - this.do_auth(); - } else { - this.emit("connect"); - - if (this.options.no_ready_check) { - this.on_ready(); - } else { - this.ready_check(); - } - } -}; - -RedisClient.prototype.init_parser = function () { - var self = this; - - if (this.options.parser) { - if (! parsers.some(function (parser) { - if (parser.name === self.options.parser) { - this.parser_module = parser; - if (exports.debug_mode) { - console.log("Using parser module: " + self.parser_module.name); - } - return true; - } - })) { - throw new Error("Couldn't find named parser " + self.options.parser + " on this system"); - } - } else { - if (exports.debug_mode) { - console.log("Using default parser module: " + parsers[0].name); - } - this.parser_module = parsers[0]; - } - - this.parser_module.debug_mode = exports.debug_mode; - - // return_buffers sends back Buffers from parser to callback. detect_buffers sends back Buffers from parser, but - // converts to Strings if the input arguments are not Buffers. - this.reply_parser = new this.parser_module.Parser({ - return_buffers: self.options.return_buffers || self.options.detect_buffers || false - }); - - // "reply error" is an error sent back by Redis - this.reply_parser.on("reply error", function (reply) { - self.return_error(new Error(reply)); - }); - this.reply_parser.on("reply", function (reply) { - self.return_reply(reply); - }); - // "error" is bad. Somehow the parser got confused. It'll try to reset and continue. - this.reply_parser.on("error", function (err) { - self.emit("error", new Error("Redis reply parser error: " + err.stack)); - }); -}; - -RedisClient.prototype.on_ready = function () { - var self = this; - - this.ready = true; - - // magically restore any modal commands from a previous connection - if (this.selected_db !== null) { - this.send_command('select', [this.selected_db]); - } - if (this.pub_sub_mode === true) { - Object.keys(this.subscription_set).forEach(function (key) { - var parts = key.split(" "); - if (exports.debug_mode) { - console.warn("sending pub/sub on_ready " + parts[0] + ", " + parts[1]); - } - self.send_command(parts[0], [parts[1]]); - }); - } else if (this.monitoring) { - this.send_command("monitor"); - } else { - this.send_offline_queue(); - } - this.emit("ready"); -}; - -RedisClient.prototype.on_info_cmd = function (err, res) { - var self = this, obj = {}, lines, retry_time; - - if (err) { - return self.emit("error", "Ready check failed: " + err); - } - - lines = res.toString().split("\r\n"); - - lines.forEach(function (line) { - var parts = line.split(':'); - if (parts[1]) { - obj[parts[0]] = parts[1]; - } - }); - - obj.versions = []; - obj.redis_version.split('.').forEach(function (num) { - obj.versions.push(+num); - }); - - // expose info key/vals to users - this.server_info = obj; - - if (!obj.loading || (obj.loading && obj.loading === "0")) { - if (exports.debug_mode) { - console.log("Redis server ready."); - } - this.on_ready(); - } else { - retry_time = obj.loading_eta_seconds * 1000; - if (retry_time > 1000) { - retry_time = 1000; - } - if (exports.debug_mode) { - console.log("Redis server still loading, trying again in " + retry_time); - } - setTimeout(function () { - self.ready_check(); - }, retry_time); - } -}; - -RedisClient.prototype.ready_check = function () { - var self = this; - - if (exports.debug_mode) { - console.log("checking server ready state..."); - } - - this.send_anyway = true; // secret flag to send_command to send something even if not "ready" - this.info(function (err, res) { - self.on_info_cmd(err, res); - }); - this.send_anyway = false; -}; - -RedisClient.prototype.send_offline_queue = function () { - var command_obj, buffered_writes = 0; - - while (this.offline_queue.length > 0) { - command_obj = this.offline_queue.shift(); - if (exports.debug_mode) { - console.log("Sending offline command: " + command_obj.command); - } - buffered_writes += !this.send_command(command_obj.command, command_obj.args, command_obj.callback); - } - this.offline_queue = new Queue(); - // Even though items were shifted off, Queue backing store still uses memory until next add, so just get a new Queue - - if (!buffered_writes) { - this.should_buffer = false; - this.emit("drain"); - } -}; - -RedisClient.prototype.connection_gone = function (why) { - var self = this, message; - - // If a retry is already in progress, just let that happen - if (this.retry_timer) { - return; - } - - if (exports.debug_mode) { - console.warn("Redis connection is gone from " + why + " event."); - } - this.connected = false; - this.ready = false; - - // since we are collapsing end and close, users don't expect to be called twice - if (! this.emitted_end) { - this.emit("end"); - this.emitted_end = true; - } - - this.flush_and_error("Redis connection gone from " + why + " event."); - - // If this is a requested shutdown, then don't retry - if (this.closing) { - this.retry_timer = null; - if (exports.debug_mode) { - console.warn("connection ended from quit command, not retrying."); - } - return; - } - - this.retry_delay = Math.floor(this.retry_delay * this.retry_backoff); - - if (exports.debug_mode) { - console.log("Retry connection in " + this.current_retry_delay + " ms"); - } - - if (this.max_attempts && this.attempts >= this.max_attempts) { - this.retry_timer = null; - // TODO - some people need a "Redis is Broken mode" for future commands that errors immediately, and others - // want the program to exit. Right now, we just log, which doesn't really help in either case. - console.error("node_redis: Couldn't get Redis connection after " + this.max_attempts + " attempts."); - return; - } - - this.attempts += 1; - this.emit("reconnecting", { - delay: self.retry_delay, - attempt: self.attempts - }); - this.retry_timer = setTimeout(function () { - if (exports.debug_mode) { - console.log("Retrying connection..."); - } - - self.retry_totaltime += self.current_retry_delay; - - if (self.connect_timeout && self.retry_totaltime >= self.connect_timeout) { - self.retry_timer = null; - // TODO - engage Redis is Broken mode for future commands, or whatever - console.error("node_redis: Couldn't get Redis connection after " + self.retry_totaltime + "ms."); - return; - } - - self.stream.connect(self.port, self.host); - self.retry_timer = null; - }, this.retry_delay); -}; - -RedisClient.prototype.on_data = function (data) { - if (exports.debug_mode) { - console.log("net read " + this.host + ":" + this.port + " id " + this.connection_id + ": " + data.toString()); - } - - try { - this.reply_parser.execute(data); - } catch (err) { - // This is an unexpected parser problem, an exception that came from the parser code itself. - // Parser should emit "error" events if it notices things are out of whack. - // Callbacks that throw exceptions will land in return_reply(), below. - // TODO - it might be nice to have a different "error" event for different types of errors - this.emit("error", err); - } -}; - -RedisClient.prototype.return_error = function (err) { - var command_obj = this.command_queue.shift(), queue_len = this.command_queue.getLength(); - - if (this.pub_sub_mode === false && queue_len === 0) { - this.emit("idle"); - this.command_queue = new Queue(); - } - if (this.should_buffer && queue_len <= this.command_queue_low_water) { - this.emit("drain"); - this.should_buffer = false; - } - - if (command_obj && typeof command_obj.callback === "function") { - try { - command_obj.callback(err); - } catch (callback_err) { - // if a callback throws an exception, re-throw it on a new stack so the parser can keep going - process.nextTick(function () { - throw callback_err; - }); - } - } else { - console.log("node_redis: no callback to send error: " + err.message); - // this will probably not make it anywhere useful, but we might as well throw - process.nextTick(function () { - throw err; - }); - } -}; - -// if a callback throws an exception, re-throw it on a new stack so the parser can keep going. -// put this try/catch in its own function because V8 doesn't optimize this well yet. -function try_callback(callback, reply) { - try { - callback(null, reply); - } catch (err) { - process.nextTick(function () { - throw err; - }); - } -} - -// hgetall converts its replies to an Object. If the reply is empty, null is returned. -function reply_to_object(reply) { - var obj = {}, j, jl, key, val; - - if (reply.length === 0) { - return null; - } - - for (j = 0, jl = reply.length; j < jl; j += 2) { - key = reply[j].toString(); - val = reply[j + 1]; - obj[key] = val; - } - - return obj; -} - -function reply_to_strings(reply) { - var i; - - if (Buffer.isBuffer(reply)) { - return reply.toString(); - } - - if (Array.isArray(reply)) { - for (i = 0; i < reply.length; i++) { - reply[i] = reply[i].toString(); - } - return reply; - } - - return reply; -} - -RedisClient.prototype.return_reply = function (reply) { - var command_obj, obj, i, len, type, timestamp, argindex, args, queue_len; - - queue_len = this.command_queue.getLength(); - - if (this.pub_sub_mode === false && queue_len === 0) { - this.emit("idle"); - this.command_queue = new Queue(); // explicitly reclaim storage from old Queue - } - if (this.should_buffer && queue_len <= this.command_queue_low_water) { - this.emit("drain"); - this.should_buffer = false; - } - - command_obj = this.command_queue.shift(); - - if (command_obj && !command_obj.sub_command) { - if (typeof command_obj.callback === "function") { - if (this.options.detect_buffers && command_obj.buffer_args === false) { - // If detect_buffers option was specified, then the reply from the parser will be Buffers. - // If this command did not use Buffer arguments, then convert the reply to Strings here. - reply = reply_to_strings(reply); - } - - // TODO - confusing and error-prone that hgetall is special cased in two places - if (reply && 'hgetall' === command_obj.command.toLowerCase()) { - reply = reply_to_object(reply); - } - - try_callback(command_obj.callback, reply); - } else if (exports.debug_mode) { - console.log("no callback for reply: " + (reply && reply.toString && reply.toString())); - } - } else if (this.pub_sub_mode || (command_obj && command_obj.sub_command)) { - if (Array.isArray(reply)) { - type = reply[0].toString(); - - if (type === "message") { - this.emit("message", reply[1].toString(), reply[2]); // channel, message - } else if (type === "pmessage") { - this.emit("pmessage", reply[1].toString(), reply[2].toString(), reply[3]); // pattern, channel, message - } else if (type === "subscribe" || type === "unsubscribe" || type === "psubscribe" || type === "punsubscribe") { - if (reply[2] === 0) { - this.pub_sub_mode = false; - if (this.debug_mode) { - console.log("All subscriptions removed, exiting pub/sub mode"); - } - } - // subscribe commands take an optional callback and also emit an event, but only the first response is included in the callback - // TODO - document this or fix it so it works in a more obvious way - if (command_obj && typeof command_obj.callback === "function") { - try_callback(command_obj.callback, reply[1].toString()); - } - this.emit(type, reply[1].toString(), reply[2]); // channel, count - } else { - throw new Error("subscriptions are active but got unknown reply type " + type); - } - } else if (! this.closing) { - throw new Error("subscriptions are active but got an invalid reply: " + reply); - } - } else if (this.monitoring) { - len = reply.indexOf(" "); - timestamp = reply.slice(0, len); - argindex = reply.indexOf('"'); - args = reply.slice(argindex + 1, -1).split('" "').map(function (elem) { - return elem.replace(/\\"/g, '"'); - }); - this.emit("monitor", timestamp, args); - } else { - throw new Error("node_redis command queue state error. If you can reproduce this, please report it."); - } -}; - -// This Command constructor is ever so slightly faster than using an object literal, but more importantly, using -// a named constructor helps it show up meaningfully in the V8 CPU profiler and in heap snapshots. -function Command(command, args, sub_command, buffer_args, callback) { - this.command = command; - this.args = args; - this.sub_command = sub_command; - this.buffer_args = buffer_args; - this.callback = callback; -} - -RedisClient.prototype.send_command = function (command, args, callback) { - var arg, this_args, command_obj, i, il, elem_count, buffer_args, stream = this.stream, command_str = "", buffered_writes = 0, last_arg_type; - - if (typeof command !== "string") { - throw new Error("First argument to send_command must be the command name string, not " + typeof command); - } - - if (Array.isArray(args)) { - if (typeof callback === "function") { - // probably the fastest way: - // client.command([arg1, arg2], cb); (straight passthrough) - // send_command(command, [arg1, arg2], cb); - } else if (! callback) { - // most people find this variable argument length form more convenient, but it uses arguments, which is slower - // client.command(arg1, arg2, cb); (wraps up arguments into an array) - // send_command(command, [arg1, arg2, cb]); - // client.command(arg1, arg2); (callback is optional) - // send_command(command, [arg1, arg2]); - // client.command(arg1, arg2, undefined); (callback is undefined) - // send_command(command, [arg1, arg2, undefined]); - last_arg_type = typeof args[args.length - 1]; - if (last_arg_type === "function" || last_arg_type === "undefined") { - callback = args.pop(); - } - } else { - throw new Error("send_command: last argument must be a callback or undefined"); - } - } else { - throw new Error("send_command: second argument must be an array"); - } - - // if the last argument is an array, expand it out. This allows commands like this: - // client.command(arg1, [arg2, arg3, arg4], cb); - // and converts to: - // client.command(arg1, arg2, arg3, arg4, cb); - // which is convenient for some things like sadd - if (args.length > 0 && Array.isArray(args[args.length - 1])) { - args = args.slice(0, -1).concat(args[args.length - 1]); - } - - buffer_args = false; - for (i = 0, il = args.length, arg; i < il; i += 1) { - if (Buffer.isBuffer(args[i])) { - buffer_args = true; - } - } - - command_obj = new Command(command, args, false, buffer_args, callback); - - if ((!this.ready && !this.send_anyway) || !stream.writable) { - if (exports.debug_mode) { - if (!stream.writable) { - console.log("send command: stream is not writeable."); - } - - console.log("Queueing " + command + " for next server connection."); - } - this.offline_queue.push(command_obj); - this.should_buffer = true; - return false; - } - - if (command === "subscribe" || command === "psubscribe" || command === "unsubscribe" || command === "punsubscribe") { - this.pub_sub_command(command_obj); - } else if (command === "monitor") { - this.monitoring = true; - } else if (command === "quit") { - this.closing = true; - } else if (this.pub_sub_mode === true) { - throw new Error("Connection in pub/sub mode, only pub/sub commands may be used"); - } - this.command_queue.push(command_obj); - this.commands_sent += 1; - - elem_count = args.length + 1; - - // Always use "Multi bulk commands", but if passed any Buffer args, then do multiple writes, one for each arg. - // This means that using Buffers in commands is going to be slower, so use Strings if you don't already have a Buffer. - - command_str = "*" + elem_count + "\r\n$" + command.length + "\r\n" + command + "\r\n"; - - if (! buffer_args) { // Build up a string and send entire command in one write - for (i = 0, il = args.length, arg; i < il; i += 1) { - arg = args[i]; - if (typeof arg !== "string") { - arg = String(arg); - } - command_str += "$" + Buffer.byteLength(arg) + "\r\n" + arg + "\r\n"; - } - if (exports.debug_mode) { - console.log("send " + this.host + ":" + this.port + " id " + this.connection_id + ": " + command_str); - } - buffered_writes += !stream.write(command_str); - } else { - if (exports.debug_mode) { - console.log("send command (" + command_str + ") has Buffer arguments"); - } - buffered_writes += !stream.write(command_str); - - for (i = 0, il = args.length, arg; i < il; i += 1) { - arg = args[i]; - if (!(Buffer.isBuffer(arg) || arg instanceof String)) { - arg = String(arg); - } - - if (Buffer.isBuffer(arg)) { - if (arg.length === 0) { - if (exports.debug_mode) { - console.log("send_command: using empty string for 0 length buffer"); - } - buffered_writes += !stream.write("$0\r\n\r\n"); - } else { - buffered_writes += !stream.write("$" + arg.length + "\r\n"); - buffered_writes += !stream.write(arg); - buffered_writes += !stream.write("\r\n"); - if (exports.debug_mode) { - console.log("send_command: buffer send " + arg.length + " bytes"); - } - } - } else { - if (exports.debug_mode) { - console.log("send_command: string send " + Buffer.byteLength(arg) + " bytes: " + arg); - } - buffered_writes += !stream.write("$" + Buffer.byteLength(arg) + "\r\n" + arg + "\r\n"); - } - } - } - if (exports.debug_mode) { - console.log("send_command buffered_writes: " + buffered_writes, " should_buffer: " + this.should_buffer); - } - if (buffered_writes || this.command_queue.getLength() >= this.command_queue_high_water) { - this.should_buffer = true; - } - return !this.should_buffer; -}; - -RedisClient.prototype.pub_sub_command = function (command_obj) { - var i, key, command, args; - - if (this.pub_sub_mode === false && exports.debug_mode) { - console.log("Entering pub/sub mode from " + command_obj.command); - } - this.pub_sub_mode = true; - command_obj.sub_command = true; - - command = command_obj.command; - args = command_obj.args; - if (command === "subscribe" || command === "psubscribe") { - if (command === "subscribe") { - key = "sub"; - } else { - key = "psub"; - } - for (i = 0; i < args.length; i++) { - this.subscription_set[key + " " + args[i]] = true; - } - } else { - if (command === "unsubscribe") { - key = "sub"; - } else { - key = "psub"; - } - for (i = 0; i < args.length; i++) { - delete this.subscription_set[key + " " + args[i]]; - } - } -}; - -RedisClient.prototype.end = function () { - this.stream._events = {}; - this.connected = false; - this.ready = false; - return this.stream.end(); -}; - -function Multi(client, args) { - this.client = client; - this.queue = [["MULTI"]]; - if (Array.isArray(args)) { - this.queue = this.queue.concat(args); - } -} - -exports.Multi = Multi; - -// take 2 arrays and return the union of their elements -function set_union(seta, setb) { - var obj = {}; - - seta.forEach(function (val) { - obj[val] = true; - }); - setb.forEach(function (val) { - obj[val] = true; - }); - return Object.keys(obj); -} - -// This static list of commands is updated from time to time. ./lib/commands.js can be updated with generate_commands.js -commands = set_union(["get", "set", "setnx", "setex", "append", "strlen", "del", "exists", "setbit", "getbit", "setrange", "getrange", "substr", - "incr", "decr", "mget", "rpush", "lpush", "rpushx", "lpushx", "linsert", "rpop", "lpop", "brpop", "brpoplpush", "blpop", "llen", "lindex", - "lset", "lrange", "ltrim", "lrem", "rpoplpush", "sadd", "srem", "smove", "sismember", "scard", "spop", "srandmember", "sinter", "sinterstore", - "sunion", "sunionstore", "sdiff", "sdiffstore", "smembers", "zadd", "zincrby", "zrem", "zremrangebyscore", "zremrangebyrank", "zunionstore", - "zinterstore", "zrange", "zrangebyscore", "zrevrangebyscore", "zcount", "zrevrange", "zcard", "zscore", "zrank", "zrevrank", "hset", "hsetnx", - "hget", "hmset", "hmget", "hincrby", "hdel", "hlen", "hkeys", "hvals", "hgetall", "hexists", "incrby", "decrby", "getset", "mset", "msetnx", - "randomkey", "select", "move", "rename", "renamenx", "expire", "expireat", "keys", "dbsize", "auth", "ping", "echo", "save", "bgsave", - "bgrewriteaof", "shutdown", "lastsave", "type", "multi", "exec", "discard", "sync", "flushdb", "flushall", "sort", "info", "monitor", "ttl", - "persist", "slaveof", "debug", "config", "subscribe", "unsubscribe", "psubscribe", "punsubscribe", "publish", "watch", "unwatch", "cluster", - "restore", "migrate", "dump", "object", "client", "eval", "evalsha"], require("./lib/commands")); - -commands.forEach(function (command) { - RedisClient.prototype[command] = function (args, callback) { - if (Array.isArray(args) && typeof callback === "function") { - return this.send_command(command, args, callback); - } else { - return this.send_command(command, to_array(arguments)); - } - }; - RedisClient.prototype[command.toUpperCase()] = RedisClient.prototype[command]; - - Multi.prototype[command] = function () { - this.queue.push([command].concat(to_array(arguments))); - return this; - }; - Multi.prototype[command.toUpperCase()] = Multi.prototype[command]; -}); - -// store db in this.select_db to restore it on reconnect -RedisClient.prototype.select = function (db, callback) { - var self = this; - - this.send_command('select', [db], function (err, res) { - if (err === null) { - self.selected_db = db; - } - if (typeof(callback) === 'function') { - callback(err, res); - } - }); -}; -RedisClient.prototype.SELECT = RedisClient.prototype.select; - -// Stash auth for connect and reconnect. Send immediately if already connected. -RedisClient.prototype.auth = function () { - var args = to_array(arguments); - this.auth_pass = args[0]; - this.auth_callback = args[1]; - if (exports.debug_mode) { - console.log("Saving auth as " + this.auth_pass); - } - - if (this.connected) { - this.send_command("auth", args); - } -}; -RedisClient.prototype.AUTH = RedisClient.prototype.auth; - -RedisClient.prototype.hmget = function (arg1, arg2, arg3) { - if (Array.isArray(arg2) && typeof arg3 === "function") { - return this.send_command("hmget", [arg1].concat(arg2), arg3); - } else if (Array.isArray(arg1) && typeof arg2 === "function") { - return this.send_command("hmget", arg1, arg2); - } else { - return this.send_command("hmget", to_array(arguments)); - } -}; -RedisClient.prototype.HMGET = RedisClient.prototype.hmget; - -RedisClient.prototype.hmset = function (args, callback) { - var tmp_args, tmp_keys, i, il, key; - - if (Array.isArray(args) && typeof callback === "function") { - return this.send_command("hmset", args, callback); - } - - args = to_array(arguments); - if (typeof args[args.length - 1] === "function") { - callback = args[args.length - 1]; - args.length -= 1; - } else { - callback = null; - } - - if (args.length === 2 && typeof args[0] === "string" && typeof args[1] === "object") { - // User does: client.hmset(key, {key1: val1, key2: val2}) - tmp_args = [ args[0] ]; - tmp_keys = Object.keys(args[1]); - for (i = 0, il = tmp_keys.length; i < il ; i++) { - key = tmp_keys[i]; - tmp_args.push(key); - tmp_args.push(args[1][key]); - } - args = tmp_args; - } - - return this.send_command("hmset", args, callback); -}; -RedisClient.prototype.HMSET = RedisClient.prototype.hmset; - -Multi.prototype.hmset = function () { - var args = to_array(arguments), tmp_args; - if (args.length >= 2 && typeof args[0] === "string" && typeof args[1] === "object") { - tmp_args = [ "hmset", args[0] ]; - Object.keys(args[1]).map(function (key) { - tmp_args.push(key); - tmp_args.push(args[1][key]); - }); - if (args[2]) { - tmp_args.push(args[2]); - } - args = tmp_args; - } else { - args.unshift("hmset"); - } - - this.queue.push(args); - return this; -}; -Multi.prototype.HMSET = Multi.prototype.hmset; - -Multi.prototype.exec = function (callback) { - var self = this; - - // drain queue, callback will catch "QUEUED" or error - // TODO - get rid of all of these anonymous functions which are elegant but slow - this.queue.forEach(function (args, index) { - var command = args[0], obj; - if (typeof args[args.length - 1] === "function") { - args = args.slice(1, -1); - } else { - args = args.slice(1); - } - if (args.length === 1 && Array.isArray(args[0])) { - args = args[0]; - } - if (command === 'hmset' && typeof args[1] === 'object') { - obj = args.pop(); - Object.keys(obj).forEach(function (key) { - args.push(key); - args.push(obj[key]); - }); - } - this.client.send_command(command, args, function (err, reply) { - if (err) { - var cur = self.queue[index]; - if (typeof cur[cur.length - 1] === "function") { - cur[cur.length - 1](err); - } else { - throw new Error(err); - } - self.queue.splice(index, 1); - } - }); - }, this); - - // TODO - make this callback part of Multi.prototype instead of creating it each time - return this.client.send_command("EXEC", [], function (err, replies) { - if (err) { - if (callback) { - callback(new Error(err)); - return; - } else { - throw new Error(err); - } - } - - var i, il, j, jl, reply, args; - - if (replies) { - for (i = 1, il = self.queue.length; i < il; i += 1) { - reply = replies[i - 1]; - args = self.queue[i]; - - // TODO - confusing and error-prone that hgetall is special cased in two places - if (reply && args[0].toLowerCase() === "hgetall") { - replies[i - 1] = reply = reply_to_object(reply); - } - - if (typeof args[args.length - 1] === "function") { - args[args.length - 1](null, reply); - } - } - } - - if (callback) { - callback(null, replies); - } - }); -}; - -RedisClient.prototype.multi = function (args) { - return new Multi(this, args); -}; -RedisClient.prototype.MULTI = function (args) { - return new Multi(this, args); -}; - -exports.createClient = function (port_arg, host_arg, options) { - var port = port_arg || default_port, - host = host_arg || default_host, - redis_client, net_client; - - net_client = net.createConnection(port, host); - - redis_client = new RedisClient(net_client, options); - - redis_client.port = port; - redis_client.host = host; - - return redis_client; -}; - -exports.print = function (err, reply) { - if (err) { - console.log("Error: " + err); - } else { - console.log("Reply: " + reply); - } -}; diff --git a/lib/commands.js b/lib/commands.js deleted file mode 100644 index 0293ae8d37b..00000000000 --- a/lib/commands.js +++ /dev/null @@ -1,126 +0,0 @@ -// This file was generated by ./generate_commands.js on Tue Jun 28 2011 22:37:02 GMT-0700 (PDT) -module.exports = [ - "append", - "auth", - "bgrewriteaof", - "bgsave", - "blpop", - "brpop", - "brpoplpush", - "config get", - "config set", - "config resetstat", - "dbsize", - "debug object", - "debug segfault", - "decr", - "decrby", - "del", - "discard", - "echo", - "exec", - "exists", - "expire", - "expireat", - "flushall", - "flushdb", - "get", - "getbit", - "getrange", - "getset", - "hdel", - "hexists", - "hget", - "hgetall", - "hincrby", - "hkeys", - "hlen", - "hmget", - "hmset", - "hset", - "hsetnx", - "hvals", - "incr", - "incrby", - "info", - "keys", - "lastsave", - "lindex", - "linsert", - "llen", - "lpop", - "lpush", - "lpushx", - "lrange", - "lrem", - "lset", - "ltrim", - "mget", - "monitor", - "move", - "mset", - "msetnx", - "multi", - "object", - "persist", - "ping", - "psubscribe", - "publish", - "punsubscribe", - "quit", - "randomkey", - "rename", - "renamenx", - "rpop", - "rpoplpush", - "rpush", - "rpushx", - "sadd", - "save", - "scard", - "sdiff", - "sdiffstore", - "select", - "set", - "setbit", - "setex", - "setnx", - "setrange", - "shutdown", - "sinter", - "sinterstore", - "sismember", - "slaveof", - "smembers", - "smove", - "sort", - "spop", - "srandmember", - "srem", - "strlen", - "subscribe", - "sunion", - "sunionstore", - "sync", - "ttl", - "type", - "unsubscribe", - "unwatch", - "watch", - "zadd", - "zcard", - "zcount", - "zincrby", - "zinterstore", - "zrange", - "zrangebyscore", - "zrank", - "zrem", - "zremrangebyrank", - "zremrangebyscore", - "zrevrange", - "zrevrangebyscore", - "zrevrank", - "zscore", - "zunionstore" -]; diff --git a/lib/parser/hiredis.js b/lib/parser/hiredis.js deleted file mode 100644 index cbb15ba387e..00000000000 --- a/lib/parser/hiredis.js +++ /dev/null @@ -1,46 +0,0 @@ -/*global Buffer require exports console setTimeout */ - -var events = require("events"), - util = require("../util"), - hiredis = require("hiredis"); - -exports.debug_mode = false; -exports.name = "hiredis"; - -function HiredisReplyParser(options) { - this.name = exports.name; - this.options = options || {}; - this.reset(); - events.EventEmitter.call(this); -} - -util.inherits(HiredisReplyParser, events.EventEmitter); - -exports.Parser = HiredisReplyParser; - -HiredisReplyParser.prototype.reset = function () { - this.reader = new hiredis.Reader({ - return_buffers: this.options.return_buffers || false - }); -}; - -HiredisReplyParser.prototype.execute = function (data) { - var reply; - this.reader.feed(data); - while (true) { - try { - reply = this.reader.get(); - } catch (err) { - this.emit("error", err); - break; - } - - if (reply === undefined) break; - - if (reply && reply.constructor === Error) { - this.emit("reply error", reply); - } else { - this.emit("reply", reply); - } - } -}; diff --git a/lib/parser/javascript.js b/lib/parser/javascript.js deleted file mode 100644 index b8f5bc68298..00000000000 --- a/lib/parser/javascript.js +++ /dev/null @@ -1,317 +0,0 @@ -/*global Buffer require exports console setTimeout */ - -// TODO - incorporate these V8 pro tips: -// pre-allocate Arrays if length is known in advance -// do not use delete -// use numbers for parser state - -var events = require("events"), - util = require("../util"); - -exports.debug_mode = false; -exports.name = "javascript"; - -function RedisReplyParser(options) { - this.name = exports.name; - this.options = options || {}; - this.reset(); - events.EventEmitter.call(this); -} - -util.inherits(RedisReplyParser, events.EventEmitter); - -exports.Parser = RedisReplyParser; - -// Buffer.toString() is quite slow for small strings -function small_toString(buf, len) { - var tmp = "", i; - - for (i = 0; i < len; i += 1) { - tmp += String.fromCharCode(buf[i]); - } - - return tmp; -} - -// Reset parser to it's original state. -RedisReplyParser.prototype.reset = function () { - this.return_buffer = new Buffer(16384); // for holding replies, might grow - this.return_string = ""; - this.tmp_string = ""; // for holding size fields - - this.multi_bulk_length = 0; - this.multi_bulk_replies = null; - this.multi_bulk_pos = 0; - this.multi_bulk_nested_length = 0; - this.multi_bulk_nested_replies = null; - - this.states = { - TYPE: 1, - SINGLE_LINE: 2, - MULTI_BULK_COUNT: 3, - INTEGER_LINE: 4, - BULK_LENGTH: 5, - ERROR_LINE: 6, - BULK_DATA: 7, - UNKNOWN_TYPE: 8, - FINAL_CR: 9, - FINAL_LF: 10, - MULTI_BULK_COUNT_LF: 11, - BULK_LF: 12 - }; - - this.state = this.states.TYPE; -}; - -RedisReplyParser.prototype.parser_error = function (message) { - this.emit("error", message); - this.reset(); -}; - -RedisReplyParser.prototype.execute = function (incoming_buf) { - var pos = 0, bd_tmp, bd_str, i, il, states = this.states; - //, state_times = {}, start_execute = new Date(), start_switch, end_switch, old_state; - //start_switch = new Date(); - - while (pos < incoming_buf.length) { - // old_state = this.state; - // console.log("execute: " + this.state + ", " + pos + "/" + incoming_buf.length + ", " + String.fromCharCode(incoming_buf[pos])); - - switch (this.state) { - case 1: // states.TYPE - this.type = incoming_buf[pos]; - pos += 1; - - switch (this.type) { - case 43: // + - this.state = states.SINGLE_LINE; - this.return_buffer.end = 0; - this.return_string = ""; - break; - case 42: // * - this.state = states.MULTI_BULK_COUNT; - this.tmp_string = ""; - break; - case 58: // : - this.state = states.INTEGER_LINE; - this.return_buffer.end = 0; - this.return_string = ""; - break; - case 36: // $ - this.state = states.BULK_LENGTH; - this.tmp_string = ""; - break; - case 45: // - - this.state = states.ERROR_LINE; - this.return_buffer.end = 0; - this.return_string = ""; - break; - default: - this.state = states.UNKNOWN_TYPE; - } - break; - case 4: // states.INTEGER_LINE - if (incoming_buf[pos] === 13) { - this.send_reply(+small_toString(this.return_buffer, this.return_buffer.end)); - this.state = states.FINAL_LF; - } else { - this.return_buffer[this.return_buffer.end] = incoming_buf[pos]; - this.return_buffer.end += 1; - } - pos += 1; - break; - case 6: // states.ERROR_LINE - if (incoming_buf[pos] === 13) { - this.send_error(this.return_buffer.toString("ascii", 0, this.return_buffer.end)); - this.state = states.FINAL_LF; - } else { - this.return_buffer[this.return_buffer.end] = incoming_buf[pos]; - this.return_buffer.end += 1; - } - pos += 1; - break; - case 2: // states.SINGLE_LINE - if (incoming_buf[pos] === 13) { - this.send_reply(this.return_string); - this.state = states.FINAL_LF; - } else { - this.return_string += String.fromCharCode(incoming_buf[pos]); - } - pos += 1; - break; - case 3: // states.MULTI_BULK_COUNT - if (incoming_buf[pos] === 13) { // \r - this.state = states.MULTI_BULK_COUNT_LF; - } else { - this.tmp_string += String.fromCharCode(incoming_buf[pos]); - } - pos += 1; - break; - case 11: // states.MULTI_BULK_COUNT_LF - if (incoming_buf[pos] === 10) { // \n - if (this.multi_bulk_length) { // nested multi-bulk - this.multi_bulk_nested_length = this.multi_bulk_length; - this.multi_bulk_nested_replies = this.multi_bulk_replies; - this.multi_bulk_nested_pos = this.multi_bulk_pos; - } - this.multi_bulk_length = +this.tmp_string; - this.multi_bulk_pos = 0; - this.state = states.TYPE; - if (this.multi_bulk_length < 0) { - this.send_reply(null); - this.multi_bulk_length = 0; - } else if (this.multi_bulk_length === 0) { - this.multi_bulk_pos = 0; - this.multi_bulk_replies = null; - this.send_reply([]); - } else { - this.multi_bulk_replies = new Array(this.multi_bulk_length); - } - } else { - this.parser_error(new Error("didn't see LF after NL reading multi bulk count")); - return; - } - pos += 1; - break; - case 5: // states.BULK_LENGTH - if (incoming_buf[pos] === 13) { // \r - this.state = states.BULK_LF; - } else { - this.tmp_string += String.fromCharCode(incoming_buf[pos]); - } - pos += 1; - break; - case 12: // states.BULK_LF - if (incoming_buf[pos] === 10) { // \n - this.bulk_length = +this.tmp_string; - if (this.bulk_length === -1) { - this.send_reply(null); - this.state = states.TYPE; - } else if (this.bulk_length === 0) { - this.send_reply(new Buffer("")); - this.state = states.FINAL_CR; - } else { - this.state = states.BULK_DATA; - if (this.bulk_length > this.return_buffer.length) { - if (exports.debug_mode) { - console.log("Growing return_buffer from " + this.return_buffer.length + " to " + this.bulk_length); - } - this.return_buffer = new Buffer(this.bulk_length); - } - this.return_buffer.end = 0; - } - } else { - this.parser_error(new Error("didn't see LF after NL while reading bulk length")); - return; - } - pos += 1; - break; - case 7: // states.BULK_DATA - this.return_buffer[this.return_buffer.end] = incoming_buf[pos]; - this.return_buffer.end += 1; - pos += 1; - if (this.return_buffer.end === this.bulk_length) { - bd_tmp = new Buffer(this.bulk_length); - // When the response is small, Buffer.copy() is a lot slower. - if (this.bulk_length > 10) { - this.return_buffer.copy(bd_tmp, 0, 0, this.bulk_length); - } else { - for (i = 0, il = this.bulk_length; i < il; i += 1) { - bd_tmp[i] = this.return_buffer[i]; - } - } - this.send_reply(bd_tmp); - this.state = states.FINAL_CR; - } - break; - case 9: // states.FINAL_CR - if (incoming_buf[pos] === 13) { // \r - this.state = states.FINAL_LF; - pos += 1; - } else { - this.parser_error(new Error("saw " + incoming_buf[pos] + " when expecting final CR")); - return; - } - break; - case 10: // states.FINAL_LF - if (incoming_buf[pos] === 10) { // \n - this.state = states.TYPE; - pos += 1; - } else { - this.parser_error(new Error("saw " + incoming_buf[pos] + " when expecting final LF")); - return; - } - break; - default: - this.parser_error(new Error("invalid state " + this.state)); - } - // end_switch = new Date(); - // if (state_times[old_state] === undefined) { - // state_times[old_state] = 0; - // } - // state_times[old_state] += (end_switch - start_switch); - // start_switch = end_switch; - } - // console.log("execute ran for " + (Date.now() - start_execute) + " ms, on " + incoming_buf.length + " Bytes. "); - // Object.keys(state_times).forEach(function (state) { - // console.log(" " + state + ": " + state_times[state]); - // }); -}; - -RedisReplyParser.prototype.send_error = function (reply) { - if (this.multi_bulk_length > 0 || this.multi_bulk_nested_length > 0) { - // TODO - can this happen? Seems like maybe not. - this.add_multi_bulk_reply(reply); - } else { - this.emit("reply error", reply); - } -}; - -RedisReplyParser.prototype.send_reply = function (reply) { - if (this.multi_bulk_length > 0 || this.multi_bulk_nested_length > 0) { - if (!this.options.return_buffers && Buffer.isBuffer(reply)) { - this.add_multi_bulk_reply(reply.toString("utf8")); - } else { - this.add_multi_bulk_reply(reply); - } - } else { - if (!this.options.return_buffers && Buffer.isBuffer(reply)) { - this.emit("reply", reply.toString("utf8")); - } else { - this.emit("reply", reply); - } - } -}; - -RedisReplyParser.prototype.add_multi_bulk_reply = function (reply) { - if (this.multi_bulk_replies) { - this.multi_bulk_replies[this.multi_bulk_pos] = reply; - this.multi_bulk_pos += 1; - if (this.multi_bulk_pos < this.multi_bulk_length) { - return; - } - } else { - this.multi_bulk_replies = reply; - } - - if (this.multi_bulk_nested_length > 0) { - this.multi_bulk_nested_replies[this.multi_bulk_nested_pos] = this.multi_bulk_replies; - this.multi_bulk_nested_pos += 1; - - this.multi_bulk_length = 0; - this.multi_bulk_replies = null; - this.multi_bulk_pos = 0; - - if (this.multi_bulk_nested_length === this.multi_bulk_nested_pos) { - this.emit("reply", this.multi_bulk_nested_replies); - this.multi_bulk_nested_length = 0; - this.multi_bulk_nested_pos = 0; - this.multi_bulk_nested_replies = null; - } - } else { - this.emit("reply", this.multi_bulk_replies); - this.multi_bulk_length = 0; - this.multi_bulk_replies = null; - this.multi_bulk_pos = 0; - } -}; diff --git a/lib/queue.js b/lib/queue.js deleted file mode 100644 index 56254e1ca40..00000000000 --- a/lib/queue.js +++ /dev/null @@ -1,61 +0,0 @@ -var to_array = require("./to_array"); - -// Queue class adapted from Tim Caswell's pattern library -// http://github.com/creationix/pattern/blob/master/lib/pattern/queue.js - -function Queue() { - this.tail = []; - this.head = []; - this.offset = 0; -} - -Queue.prototype.shift = function () { - if (this.offset === this.head.length) { - var tmp = this.head; - tmp.length = 0; - this.head = this.tail; - this.tail = tmp; - this.offset = 0; - if (this.head.length === 0) { - return; - } - } - return this.head[this.offset++]; // sorry, JSLint -}; - -Queue.prototype.push = function (item) { - return this.tail.push(item); -}; - -Queue.prototype.forEach = function (fn, thisv) { - var array = this.head.slice(this.offset), i, il; - - array.push.apply(array, this.tail); - - if (thisv) { - for (i = 0, il = array.length; i < il; i += 1) { - fn.call(thisv, array[i], i, array); - } - } else { - for (i = 0, il = array.length; i < il; i += 1) { - fn(array[i], i, array); - } - } - - return array; -}; - -Queue.prototype.getLength = function () { - return this.head.length - this.offset + this.tail.length; -}; - -Object.defineProperty(Queue.prototype, 'length', { - get: function () { - return this.getLength(); - } -}); - - -if(typeof module !== 'undefined' && module.exports) { - module.exports = Queue; -} diff --git a/lib/to_array.js b/lib/to_array.js deleted file mode 100644 index 88a57e18a42..00000000000 --- a/lib/to_array.js +++ /dev/null @@ -1,12 +0,0 @@ -function to_array(args) { - var len = args.length, - arr = new Array(len), i; - - for (i = 0; i < len; i += 1) { - arr[i] = args[i]; - } - - return arr; -} - -module.exports = to_array; diff --git a/lib/util.js b/lib/util.js deleted file mode 100644 index fc255ae9536..00000000000 --- a/lib/util.js +++ /dev/null @@ -1,11 +0,0 @@ -// Support for very old versions of node where the module was called "sys". At some point, we should abandon this. - -var util; - -try { - util = require("util"); -} catch (err) { - util = require("sys"); -} - -module.exports = util; diff --git a/multi_bench.js b/multi_bench.js deleted file mode 100644 index 5be2e564fda..00000000000 --- a/multi_bench.js +++ /dev/null @@ -1,225 +0,0 @@ -var redis = require("./index"), - metrics = require("metrics"), - num_clients = parseInt(process.argv[2], 10) || 5, - num_requests = 20000, - tests = [], - versions_logged = false, - client_options = { - return_buffers: false - }, - small_str, large_str, small_buf, large_buf; - -redis.debug_mode = false; - -function lpad(input, len, chr) { - var str = input.toString(); - chr = chr || " "; - - while (str.length < len) { - str = chr + str; - } - return str; -} - -metrics.Histogram.prototype.print_line = function () { - var obj = this.printObj(); - - return lpad(obj.min, 4) + "/" + lpad(obj.max, 4) + "/" + lpad(obj.mean.toFixed(2), 7) + "/" + lpad(obj.p95.toFixed(2), 7); -}; - -function Test(args) { - var self = this; - - this.args = args; - - this.callback = null; - this.clients = []; - this.clients_ready = 0; - this.commands_sent = 0; - this.commands_completed = 0; - this.max_pipeline = this.args.pipeline || num_requests; - this.client_options = args.client_options || client_options; - - this.connect_latency = new metrics.Histogram(); - this.ready_latency = new metrics.Histogram(); - this.command_latency = new metrics.Histogram(); -} - -Test.prototype.run = function (callback) { - var self = this, i; - - this.callback = callback; - - for (i = 0; i < num_clients ; i++) { - this.new_client(i); - } -}; - -Test.prototype.new_client = function (id) { - var self = this, new_client; - - new_client = redis.createClient(6379, "127.0.0.1", this.client_options); - new_client.create_time = Date.now(); - - new_client.on("connect", function () { - self.connect_latency.update(Date.now() - new_client.create_time); - }); - - new_client.on("ready", function () { - if (! versions_logged) { - console.log("Client count: " + num_clients + ", node version: " + process.versions.node + ", server version: " + - new_client.server_info.redis_version + ", parser: " + new_client.reply_parser.name); - versions_logged = true; - } - self.ready_latency.update(Date.now() - new_client.create_time); - self.clients_ready++; - if (self.clients_ready === self.clients.length) { - self.on_clients_ready(); - } - }); - - self.clients[id] = new_client; -}; - -Test.prototype.on_clients_ready = function () { - process.stdout.write(lpad(this.args.descr, 13) + ", " + lpad(this.args.pipeline, 5) + "/" + this.clients_ready + " "); - this.test_start = Date.now(); - - this.fill_pipeline(); -}; - -Test.prototype.fill_pipeline = function () { - var pipeline = this.commands_sent - this.commands_completed; - - while (this.commands_sent < num_requests && pipeline < this.max_pipeline) { - this.commands_sent++; - pipeline++; - this.send_next(); - } - - if (this.commands_completed === num_requests) { - this.print_stats(); - this.stop_clients(); - } -}; - -Test.prototype.stop_clients = function () { - var self = this; - - this.clients.forEach(function (client, pos) { - if (pos === self.clients.length - 1) { - client.quit(function (err, res) { - self.callback(); - }); - } else { - client.quit(); - } - }); -}; - -Test.prototype.send_next = function () { - var self = this, - cur_client = this.commands_sent % this.clients.length, - command_num = this.commands_sent, - start = Date.now(); - - this.clients[cur_client][this.args.command](this.args.args, function (err, res) { - if (err) { - throw err; - } - self.commands_completed++; - self.command_latency.update(Date.now() - start); - self.fill_pipeline(); - }); -}; - -Test.prototype.print_stats = function () { - var duration = Date.now() - this.test_start; - - console.log("min/max/avg/p95: " + this.command_latency.print_line() + " " + lpad(duration, 6) + "ms total, " + - lpad((num_requests / (duration / 1000)).toFixed(2), 8) + " ops/sec"); -}; - -small_str = "1234"; -small_buf = new Buffer(small_str); -large_str = (new Array(4097).join("-")); -large_buf = new Buffer(large_str); - -tests.push(new Test({descr: "PING", command: "ping", args: [], pipeline: 1})); -tests.push(new Test({descr: "PING", command: "ping", args: [], pipeline: 50})); -tests.push(new Test({descr: "PING", command: "ping", args: [], pipeline: 200})); -tests.push(new Test({descr: "PING", command: "ping", args: [], pipeline: 20000})); - -tests.push(new Test({descr: "SET small str", command: "set", args: ["foo_rand000000000000", small_str], pipeline: 1})); -tests.push(new Test({descr: "SET small str", command: "set", args: ["foo_rand000000000000", small_str], pipeline: 50})); -tests.push(new Test({descr: "SET small str", command: "set", args: ["foo_rand000000000000", small_str], pipeline: 200})); -tests.push(new Test({descr: "SET small str", command: "set", args: ["foo_rand000000000000", small_str], pipeline: 20000})); - -tests.push(new Test({descr: "SET small buf", command: "set", args: ["foo_rand000000000000", small_buf], pipeline: 1})); -tests.push(new Test({descr: "SET small buf", command: "set", args: ["foo_rand000000000000", small_buf], pipeline: 50})); -tests.push(new Test({descr: "SET small buf", command: "set", args: ["foo_rand000000000000", small_buf], pipeline: 200})); -tests.push(new Test({descr: "SET small buf", command: "set", args: ["foo_rand000000000000", small_buf], pipeline: 20000})); - -tests.push(new Test({descr: "GET small str", command: "get", args: ["foo_rand000000000000"], pipeline: 1})); -tests.push(new Test({descr: "GET small str", command: "get", args: ["foo_rand000000000000"], pipeline: 50})); -tests.push(new Test({descr: "GET small str", command: "get", args: ["foo_rand000000000000"], pipeline: 200})); -tests.push(new Test({descr: "GET small str", command: "get", args: ["foo_rand000000000000"], pipeline: 20000})); - -tests.push(new Test({descr: "GET small buf", command: "get", args: ["foo_rand000000000000"], pipeline: 1, client_opts: { return_buffers: true} })); -tests.push(new Test({descr: "GET small buf", command: "get", args: ["foo_rand000000000000"], pipeline: 50, client_opts: { return_buffers: true} })); -tests.push(new Test({descr: "GET small buf", command: "get", args: ["foo_rand000000000000"], pipeline: 200, client_opts: { return_buffers: true} })); -tests.push(new Test({descr: "GET small buf", command: "get", args: ["foo_rand000000000000"], pipeline: 20000, client_opts: { return_buffers: true} })); - -tests.push(new Test({descr: "SET large str", command: "set", args: ["foo_rand000000000001", large_str], pipeline: 1})); -tests.push(new Test({descr: "SET large str", command: "set", args: ["foo_rand000000000001", large_str], pipeline: 50})); -tests.push(new Test({descr: "SET large str", command: "set", args: ["foo_rand000000000001", large_str], pipeline: 200})); -tests.push(new Test({descr: "SET large str", command: "set", args: ["foo_rand000000000001", large_str], pipeline: 20000})); - -tests.push(new Test({descr: "SET large buf", command: "set", args: ["foo_rand000000000001", large_buf], pipeline: 1})); -tests.push(new Test({descr: "SET large buf", command: "set", args: ["foo_rand000000000001", large_buf], pipeline: 50})); -tests.push(new Test({descr: "SET large buf", command: "set", args: ["foo_rand000000000001", large_buf], pipeline: 200})); -tests.push(new Test({descr: "SET large buf", command: "set", args: ["foo_rand000000000001", large_buf], pipeline: 20000})); - -tests.push(new Test({descr: "GET large str", command: "get", args: ["foo_rand000000000001"], pipeline: 1})); -tests.push(new Test({descr: "GET large str", command: "get", args: ["foo_rand000000000001"], pipeline: 50})); -tests.push(new Test({descr: "GET large str", command: "get", args: ["foo_rand000000000001"], pipeline: 200})); -tests.push(new Test({descr: "GET large str", command: "get", args: ["foo_rand000000000001"], pipeline: 20000})); - -tests.push(new Test({descr: "GET large buf", command: "get", args: ["foo_rand000000000001"], pipeline: 1, client_opts: { return_buffers: true} })); -tests.push(new Test({descr: "GET large buf", command: "get", args: ["foo_rand000000000001"], pipeline: 50, client_opts: { return_buffers: true} })); -tests.push(new Test({descr: "GET large buf", command: "get", args: ["foo_rand000000000001"], pipeline: 200, client_opts: { return_buffers: true} })); -tests.push(new Test({descr: "GET large buf", command: "get", args: ["foo_rand000000000001"], pipeline: 20000, client_opts: { return_buffers: true} })); - -tests.push(new Test({descr: "INCR", command: "incr", args: ["counter_rand000000000000"], pipeline: 1})); -tests.push(new Test({descr: "INCR", command: "incr", args: ["counter_rand000000000000"], pipeline: 50})); -tests.push(new Test({descr: "INCR", command: "incr", args: ["counter_rand000000000000"], pipeline: 200})); -tests.push(new Test({descr: "INCR", command: "incr", args: ["counter_rand000000000000"], pipeline: 20000})); - -tests.push(new Test({descr: "LPUSH", command: "lpush", args: ["mylist", small_str], pipeline: 1})); -tests.push(new Test({descr: "LPUSH", command: "lpush", args: ["mylist", small_str], pipeline: 50})); -tests.push(new Test({descr: "LPUSH", command: "lpush", args: ["mylist", small_str], pipeline: 200})); -tests.push(new Test({descr: "LPUSH", command: "lpush", args: ["mylist", small_str], pipeline: 20000})); - -tests.push(new Test({descr: "LRANGE 10", command: "lrange", args: ["mylist", "0", "9"], pipeline: 1})); -tests.push(new Test({descr: "LRANGE 10", command: "lrange", args: ["mylist", "0", "9"], pipeline: 50})); -tests.push(new Test({descr: "LRANGE 10", command: "lrange", args: ["mylist", "0", "9"], pipeline: 200})); -tests.push(new Test({descr: "LRANGE 10", command: "lrange", args: ["mylist", "0", "9"], pipeline: 20000})); - -tests.push(new Test({descr: "LRANGE 100", command: "lrange", args: ["mylist", "0", "99"], pipeline: 1})); -tests.push(new Test({descr: "LRANGE 100", command: "lrange", args: ["mylist", "0", "99"], pipeline: 50})); -tests.push(new Test({descr: "LRANGE 100", command: "lrange", args: ["mylist", "0", "99"], pipeline: 200})); -tests.push(new Test({descr: "LRANGE 100", command: "lrange", args: ["mylist", "0", "99"], pipeline: 20000})); - -function next() { - var test = tests.shift(); - if (test) { - test.run(function () { - next(); - }); - } else { - console.log("End of tests."); - process.exit(0); - } -} - -next(); diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 00000000000..a95a6d1bff9 --- /dev/null +++ b/package-lock.json @@ -0,0 +1,7529 @@ +{ + "name": "redis-monorepo", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "redis-monorepo", + "workspaces": [ + "./packages/client", + "./packages/test-utils", + "./packages/bloom", + "./packages/json", + "./packages/search", + "./packages/time-series", + "./packages/entraid", + "./packages/redis" + ], + "devDependencies": { + "@istanbuljs/nyc-config-typescript": "^1.0.2", + "@release-it/bumper": "^7.0.5", + "@types/mocha": "^10.0.6", + "@types/node": "^20.11.16", + "gh-pages": "^6.1.1", + "mocha": "^10.2.0", + "nyc": "^15.1.0", + "release-it": "^19.0.2", + "ts-node": "^10.9.2", + "tsx": "^4.7.0", + "typedoc": "^0.25.7", + "typescript": "^5.3.3" + } + }, + "node_modules/@ampproject/remapping": { + "version": "2.2.1", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.0", + "@jridgewell/trace-mapping": "^0.3.9" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@azure/abort-controller": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.2.tgz", + "integrity": "sha512-nBrLsEWm4J2u5LpAPjxADTlq3trDgVZZXHNKabeXZtpq3d3AbN/KGO82R87rdDz5/lYB024rtEf10/q0urNgsA==", + "license": "MIT", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-auth": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@azure/core-auth/-/core-auth-1.9.0.tgz", + "integrity": "sha512-FPwHpZywuyasDSLMqJ6fhbOK3TqUdviZNF8OqRGA4W5Ewib2lEEZ+pBsYcBa88B2NGO/SEnYPGhyBqNlE8ilSw==", + "license": "MIT", + "dependencies": { + "@azure/abort-controller": "^2.0.0", + "@azure/core-util": "^1.11.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-client": { + "version": "1.9.2", + "resolved": "https://registry.npmjs.org/@azure/core-client/-/core-client-1.9.2.tgz", + "integrity": "sha512-kRdry/rav3fUKHl/aDLd/pDLcB+4pOFwPPTVEExuMyaI5r+JBbMWqRbCY1pn5BniDaU3lRxO9eaQ1AmSMehl/w==", + "license": "MIT", + "dependencies": { + "@azure/abort-controller": "^2.0.0", + "@azure/core-auth": "^1.4.0", + "@azure/core-rest-pipeline": "^1.9.1", + "@azure/core-tracing": "^1.0.0", + "@azure/core-util": "^1.6.1", + "@azure/logger": "^1.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-rest-pipeline": { + "version": "1.19.0", + "resolved": "https://registry.npmjs.org/@azure/core-rest-pipeline/-/core-rest-pipeline-1.19.0.tgz", + "integrity": "sha512-bM3308LRyg5g7r3Twprtqww0R/r7+GyVxj4BafcmVPo4WQoGt5JXuaqxHEFjw2o3rvFZcUPiqJMg6WuvEEeVUA==", + "license": "MIT", + "dependencies": { + "@azure/abort-controller": "^2.0.0", + "@azure/core-auth": "^1.8.0", + "@azure/core-tracing": "^1.0.1", + "@azure/core-util": "^1.11.0", + "@azure/logger": "^1.0.0", + "http-proxy-agent": "^7.0.0", + "https-proxy-agent": "^7.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-tracing": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@azure/core-tracing/-/core-tracing-1.2.0.tgz", + "integrity": "sha512-UKTiEJPkWcESPYJz3X5uKRYyOcJD+4nYph+KpfdPRnQJVrZfk0KJgdnaAWKfhsBBtAf/D58Az4AvCJEmWgIBAg==", + "license": "MIT", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-util": { + "version": "1.11.0", + "resolved": "https://registry.npmjs.org/@azure/core-util/-/core-util-1.11.0.tgz", + "integrity": "sha512-DxOSLua+NdpWoSqULhjDyAZTXFdP/LKkqtYuxxz1SCN289zk3OG8UOpnCQAz/tygyACBtWp/BoO72ptK7msY8g==", + "license": "MIT", + "dependencies": { + "@azure/abort-controller": "^2.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/identity": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/@azure/identity/-/identity-4.7.0.tgz", + "integrity": "sha512-6z/S2KorkbKaZ0DgZFVRdu7RCuATmMSTjKpuhj7YpjxkJ0vnJ7kTM3cpNgzFgk9OPYfZ31wrBEtC/iwAS4jQDA==", + "license": "MIT", + "dependencies": { + "@azure/abort-controller": "^2.0.0", + "@azure/core-auth": "^1.9.0", + "@azure/core-client": "^1.9.2", + "@azure/core-rest-pipeline": "^1.17.0", + "@azure/core-tracing": "^1.0.0", + "@azure/core-util": "^1.11.0", + "@azure/logger": "^1.0.0", + "@azure/msal-browser": "^4.2.0", + "@azure/msal-node": "^3.2.1", + "events": "^3.0.0", + "jws": "^4.0.0", + "open": "^10.1.0", + "stoppable": "^1.1.0", + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/identity/node_modules/@azure/msal-common": { + "version": "15.2.0", + "resolved": "https://registry.npmjs.org/@azure/msal-common/-/msal-common-15.2.0.tgz", + "integrity": "sha512-HiYfGAKthisUYqHG1nImCf/uzcyS31wng3o+CycWLIM9chnYJ9Lk6jZ30Y6YiYYpTQ9+z/FGUpiKKekd3Arc0A==", + "license": "MIT", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/@azure/identity/node_modules/@azure/msal-node": { + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/@azure/msal-node/-/msal-node-3.2.3.tgz", + "integrity": "sha512-0eaPqBIWEAizeYiXdeHb09Iq0tvHJ17ztvNEaLdr/KcJJhJxbpkkEQf09DB+vKlFE0tzYi7j4rYLTXtES/InEQ==", + "license": "MIT", + "dependencies": { + "@azure/msal-common": "15.2.0", + "jsonwebtoken": "^9.0.0", + "uuid": "^8.3.0" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/@azure/identity/node_modules/jwa": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.0.tgz", + "integrity": "sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA==", + "license": "MIT", + "dependencies": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/@azure/identity/node_modules/jws": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz", + "integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==", + "license": "MIT", + "dependencies": { + "jwa": "^2.0.0", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/@azure/logger": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/@azure/logger/-/logger-1.1.4.tgz", + "integrity": "sha512-4IXXzcCdLdlXuCG+8UKEwLA1T1NHqUfanhXYHiQTn+6sfWCZXduqbtXDGceg3Ce5QxTGo7EqmbV6Bi+aqKuClQ==", + "license": "MIT", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/msal-browser": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/@azure/msal-browser/-/msal-browser-4.4.0.tgz", + "integrity": "sha512-rU6juYXk67CKQmpgi6fDgZoPQ9InZ1760z1BSAH7RbeIc4lHZM/Tu+H0CyRk7cnrfvTkexyYE4pjYhMghpzheA==", + "license": "MIT", + "dependencies": { + "@azure/msal-common": "15.2.0" + }, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/@azure/msal-browser/node_modules/@azure/msal-common": { + "version": "15.2.0", + "resolved": "https://registry.npmjs.org/@azure/msal-common/-/msal-common-15.2.0.tgz", + "integrity": "sha512-HiYfGAKthisUYqHG1nImCf/uzcyS31wng3o+CycWLIM9chnYJ9Lk6jZ30Y6YiYYpTQ9+z/FGUpiKKekd3Arc0A==", + "license": "MIT", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/@azure/msal-common": { + "version": "14.16.0", + "resolved": "https://registry.npmjs.org/@azure/msal-common/-/msal-common-14.16.0.tgz", + "integrity": "sha512-1KOZj9IpcDSwpNiQNjt0jDYZpQvNZay7QAEi/5DLubay40iGYtLzya/jbjRPLyOTZhEKyL1MzPuw2HqBCjceYA==", + "license": "MIT", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/@azure/msal-node": { + "version": "2.16.2", + "resolved": "https://registry.npmjs.org/@azure/msal-node/-/msal-node-2.16.2.tgz", + "integrity": "sha512-An7l1hEr0w1HMMh1LU+rtDtqL7/jw74ORlc9Wnh06v7TU/xpG39/Zdr1ZJu3QpjUfKJ+E0/OXMW8DRSWTlh7qQ==", + "license": "MIT", + "dependencies": { + "@azure/msal-common": "14.16.0", + "jsonwebtoken": "^9.0.0", + "uuid": "^8.3.0" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.23.5", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/highlight": "^7.23.4", + "chalk": "^2.4.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/code-frame/node_modules/ansi-styles": { + "version": "3.2.1", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/code-frame/node_modules/chalk": { + "version": "2.4.2", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/code-frame/node_modules/color-convert": { + "version": "1.9.3", + "dev": true, + "license": "MIT", + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/@babel/code-frame/node_modules/color-name": { + "version": "1.1.3", + "dev": true, + "license": "MIT" + }, + "node_modules/@babel/code-frame/node_modules/escape-string-regexp": { + "version": "1.0.5", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/@babel/code-frame/node_modules/has-flag": { + "version": "3.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/code-frame/node_modules/supports-color": { + "version": "5.5.0", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/compat-data": { + "version": "7.23.5", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core": { + "version": "7.23.9", + "dev": true, + "license": "MIT", + "dependencies": { + "@ampproject/remapping": "^2.2.0", + "@babel/code-frame": "^7.23.5", + "@babel/generator": "^7.23.6", + "@babel/helper-compilation-targets": "^7.23.6", + "@babel/helper-module-transforms": "^7.23.3", + "@babel/helpers": "^7.23.9", + "@babel/parser": "^7.23.9", + "@babel/template": "^7.23.9", + "@babel/traverse": "^7.23.9", + "@babel/types": "^7.23.9", + "convert-source-map": "^2.0.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.3", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@babel/core/node_modules/convert-source-map": { + "version": "2.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/@babel/generator": { + "version": "7.23.6", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.23.6", + "@jridgewell/gen-mapping": "^0.3.2", + "@jridgewell/trace-mapping": "^0.3.17", + "jsesc": "^2.5.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets": { + "version": "7.23.6", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/compat-data": "^7.23.5", + "@babel/helper-validator-option": "^7.23.5", + "browserslist": "^4.22.2", + "lru-cache": "^5.1.1", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-environment-visitor": { + "version": "7.22.20", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-function-name": { + "version": "7.23.0", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/template": "^7.22.15", + "@babel/types": "^7.23.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-hoist-variables": { + "version": "7.22.5", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.22.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-imports": { + "version": "7.22.15", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.22.15" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-transforms": { + "version": "7.23.3", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-environment-visitor": "^7.22.20", + "@babel/helper-module-imports": "^7.22.15", + "@babel/helper-simple-access": "^7.22.5", + "@babel/helper-split-export-declaration": "^7.22.6", + "@babel/helper-validator-identifier": "^7.22.20" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-simple-access": { + "version": "7.22.5", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.22.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-split-export-declaration": { + "version": "7.22.6", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.22.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "7.23.4", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.22.20", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-option": { + "version": "7.23.5", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helpers": { + "version": "7.23.9", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/template": "^7.23.9", + "@babel/traverse": "^7.23.9", + "@babel/types": "^7.23.9" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/highlight": { + "version": "7.23.4", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-validator-identifier": "^7.22.20", + "chalk": "^2.4.2", + "js-tokens": "^4.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/highlight/node_modules/ansi-styles": { + "version": "3.2.1", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/chalk": { + "version": "2.4.2", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/color-convert": { + "version": "1.9.3", + "dev": true, + "license": "MIT", + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/@babel/highlight/node_modules/color-name": { + "version": "1.1.3", + "dev": true, + "license": "MIT" + }, + "node_modules/@babel/highlight/node_modules/escape-string-regexp": { + "version": "1.0.5", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/@babel/highlight/node_modules/has-flag": { + "version": "3.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/supports-color": { + "version": "5.5.0", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/parser": { + "version": "7.23.9", + "dev": true, + "license": "MIT", + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/template": { + "version": "7.23.9", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.23.5", + "@babel/parser": "^7.23.9", + "@babel/types": "^7.23.9" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse": { + "version": "7.23.9", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.23.5", + "@babel/generator": "^7.23.6", + "@babel/helper-environment-visitor": "^7.22.20", + "@babel/helper-function-name": "^7.23.0", + "@babel/helper-hoist-variables": "^7.22.5", + "@babel/helper-split-export-declaration": "^7.22.6", + "@babel/parser": "^7.23.9", + "@babel/types": "^7.23.9", + "debug": "^4.3.1", + "globals": "^11.1.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/types": { + "version": "7.23.9", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-string-parser": "^7.23.4", + "@babel/helper-validator-identifier": "^7.22.20", + "to-fast-properties": "^2.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@cspotcode/source-map-support": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", + "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/trace-mapping": "0.3.9" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", + "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.0.3", + "@jridgewell/sourcemap-codec": "^1.4.10" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.19.12", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@iarna/toml": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@iarna/toml/-/toml-3.0.0.tgz", + "integrity": "sha512-td6ZUkz2oS3VeleBcN+m//Q6HlCFCPrnI0FZhrt/h4XqLEdOyYp2u21nd8MdsR+WJy5r9PTDaHTDDfhf4H4l6Q==", + "dev": true, + "license": "ISC" + }, + "node_modules/@inquirer/checkbox": { + "version": "4.1.7", + "resolved": "https://registry.npmjs.org/@inquirer/checkbox/-/checkbox-4.1.7.tgz", + "integrity": "sha512-VEr2vnI4TSM2Q50fAck98mzWJGAoxbF0rb48tcSEjkJ2kn3mM6c/YsJwnyu45PlXd6aNWObMGWmQVleL2BJy6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/core": "^10.1.12", + "@inquirer/figures": "^1.0.12", + "@inquirer/type": "^3.0.7", + "ansi-escapes": "^4.3.2", + "yoctocolors-cjs": "^2.1.2" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/@inquirer/confirm": { + "version": "5.1.11", + "resolved": "https://registry.npmjs.org/@inquirer/confirm/-/confirm-5.1.11.tgz", + "integrity": "sha512-HgVha2B1lurfZ8u7cBWmu60HpkpnnIT/1IrreBx5g2oxQOVYU15WQDl6oZqjuXVbzteFKSpmMkLTMf2OmbUjaw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/core": "^10.1.12", + "@inquirer/type": "^3.0.7" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/@inquirer/core": { + "version": "10.1.12", + "resolved": "https://registry.npmjs.org/@inquirer/core/-/core-10.1.12.tgz", + "integrity": "sha512-uoaDadeJCYSVKYCMPwJi3AjCF9w+l9aWbHYA4iskKX84cVW/A2M6bJlWBoy3k81GpFp6EX3IElV1Z5xKw0g1QQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/figures": "^1.0.12", + "@inquirer/type": "^3.0.7", + "ansi-escapes": "^4.3.2", + "cli-width": "^4.1.0", + "mute-stream": "^2.0.0", + "signal-exit": "^4.1.0", + "wrap-ansi": "^6.2.0", + "yoctocolors-cjs": "^2.1.2" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/@inquirer/core/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@inquirer/editor": { + "version": "4.2.12", + "resolved": "https://registry.npmjs.org/@inquirer/editor/-/editor-4.2.12.tgz", + "integrity": "sha512-YNOCY79iqI/ksWohdudGtnO02N/a2j82b6akK/+hy1/C6xoU07dsKFUBfQ36nLCxE98ICS74Uyandq7nBS31Mw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/core": "^10.1.12", + "@inquirer/type": "^3.0.7", + "external-editor": "^3.1.0" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/@inquirer/expand": { + "version": "4.0.14", + "resolved": "https://registry.npmjs.org/@inquirer/expand/-/expand-4.0.14.tgz", + "integrity": "sha512-aon4yACMp4Qwc/2f6xafcC6jzAJ5vXBwL5+z4bS2y4YIOGF+QOe+Jzd5hLz1hOo+bhzVS7q07dNXTeBjaFAqRA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/core": "^10.1.12", + "@inquirer/type": "^3.0.7", + "yoctocolors-cjs": "^2.1.2" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/@inquirer/figures": { + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/@inquirer/figures/-/figures-1.0.12.tgz", + "integrity": "sha512-MJttijd8rMFcKJC8NYmprWr6hD3r9Gd9qUC0XwPNwoEPWSMVJwA2MlXxF+nhZZNMY+HXsWa+o7KY2emWYIn0jQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + } + }, + "node_modules/@inquirer/input": { + "version": "4.1.11", + "resolved": "https://registry.npmjs.org/@inquirer/input/-/input-4.1.11.tgz", + "integrity": "sha512-gzcBWLWMiBaY507HFg4B1NJ18InnHhLjj4DTLfyoz9Rv7dSPpJ9JSj7Of8ea5QE2D+ms3ESTl/4MdzrC1//B0Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/core": "^10.1.12", + "@inquirer/type": "^3.0.7" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/@inquirer/number": { + "version": "3.0.14", + "resolved": "https://registry.npmjs.org/@inquirer/number/-/number-3.0.14.tgz", + "integrity": "sha512-8B4jX8ArK9zvb8/tB04jGLja4XoFfjvrTLJ5YeLlFnJh3jPa9VTQt2kxJZubGKc8YHX68e1XQxv4Nu/WZUnXIw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/core": "^10.1.12", + "@inquirer/type": "^3.0.7" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/@inquirer/password": { + "version": "4.0.14", + "resolved": "https://registry.npmjs.org/@inquirer/password/-/password-4.0.14.tgz", + "integrity": "sha512-/N/5PeI+QWE23dTn2D4erD9Y3yYeh0bUDkO9tt2d11mAVuCswiOKzoHrV9KYGQhoD6ae+Nff1G8TPqbfUUh8Ag==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/core": "^10.1.12", + "@inquirer/type": "^3.0.7", + "ansi-escapes": "^4.3.2" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/@inquirer/prompts": { + "version": "7.5.2", + "resolved": "https://registry.npmjs.org/@inquirer/prompts/-/prompts-7.5.2.tgz", + "integrity": "sha512-+jsUm6G9X5PUD97HkcGojzwyPsz5oSB2FUbj+D+NOYFQUj0XqvhDcDfk9mhMxFG/RDIgT9Kq4x0rm5pC5zVHUg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/checkbox": "^4.1.7", + "@inquirer/confirm": "^5.1.11", + "@inquirer/editor": "^4.2.12", + "@inquirer/expand": "^4.0.14", + "@inquirer/input": "^4.1.11", + "@inquirer/number": "^3.0.14", + "@inquirer/password": "^4.0.14", + "@inquirer/rawlist": "^4.1.2", + "@inquirer/search": "^3.0.14", + "@inquirer/select": "^4.2.2" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/@inquirer/rawlist": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/@inquirer/rawlist/-/rawlist-4.1.2.tgz", + "integrity": "sha512-VDuhV58w3FuKNl24GR9ygdbu3NkGfuaK7D2gyMWeY79Lr4GVbj7ySxw1isAnelSzU1ecZC/TwICa5rCy0za2OA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/core": "^10.1.12", + "@inquirer/type": "^3.0.7", + "yoctocolors-cjs": "^2.1.2" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/@inquirer/search": { + "version": "3.0.14", + "resolved": "https://registry.npmjs.org/@inquirer/search/-/search-3.0.14.tgz", + "integrity": "sha512-+VdtRD5nVR50K5fEMq/qbtHGH08vfqm69NJtojavlMXj6fsYymQZrNqjxEISPs2PDvtsemTJVFGs0uI6Zti6Dw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/core": "^10.1.12", + "@inquirer/figures": "^1.0.12", + "@inquirer/type": "^3.0.7", + "yoctocolors-cjs": "^2.1.2" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/@inquirer/select": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@inquirer/select/-/select-4.2.2.tgz", + "integrity": "sha512-3X8AAPE1WPUwY3IawT19BapD0kKpAUP7SVUu5mxmRjnl/f4q0MQz8CU8ToCC6Im0SzyOTWmSauE3GBgyOv1rBw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/core": "^10.1.12", + "@inquirer/figures": "^1.0.12", + "@inquirer/type": "^3.0.7", + "ansi-escapes": "^4.3.2", + "yoctocolors-cjs": "^2.1.2" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/@inquirer/type": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/@inquirer/type/-/type-3.0.7.tgz", + "integrity": "sha512-PfunHQcjwnju84L+ycmcMKB/pTPIngjUJvfnRhKY6FKPuYXlM4aQCb/nIdTFR6BEhMjFvngzvng/vBAJMZpLSA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/@istanbuljs/load-nyc-config": { + "version": "1.1.0", + "dev": true, + "license": "ISC", + "dependencies": { + "camelcase": "^5.3.1", + "find-up": "^4.1.0", + "get-package-type": "^0.1.0", + "js-yaml": "^3.13.1", + "resolve-from": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/argparse": { + "version": "1.0.10", + "dev": true, + "license": "MIT", + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/find-up": { + "version": "4.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/js-yaml": { + "version": "3.14.1", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/locate-path": { + "version": "5.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/p-limit": { + "version": "2.3.0", + "dev": true, + "license": "MIT", + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/p-locate": { + "version": "4.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/nyc-config-typescript": { + "version": "1.0.2", + "dev": true, + "license": "ISC", + "dependencies": { + "@istanbuljs/schema": "^0.1.2" + }, + "engines": { + "node": ">=8" + }, + "peerDependencies": { + "nyc": ">=15" + } + }, + "node_modules/@istanbuljs/schema": { + "version": "0.1.3", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.3", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/set-array": "^1.0.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.9" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/set-array": { + "version": "1.1.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.4.15", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.22", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodeutils/defaults-deep": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@nodeutils/defaults-deep/-/defaults-deep-1.1.0.tgz", + "integrity": "sha512-gG44cwQovaOFdSR02jR9IhVRpnDP64VN6JdjYJTfNz4J4fWn7TQnmrf22nSjRqlwlxPcW8PL/L3KbJg3tdwvpg==", + "dev": true, + "license": "ISC", + "dependencies": { + "lodash": "^4.15.0" + } + }, + "node_modules/@octokit/auth-token": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-5.1.2.tgz", + "integrity": "sha512-JcQDsBdg49Yky2w2ld20IHAlwr8d/d8N6NiOXbtuoPCqzbsiJgF633mVUw3x4mo0H5ypataQIX7SFu3yy44Mpw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/core": { + "version": "6.1.5", + "resolved": "https://registry.npmjs.org/@octokit/core/-/core-6.1.5.tgz", + "integrity": "sha512-vvmsN0r7rguA+FySiCsbaTTobSftpIDIpPW81trAmsv9TGxg3YCujAxRYp/Uy8xmDgYCzzgulG62H7KYUFmeIg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/auth-token": "^5.0.0", + "@octokit/graphql": "^8.2.2", + "@octokit/request": "^9.2.3", + "@octokit/request-error": "^6.1.8", + "@octokit/types": "^14.0.0", + "before-after-hook": "^3.0.2", + "universal-user-agent": "^7.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/endpoint": { + "version": "10.1.4", + "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-10.1.4.tgz", + "integrity": "sha512-OlYOlZIsfEVZm5HCSR8aSg02T2lbUWOsCQoPKfTXJwDzcHQBrVBGdGXb89dv2Kw2ToZaRtudp8O3ZIYoaOjKlA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/types": "^14.0.0", + "universal-user-agent": "^7.0.2" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/graphql": { + "version": "8.2.2", + "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-8.2.2.tgz", + "integrity": "sha512-Yi8hcoqsrXGdt0yObxbebHXFOiUA+2v3n53epuOg1QUgOB6c4XzvisBNVXJSl8RYA5KrDuSL2yq9Qmqe5N0ryA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/request": "^9.2.3", + "@octokit/types": "^14.0.0", + "universal-user-agent": "^7.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/openapi-types": { + "version": "25.0.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-25.0.0.tgz", + "integrity": "sha512-FZvktFu7HfOIJf2BScLKIEYjDsw6RKc7rBJCdvCTfKsVnx2GEB/Nbzjr29DUdb7vQhlzS/j8qDzdditP0OC6aw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@octokit/plugin-paginate-rest": { + "version": "11.6.0", + "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-11.6.0.tgz", + "integrity": "sha512-n5KPteiF7pWKgBIBJSk8qzoZWcUkza2O6A0za97pMGVrGfPdltxrfmfF5GucHYvHGZD8BdaZmmHGz5cX/3gdpw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/types": "^13.10.0" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@octokit/core": ">=6" + } + }, + "node_modules/@octokit/plugin-paginate-rest/node_modules/@octokit/openapi-types": { + "version": "24.2.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz", + "integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==", + "dev": true, + "license": "MIT" + }, + "node_modules/@octokit/plugin-paginate-rest/node_modules/@octokit/types": { + "version": "13.10.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz", + "integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/openapi-types": "^24.2.0" + } + }, + "node_modules/@octokit/plugin-request-log": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/@octokit/plugin-request-log/-/plugin-request-log-5.3.1.tgz", + "integrity": "sha512-n/lNeCtq+9ofhC15xzmJCNKP2BWTv8Ih2TTy+jatNCCq/gQP/V7rK3fjIfuz0pDWDALO/o/4QY4hyOF6TQQFUw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@octokit/core": ">=6" + } + }, + "node_modules/@octokit/plugin-rest-endpoint-methods": { + "version": "13.5.0", + "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-13.5.0.tgz", + "integrity": "sha512-9Pas60Iv9ejO3WlAX3maE1+38c5nqbJXV5GrncEfkndIpZrJ/WPMRd2xYDcPPEt5yzpxcjw9fWNoPhsSGzqKqw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/types": "^13.10.0" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@octokit/core": ">=6" + } + }, + "node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/openapi-types": { + "version": "24.2.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz", + "integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==", + "dev": true, + "license": "MIT" + }, + "node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types": { + "version": "13.10.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz", + "integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/openapi-types": "^24.2.0" + } + }, + "node_modules/@octokit/request": { + "version": "9.2.3", + "resolved": "https://registry.npmjs.org/@octokit/request/-/request-9.2.3.tgz", + "integrity": "sha512-Ma+pZU8PXLOEYzsWf0cn/gY+ME57Wq8f49WTXA8FMHp2Ps9djKw//xYJ1je8Hm0pR2lU9FUGeJRWOtxq6olt4w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/endpoint": "^10.1.4", + "@octokit/request-error": "^6.1.8", + "@octokit/types": "^14.0.0", + "fast-content-type-parse": "^2.0.0", + "universal-user-agent": "^7.0.2" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/request-error": { + "version": "6.1.8", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-6.1.8.tgz", + "integrity": "sha512-WEi/R0Jmq+IJKydWlKDmryPcmdYSVjL3ekaiEL1L9eo1sUnqMJ+grqmC9cjk7CA7+b2/T397tO5d8YLOH3qYpQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/types": "^14.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/rest": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/@octokit/rest/-/rest-21.1.1.tgz", + "integrity": "sha512-sTQV7va0IUVZcntzy1q3QqPm/r8rWtDCqpRAmb8eXXnKkjoQEtFe3Nt5GTVsHft+R6jJoHeSiVLcgcvhtue/rg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/core": "^6.1.4", + "@octokit/plugin-paginate-rest": "^11.4.2", + "@octokit/plugin-request-log": "^5.3.1", + "@octokit/plugin-rest-endpoint-methods": "^13.3.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/types": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-14.0.0.tgz", + "integrity": "sha512-VVmZP0lEhbo2O1pdq63gZFiGCKkm8PPp8AUOijlwPO6hojEVjspA0MWKP7E4hbvGxzFKNqKr6p0IYtOH/Wf/zA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/openapi-types": "^25.0.0" + } + }, + "node_modules/@phun-ky/typeof": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@phun-ky/typeof/-/typeof-1.2.8.tgz", + "integrity": "sha512-7J6ca1tK0duM2BgVB+CuFMh3idlIVASOP2QvOCbNWDc6JnvjtKa9nufPoJQQ4xrwBonwgT1TIhRRcEtzdVgWsA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^20.9.0 || >=22.0.0", + "npm": ">=10.8.2" + }, + "funding": { + "url": "https://github.com/phun-ky/typeof?sponsor=1" + } + }, + "node_modules/@redis/bloom": { + "resolved": "packages/bloom", + "link": true + }, + "node_modules/@redis/client": { + "resolved": "packages/client", + "link": true + }, + "node_modules/@redis/entraid": { + "resolved": "packages/entraid", + "link": true + }, + "node_modules/@redis/json": { + "resolved": "packages/json", + "link": true + }, + "node_modules/@redis/search": { + "resolved": "packages/search", + "link": true + }, + "node_modules/@redis/test-utils": { + "resolved": "packages/test-utils", + "link": true + }, + "node_modules/@redis/time-series": { + "resolved": "packages/time-series", + "link": true + }, + "node_modules/@release-it/bumper": { + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/@release-it/bumper/-/bumper-7.0.5.tgz", + "integrity": "sha512-HCFMqDHreLYg4jjTWL//pW1GzZZMn3p7HDbwS2y7y5m0L6p8hEaOEixC3tEzwyVV7VP1VGjqxMvxfa360q8+Tg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@iarna/toml": "^3.0.0", + "cheerio": "^1.0.0", + "detect-indent": "7.0.1", + "fast-glob": "^3.3.3", + "ini": "^5.0.0", + "js-yaml": "^4.1.0", + "lodash-es": "^4.17.21", + "semver": "^7.7.1" + }, + "engines": { + "node": "^20.9.0 || >=22.0.0" + }, + "peerDependencies": { + "release-it": ">=18.0.0 || >=19.0.0" + } + }, + "node_modules/@release-it/bumper/node_modules/semver": { + "version": "7.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", + "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@sinonjs/commons": { + "version": "3.0.1", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "type-detect": "4.0.8" + } + }, + "node_modules/@sinonjs/fake-timers": { + "version": "11.2.2", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@sinonjs/commons": "^3.0.0" + } + }, + "node_modules/@sinonjs/samsam": { + "version": "8.0.0", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@sinonjs/commons": "^2.0.0", + "lodash.get": "^4.4.2", + "type-detect": "^4.0.8" + } + }, + "node_modules/@sinonjs/samsam/node_modules/@sinonjs/commons": { + "version": "2.0.0", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "type-detect": "4.0.8" + } + }, + "node_modules/@sinonjs/text-encoding": { + "version": "0.7.2", + "dev": true, + "license": "(Unlicense OR Apache-2.0)" + }, + "node_modules/@tootallnate/quickjs-emscripten": { + "version": "0.23.0", + "resolved": "https://registry.npmjs.org/@tootallnate/quickjs-emscripten/-/quickjs-emscripten-0.23.0.tgz", + "integrity": "sha512-C5Mc6rdnsaJDjO3UpGW/CQTHtCKaYlScZTly4JIu97Jxo/odCiH0ITnDXSJPTOrEKk/ycSZ0AOgTmkDtkOsvIA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@tsconfig/node10": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.11.tgz", + "integrity": "sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@tsconfig/node12": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz", + "integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==", + "dev": true, + "license": "MIT" + }, + "node_modules/@tsconfig/node14": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz", + "integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==", + "dev": true, + "license": "MIT" + }, + "node_modules/@tsconfig/node16": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz", + "integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/body-parser": { + "version": "1.19.5", + "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.5.tgz", + "integrity": "sha512-fB3Zu92ucau0iQ0JMCFQE7b/dv8Ot07NI3KaZIkIUNXq82k4eBAqUaneXfleGY9JWskeS9y+u0nXMyspcuQrCg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/connect": "*", + "@types/node": "*" + } + }, + "node_modules/@types/connect": { + "version": "3.4.38", + "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.38.tgz", + "integrity": "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/express": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.21.tgz", + "integrity": "sha512-ejlPM315qwLpaQlQDTjPdsUFSc6ZsP4AN6AlWnogPjQ7CVi7PYF3YVz+CY3jE2pwYf7E/7HlDAN0rV2GxTG0HQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/body-parser": "*", + "@types/express-serve-static-core": "^4.17.33", + "@types/qs": "*", + "@types/serve-static": "*" + } + }, + "node_modules/@types/express-serve-static-core": { + "version": "4.19.6", + "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.19.6.tgz", + "integrity": "sha512-N4LZ2xG7DatVqhCZzOGb1Yi5lMbXSZcmdLDe9EzSndPV2HpWYWzRbaerl2n27irrm94EPpprqa8KpskPT085+A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "@types/qs": "*", + "@types/range-parser": "*", + "@types/send": "*" + } + }, + "node_modules/@types/express-session": { + "version": "1.18.1", + "resolved": "https://registry.npmjs.org/@types/express-session/-/express-session-1.18.1.tgz", + "integrity": "sha512-S6TkD/lljxDlQ2u/4A70luD8/ZxZcrU5pQwI1rVXCiaVIywoFgbA+PIUNDjPhQpPdK0dGleLtYc/y7XWBfclBg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/express": "*" + } + }, + "node_modules/@types/http-errors": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-2.0.4.tgz", + "integrity": "sha512-D0CFMMtydbJAegzOyHjtiKPLlvnm3iTZyZRSZoLq2mRhDdmLfIWOCYPfQJ4cu2erKghU++QvjcUjp/5h7hESpA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/mime": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz", + "integrity": "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/mocha": { + "version": "10.0.6", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "20.11.16", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~5.26.4" + } + }, + "node_modules/@types/parse-path": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/@types/parse-path/-/parse-path-7.0.3.tgz", + "integrity": "sha512-LriObC2+KYZD3FzCrgWGv/qufdUy4eXrxcLgQMfYXgPbLIecKIsVBaQgUPmxSSLcjmYbDTQbMgr6qr6l/eb7Bg==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/qs": { + "version": "6.9.17", + "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.17.tgz", + "integrity": "sha512-rX4/bPcfmvxHDv0XjfJELTTr+iB+tn032nPILqHm5wbthUUUuVtNGGqzhya9XUxjTP8Fpr0qYgSZZKxGY++svQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/range-parser": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.7.tgz", + "integrity": "sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/send": { + "version": "0.17.4", + "resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.4.tgz", + "integrity": "sha512-x2EM6TJOybec7c52BX0ZspPodMsQUd5L6PRwOunVyVUhXiBSKf3AezDL8Dgvgt5o0UfKNfuA0eMLr2wLT4AiBA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/mime": "^1", + "@types/node": "*" + } + }, + "node_modules/@types/serve-static": { + "version": "1.15.7", + "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.7.tgz", + "integrity": "sha512-W8Ym+h8nhuRwaKPaDw34QUkwsGi6Rc4yYqvKFo5rm2FUEhCFbzVWrxXUxuKK8TASjWsysJY0nsmNCGhCOIsrOw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/http-errors": "*", + "@types/node": "*", + "@types/send": "*" + } + }, + "node_modules/@types/sinon": { + "version": "17.0.3", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/sinonjs__fake-timers": "*" + } + }, + "node_modules/@types/sinonjs__fake-timers": { + "version": "8.1.5", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/yargs": { + "version": "17.0.32", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/yargs-parser": "*" + } + }, + "node_modules/@types/yargs-parser": { + "version": "21.0.3", + "dev": true, + "license": "MIT" + }, + "node_modules/accepts": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", + "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==", + "dev": true, + "license": "MIT", + "dependencies": { + "mime-types": "~2.1.34", + "negotiator": "0.6.3" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/acorn": { + "version": "8.14.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.1.tgz", + "integrity": "sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg==", + "dev": true, + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-walk": { + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.4.tgz", + "integrity": "sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "acorn": "^8.11.0" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/agent-base": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.3.tgz", + "integrity": "sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw==", + "license": "MIT", + "engines": { + "node": ">= 14" + } + }, + "node_modules/aggregate-error": { + "version": "3.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "clean-stack": "^2.0.0", + "indent-string": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-colors": { + "version": "4.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/ansi-escapes": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", + "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "type-fest": "^0.21.3" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ansi-escapes/node_modules/type-fest": { + "version": "0.21.3", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", + "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-sequence-parser": { + "version": "1.1.1", + "dev": true, + "license": "MIT" + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/anymatch": { + "version": "3.1.3", + "dev": true, + "license": "ISC", + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/append-transform": { + "version": "2.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "default-require-extensions": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/archy": { + "version": "1.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/arg": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", + "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", + "dev": true, + "license": "MIT" + }, + "node_modules/argparse": { + "version": "2.0.1", + "dev": true, + "license": "Python-2.0" + }, + "node_modules/array-flatten": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", + "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==", + "dev": true, + "license": "MIT" + }, + "node_modules/array-union": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "array-uniq": "^1.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/array-uniq": { + "version": "1.0.3", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ast-types": { + "version": "0.13.4", + "resolved": "https://registry.npmjs.org/ast-types/-/ast-types-0.13.4.tgz", + "integrity": "sha512-x1FCFnFifvYDDzTaLII71vG5uvDwgtmDTEVWAxrgeiR8VjMONcCXJx7E+USjDtHlwFmt9MysbqgF9b9Vjr6w+w==", + "dev": true, + "license": "MIT", + "dependencies": { + "tslib": "^2.0.1" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/async": { + "version": "3.2.5", + "dev": true, + "license": "MIT" + }, + "node_modules/async-retry": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/async-retry/-/async-retry-1.3.3.tgz", + "integrity": "sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw==", + "dev": true, + "license": "MIT", + "dependencies": { + "retry": "0.13.1" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "dev": true, + "license": "MIT" + }, + "node_modules/basic-ftp": { + "version": "5.0.5", + "resolved": "https://registry.npmjs.org/basic-ftp/-/basic-ftp-5.0.5.tgz", + "integrity": "sha512-4Bcg1P8xhUuqcii/S0Z9wiHIrQVPMermM1any+MX5GeGD7faD3/msQUDGLol9wOcz4/jbg/WJnGqoJF6LiBdtg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/before-after-hook": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-3.0.2.tgz", + "integrity": "sha512-Nik3Sc0ncrMK4UUdXQmAnRtzmNQTAAXmXIopizwZ1W1t8QmfJj+zL4OA2I7XPTPW5z5TDqv4hRo/JzouDJnX3A==", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/binary-extensions": { + "version": "2.2.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/body-parser": { + "version": "1.20.3", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", + "integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==", + "dev": true, + "license": "MIT", + "dependencies": { + "bytes": "3.1.2", + "content-type": "~1.0.5", + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "on-finished": "2.4.1", + "qs": "6.13.0", + "raw-body": "2.5.2", + "type-is": "~1.6.18", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + } + }, + "node_modules/body-parser/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/body-parser/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "dev": true, + "license": "MIT" + }, + "node_modules/boolbase": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", + "integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==", + "dev": true, + "license": "ISC" + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "license": "MIT", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browser-stdout": { + "version": "1.3.1", + "dev": true, + "license": "ISC" + }, + "node_modules/browserslist": { + "version": "4.22.3", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "caniuse-lite": "^1.0.30001580", + "electron-to-chromium": "^1.4.648", + "node-releases": "^2.0.14", + "update-browserslist-db": "^1.0.13" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/buffer-equal-constant-time": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", + "integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==", + "license": "BSD-3-Clause" + }, + "node_modules/bundle-name": { + "version": "4.1.0", + "license": "MIT", + "dependencies": { + "run-applescript": "^7.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/bytes": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", + "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/c12": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/c12/-/c12-3.0.3.tgz", + "integrity": "sha512-uC3MacKBb0Z15o5QWCHvHWj5Zv34pGQj9P+iXKSpTuSGFS0KKhUWf4t9AJ+gWjYOdmWCPEGpEzm8sS0iqbpo1w==", + "dev": true, + "license": "MIT", + "dependencies": { + "chokidar": "^4.0.3", + "confbox": "^0.2.2", + "defu": "^6.1.4", + "dotenv": "^16.4.7", + "exsolve": "^1.0.4", + "giget": "^2.0.0", + "jiti": "^2.4.2", + "ohash": "^2.0.11", + "pathe": "^2.0.3", + "perfect-debounce": "^1.0.0", + "pkg-types": "^2.1.0", + "rc9": "^2.1.2" + }, + "peerDependencies": { + "magicast": "^0.3.5" + }, + "peerDependenciesMeta": { + "magicast": { + "optional": true + } + } + }, + "node_modules/c12/node_modules/chokidar": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz", + "integrity": "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "readdirp": "^4.0.1" + }, + "engines": { + "node": ">= 14.16.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + } + }, + "node_modules/c12/node_modules/readdirp": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz", + "integrity": "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 14.18.0" + }, + "funding": { + "type": "individual", + "url": "https://paulmillr.com/funding/" + } + }, + "node_modules/caching-transform": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "hasha": "^5.0.0", + "make-dir": "^3.0.0", + "package-hash": "^4.0.0", + "write-file-atomic": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/call-bind": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.8.tgz", + "integrity": "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.0", + "es-define-property": "^1.0.0", + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.1.tgz", + "integrity": "sha512-BhYE+WDaywFg2TBWYNXAE+8B1ATnThNBqXHP5nQu0jWJdVvY2hvkpyB3qOmtmDePiS5/BDQ8wASEWGMWRG148g==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/camelcase": { + "version": "5.3.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001584", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "CC-BY-4.0" + }, + "node_modules/chalk": { + "version": "4.1.2", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/chalk/node_modules/supports-color": { + "version": "7.2.0", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/chardet": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/chardet/-/chardet-0.7.0.tgz", + "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==", + "dev": true, + "license": "MIT" + }, + "node_modules/cheerio": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/cheerio/-/cheerio-1.0.0.tgz", + "integrity": "sha512-quS9HgjQpdaXOvsZz82Oz7uxtXiy6UIsIQcpBj7HRw2M63Skasm9qlDocAM7jNuaxdhpPU7c4kJN+gA5MCu4ww==", + "dev": true, + "license": "MIT", + "dependencies": { + "cheerio-select": "^2.1.0", + "dom-serializer": "^2.0.0", + "domhandler": "^5.0.3", + "domutils": "^3.1.0", + "encoding-sniffer": "^0.2.0", + "htmlparser2": "^9.1.0", + "parse5": "^7.1.2", + "parse5-htmlparser2-tree-adapter": "^7.0.0", + "parse5-parser-stream": "^7.1.2", + "undici": "^6.19.5", + "whatwg-mimetype": "^4.0.0" + }, + "engines": { + "node": ">=18.17" + }, + "funding": { + "url": "https://github.com/cheeriojs/cheerio?sponsor=1" + } + }, + "node_modules/cheerio-select": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/cheerio-select/-/cheerio-select-2.1.0.tgz", + "integrity": "sha512-9v9kG0LvzrlcungtnJtpGNxY+fzECQKhK4EGJX2vByejiMX84MFNQw4UxPJl3bFbTMw+Dfs37XaIkCwTZfLh4g==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "boolbase": "^1.0.0", + "css-select": "^5.1.0", + "css-what": "^6.1.0", + "domelementtype": "^2.3.0", + "domhandler": "^5.0.3", + "domutils": "^3.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/fb55" + } + }, + "node_modules/chokidar": { + "version": "3.5.3", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://paulmillr.com/funding/" + } + ], + "license": "MIT", + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/ci-info": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-4.2.0.tgz", + "integrity": "sha512-cYY9mypksY8NRqgDB1XD1RiJL338v/551niynFTGkZOO2LHuB2OmOYxDIe/ttN9AHwrqdum1360G3ald0W9kCg==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/citty": { + "version": "0.1.6", + "resolved": "https://registry.npmjs.org/citty/-/citty-0.1.6.tgz", + "integrity": "sha512-tskPPKEs8D2KPafUypv2gxwJP8h/OaJmC82QQGGDQcHvXX43xF2VDACcJVmZ0EuSxkpO9Kc4MlrA3q0+FG58AQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "consola": "^3.2.3" + } + }, + "node_modules/clean-stack": { + "version": "2.2.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/cli-cursor": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-5.0.0.tgz", + "integrity": "sha512-aCj4O5wKyszjMmDT4tZj93kxyydN/K5zPWSCe6/0AV/AA1pqe5ZBIw0a2ZfPQV7lL5/yb5HsUreJ6UFAF1tEQw==", + "dev": true, + "license": "MIT", + "dependencies": { + "restore-cursor": "^5.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/cli-spinners": { + "version": "2.9.2", + "resolved": "https://registry.npmjs.org/cli-spinners/-/cli-spinners-2.9.2.tgz", + "integrity": "sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/cli-width": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-4.1.0.tgz", + "integrity": "sha512-ouuZd4/dm2Sw5Gmqy6bGyNNNe1qt9RpmxveLSO7KcgsTnU7RXfsw+/bukWGo1abgBiMAic068rclZsO4IWmmxQ==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">= 12" + } + }, + "node_modules/cliui": { + "version": "7.0.4", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" + } + }, + "node_modules/cliui/node_modules/wrap-ansi": { + "version": "7.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/cluster-key-slot": { + "version": "1.1.2", + "license": "Apache-2.0", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "dev": true, + "license": "MIT" + }, + "node_modules/commander": { + "version": "11.1.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=16" + } + }, + "node_modules/commondir": { + "version": "1.0.1", + "dev": true, + "license": "MIT" + }, + "node_modules/concat-map": { + "version": "0.0.1", + "dev": true, + "license": "MIT" + }, + "node_modules/confbox": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/confbox/-/confbox-0.2.2.tgz", + "integrity": "sha512-1NB+BKqhtNipMsov4xI/NnhCKp9XG9NamYp5PVm9klAT0fsrNPjaFICsCFhNhwZJKNh7zB/3q8qXz0E9oaMNtQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/consola": { + "version": "3.4.2", + "resolved": "https://registry.npmjs.org/consola/-/consola-3.4.2.tgz", + "integrity": "sha512-5IKcdX0nnYavi6G7TtOhwkYzyjfJlatbjMjuLSfE2kYT5pMDOilZ4OvMhi637CcDICTmz3wARPoyhqyX1Y+XvA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^14.18.0 || >=16.10.0" + } + }, + "node_modules/content-disposition": { + "version": "0.5.4", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", + "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "safe-buffer": "5.2.1" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/content-type": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", + "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/convert-source-map": { + "version": "1.9.0", + "dev": true, + "license": "MIT" + }, + "node_modules/cookie": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.1.tgz", + "integrity": "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie-signature": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", + "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/create-require": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", + "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/cross-spawn": { + "version": "7.0.3", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/css-select": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/css-select/-/css-select-5.1.0.tgz", + "integrity": "sha512-nwoRF1rvRRnnCqqY7updORDsuqKzqYJ28+oSMaJMMgOauh3fvwHqMS7EZpIPqK8GL+g9mKxF1vP/ZjSeNjEVHg==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "boolbase": "^1.0.0", + "css-what": "^6.1.0", + "domhandler": "^5.0.2", + "domutils": "^3.0.1", + "nth-check": "^2.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/fb55" + } + }, + "node_modules/css-what": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/css-what/-/css-what-6.1.0.tgz", + "integrity": "sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">= 6" + }, + "funding": { + "url": "https://github.com/sponsors/fb55" + } + }, + "node_modules/data-uri-to-buffer": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-6.0.2.tgz", + "integrity": "sha512-7hvf7/GW8e86rW0ptuwS3OcBGDjIi6SZva7hCyWC0yYry2cOPmLIjXAUHI6DK2HsnwJd9ifmt57i8eV2n4YNpw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 14" + } + }, + "node_modules/debug": { + "version": "4.3.4", + "license": "MIT", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/debug/node_modules/ms": { + "version": "2.1.2", + "license": "MIT" + }, + "node_modules/decamelize": { + "version": "1.2.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/default-browser": { + "version": "5.2.1", + "license": "MIT", + "dependencies": { + "bundle-name": "^4.1.0", + "default-browser-id": "^5.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/default-browser-id": { + "version": "5.0.0", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/default-require-extensions": { + "version": "3.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "strip-bom": "^4.0.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/define-data-property": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/define-lazy-prop": { + "version": "3.0.0", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/defu": { + "version": "6.1.4", + "resolved": "https://registry.npmjs.org/defu/-/defu-6.1.4.tgz", + "integrity": "sha512-mEQCMmwJu317oSz8CwdIOdwf3xMif1ttiM8LTufzc3g6kR+9Pe236twL8j3IYT1F7GfRgGcW6MWxzZjLIkuHIg==", + "dev": true, + "license": "MIT" + }, + "node_modules/degenerator": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/degenerator/-/degenerator-5.0.1.tgz", + "integrity": "sha512-TllpMR/t0M5sqCXfj85i4XaAzxmS5tVA16dqvdkMwGmzI+dXLXnw3J+3Vdv7VKw+ThlTMboK6i9rnZ6Nntj5CQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ast-types": "^0.13.4", + "escodegen": "^2.1.0", + "esprima": "^4.0.1" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/destr": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/destr/-/destr-2.0.5.tgz", + "integrity": "sha512-ugFTXCtDZunbzasqBxrK93Ik/DRYsO6S/fedkWEMKqt04xZ4csmnmwGDBAb07QWNaGMAmnTIemsYZCksjATwsA==", + "dev": true, + "license": "MIT" + }, + "node_modules/destroy": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", + "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + } + }, + "node_modules/detect-indent": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/detect-indent/-/detect-indent-7.0.1.tgz", + "integrity": "sha512-Mc7QhQ8s+cLrnUfU/Ji94vG/r8M26m8f++vyres4ZoojaRDpZ1eSIh/EpzLNwlWuvzSZ3UbDFspjFvTDXe6e/g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.20" + } + }, + "node_modules/diff": { + "version": "5.0.0", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/dom-serializer": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz", + "integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==", + "dev": true, + "license": "MIT", + "dependencies": { + "domelementtype": "^2.3.0", + "domhandler": "^5.0.2", + "entities": "^4.2.0" + }, + "funding": { + "url": "https://github.com/cheeriojs/dom-serializer?sponsor=1" + } + }, + "node_modules/domelementtype": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", + "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fb55" + } + ], + "license": "BSD-2-Clause" + }, + "node_modules/domhandler": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz", + "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "domelementtype": "^2.3.0" + }, + "engines": { + "node": ">= 4" + }, + "funding": { + "url": "https://github.com/fb55/domhandler?sponsor=1" + } + }, + "node_modules/domutils": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.2.2.tgz", + "integrity": "sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "dom-serializer": "^2.0.0", + "domelementtype": "^2.3.0", + "domhandler": "^5.0.3" + }, + "funding": { + "url": "https://github.com/fb55/domutils?sponsor=1" + } + }, + "node_modules/dotenv": { + "version": "16.4.7", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.7.tgz", + "integrity": "sha512-47qPchRCykZC03FhkYAhrvwU4xDBFIj1QPqaarj6mdM/hgUzfPHcpkHJOn3mJAufFeeAxAzeGsr5X0M4k6fLZQ==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com" + } + }, + "node_modules/dunder-proto": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.0.tgz", + "integrity": "sha512-9+Sj30DIu+4KvHqMfLUGLFYL2PkURSYMVXJyXe92nFRvlYq5hBjLEhblKB+vkd/WVlUYMWigiY07T91Fkk0+4A==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/ecdsa-sig-formatter": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", + "integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==", + "license": "Apache-2.0", + "dependencies": { + "safe-buffer": "^5.0.1" + } + }, + "node_modules/ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==", + "dev": true, + "license": "MIT" + }, + "node_modules/electron-to-chromium": { + "version": "1.4.656", + "dev": true, + "license": "ISC" + }, + "node_modules/email-addresses": { + "version": "5.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/encodeurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", + "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/encoding-sniffer": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/encoding-sniffer/-/encoding-sniffer-0.2.0.tgz", + "integrity": "sha512-ju7Wq1kg04I3HtiYIOrUrdfdDvkyO9s5XM8QAj/bN61Yo/Vb4vgJxy5vi4Yxk01gWHbrofpPtpxM8bKger9jhg==", + "dev": true, + "license": "MIT", + "dependencies": { + "iconv-lite": "^0.6.3", + "whatwg-encoding": "^3.1.1" + }, + "funding": { + "url": "https://github.com/fb55/encoding-sniffer?sponsor=1" + } + }, + "node_modules/encoding-sniffer/node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "dev": true, + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/entities": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es6-error": { + "version": "4.1.1", + "dev": true, + "license": "MIT" + }, + "node_modules/esbuild": { + "version": "0.19.12", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=12" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.19.12", + "@esbuild/android-arm": "0.19.12", + "@esbuild/android-arm64": "0.19.12", + "@esbuild/android-x64": "0.19.12", + "@esbuild/darwin-arm64": "0.19.12", + "@esbuild/darwin-x64": "0.19.12", + "@esbuild/freebsd-arm64": "0.19.12", + "@esbuild/freebsd-x64": "0.19.12", + "@esbuild/linux-arm": "0.19.12", + "@esbuild/linux-arm64": "0.19.12", + "@esbuild/linux-ia32": "0.19.12", + "@esbuild/linux-loong64": "0.19.12", + "@esbuild/linux-mips64el": "0.19.12", + "@esbuild/linux-ppc64": "0.19.12", + "@esbuild/linux-riscv64": "0.19.12", + "@esbuild/linux-s390x": "0.19.12", + "@esbuild/linux-x64": "0.19.12", + "@esbuild/netbsd-x64": "0.19.12", + "@esbuild/openbsd-x64": "0.19.12", + "@esbuild/sunos-x64": "0.19.12", + "@esbuild/win32-arm64": "0.19.12", + "@esbuild/win32-ia32": "0.19.12", + "@esbuild/win32-x64": "0.19.12" + } + }, + "node_modules/escalade": { + "version": "3.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==", + "dev": true, + "license": "MIT" + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/escodegen": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-2.1.0.tgz", + "integrity": "sha512-2NlIDTwUWJN0mRPQOdtQBzbUHvdGY2P1VXSyU83Q3xKxM7WHX2Ql8dKq782Q9TgQUNOLEzEYu9bzLNj1q88I5w==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "esprima": "^4.0.1", + "estraverse": "^5.2.0", + "esutils": "^2.0.2" + }, + "bin": { + "escodegen": "bin/escodegen.js", + "esgenerate": "bin/esgenerate.js" + }, + "engines": { + "node": ">=6.0" + }, + "optionalDependencies": { + "source-map": "~0.6.1" + } + }, + "node_modules/esprima": { + "version": "4.0.1", + "dev": true, + "license": "BSD-2-Clause", + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/eta": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/eta/-/eta-3.5.0.tgz", + "integrity": "sha512-e3x3FBvGzeCIHhF+zhK8FZA2vC5uFn6b4HJjegUbIWrDb4mJ7JjTGMJY9VGIbRVpmSwHopNiaJibhjIr+HfLug==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + }, + "funding": { + "url": "https://github.com/eta-dev/eta?sponsor=1" + } + }, + "node_modules/etag": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/events": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", + "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", + "license": "MIT", + "engines": { + "node": ">=0.8.x" + } + }, + "node_modules/execa": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-8.0.1.tgz", + "integrity": "sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==", + "dev": true, + "license": "MIT", + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^8.0.1", + "human-signals": "^5.0.0", + "is-stream": "^3.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^5.1.0", + "onetime": "^6.0.0", + "signal-exit": "^4.1.0", + "strip-final-newline": "^3.0.0" + }, + "engines": { + "node": ">=16.17" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/execa/node_modules/is-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-3.0.0.tgz", + "integrity": "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/execa/node_modules/onetime": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-6.0.0.tgz", + "integrity": "sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "mimic-fn": "^4.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/execa/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/express": { + "version": "4.21.2", + "resolved": "https://registry.npmjs.org/express/-/express-4.21.2.tgz", + "integrity": "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "accepts": "~1.3.8", + "array-flatten": "1.1.1", + "body-parser": "1.20.3", + "content-disposition": "0.5.4", + "content-type": "~1.0.4", + "cookie": "0.7.1", + "cookie-signature": "1.0.6", + "debug": "2.6.9", + "depd": "2.0.0", + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "finalhandler": "1.3.1", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "merge-descriptors": "1.0.3", + "methods": "~1.1.2", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "path-to-regexp": "0.1.12", + "proxy-addr": "~2.0.7", + "qs": "6.13.0", + "range-parser": "~1.2.1", + "safe-buffer": "5.2.1", + "send": "0.19.0", + "serve-static": "1.16.2", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "type-is": "~1.6.18", + "utils-merge": "1.0.1", + "vary": "~1.1.2" + }, + "engines": { + "node": ">= 0.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/express-session": { + "version": "1.18.1", + "resolved": "https://registry.npmjs.org/express-session/-/express-session-1.18.1.tgz", + "integrity": "sha512-a5mtTqEaZvBCL9A9aqkrtfz+3SMDhOVUnjafjo+s7A9Txkq+SVX2DLvSp1Zrv4uCXa3lMSK3viWnh9Gg07PBUA==", + "dev": true, + "license": "MIT", + "dependencies": { + "cookie": "0.7.2", + "cookie-signature": "1.0.7", + "debug": "2.6.9", + "depd": "~2.0.0", + "on-headers": "~1.0.2", + "parseurl": "~1.3.3", + "safe-buffer": "5.2.1", + "uid-safe": "~2.1.5" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/express-session/node_modules/cookie": { + "version": "0.7.2", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz", + "integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/express-session/node_modules/cookie-signature": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.7.tgz", + "integrity": "sha512-NXdYc3dLr47pBkpUCHtKSwIOQXLVn8dZEuywboCOJY/osA0wFSLlSawr3KN8qXJEyX66FcONTH8EIlVuK0yyFA==", + "dev": true, + "license": "MIT" + }, + "node_modules/express-session/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/express-session/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "dev": true, + "license": "MIT" + }, + "node_modules/express/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/express/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "dev": true, + "license": "MIT" + }, + "node_modules/express/node_modules/path-to-regexp": { + "version": "0.1.12", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.12.tgz", + "integrity": "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/exsolve": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/exsolve/-/exsolve-1.0.5.tgz", + "integrity": "sha512-pz5dvkYYKQ1AHVrgOzBKWeP4u4FRb3a6DNK2ucr0OoNwYIU4QWsJ+NM36LLzORT+z845MzKHHhpXiUF5nvQoJg==", + "dev": true, + "license": "MIT" + }, + "node_modules/external-editor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/external-editor/-/external-editor-3.1.0.tgz", + "integrity": "sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==", + "dev": true, + "license": "MIT", + "dependencies": { + "chardet": "^0.7.0", + "iconv-lite": "^0.4.24", + "tmp": "^0.0.33" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/fast-content-type-parse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/fast-content-type-parse/-/fast-content-type-parse-2.0.1.tgz", + "integrity": "sha512-nGqtvLrj5w0naR6tDPfB4cUmYCqouzyQiz6C5y/LtcDllJdrcc6WaWW6iXyIIOErTa/XRybj28aasdn4LkVk6Q==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "MIT" + }, + "node_modules/fast-glob": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", + "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.8" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fastq": { + "version": "1.19.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz", + "integrity": "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/filename-reserved-regex": { + "version": "2.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/filenamify": { + "version": "4.3.0", + "dev": true, + "license": "MIT", + "dependencies": { + "filename-reserved-regex": "^2.0.0", + "strip-outer": "^1.0.1", + "trim-repeated": "^1.0.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "license": "MIT", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/finalhandler": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.1.tgz", + "integrity": "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "2.6.9", + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "statuses": "2.0.1", + "unpipe": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/finalhandler/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/finalhandler/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "dev": true, + "license": "MIT" + }, + "node_modules/find-cache-dir": { + "version": "3.3.2", + "dev": true, + "license": "MIT", + "dependencies": { + "commondir": "^1.0.1", + "make-dir": "^3.0.2", + "pkg-dir": "^4.1.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/avajs/find-cache-dir?sponsor=1" + } + }, + "node_modules/find-up": { + "version": "5.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/flat": { + "version": "5.0.2", + "dev": true, + "license": "BSD-3-Clause", + "bin": { + "flat": "cli.js" + } + }, + "node_modules/foreground-child": { + "version": "2.0.0", + "dev": true, + "license": "ISC", + "dependencies": { + "cross-spawn": "^7.0.0", + "signal-exit": "^3.0.2" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/forwarded": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", + "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/fresh": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", + "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/fromentries": { + "version": "1.3.2", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/fs-extra": { + "version": "11.2.0", + "dev": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=14.14" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "dev": true, + "license": "ISC" + }, + "node_modules/function-bind": { + "version": "1.1.2", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "dev": true, + "license": "ISC", + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-east-asian-width": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.3.0.tgz", + "integrity": "sha512-vpeMIQKxczTD/0s2CdEWHcb0eeJe6TFjxb+J5xgX7hScxqrGuyjmv4c1D4A/gelKfyox0gJJwIHF+fLjeaM8kQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/get-intrinsic": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.5.tgz", + "integrity": "sha512-Y4+pKa7XeRUPWFNvOOYHkRYrfzW07oraURSvjDmRVOJ748OrVmeXtpE4+GCEHncjCjkTxPNRt8kEbxDhsn6VTg==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.0", + "dunder-proto": "^1.0.0", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-package-type": { + "version": "0.1.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/get-stream": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-8.0.1.tgz", + "integrity": "sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/get-tsconfig": { + "version": "4.7.2", + "dev": true, + "license": "MIT", + "dependencies": { + "resolve-pkg-maps": "^1.0.0" + }, + "funding": { + "url": "https://github.com/privatenumber/get-tsconfig?sponsor=1" + } + }, + "node_modules/get-uri": { + "version": "6.0.4", + "resolved": "https://registry.npmjs.org/get-uri/-/get-uri-6.0.4.tgz", + "integrity": "sha512-E1b1lFFLvLgak2whF2xDBcOy6NLVGZBqqjJjsIhvopKfWWEi64pLVTWWehV8KlLerZkfNTA95sTe2OdJKm1OzQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "basic-ftp": "^5.0.2", + "data-uri-to-buffer": "^6.0.2", + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/gh-pages": { + "version": "6.1.1", + "dev": true, + "license": "MIT", + "dependencies": { + "async": "^3.2.4", + "commander": "^11.0.0", + "email-addresses": "^5.0.0", + "filenamify": "^4.3.0", + "find-cache-dir": "^3.3.1", + "fs-extra": "^11.1.1", + "globby": "^6.1.0" + }, + "bin": { + "gh-pages": "bin/gh-pages.js", + "gh-pages-clean": "bin/gh-pages-clean.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/giget": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/giget/-/giget-2.0.0.tgz", + "integrity": "sha512-L5bGsVkxJbJgdnwyuheIunkGatUF/zssUoxxjACCseZYAVbaqdh9Tsmmlkl8vYan09H7sbvKt4pS8GqKLBrEzA==", + "dev": true, + "license": "MIT", + "dependencies": { + "citty": "^0.1.6", + "consola": "^3.4.0", + "defu": "^6.1.4", + "node-fetch-native": "^1.6.6", + "nypm": "^0.6.0", + "pathe": "^2.0.3" + }, + "bin": { + "giget": "dist/cli.mjs" + } + }, + "node_modules/git-up": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/git-up/-/git-up-8.1.1.tgz", + "integrity": "sha512-FDenSF3fVqBYSaJoYy1KSc2wosx0gCvKP+c+PRBht7cAaiCeQlBtfBDX9vgnNOHmdePlSFITVcn4pFfcgNvx3g==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-ssh": "^1.4.0", + "parse-url": "^9.2.0" + } + }, + "node_modules/git-url-parse": { + "version": "16.1.0", + "resolved": "https://registry.npmjs.org/git-url-parse/-/git-url-parse-16.1.0.tgz", + "integrity": "sha512-cPLz4HuK86wClEW7iDdeAKcCVlWXmrLpb2L+G9goW0Z1dtpNS6BXXSOckUTlJT/LDQViE1QZKstNORzHsLnobw==", + "dev": true, + "license": "MIT", + "dependencies": { + "git-up": "^8.1.0" + } + }, + "node_modules/glob": { + "version": "7.2.3", + "dev": true, + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "5.1.2", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/globals": { + "version": "11.12.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/globby": { + "version": "6.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "array-union": "^1.0.1", + "glob": "^7.0.3", + "object-assign": "^4.0.1", + "pify": "^2.0.0", + "pinkie-promise": "^2.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "dev": true, + "license": "ISC" + }, + "node_modules/has-flag": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/has-property-descriptors": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-define-property": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasha": { + "version": "5.2.2", + "dev": true, + "license": "MIT", + "dependencies": { + "is-stream": "^2.0.0", + "type-fest": "^0.8.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/he": { + "version": "1.2.0", + "dev": true, + "license": "MIT", + "bin": { + "he": "bin/he" + } + }, + "node_modules/html-escaper": { + "version": "2.0.2", + "dev": true, + "license": "MIT" + }, + "node_modules/htmlparser2": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-9.1.0.tgz", + "integrity": "sha512-5zfg6mHUoaer/97TxnGpxmbR7zJtPwIYFMZ/H5ucTlPZhKvtum05yiPK3Mgai3a0DyVxv7qYqoweaEd2nrYQzQ==", + "dev": true, + "funding": [ + "https://github.com/fb55/htmlparser2?sponsor=1", + { + "type": "github", + "url": "https://github.com/sponsors/fb55" + } + ], + "license": "MIT", + "dependencies": { + "domelementtype": "^2.3.0", + "domhandler": "^5.0.3", + "domutils": "^3.1.0", + "entities": "^4.5.0" + } + }, + "node_modules/http-errors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", + "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "depd": "2.0.0", + "inherits": "2.0.4", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "toidentifier": "1.0.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/http-proxy-agent": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz", + "integrity": "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==", + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.0", + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/https-proxy-agent": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz", + "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==", + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.2", + "debug": "4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/human-signals": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-5.0.0.tgz", + "integrity": "sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=16.17.0" + } + }, + "node_modules/iconv-lite": { + "version": "0.4.24", + "dev": true, + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/indent-string": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "dev": true, + "license": "ISC", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "dev": true, + "license": "ISC" + }, + "node_modules/ini": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/ini/-/ini-5.0.0.tgz", + "integrity": "sha512-+N0ngpO3e7cRUWOJAS7qw0IZIVc6XPrW4MlFBdD066F2L4k1L6ker3hLqSq7iXxU5tgS4WGkIUElWn5vogAEnw==", + "dev": true, + "license": "ISC", + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/inquirer": { + "version": "12.6.0", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-12.6.0.tgz", + "integrity": "sha512-3zmmccQd/8o65nPOZJZ+2wqt76Ghw3+LaMrmc6JE/IzcvQhJ1st+QLCOo/iLS85/tILU0myG31a2TAZX0ysAvg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/core": "^10.1.10", + "@inquirer/prompts": "^7.5.0", + "@inquirer/type": "^3.0.6", + "ansi-escapes": "^4.3.2", + "mute-stream": "^2.0.0", + "run-async": "^3.0.0", + "rxjs": "^7.8.2" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/ip-address": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/ip-address/-/ip-address-9.0.5.tgz", + "integrity": "sha512-zHtQzGojZXTwZTHQqra+ETKd4Sn3vgi7uBmlPoXVWZqYvuKmtI0l/VZTjqGmJY9x88GGOaZ9+G9ES8hC4T4X8g==", + "dev": true, + "license": "MIT", + "dependencies": { + "jsbn": "1.1.0", + "sprintf-js": "^1.1.3" + }, + "engines": { + "node": ">= 12" + } + }, + "node_modules/ip-address/node_modules/sprintf-js": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.1.3.tgz", + "integrity": "sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/ipaddr.js": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", + "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-docker": { + "version": "3.0.0", + "license": "MIT", + "bin": { + "is-docker": "cli.js" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-inside-container": { + "version": "1.0.0", + "license": "MIT", + "dependencies": { + "is-docker": "^3.0.0" + }, + "bin": { + "is-inside-container": "cli.js" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-interactive": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-interactive/-/is-interactive-2.0.0.tgz", + "integrity": "sha512-qP1vozQRI+BMOPcjFzrjXuQvdak2pHNUMZoeG2eRbiSqyvbEf/wQtEOTOX1guk6E3t36RkaqiSt8A/6YElNxLQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-plain-obj": { + "version": "2.1.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-ssh": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/is-ssh/-/is-ssh-1.4.1.tgz", + "integrity": "sha512-JNeu1wQsHjyHgn9NcWTaXq6zWSR6hqE0++zhfZlkFBbScNkyvxCdeV8sRkSBaeLKxmbpR21brail63ACNxJ0Tg==", + "dev": true, + "license": "MIT", + "dependencies": { + "protocols": "^2.0.1" + } + }, + "node_modules/is-stream": { + "version": "2.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-typedarray": { + "version": "1.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/is-unicode-supported": { + "version": "0.1.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-windows": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-wsl": { + "version": "3.1.0", + "license": "MIT", + "dependencies": { + "is-inside-container": "^1.0.0" + }, + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "dev": true, + "license": "ISC" + }, + "node_modules/issue-parser": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/issue-parser/-/issue-parser-7.0.1.tgz", + "integrity": "sha512-3YZcUUR2Wt1WsapF+S/WiA2WmlW0cWAoPccMqne7AxEBhCdFeTPjfv/Axb8V2gyCgY3nRw+ksZ3xSUX+R47iAg==", + "dev": true, + "license": "MIT", + "dependencies": { + "lodash.capitalize": "^4.2.1", + "lodash.escaperegexp": "^4.1.2", + "lodash.isplainobject": "^4.0.6", + "lodash.isstring": "^4.0.1", + "lodash.uniqby": "^4.7.0" + }, + "engines": { + "node": "^18.17 || >=20.6.1" + } + }, + "node_modules/istanbul-lib-coverage": { + "version": "3.2.2", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-hook": { + "version": "3.0.0", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "append-transform": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-instrument": { + "version": "4.0.3", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@babel/core": "^7.7.5", + "@istanbuljs/schema": "^0.1.2", + "istanbul-lib-coverage": "^3.0.0", + "semver": "^6.3.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-processinfo": { + "version": "2.0.3", + "dev": true, + "license": "ISC", + "dependencies": { + "archy": "^1.0.0", + "cross-spawn": "^7.0.3", + "istanbul-lib-coverage": "^3.2.0", + "p-map": "^3.0.0", + "rimraf": "^3.0.0", + "uuid": "^8.3.2" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-report": { + "version": "3.0.1", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "istanbul-lib-coverage": "^3.0.0", + "make-dir": "^4.0.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-lib-report/node_modules/lru-cache": { + "version": "6.0.0", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-lib-report/node_modules/make-dir": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "semver": "^7.5.3" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/istanbul-lib-report/node_modules/semver": { + "version": "7.5.4", + "dev": true, + "license": "ISC", + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-lib-report/node_modules/supports-color": { + "version": "7.2.0", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-report/node_modules/yallist": { + "version": "4.0.0", + "dev": true, + "license": "ISC" + }, + "node_modules/istanbul-lib-source-maps": { + "version": "4.0.1", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "debug": "^4.1.1", + "istanbul-lib-coverage": "^3.0.0", + "source-map": "^0.6.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-reports": { + "version": "3.1.6", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "html-escaper": "^2.0.0", + "istanbul-lib-report": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jiti": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-2.4.2.tgz", + "integrity": "sha512-rg9zJN+G4n2nfJl5MW3BMygZX56zKPNVEYYqq7adpmMh4Jn2QNEwhvQlFy6jPVdcod7txZtKHWnyZiA3a0zP7A==", + "dev": true, + "license": "MIT", + "bin": { + "jiti": "lib/jiti-cli.mjs" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/jsbn": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-1.1.0.tgz", + "integrity": "sha512-4bYVV3aAMtDTTu4+xsDYa6sy9GyJ69/amsu9sYF2zqjiEoZA5xJi3BrfX3uY+/IekIu7MwdObdbDWpoZdBv3/A==", + "dev": true, + "license": "MIT" + }, + "node_modules/jsesc": { + "version": "2.5.2", + "dev": true, + "license": "MIT", + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/json5": { + "version": "2.2.3", + "dev": true, + "license": "MIT", + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/jsonc-parser": { + "version": "3.2.1", + "dev": true, + "license": "MIT" + }, + "node_modules/jsonfile": { + "version": "6.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "universalify": "^2.0.0" + }, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/jsonwebtoken": { + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-9.0.2.tgz", + "integrity": "sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ==", + "license": "MIT", + "dependencies": { + "jws": "^3.2.2", + "lodash.includes": "^4.3.0", + "lodash.isboolean": "^3.0.3", + "lodash.isinteger": "^4.0.4", + "lodash.isnumber": "^3.0.3", + "lodash.isplainobject": "^4.0.6", + "lodash.isstring": "^4.0.1", + "lodash.once": "^4.0.0", + "ms": "^2.1.1", + "semver": "^7.5.4" + }, + "engines": { + "node": ">=12", + "npm": ">=6" + } + }, + "node_modules/jsonwebtoken/node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/just-extend": { + "version": "6.2.0", + "dev": true, + "license": "MIT" + }, + "node_modules/jwa": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", + "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", + "license": "MIT", + "dependencies": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/jws": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", + "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", + "license": "MIT", + "dependencies": { + "jwa": "^1.4.1", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/locate-path": { + "version": "6.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "dev": true, + "license": "MIT" + }, + "node_modules/lodash-es": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz", + "integrity": "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==", + "dev": true, + "license": "MIT" + }, + "node_modules/lodash.capitalize": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/lodash.capitalize/-/lodash.capitalize-4.2.1.tgz", + "integrity": "sha512-kZzYOKspf8XVX5AvmQF94gQW0lejFVgb80G85bU4ZWzoJ6C03PQg3coYAUpSTpQWelrZELd3XWgHzw4Ck5kaIw==", + "dev": true, + "license": "MIT" + }, + "node_modules/lodash.escaperegexp": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/lodash.escaperegexp/-/lodash.escaperegexp-4.1.2.tgz", + "integrity": "sha512-TM9YBvyC84ZxE3rgfefxUWiQKLilstD6k7PTGt6wfbtXF8ixIJLOL3VYyV/z+ZiPLsVxAsKAFVwWlWeb2Y8Yyw==", + "dev": true, + "license": "MIT" + }, + "node_modules/lodash.flattendeep": { + "version": "4.4.0", + "dev": true, + "license": "MIT" + }, + "node_modules/lodash.get": { + "version": "4.4.2", + "dev": true, + "license": "MIT" + }, + "node_modules/lodash.includes": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz", + "integrity": "sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==", + "license": "MIT" + }, + "node_modules/lodash.isboolean": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz", + "integrity": "sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==", + "license": "MIT" + }, + "node_modules/lodash.isinteger": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz", + "integrity": "sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA==", + "license": "MIT" + }, + "node_modules/lodash.isnumber": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz", + "integrity": "sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw==", + "license": "MIT" + }, + "node_modules/lodash.isplainobject": { + "version": "4.0.6", + "license": "MIT" + }, + "node_modules/lodash.isstring": { + "version": "4.0.1", + "license": "MIT" + }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/lodash.once": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz", + "integrity": "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==", + "license": "MIT" + }, + "node_modules/lodash.uniqby": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/lodash.uniqby/-/lodash.uniqby-4.7.0.tgz", + "integrity": "sha512-e/zcLx6CSbmaEgFHCA7BnoQKyCtKMxnuWrJygbwPs/AIn+IMKl66L8/s+wBUn5LRw2pZx3bUHibiV1b6aTWIww==", + "dev": true, + "license": "MIT" + }, + "node_modules/log-symbols": { + "version": "4.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^4.1.0", + "is-unicode-supported": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lru-cache": { + "version": "5.1.1", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/lunr": { + "version": "2.3.9", + "dev": true, + "license": "MIT" + }, + "node_modules/macos-release": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/macos-release/-/macos-release-3.3.0.tgz", + "integrity": "sha512-tPJQ1HeyiU2vRruNGhZ+VleWuMQRro8iFtJxYgnS4NQe+EukKF6aGiIT+7flZhISAt2iaXBCfFGvAyif7/f8nQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/make-dir": { + "version": "3.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "semver": "^6.0.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", + "dev": true, + "license": "ISC" + }, + "node_modules/marked": { + "version": "4.3.0", + "dev": true, + "license": "MIT", + "bin": { + "marked": "bin/marked.js" + }, + "engines": { + "node": ">= 12" + } + }, + "node_modules/media-typer": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/merge-descriptors": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz", + "integrity": "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true, + "license": "MIT" + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/methods": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mime": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", + "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", + "dev": true, + "license": "MIT", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "dev": true, + "license": "MIT", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mimic-fn": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-4.0.0.tgz", + "integrity": "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/mimic-function": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/mimic-function/-/mimic-function-5.0.1.tgz", + "integrity": "sha512-VP79XUPxV2CigYP3jWwAUFSku2aKqBH7uTAapFWCBqutsbmDo96KY5o8uh6U+/YSIn5OxJnXp73beVkpqMIGhA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/mocha": { + "version": "10.2.0", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-colors": "4.1.1", + "browser-stdout": "1.3.1", + "chokidar": "3.5.3", + "debug": "4.3.4", + "diff": "5.0.0", + "escape-string-regexp": "4.0.0", + "find-up": "5.0.0", + "glob": "7.2.0", + "he": "1.2.0", + "js-yaml": "4.1.0", + "log-symbols": "4.1.0", + "minimatch": "5.0.1", + "ms": "2.1.3", + "nanoid": "3.3.3", + "serialize-javascript": "6.0.0", + "strip-json-comments": "3.1.1", + "supports-color": "8.1.1", + "workerpool": "6.2.1", + "yargs": "16.2.0", + "yargs-parser": "20.2.4", + "yargs-unparser": "2.0.0" + }, + "bin": { + "_mocha": "bin/_mocha", + "mocha": "bin/mocha.js" + }, + "engines": { + "node": ">= 14.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/mochajs" + } + }, + "node_modules/mocha/node_modules/glob": { + "version": "7.2.0", + "dev": true, + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/mocha/node_modules/glob/node_modules/minimatch": { + "version": "3.1.2", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/mocha/node_modules/minimatch": { + "version": "5.0.1", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/mocha/node_modules/minimatch/node_modules/brace-expansion": { + "version": "2.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "license": "MIT" + }, + "node_modules/mute-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-2.0.0.tgz", + "integrity": "sha512-WWdIxpyjEn+FhQJQQv9aQAYlHoNVdzIzUySNV1gHUPDSdZJ3yZn7pAAbQcV7B56Mvu881q9FZV+0Vx2xC44VWA==", + "dev": true, + "license": "ISC", + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/nanoid": { + "version": "3.3.3", + "dev": true, + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/negotiator": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", + "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/netmask": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/netmask/-/netmask-2.0.2.tgz", + "integrity": "sha512-dBpDMdxv9Irdq66304OLfEmQ9tbNRFnFTuZiLo+bD+r332bBmMJ8GBLXklIXXgxd3+v9+KUnZaUR5PJMa75Gsg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/new-github-release-url": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/new-github-release-url/-/new-github-release-url-2.0.0.tgz", + "integrity": "sha512-NHDDGYudnvRutt/VhKFlX26IotXe1w0cmkDm6JGquh5bz/bDTw0LufSmH/GxTjEdpHEO+bVKFTwdrcGa/9XlKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "type-fest": "^2.5.1" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/new-github-release-url/node_modules/type-fest": { + "version": "2.19.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.19.0.tgz", + "integrity": "sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=12.20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/nise": { + "version": "5.1.9", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@sinonjs/commons": "^3.0.0", + "@sinonjs/fake-timers": "^11.2.2", + "@sinonjs/text-encoding": "^0.7.2", + "just-extend": "^6.2.0", + "path-to-regexp": "^6.2.1" + } + }, + "node_modules/node-fetch-native": { + "version": "1.6.6", + "resolved": "https://registry.npmjs.org/node-fetch-native/-/node-fetch-native-1.6.6.tgz", + "integrity": "sha512-8Mc2HhqPdlIfedsuZoc3yioPuzp6b+L5jRCRY1QzuWZh2EGJVQrGppC6V6cF0bLdbW0+O2YpqCA25aF/1lvipQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/node-preload": { + "version": "0.2.1", + "dev": true, + "license": "MIT", + "dependencies": { + "process-on-spawn": "^1.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/node-releases": { + "version": "2.0.14", + "dev": true, + "license": "MIT" + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/npm-run-path": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.3.0.tgz", + "integrity": "sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^4.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/npm-run-path/node_modules/path-key": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-4.0.0.tgz", + "integrity": "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/nth-check": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz", + "integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "boolbase": "^1.0.0" + }, + "funding": { + "url": "https://github.com/fb55/nth-check?sponsor=1" + } + }, + "node_modules/nyc": { + "version": "15.1.0", + "dev": true, + "license": "ISC", + "dependencies": { + "@istanbuljs/load-nyc-config": "^1.0.0", + "@istanbuljs/schema": "^0.1.2", + "caching-transform": "^4.0.0", + "convert-source-map": "^1.7.0", + "decamelize": "^1.2.0", + "find-cache-dir": "^3.2.0", + "find-up": "^4.1.0", + "foreground-child": "^2.0.0", + "get-package-type": "^0.1.0", + "glob": "^7.1.6", + "istanbul-lib-coverage": "^3.0.0", + "istanbul-lib-hook": "^3.0.0", + "istanbul-lib-instrument": "^4.0.0", + "istanbul-lib-processinfo": "^2.0.2", + "istanbul-lib-report": "^3.0.0", + "istanbul-lib-source-maps": "^4.0.0", + "istanbul-reports": "^3.0.2", + "make-dir": "^3.0.0", + "node-preload": "^0.2.1", + "p-map": "^3.0.0", + "process-on-spawn": "^1.0.0", + "resolve-from": "^5.0.0", + "rimraf": "^3.0.0", + "signal-exit": "^3.0.2", + "spawn-wrap": "^2.0.0", + "test-exclude": "^6.0.0", + "yargs": "^15.0.2" + }, + "bin": { + "nyc": "bin/nyc.js" + }, + "engines": { + "node": ">=8.9" + } + }, + "node_modules/nyc/node_modules/cliui": { + "version": "6.0.0", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^6.2.0" + } + }, + "node_modules/nyc/node_modules/find-up": { + "version": "4.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/nyc/node_modules/locate-path": { + "version": "5.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/nyc/node_modules/p-limit": { + "version": "2.3.0", + "dev": true, + "license": "MIT", + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/nyc/node_modules/p-locate": { + "version": "4.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/nyc/node_modules/y18n": { + "version": "4.0.3", + "dev": true, + "license": "ISC" + }, + "node_modules/nyc/node_modules/yargs": { + "version": "15.4.1", + "dev": true, + "license": "MIT", + "dependencies": { + "cliui": "^6.0.0", + "decamelize": "^1.2.0", + "find-up": "^4.1.0", + "get-caller-file": "^2.0.1", + "require-directory": "^2.1.1", + "require-main-filename": "^2.0.0", + "set-blocking": "^2.0.0", + "string-width": "^4.2.0", + "which-module": "^2.0.0", + "y18n": "^4.0.0", + "yargs-parser": "^18.1.2" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/nyc/node_modules/yargs-parser": { + "version": "18.1.3", + "dev": true, + "license": "ISC", + "dependencies": { + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/nypm": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/nypm/-/nypm-0.6.0.tgz", + "integrity": "sha512-mn8wBFV9G9+UFHIrq+pZ2r2zL4aPau/by3kJb3cM7+5tQHMt6HGQB8FDIeKFYp8o0D2pnH6nVsO88N4AmUxIWg==", + "dev": true, + "license": "MIT", + "dependencies": { + "citty": "^0.1.6", + "consola": "^3.4.0", + "pathe": "^2.0.3", + "pkg-types": "^2.0.0", + "tinyexec": "^0.3.2" + }, + "bin": { + "nypm": "dist/cli.mjs" + }, + "engines": { + "node": "^14.16.0 || >=16.10.0" + } + }, + "node_modules/nypm/node_modules/tinyexec": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-0.3.2.tgz", + "integrity": "sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA==", + "dev": true, + "license": "MIT" + }, + "node_modules/object-assign": { + "version": "4.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-inspect": { + "version": "1.13.1", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ohash": { + "version": "2.0.11", + "resolved": "https://registry.npmjs.org/ohash/-/ohash-2.0.11.tgz", + "integrity": "sha512-RdR9FQrFwNBNXAr4GixM8YaRZRJ5PUWbKYbE5eOsrwAjJW0q2REGcf79oYPsLyskQCZG1PLN+S/K1V00joZAoQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/on-finished": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", + "dev": true, + "license": "MIT", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/on-headers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz", + "integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/once": { + "version": "1.4.0", + "dev": true, + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/onetime": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-7.0.0.tgz", + "integrity": "sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "mimic-function": "^5.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/open": { + "version": "10.1.2", + "resolved": "https://registry.npmjs.org/open/-/open-10.1.2.tgz", + "integrity": "sha512-cxN6aIDPz6rm8hbebcP7vrQNhvRcveZoJU72Y7vskh4oIm+BZwBECnx5nTmrlres1Qapvx27Qo1Auukpf8PKXw==", + "license": "MIT", + "dependencies": { + "default-browser": "^5.2.1", + "define-lazy-prop": "^3.0.0", + "is-inside-container": "^1.0.0", + "is-wsl": "^3.1.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ora": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/ora/-/ora-8.2.0.tgz", + "integrity": "sha512-weP+BZ8MVNnlCm8c0Qdc1WSWq4Qn7I+9CJGm7Qali6g44e/PUzbjNqJX5NJ9ljlNMosfJvg1fKEGILklK9cwnw==", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^5.3.0", + "cli-cursor": "^5.0.0", + "cli-spinners": "^2.9.2", + "is-interactive": "^2.0.0", + "is-unicode-supported": "^2.0.0", + "log-symbols": "^6.0.0", + "stdin-discarder": "^0.2.2", + "string-width": "^7.2.0", + "strip-ansi": "^7.1.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ora/node_modules/ansi-regex": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", + "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/ora/node_modules/chalk": { + "version": "5.4.1", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.4.1.tgz", + "integrity": "sha512-zgVZuo2WcZgfUEmsn6eO3kINexW8RAE4maiQ8QNs8CtpPCSyMiYsULR3HQYkm3w8FIA3SberyMJMSldGsW+U3w==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.17.0 || ^14.13 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/ora/node_modules/emoji-regex": { + "version": "10.4.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.4.0.tgz", + "integrity": "sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw==", + "dev": true, + "license": "MIT" + }, + "node_modules/ora/node_modules/is-unicode-supported": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-2.1.0.tgz", + "integrity": "sha512-mE00Gnza5EEB3Ds0HfMyllZzbBrmLOX3vfWoj9A9PEnTfratQ/BcaJOuMhnkhjXvb2+FkY3VuHqtAGpTPmglFQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ora/node_modules/log-symbols": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-6.0.0.tgz", + "integrity": "sha512-i24m8rpwhmPIS4zscNzK6MSEhk0DUWa/8iYQWxhffV8jkI4Phvs3F+quL5xvS0gdQR0FyTCMMH33Y78dDTzzIw==", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^5.3.0", + "is-unicode-supported": "^1.3.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ora/node_modules/log-symbols/node_modules/is-unicode-supported": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-1.3.0.tgz", + "integrity": "sha512-43r2mRvz+8JRIKnWJ+3j8JtjRKZ6GmjzfaE/qiBJnikNnYv/6bagRJ1kUhNk8R5EX/GkobD+r+sfxCPJsiKBLQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ora/node_modules/string-width": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz", + "integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^10.3.0", + "get-east-asian-width": "^1.0.0", + "strip-ansi": "^7.1.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ora/node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/os-name": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/os-name/-/os-name-6.0.0.tgz", + "integrity": "sha512-bv608E0UX86atYi2GMGjDe0vF/X1TJjemNS8oEW6z22YW1Rc3QykSYoGfkQbX0zZX9H0ZB6CQP/3GTf1I5hURg==", + "dev": true, + "license": "MIT", + "dependencies": { + "macos-release": "^3.2.0", + "windows-release": "^6.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/os-tmpdir": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", + "integrity": "sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "5.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-map": { + "version": "3.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "aggregate-error": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/p-try": { + "version": "2.2.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/pac-proxy-agent": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/pac-proxy-agent/-/pac-proxy-agent-7.2.0.tgz", + "integrity": "sha512-TEB8ESquiLMc0lV8vcd5Ql/JAKAoyzHFXaStwjkzpOpC5Yv+pIzLfHvjTSdf3vpa2bMiUQrg9i6276yn8666aA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@tootallnate/quickjs-emscripten": "^0.23.0", + "agent-base": "^7.1.2", + "debug": "^4.3.4", + "get-uri": "^6.0.1", + "http-proxy-agent": "^7.0.0", + "https-proxy-agent": "^7.0.6", + "pac-resolver": "^7.0.1", + "socks-proxy-agent": "^8.0.5" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/pac-resolver": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/pac-resolver/-/pac-resolver-7.0.1.tgz", + "integrity": "sha512-5NPgf87AT2STgwa2ntRMr45jTKrYBGkVU36yT0ig/n/GMAa3oPqhZfIQ2kMEimReg0+t9kZViDVZ83qfVUlckg==", + "dev": true, + "license": "MIT", + "dependencies": { + "degenerator": "^5.0.0", + "netmask": "^2.0.2" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/package-hash": { + "version": "4.0.0", + "dev": true, + "license": "ISC", + "dependencies": { + "graceful-fs": "^4.1.15", + "hasha": "^5.0.0", + "lodash.flattendeep": "^4.4.0", + "release-zalgo": "^1.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/parse-path": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/parse-path/-/parse-path-7.1.0.tgz", + "integrity": "sha512-EuCycjZtfPcjWk7KTksnJ5xPMvWGA/6i4zrLYhRG0hGvC3GPU/jGUj3Cy+ZR0v30duV3e23R95T1lE2+lsndSw==", + "dev": true, + "license": "MIT", + "dependencies": { + "protocols": "^2.0.0" + } + }, + "node_modules/parse-url": { + "version": "9.2.0", + "resolved": "https://registry.npmjs.org/parse-url/-/parse-url-9.2.0.tgz", + "integrity": "sha512-bCgsFI+GeGWPAvAiUv63ZorMeif3/U0zaXABGJbOWt5OH2KCaPHF6S+0ok4aqM9RuIPGyZdx9tR9l13PsW4AYQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/parse-path": "^7.0.0", + "parse-path": "^7.0.0" + }, + "engines": { + "node": ">=14.13.0" + } + }, + "node_modules/parse5": { + "version": "7.3.0", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.3.0.tgz", + "integrity": "sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw==", + "dev": true, + "license": "MIT", + "dependencies": { + "entities": "^6.0.0" + }, + "funding": { + "url": "https://github.com/inikulin/parse5?sponsor=1" + } + }, + "node_modules/parse5-htmlparser2-tree-adapter": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/parse5-htmlparser2-tree-adapter/-/parse5-htmlparser2-tree-adapter-7.1.0.tgz", + "integrity": "sha512-ruw5xyKs6lrpo9x9rCZqZZnIUntICjQAd0Wsmp396Ul9lN/h+ifgVV1x1gZHi8euej6wTfpqX8j+BFQxF0NS/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "domhandler": "^5.0.3", + "parse5": "^7.0.0" + }, + "funding": { + "url": "https://github.com/inikulin/parse5?sponsor=1" + } + }, + "node_modules/parse5-parser-stream": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/parse5-parser-stream/-/parse5-parser-stream-7.1.2.tgz", + "integrity": "sha512-JyeQc9iwFLn5TbvvqACIF/VXG6abODeB3Fwmv/TGdLk2LfbWkaySGY72at4+Ty7EkPZj854u4CrICqNk2qIbow==", + "dev": true, + "license": "MIT", + "dependencies": { + "parse5": "^7.0.0" + }, + "funding": { + "url": "https://github.com/inikulin/parse5?sponsor=1" + } + }, + "node_modules/parse5/node_modules/entities": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-6.0.0.tgz", + "integrity": "sha512-aKstq2TDOndCn4diEyp9Uq/Flu2i1GlLkc6XIDQSDMuaFE3OPW5OphLCyQ5SpSJZTb4reN+kTcYru5yIfXoRPw==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/parseurl": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-to-regexp": { + "version": "6.2.1", + "dev": true, + "license": "MIT" + }, + "node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", + "dev": true, + "license": "MIT" + }, + "node_modules/perfect-debounce": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/perfect-debounce/-/perfect-debounce-1.0.0.tgz", + "integrity": "sha512-xCy9V055GLEqoFaHoC1SoLIaLmWctgCUaBaWxDZ7/Zx4CTyX7cJQLJOok/orfjZAh9kEYpjJa4d0KcJmCbctZA==", + "dev": true, + "license": "MIT" + }, + "node_modules/picocolors": { + "version": "1.0.0", + "dev": true, + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "2.3.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pify": { + "version": "2.3.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pinkie": { + "version": "2.0.4", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pinkie-promise": { + "version": "2.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "pinkie": "^2.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pkg-dir": { + "version": "4.2.0", + "dev": true, + "license": "MIT", + "dependencies": { + "find-up": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/pkg-dir/node_modules/find-up": { + "version": "4.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/pkg-dir/node_modules/locate-path": { + "version": "5.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/pkg-dir/node_modules/p-limit": { + "version": "2.3.0", + "dev": true, + "license": "MIT", + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/pkg-dir/node_modules/p-locate": { + "version": "4.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/pkg-types": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/pkg-types/-/pkg-types-2.1.0.tgz", + "integrity": "sha512-wmJwA+8ihJixSoHKxZJRBQG1oY8Yr9pGLzRmSsNms0iNWyHHAlZCa7mmKiFR10YPZuz/2k169JiS/inOjBCZ2A==", + "dev": true, + "license": "MIT", + "dependencies": { + "confbox": "^0.2.1", + "exsolve": "^1.0.1", + "pathe": "^2.0.3" + } + }, + "node_modules/process-on-spawn": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "fromentries": "^1.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/protocols": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/protocols/-/protocols-2.0.2.tgz", + "integrity": "sha512-hHVTzba3wboROl0/aWRRG9dMytgH6ow//STBZh43l/wQgmMhYhOFi0EHWAPtoCz9IAUymsyP0TSBHkhgMEGNnQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/proxy-addr": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", + "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", + "dev": true, + "license": "MIT", + "dependencies": { + "forwarded": "0.2.0", + "ipaddr.js": "1.9.1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/proxy-agent": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/proxy-agent/-/proxy-agent-6.5.0.tgz", + "integrity": "sha512-TmatMXdr2KlRiA2CyDu8GqR8EjahTG3aY3nXjdzFyoZbmB8hrBsTyMezhULIXKnC0jpfjlmiZ3+EaCzoInSu/A==", + "dev": true, + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.2", + "debug": "^4.3.4", + "http-proxy-agent": "^7.0.1", + "https-proxy-agent": "^7.0.6", + "lru-cache": "^7.14.1", + "pac-proxy-agent": "^7.1.0", + "proxy-from-env": "^1.1.0", + "socks-proxy-agent": "^8.0.5" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/proxy-agent/node_modules/lru-cache": { + "version": "7.18.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", + "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/proxy-from-env": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", + "dev": true, + "license": "MIT" + }, + "node_modules/qs": { + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", + "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "side-channel": "^1.0.6" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/random-bytes": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/random-bytes/-/random-bytes-1.0.0.tgz", + "integrity": "sha512-iv7LhNVO047HzYR3InF6pUcUsPQiHTM1Qal51DcGSuZFBil1aBBWG5eHPNek7bvILMaYJ/8RU1e8w1AMdHmLQQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/randombytes": { + "version": "2.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "safe-buffer": "^5.1.0" + } + }, + "node_modules/range-parser": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/raw-body": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", + "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", + "dev": true, + "license": "MIT", + "dependencies": { + "bytes": "3.1.2", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/rc9": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/rc9/-/rc9-2.1.2.tgz", + "integrity": "sha512-btXCnMmRIBINM2LDZoEmOogIZU7Qe7zn4BpomSKZ/ykbLObuBdvG+mFq11DL6fjH1DRwHhrlgtYWG96bJiC7Cg==", + "dev": true, + "license": "MIT", + "dependencies": { + "defu": "^6.1.4", + "destr": "^2.0.3" + } + }, + "node_modules/readdirp": { + "version": "3.6.0", + "dev": true, + "license": "MIT", + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/redis": { + "resolved": "packages/redis", + "link": true + }, + "node_modules/release-it": { + "version": "19.0.2", + "resolved": "https://registry.npmjs.org/release-it/-/release-it-19.0.2.tgz", + "integrity": "sha512-tGRCcKeXNOMrK9Qe+ZIgQiMlQgjV8PLxZjTq1XGlCk5u1qPgx+Pps0i8HIt667FDt0wLjFtvn5o9ItpitKnVUA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/webpro" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/webpro" + } + ], + "license": "MIT", + "dependencies": { + "@nodeutils/defaults-deep": "1.1.0", + "@octokit/rest": "21.1.1", + "@phun-ky/typeof": "1.2.8", + "async-retry": "1.3.3", + "c12": "3.0.3", + "ci-info": "^4.2.0", + "eta": "3.5.0", + "git-url-parse": "16.1.0", + "inquirer": "12.6.0", + "issue-parser": "7.0.1", + "lodash.get": "4.4.2", + "lodash.merge": "4.6.2", + "mime-types": "3.0.1", + "new-github-release-url": "2.0.0", + "open": "10.1.2", + "ora": "8.2.0", + "os-name": "6.0.0", + "proxy-agent": "6.5.0", + "semver": "7.7.1", + "tinyexec": "1.0.1", + "tinyglobby": "0.2.13", + "undici": "6.21.2", + "url-join": "5.0.0", + "wildcard-match": "5.1.4", + "yargs-parser": "21.1.1" + }, + "bin": { + "release-it": "bin/release-it.js" + }, + "engines": { + "node": "^20.12.0 || >=22.0.0" + } + }, + "node_modules/release-it/node_modules/mime-db": { + "version": "1.54.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz", + "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/release-it/node_modules/mime-types": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.1.tgz", + "integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==", + "dev": true, + "license": "MIT", + "dependencies": { + "mime-db": "^1.54.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/release-it/node_modules/semver": { + "version": "7.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.1.tgz", + "integrity": "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/release-it/node_modules/undici": { + "version": "6.21.2", + "resolved": "https://registry.npmjs.org/undici/-/undici-6.21.2.tgz", + "integrity": "sha512-uROZWze0R0itiAKVPsYhFov9LxrPMHLMEQFszeI2gCN6bnIIZ8twzBCJcN2LJrBBLfrP0t1FW0g+JmKVl8Vk1g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18.17" + } + }, + "node_modules/release-it/node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/release-zalgo": { + "version": "1.0.0", + "dev": true, + "license": "ISC", + "dependencies": { + "es6-error": "^4.0.1" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/require-main-filename": { + "version": "2.0.0", + "dev": true, + "license": "ISC" + }, + "node_modules/resolve-from": { + "version": "5.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/resolve-pkg-maps": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1" + } + }, + "node_modules/restore-cursor": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-5.1.0.tgz", + "integrity": "sha512-oMA2dcrw6u0YfxJQXm342bFKX/E4sG9rbTzO9ptUcR/e8A33cHuvStiYOwH7fszkZlZ1z/ta9AAoPk2F4qIOHA==", + "dev": true, + "license": "MIT", + "dependencies": { + "onetime": "^7.0.0", + "signal-exit": "^4.1.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/restore-cursor/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/retry": { + "version": "0.13.1", + "resolved": "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz", + "integrity": "sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/reusify": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", + "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/rimraf": { + "version": "3.0.2", + "dev": true, + "license": "ISC", + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/run-applescript": { + "version": "7.0.0", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/run-async": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/run-async/-/run-async-3.0.0.tgz", + "integrity": "sha512-540WwVDOMxA6dN6We19EcT9sc3hkXPw5mzRNGM3FkdN/vtE9NFvj5lFAPNwUDmJjXidm3v7TC1cTE7t17Ulm1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/rxjs": { + "version": "7.8.2", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.2.tgz", + "integrity": "sha512-dhKf903U/PQZY6boNNtAGdWbG85WAbjT/1xYoZIC7FAY0yWapOBQVsVrDl58W86//e1VpMNBtRV4MaXfdMySFA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.1.0" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "dev": true, + "license": "MIT" + }, + "node_modules/semver": { + "version": "6.3.1", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/send": { + "version": "0.19.0", + "resolved": "https://registry.npmjs.org/send/-/send-0.19.0.tgz", + "integrity": "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "mime": "1.6.0", + "ms": "2.1.3", + "on-finished": "2.4.1", + "range-parser": "~1.2.1", + "statuses": "2.0.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/send/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/send/node_modules/debug/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "dev": true, + "license": "MIT" + }, + "node_modules/send/node_modules/encodeurl": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", + "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/serialize-javascript": { + "version": "6.0.0", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "randombytes": "^2.1.0" + } + }, + "node_modules/serve-static": { + "version": "1.16.2", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz", + "integrity": "sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw==", + "dev": true, + "license": "MIT", + "dependencies": { + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "parseurl": "~1.3.3", + "send": "0.19.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/set-blocking": { + "version": "2.0.0", + "dev": true, + "license": "ISC" + }, + "node_modules/set-function-length": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", + "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", + "dev": true, + "license": "MIT", + "dependencies": { + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/setprototypeof": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==", + "dev": true, + "license": "ISC" + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/shiki": { + "version": "0.14.7", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-sequence-parser": "^1.1.0", + "jsonc-parser": "^3.2.0", + "vscode-oniguruma": "^1.7.0", + "vscode-textmate": "^8.0.0" + } + }, + "node_modules/side-channel": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz", + "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4", + "object-inspect": "^1.13.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "dev": true, + "license": "ISC" + }, + "node_modules/sinon": { + "version": "17.0.1", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@sinonjs/commons": "^3.0.0", + "@sinonjs/fake-timers": "^11.2.2", + "@sinonjs/samsam": "^8.0.0", + "diff": "^5.1.0", + "nise": "^5.1.5", + "supports-color": "^7.2.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/sinon" + } + }, + "node_modules/sinon/node_modules/diff": { + "version": "5.1.0", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/sinon/node_modules/supports-color": { + "version": "7.2.0", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/smart-buffer": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz", + "integrity": "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6.0.0", + "npm": ">= 3.0.0" + } + }, + "node_modules/socks": { + "version": "2.8.4", + "resolved": "https://registry.npmjs.org/socks/-/socks-2.8.4.tgz", + "integrity": "sha512-D3YaD0aRxR3mEcqnidIs7ReYJFVzWdd6fXJYUM8ixcQcJRGTka/b3saV0KflYhyVJXKhb947GndU35SxYNResQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ip-address": "^9.0.5", + "smart-buffer": "^4.2.0" + }, + "engines": { + "node": ">= 10.0.0", + "npm": ">= 3.0.0" + } + }, + "node_modules/socks-proxy-agent": { + "version": "8.0.5", + "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-8.0.5.tgz", + "integrity": "sha512-HehCEsotFqbPW9sJ8WVYB6UbmIMv7kUUORIF2Nncq4VQvBfNBLibW9YZR5dlYCSUhwcD628pRllm7n+E+YTzJw==", + "dev": true, + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.2", + "debug": "^4.3.4", + "socks": "^2.8.3" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/source-map": { + "version": "0.6.1", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/spawn-wrap": { + "version": "2.0.0", + "dev": true, + "license": "ISC", + "dependencies": { + "foreground-child": "^2.0.0", + "is-windows": "^1.0.2", + "make-dir": "^3.0.0", + "rimraf": "^3.0.0", + "signal-exit": "^3.0.2", + "which": "^2.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/sprintf-js": { + "version": "1.0.3", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/statuses": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/stdin-discarder": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/stdin-discarder/-/stdin-discarder-0.2.2.tgz", + "integrity": "sha512-UhDfHmA92YAlNnCfhmq0VeNL5bDbiZGg7sZ2IvPsXubGkiNa9EC+tUTsjBRsYUAz87btI6/1wf4XoVvQ3uRnmQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/stoppable": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/stoppable/-/stoppable-1.1.0.tgz", + "integrity": "sha512-KXDYZ9dszj6bzvnEMRYvxgeTHU74QBFL54XKtP3nyMuJ81CFYtABZ3bAzL2EdFUaEwJOBOgENyFj3R7oTzDyyw==", + "license": "MIT", + "engines": { + "node": ">=4", + "npm": ">=6" + } + }, + "node_modules/string-width": { + "version": "4.2.3", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-bom": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-final-newline": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-3.0.0.tgz", + "integrity": "sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/strip-outer": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "escape-string-regexp": "^1.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/strip-outer/node_modules/escape-string-regexp": { + "version": "1.0.5", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/supports-color": { + "version": "8.1.1", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/test-exclude": { + "version": "6.0.0", + "dev": true, + "license": "ISC", + "dependencies": { + "@istanbuljs/schema": "^0.1.2", + "glob": "^7.1.4", + "minimatch": "^3.0.4" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/tinyexec": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-1.0.1.tgz", + "integrity": "sha512-5uC6DDlmeqiOwCPmK9jMSdOuZTh8bU39Ys6yidB+UTt5hfZUPGAypSgFRiEp+jbi9qH40BLDvy85jIU88wKSqw==", + "dev": true, + "license": "MIT" + }, + "node_modules/tinyglobby": { + "version": "0.2.13", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.13.tgz", + "integrity": "sha512-mEwzpUgrLySlveBwEVDMKk5B57bhLPYovRfPAXD5gA/98Opn0rCDj3GtLwFvCvH5RK9uPCExUROW5NjDwvqkxw==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.4.4", + "picomatch": "^4.0.2" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/tinyglobby/node_modules/fdir": { + "version": "6.4.4", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.4.tgz", + "integrity": "sha512-1NZP+GK4GfuAv3PqKvxQRDMjdSRZjnkq7KfhlNrCNNlZ0ygQFpebfrnfnq/W7fpUnAv9aGWmY1zKx7FYL3gwhg==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/tinyglobby/node_modules/picomatch": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz", + "integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/tmp": { + "version": "0.0.33", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz", + "integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==", + "dev": true, + "license": "MIT", + "dependencies": { + "os-tmpdir": "~1.0.2" + }, + "engines": { + "node": ">=0.6.0" + } + }, + "node_modules/to-fast-properties": { + "version": "2.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/toidentifier": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", + "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.6" + } + }, + "node_modules/trim-repeated": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "escape-string-regexp": "^1.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/trim-repeated/node_modules/escape-string-regexp": { + "version": "1.0.5", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/ts-node": { + "version": "10.9.2", + "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz", + "integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@cspotcode/source-map-support": "^0.8.0", + "@tsconfig/node10": "^1.0.7", + "@tsconfig/node12": "^1.0.7", + "@tsconfig/node14": "^1.0.0", + "@tsconfig/node16": "^1.0.2", + "acorn": "^8.4.1", + "acorn-walk": "^8.1.1", + "arg": "^4.1.0", + "create-require": "^1.1.0", + "diff": "^4.0.1", + "make-error": "^1.1.1", + "v8-compile-cache-lib": "^3.0.1", + "yn": "3.1.1" + }, + "bin": { + "ts-node": "dist/bin.js", + "ts-node-cwd": "dist/bin-cwd.js", + "ts-node-esm": "dist/bin-esm.js", + "ts-node-script": "dist/bin-script.js", + "ts-node-transpile-only": "dist/bin-transpile.js", + "ts-script": "dist/bin-script-deprecated.js" + }, + "peerDependencies": { + "@swc/core": ">=1.2.50", + "@swc/wasm": ">=1.2.50", + "@types/node": "*", + "typescript": ">=2.7" + }, + "peerDependenciesMeta": { + "@swc/core": { + "optional": true + }, + "@swc/wasm": { + "optional": true + } + } + }, + "node_modules/ts-node/node_modules/diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/tslib": { + "version": "2.6.2", + "license": "0BSD" + }, + "node_modules/tsx": { + "version": "4.7.0", + "dev": true, + "license": "MIT", + "dependencies": { + "esbuild": "~0.19.10", + "get-tsconfig": "^4.7.2" + }, + "bin": { + "tsx": "dist/cli.mjs" + }, + "engines": { + "node": ">=18.0.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + } + }, + "node_modules/type-detect": { + "version": "4.0.8", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/type-fest": { + "version": "0.8.1", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=8" + } + }, + "node_modules/type-is": { + "version": "1.6.18", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", + "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", + "dev": true, + "license": "MIT", + "dependencies": { + "media-typer": "0.3.0", + "mime-types": "~2.1.24" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/typedarray-to-buffer": { + "version": "3.1.5", + "dev": true, + "license": "MIT", + "dependencies": { + "is-typedarray": "^1.0.0" + } + }, + "node_modules/typedoc": { + "version": "0.25.7", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "lunr": "^2.3.9", + "marked": "^4.3.0", + "minimatch": "^9.0.3", + "shiki": "^0.14.7" + }, + "bin": { + "typedoc": "bin/typedoc" + }, + "engines": { + "node": ">= 16" + }, + "peerDependencies": { + "typescript": "4.6.x || 4.7.x || 4.8.x || 4.9.x || 5.0.x || 5.1.x || 5.2.x || 5.3.x" + } + }, + "node_modules/typedoc/node_modules/brace-expansion": { + "version": "2.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/typedoc/node_modules/minimatch": { + "version": "9.0.3", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/typescript": { + "version": "5.3.3", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/uid-safe": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/uid-safe/-/uid-safe-2.1.5.tgz", + "integrity": "sha512-KPHm4VL5dDXKz01UuEd88Df+KzynaohSL9fBh096KWAxSKZQDI2uBrVqtvRM4rwrIrRRKsdLNML/lnaaVSRioA==", + "dev": true, + "license": "MIT", + "dependencies": { + "random-bytes": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/undici": { + "version": "6.21.3", + "resolved": "https://registry.npmjs.org/undici/-/undici-6.21.3.tgz", + "integrity": "sha512-gBLkYIlEnSp8pFbT64yFgGE6UIB9tAkhukC23PmMDCe5Nd+cRqKxSjw5y54MK2AZMgZfJWMaNE4nYUHgi1XEOw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18.17" + } + }, + "node_modules/undici-types": { + "version": "5.26.5", + "dev": true, + "license": "MIT" + }, + "node_modules/universal-user-agent": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-7.0.3.tgz", + "integrity": "sha512-TmnEAEAsBJVZM/AADELsK76llnwcf9vMKuPz8JflO1frO8Lchitr0fNaN9d+Ap0BjKtqWqd/J17qeDnXh8CL2A==", + "dev": true, + "license": "ISC" + }, + "node_modules/universalify": { + "version": "2.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/unpipe": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/update-browserslist-db": { + "version": "1.0.13", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "escalade": "^3.1.1", + "picocolors": "^1.0.0" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/url-join": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/url-join/-/url-join-5.0.0.tgz", + "integrity": "sha512-n2huDr9h9yzd6exQVnH/jU5mr+Pfx08LRXXZhkLLetAMESRj+anQsTAh940iMrIetKAmry9coFuZQ2jY8/p3WA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + } + }, + "node_modules/utils-merge": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", + "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/uuid": { + "version": "8.3.2", + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/v8-compile-cache-lib": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", + "integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==", + "dev": true, + "license": "MIT" + }, + "node_modules/vary": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/vscode-oniguruma": { + "version": "1.7.0", + "dev": true, + "license": "MIT" + }, + "node_modules/vscode-textmate": { + "version": "8.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/whatwg-encoding": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-3.1.1.tgz", + "integrity": "sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "iconv-lite": "0.6.3" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/whatwg-encoding/node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "dev": true, + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/whatwg-mimetype": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-4.0.0.tgz", + "integrity": "sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + } + }, + "node_modules/which": { + "version": "2.0.2", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/which-module": { + "version": "2.0.1", + "dev": true, + "license": "ISC" + }, + "node_modules/wildcard-match": { + "version": "5.1.4", + "resolved": "https://registry.npmjs.org/wildcard-match/-/wildcard-match-5.1.4.tgz", + "integrity": "sha512-wldeCaczs8XXq7hj+5d/F38JE2r7EXgb6WQDM84RVwxy81T/sxB5e9+uZLK9Q9oNz1mlvjut+QtvgaOQFPVq/g==", + "dev": true, + "license": "ISC" + }, + "node_modules/windows-release": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/windows-release/-/windows-release-6.1.0.tgz", + "integrity": "sha512-1lOb3qdzw6OFmOzoY0nauhLG72TpWtb5qgYPiSh/62rjc1XidBSDio2qw0pwHh17VINF217ebIkZJdFLZFn9SA==", + "dev": true, + "license": "MIT", + "dependencies": { + "execa": "^8.0.1" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/workerpool": { + "version": "6.2.1", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/wrap-ansi": { + "version": "6.2.0", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "dev": true, + "license": "ISC" + }, + "node_modules/write-file-atomic": { + "version": "3.0.3", + "dev": true, + "license": "ISC", + "dependencies": { + "imurmurhash": "^0.1.4", + "is-typedarray": "^1.0.0", + "signal-exit": "^3.0.2", + "typedarray-to-buffer": "^3.1.5" + } + }, + "node_modules/y18n": { + "version": "5.0.8", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=10" + } + }, + "node_modules/yallist": { + "version": "3.1.1", + "dev": true, + "license": "ISC" + }, + "node_modules/yargs": { + "version": "16.2.0", + "dev": true, + "license": "MIT", + "dependencies": { + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.0", + "y18n": "^5.0.5", + "yargs-parser": "^20.2.2" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs-parser": { + "version": "20.2.4", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs-unparser": { + "version": "2.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "camelcase": "^6.0.0", + "decamelize": "^4.0.0", + "flat": "^5.0.2", + "is-plain-obj": "^2.1.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs-unparser/node_modules/camelcase": { + "version": "6.3.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/yargs-unparser/node_modules/decamelize": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/yn": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", + "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/yoctocolors-cjs": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/yoctocolors-cjs/-/yoctocolors-cjs-2.1.2.tgz", + "integrity": "sha512-cYVsTjKl8b+FrnidjibDWskAv7UKOfcwaVZdp/it9n1s9fU3IkgDbhdIRKCW4JDsAlECJY0ytoVPT3sK6kideA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "packages/bloom": { + "name": "@redis/bloom", + "version": "5.9.0-beta.2", + "license": "MIT", + "devDependencies": { + "@redis/test-utils": "*" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@redis/client": "^5.9.0-beta.2" + } + }, + "packages/client": { + "name": "@redis/client", + "version": "5.9.0-beta.2", + "license": "MIT", + "dependencies": { + "cluster-key-slot": "1.1.2" + }, + "devDependencies": { + "@redis/test-utils": "*", + "@types/sinon": "^17.0.3", + "sinon": "^17.0.1" + }, + "engines": { + "node": ">= 18" + } + }, + "packages/entraid": { + "name": "@redis/entraid", + "version": "5.9.0-beta.2", + "license": "MIT", + "dependencies": { + "@azure/identity": "^4.7.0", + "@azure/msal-node": "^2.16.1" + }, + "devDependencies": { + "@redis/test-utils": "*", + "@types/express": "^4.17.21", + "@types/express-session": "^1.18.0", + "@types/node": "^22.9.0", + "dotenv": "^16.3.1", + "express": "^4.21.1", + "express-session": "^1.18.1" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@redis/client": "^5.9.0-beta.2" + } + }, + "packages/entraid/node_modules/@types/node": { + "version": "22.10.2", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.10.2.tgz", + "integrity": "sha512-Xxr6BBRCAOQixvonOye19wnzyDiUtTeqldOOmj3CkeblonbccA12PFwlufvRdrpjXxqnmUaeiU5EOA+7s5diUQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~6.20.0" + } + }, + "packages/entraid/node_modules/undici-types": { + "version": "6.20.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.20.0.tgz", + "integrity": "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==", + "dev": true, + "license": "MIT" + }, + "packages/graph": { + "name": "@redis/graph", + "version": "5.0.0-next.6", + "extraneous": true, + "license": "MIT", + "devDependencies": { + "@redis/test-utils": "*" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@redis/client": "^5.0.0-next.6" + } + }, + "packages/json": { + "name": "@redis/json", + "version": "5.9.0-beta.2", + "license": "MIT", + "devDependencies": { + "@redis/test-utils": "*" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@redis/client": "^5.9.0-beta.2" + } + }, + "packages/redis": { + "version": "5.9.0-beta.2", + "license": "MIT", + "dependencies": { + "@redis/bloom": "5.9.0-beta.2", + "@redis/client": "5.9.0-beta.2", + "@redis/json": "5.9.0-beta.2", + "@redis/search": "5.9.0-beta.2", + "@redis/time-series": "5.9.0-beta.2" + }, + "engines": { + "node": ">= 18" + } + }, + "packages/search": { + "name": "@redis/search", + "version": "5.9.0-beta.2", + "license": "MIT", + "devDependencies": { + "@redis/test-utils": "*" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@redis/client": "^5.9.0-beta.2" + } + }, + "packages/test-utils": { + "name": "@redis/test-utils", + "devDependencies": { + "@types/yargs": "^17.0.32", + "yargs": "^17.7.2" + }, + "peerDependencies": { + "@redis/client": "*" + } + }, + "packages/test-utils/node_modules/cliui": { + "version": "8.0.1", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "packages/test-utils/node_modules/wrap-ansi": { + "version": "7.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "packages/test-utils/node_modules/yargs": { + "version": "17.7.2", + "dev": true, + "license": "MIT", + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "packages/test-utils/node_modules/yargs-parser": { + "version": "21.1.1", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "packages/time-series": { + "name": "@redis/time-series", + "version": "5.9.0-beta.2", + "license": "MIT", + "devDependencies": { + "@redis/test-utils": "*" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@redis/client": "^5.9.0-beta.2" + } + } + } +} diff --git a/package.json b/package.json index cf1248cd18e..e192e69d55e 100644 --- a/package.json +++ b/package.json @@ -1,29 +1,36 @@ -{ "name" : "redis", - "version" : "0.7.1", - "description" : "Redis client library", - "author": "Matt Ranney ", - "contributors": [ - "Rick Olson", - "Tim-Smart", - "TJ Holowaychuk", - "Orion Henry", - "Hank Sims", - "Aivo Paas", - "Paul Carey", - "Pieter Noordhuis", - "Andy Ray", - "Vladimir Dronnikov", - "Dave Hoover" - ], - "main": "./index.js", - "scripts": { - "test": "node ./test.js" - }, - "devDependencies": { - "metrics": ">=0.1.5" - }, - "repository": { - "type": "git", - "url": "git://github.com/mranney/node_redis.git" - } +{ + "name": "redis-monorepo", + "private": true, + "workspaces": [ + "./packages/client", + "./packages/test-utils", + "./packages/bloom", + "./packages/json", + "./packages/search", + "./packages/time-series", + "./packages/entraid", + "./packages/redis" + ], + "scripts": { + "test-single": "TS_NODE_PROJECT='./packages/test-utils/tsconfig.json' mocha --require ts-node/register/transpile-only ", + "test": "npm run test -ws --if-present", + "build": "tsc --build", + "documentation": "typedoc --out ./documentation", + "gh-pages": "gh-pages -d ./documentation -e ./documentation -u 'documentation-bot '", + "release": "npm run release --workspaces --if-present --" + }, + "devDependencies": { + "@istanbuljs/nyc-config-typescript": "^1.0.2", + "@release-it/bumper": "^7.0.5", + "@types/mocha": "^10.0.6", + "@types/node": "^20.11.16", + "gh-pages": "^6.1.1", + "mocha": "^10.2.0", + "nyc": "^15.1.0", + "release-it": "^19.0.2", + "ts-node": "^10.9.2", + "tsx": "^4.7.0", + "typedoc": "^0.25.7", + "typescript": "^5.3.3" + } } diff --git a/packages/bloom/.nycrc.json b/packages/bloom/.nycrc.json new file mode 100644 index 00000000000..367a89ad32c --- /dev/null +++ b/packages/bloom/.nycrc.json @@ -0,0 +1,4 @@ +{ + "extends": "@istanbuljs/nyc-config-typescript", + "exclude": ["dist", "**/*.spec.ts", "lib/test-utils.ts"] +} diff --git a/packages/bloom/.release-it.json b/packages/bloom/.release-it.json new file mode 100644 index 00000000000..23e1cf09078 --- /dev/null +++ b/packages/bloom/.release-it.json @@ -0,0 +1,22 @@ +{ + "npm": { + "publish": true, + "publishArgs": ["--access", "public"] + }, + "git": { + "tagName": "bloom@${version}", + "tagMatch": "bloom@*", + "commitMessage": "Release ${tagName}", + "tagAnnotation": "Release ${tagName}", + "commitArgs": "--all" + }, + "plugins": { + "@release-it/bumper": { + "out": { + "file": "package.json", + "path": ["peerDependencies.@redis/client"], + "versionPrefix": "^" + } + } + } +} diff --git a/packages/bloom/README.md b/packages/bloom/README.md new file mode 100644 index 00000000000..e527ff5552c --- /dev/null +++ b/packages/bloom/README.md @@ -0,0 +1,17 @@ +# @redis/bloom + +This package provides support for the [RedisBloom](https://redis.io/docs/data-types/probabilistic/) module, which adds additional probabilistic data structures to Redis. + +Should be used with [`redis`/`@redis/client`](https://github.com/redis/node-redis). + +:warning: To use these extra commands, your Redis server must have the RedisBloom module installed. + +RedisBloom provides the following probabilistic data structures: + +* Bloom Filter: for checking set membership with a high degree of certainty. +* Cuckoo Filter: for checking set membership with a high degree of certainty. +* T-Digest: for estimating the quantiles of a stream of data. +* Top-K: Maintain a list of k most frequently seen items. +* Count-Min Sketch: Determine the frequency of events in a stream. + +For some examples, see [`bloom-filter.js`](https://github.com/redis/node-redis/tree/master/examples/bloom-filter.js), [`cuckoo-filter.js`](https://github.com/redis/node-redis/tree/master/examples/cuckoo-filter.js), [`count-min-sketch.js`](https://github.com/redis/node-redis/tree/master/examples/count-min-sketch.js) and [`topk.js`](https://github.com/redis/node-redis/tree/master/examples/topk.js) in the [examples folder](https://github.com/redis/node-redis/tree/master/examples). diff --git a/packages/bloom/lib/commands/bloom/ADD.spec.ts b/packages/bloom/lib/commands/bloom/ADD.spec.ts new file mode 100644 index 00000000000..a229936c7df --- /dev/null +++ b/packages/bloom/lib/commands/bloom/ADD.spec.ts @@ -0,0 +1,20 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import ADD from './ADD'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('BF.ADD', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(ADD, 'key', 'item'), + ['BF.ADD', 'key', 'item'] + ); + }); + + testUtils.testWithClient('client.bf.add', async client => { + assert.equal( + await client.bf.add('key', 'item'), + true + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/bloom/ADD.ts b/packages/bloom/lib/commands/bloom/ADD.ts new file mode 100644 index 00000000000..bf976606997 --- /dev/null +++ b/packages/bloom/lib/commands/bloom/ADD.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, Command } from '@redis/client/dist/lib/RESP/types'; +import { transformBooleanReply } from '@redis/client/dist/lib/commands/generic-transformers'; + +export default { + IS_READ_ONLY: false, + /** + * Adds an item to a Bloom Filter + * @param parser - The command parser + * @param key - The name of the Bloom filter + * @param item - The item to add to the filter + */ + parseCommand(parser: CommandParser, key: RedisArgument, item: RedisArgument) { + parser.push('BF.ADD'); + parser.pushKey(key); + parser.push(item); + }, + transformReply: transformBooleanReply +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/bloom/CARD.spec.ts b/packages/bloom/lib/commands/bloom/CARD.spec.ts new file mode 100644 index 00000000000..32a28cdf6f7 --- /dev/null +++ b/packages/bloom/lib/commands/bloom/CARD.spec.ts @@ -0,0 +1,20 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import CARD from './CARD'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('BF.CARD', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(CARD, 'bloom'), + ['BF.CARD', 'bloom'] + ); + }); + + testUtils.testWithClient('client.bf.card', async client => { + assert.equal( + await client.bf.card('key'), + 0 + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/bloom/CARD.ts b/packages/bloom/lib/commands/bloom/CARD.ts new file mode 100644 index 00000000000..e1873e1faf1 --- /dev/null +++ b/packages/bloom/lib/commands/bloom/CARD.ts @@ -0,0 +1,16 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, NumberReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export default { + IS_READ_ONLY: true, + /** + * Returns the cardinality (number of items) in a Bloom Filter + * @param parser - The command parser + * @param key - The name of the Bloom filter to query + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('BF.CARD'); + parser.pushKey(key); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/bloom/EXISTS.spec.ts b/packages/bloom/lib/commands/bloom/EXISTS.spec.ts new file mode 100644 index 00000000000..4d2cc70074a --- /dev/null +++ b/packages/bloom/lib/commands/bloom/EXISTS.spec.ts @@ -0,0 +1,20 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import EXISTS from './EXISTS'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('BF.EXISTS', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(EXISTS, 'key', 'item'), + ['BF.EXISTS', 'key', 'item'] + ); + }); + + testUtils.testWithClient('client.bf.exists', async client => { + assert.equal( + await client.bf.exists('key', 'item'), + false + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/bloom/EXISTS.ts b/packages/bloom/lib/commands/bloom/EXISTS.ts new file mode 100644 index 00000000000..db16253e2c7 --- /dev/null +++ b/packages/bloom/lib/commands/bloom/EXISTS.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, Command } from '@redis/client/dist/lib/RESP/types'; +import { transformBooleanReply } from '@redis/client/dist/lib/commands/generic-transformers'; + +export default { + IS_READ_ONLY: true, + /** + * Checks if an item exists in a Bloom Filter + * @param parser - The command parser + * @param key - The name of the Bloom filter + * @param item - The item to check for existence + */ + parseCommand(parser: CommandParser, key: RedisArgument, item: RedisArgument) { + parser.push('BF.EXISTS'); + parser.pushKey(key); + parser.push(item); + }, + transformReply: transformBooleanReply +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/bloom/INFO.spec.ts b/packages/bloom/lib/commands/bloom/INFO.spec.ts new file mode 100644 index 00000000000..0dbe5cb1f43 --- /dev/null +++ b/packages/bloom/lib/commands/bloom/INFO.spec.ts @@ -0,0 +1,27 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import INFO from './INFO'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('BF.INFO', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(INFO, 'bloom'), + ['BF.INFO', 'bloom'] + ); + }); + + testUtils.testWithClient('client.bf.info', async client => { + const [, reply] = await Promise.all([ + client.bf.reserve('key', 0.01, 100), + client.bf.info('key') + ]); + + assert.equal(typeof reply, 'object'); + assert.equal(reply['Capacity'], 100); + assert.equal(typeof reply['Size'], 'number'); + assert.equal(typeof reply['Number of filters'], 'number'); + assert.equal(typeof reply['Number of items inserted'], 'number'); + assert.equal(typeof reply['Expansion rate'], 'number'); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/bloom/INFO.ts b/packages/bloom/lib/commands/bloom/INFO.ts new file mode 100644 index 00000000000..bdf7d0fda9b --- /dev/null +++ b/packages/bloom/lib/commands/bloom/INFO.ts @@ -0,0 +1,30 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, Command, UnwrapReply, NullReply, NumberReply, TuplesToMapReply, Resp2Reply, SimpleStringReply, TypeMapping } from '@redis/client/dist/lib/RESP/types'; +import { transformInfoV2Reply } from './helpers'; + +export type BfInfoReplyMap = TuplesToMapReply<[ + [SimpleStringReply<'Capacity'>, NumberReply], + [SimpleStringReply<'Size'>, NumberReply], + [SimpleStringReply<'Number of filters'>, NumberReply], + [SimpleStringReply<'Number of items inserted'>, NumberReply], + [SimpleStringReply<'Expansion rate'>, NullReply | NumberReply] +]>; + +export default { + IS_READ_ONLY: true, + /** + * Returns information about a Bloom Filter, including capacity, size, number of filters, items inserted, and expansion rate + * @param parser - The command parser + * @param key - The name of the Bloom filter to get information about + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('BF.INFO'); + parser.pushKey(key); + }, + transformReply: { + 2: (reply: UnwrapReply>, _, typeMapping?: TypeMapping): BfInfoReplyMap => { + return transformInfoV2Reply(reply, typeMapping); + }, + 3: undefined as unknown as () => BfInfoReplyMap + } +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/bloom/INSERT.spec.ts b/packages/bloom/lib/commands/bloom/INSERT.spec.ts new file mode 100644 index 00000000000..a9b544a51ae --- /dev/null +++ b/packages/bloom/lib/commands/bloom/INSERT.spec.ts @@ -0,0 +1,70 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import INSERT from './INSERT'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('BF.INSERT', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(INSERT, 'key', 'item'), + ['BF.INSERT', 'key', 'ITEMS', 'item'] + ); + }); + + it('with CAPACITY', () => { + assert.deepEqual( + parseArgs(INSERT, 'key', 'item', { CAPACITY: 100 }), + ['BF.INSERT', 'key', 'CAPACITY', '100', 'ITEMS', 'item'] + ); + }); + + it('with ERROR', () => { + assert.deepEqual( + parseArgs(INSERT, 'key', 'item', { ERROR: 0.01 }), + ['BF.INSERT', 'key', 'ERROR', '0.01', 'ITEMS', 'item'] + ); + }); + + it('with EXPANSION', () => { + assert.deepEqual( + parseArgs(INSERT, 'key', 'item', { EXPANSION: 1 }), + ['BF.INSERT', 'key', 'EXPANSION', '1', 'ITEMS', 'item'] + ); + }); + + it('with NOCREATE', () => { + assert.deepEqual( + parseArgs(INSERT, 'key', 'item', { NOCREATE: true }), + ['BF.INSERT', 'key', 'NOCREATE', 'ITEMS', 'item'] + ); + }); + + it('with NONSCALING', () => { + assert.deepEqual( + parseArgs(INSERT, 'key', 'item', { NONSCALING: true }), + ['BF.INSERT', 'key', 'NONSCALING', 'ITEMS', 'item'] + ); + }); + + it('with CAPACITY, ERROR, EXPANSION, NOCREATE and NONSCALING', () => { + assert.deepEqual( + parseArgs(INSERT, 'key', 'item', { + CAPACITY: 100, + ERROR: 0.01, + EXPANSION: 1, + NOCREATE: true, + NONSCALING: true + }), + ['BF.INSERT', 'key', 'CAPACITY', '100', 'ERROR', '0.01', 'EXPANSION', '1', 'NOCREATE', 'NONSCALING', 'ITEMS', 'item'] + ); + }); + }); + + testUtils.testWithClient('client.bf.insert', async client => { + assert.deepEqual( + await client.bf.insert('key', 'item'), + [true] + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/bloom/INSERT.ts b/packages/bloom/lib/commands/bloom/INSERT.ts new file mode 100644 index 00000000000..c607e015694 --- /dev/null +++ b/packages/bloom/lib/commands/bloom/INSERT.ts @@ -0,0 +1,61 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, Command } from '@redis/client/dist/lib/RESP/types'; +import { RedisVariadicArgument } from '@redis/client/dist/lib/commands/generic-transformers'; +import { transformBooleanArrayReply } from '@redis/client/dist/lib/commands/generic-transformers'; + +export interface BfInsertOptions { + CAPACITY?: number; + ERROR?: number; + EXPANSION?: number; + NOCREATE?: boolean; + NONSCALING?: boolean; +} + +export default { + IS_READ_ONLY: false, + /** + * Adds one or more items to a Bloom Filter, creating it if it does not exist + * @param parser - The command parser + * @param key - The name of the Bloom filter + * @param items - One or more items to add to the filter + * @param options - Optional parameters for filter creation + * @param options.CAPACITY - Desired capacity for a new filter + * @param options.ERROR - Desired error rate for a new filter + * @param options.EXPANSION - Expansion rate for a new filter + * @param options.NOCREATE - If true, prevents automatic filter creation + * @param options.NONSCALING - Prevents the filter from creating additional sub-filters + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + items: RedisVariadicArgument, + options?: BfInsertOptions + ) { + parser.push('BF.INSERT'); + parser.pushKey(key); + + if (options?.CAPACITY !== undefined) { + parser.push('CAPACITY', options.CAPACITY.toString()); + } + + if (options?.ERROR !== undefined) { + parser.push('ERROR', options.ERROR.toString()); + } + + if (options?.EXPANSION !== undefined) { + parser.push('EXPANSION', options.EXPANSION.toString()); + } + + if (options?.NOCREATE) { + parser.push('NOCREATE'); + } + + if (options?.NONSCALING) { + parser.push('NONSCALING'); + } + + parser.push('ITEMS'); + parser.pushVariadic(items); + }, + transformReply: transformBooleanArrayReply +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/bloom/LOADCHUNK.spec.ts b/packages/bloom/lib/commands/bloom/LOADCHUNK.spec.ts new file mode 100644 index 00000000000..40e24f96c39 --- /dev/null +++ b/packages/bloom/lib/commands/bloom/LOADCHUNK.spec.ts @@ -0,0 +1,36 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import LOADCHUNK from './LOADCHUNK'; +import { RESP_TYPES } from '@redis/client'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('BF.LOADCHUNK', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(LOADCHUNK, 'key', 0, ''), + ['BF.LOADCHUNK', 'key', '0', ''] + ); + }); + + testUtils.testWithClient('client.bf.loadChunk', async client => { + const [, { iterator, chunk }] = await Promise.all([ + client.bf.reserve('source', 0.01, 100), + client.bf.scanDump('source', 0) + ]); + + assert.equal( + await client.bf.loadChunk('destination', iterator, chunk), + 'OK' + ); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + ...GLOBAL.SERVERS.OPEN.clientOptions, + commandOptions: { + typeMapping: { + [RESP_TYPES.BLOB_STRING]: Buffer + } + } + } + }); +}); diff --git a/packages/bloom/lib/commands/bloom/LOADCHUNK.ts b/packages/bloom/lib/commands/bloom/LOADCHUNK.ts new file mode 100644 index 00000000000..d0af9d7a644 --- /dev/null +++ b/packages/bloom/lib/commands/bloom/LOADCHUNK.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export default { + IS_READ_ONLY: false, + /** + * Restores a Bloom Filter chunk previously saved using SCANDUMP + * @param parser - The command parser + * @param key - The name of the Bloom filter to restore + * @param iterator - Iterator value from the SCANDUMP command + * @param chunk - Data chunk from the SCANDUMP command + */ + parseCommand(parser: CommandParser, key: RedisArgument, iterator: number, chunk: RedisArgument) { + parser.push('BF.LOADCHUNK'); + parser.pushKey(key); + parser.push(iterator.toString(), chunk); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/bloom/MADD.spec.ts b/packages/bloom/lib/commands/bloom/MADD.spec.ts new file mode 100644 index 00000000000..5eb39ee73d4 --- /dev/null +++ b/packages/bloom/lib/commands/bloom/MADD.spec.ts @@ -0,0 +1,20 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import MADD from './MADD'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('BF.MADD', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(MADD, 'key', ['1', '2']), + ['BF.MADD', 'key', '1', '2'] + ); + }); + + testUtils.testWithClient('client.ts.mAdd', async client => { + assert.deepEqual( + await client.bf.mAdd('key', ['1', '2']), + [true, true] + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/bloom/MADD.ts b/packages/bloom/lib/commands/bloom/MADD.ts new file mode 100644 index 00000000000..adef0aee3e2 --- /dev/null +++ b/packages/bloom/lib/commands/bloom/MADD.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, Command } from '@redis/client/dist/lib/RESP/types'; +import { RedisVariadicArgument } from '@redis/client/dist/lib/commands/generic-transformers'; +import { transformBooleanArrayReply } from '@redis/client/dist/lib/commands/generic-transformers'; + +export default { + IS_READ_ONLY: false, + /** + * Adds multiple items to a Bloom Filter in a single call + * @param parser - The command parser + * @param key - The name of the Bloom filter + * @param items - One or more items to add to the filter + */ + parseCommand(parser: CommandParser, key: RedisArgument, items: RedisVariadicArgument) { + parser.push('BF.MADD'); + parser.pushKey(key); + parser.pushVariadic(items); + }, + transformReply: transformBooleanArrayReply +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/bloom/MEXISTS.spec.ts b/packages/bloom/lib/commands/bloom/MEXISTS.spec.ts new file mode 100644 index 00000000000..60c09b00f17 --- /dev/null +++ b/packages/bloom/lib/commands/bloom/MEXISTS.spec.ts @@ -0,0 +1,20 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import MEXISTS from './MEXISTS'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('BF.MEXISTS', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(MEXISTS, 'key', ['1', '2']), + ['BF.MEXISTS', 'key', '1', '2'] + ); + }); + + testUtils.testWithClient('client.bf.mExists', async client => { + assert.deepEqual( + await client.bf.mExists('key', ['1', '2']), + [false, false] + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/bloom/MEXISTS.ts b/packages/bloom/lib/commands/bloom/MEXISTS.ts new file mode 100644 index 00000000000..658f9b01229 --- /dev/null +++ b/packages/bloom/lib/commands/bloom/MEXISTS.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, Command } from '@redis/client/dist/lib/RESP/types'; +import { RedisVariadicArgument } from '@redis/client/dist/lib/commands/generic-transformers'; +import { transformBooleanArrayReply } from '@redis/client/dist/lib/commands/generic-transformers'; + +export default { + IS_READ_ONLY: true, + /** + * Checks if multiple items exist in a Bloom Filter in a single call + * @param parser - The command parser + * @param key - The name of the Bloom filter + * @param items - One or more items to check for existence + */ + parseCommand(parser: CommandParser, key: RedisArgument, items: RedisVariadicArgument) { + parser.push('BF.MEXISTS'); + parser.pushKey(key); + parser.pushVariadic(items); + }, + transformReply: transformBooleanArrayReply +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/bloom/RESERVE.spec.ts b/packages/bloom/lib/commands/bloom/RESERVE.spec.ts new file mode 100644 index 00000000000..803577b350b --- /dev/null +++ b/packages/bloom/lib/commands/bloom/RESERVE.spec.ts @@ -0,0 +1,50 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import RESERVE from './RESERVE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('BF.RESERVE', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(RESERVE, 'key', 0.01, 100), + ['BF.RESERVE', 'key', '0.01', '100'] + ); + }); + + it('with EXPANSION', () => { + assert.deepEqual( + parseArgs(RESERVE, 'key', 0.01, 100, { + EXPANSION: 1 + }), + ['BF.RESERVE', 'key', '0.01', '100', 'EXPANSION', '1'] + ); + }); + + it('with NONSCALING', () => { + assert.deepEqual( + parseArgs(RESERVE, 'key', 0.01, 100, { + NONSCALING: true + }), + ['BF.RESERVE', 'key', '0.01', '100', 'NONSCALING'] + ); + }); + + it('with EXPANSION and NONSCALING', () => { + assert.deepEqual( + parseArgs(RESERVE, 'key', 0.01, 100, { + EXPANSION: 1, + NONSCALING: true + }), + ['BF.RESERVE', 'key', '0.01', '100', 'EXPANSION', '1', 'NONSCALING'] + ); + }); + }); + + testUtils.testWithClient('client.bf.reserve', async client => { + assert.equal( + await client.bf.reserve('bloom', 0.01, 100), + 'OK' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/bloom/RESERVE.ts b/packages/bloom/lib/commands/bloom/RESERVE.ts new file mode 100644 index 00000000000..d0b3fb906a9 --- /dev/null +++ b/packages/bloom/lib/commands/bloom/RESERVE.ts @@ -0,0 +1,41 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export interface BfReserveOptions { + EXPANSION?: number; + NONSCALING?: boolean; +} + +export default { + IS_READ_ONLY: true, + /** + * Creates an empty Bloom Filter with a given desired error ratio and initial capacity + * @param parser - The command parser + * @param key - The name of the Bloom filter to create + * @param errorRate - The desired probability for false positives (between 0 and 1) + * @param capacity - The number of entries intended to be added to the filter + * @param options - Optional parameters to tune the filter + * @param options.EXPANSION - Expansion rate for the filter + * @param options.NONSCALING - Prevents the filter from creating additional sub-filters + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + errorRate: number, + capacity: number, + options?: BfReserveOptions + ) { + parser.push('BF.RESERVE'); + parser.pushKey(key); + parser.push(errorRate.toString(), capacity.toString()); + + if (options?.EXPANSION) { + parser.push('EXPANSION', options.EXPANSION.toString()); + } + + if (options?.NONSCALING) { + parser.push('NONSCALING'); + } + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/bloom/SCANDUMP.spec.ts b/packages/bloom/lib/commands/bloom/SCANDUMP.spec.ts new file mode 100644 index 00000000000..a41a6e8e466 --- /dev/null +++ b/packages/bloom/lib/commands/bloom/SCANDUMP.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import SCANDUMP from './SCANDUMP'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('BF.SCANDUMP', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(SCANDUMP, 'key', 0), + ['BF.SCANDUMP', 'key', '0'] + ); + }); + + testUtils.testWithClient('client.bf.scanDump', async client => { + const [, dump] = await Promise.all([ + client.bf.reserve('key', 0.01, 100), + client.bf.scanDump('key', 0) + ]); + assert.equal(typeof dump, 'object'); + assert.equal(typeof dump.iterator, 'number'); + assert.equal(typeof dump.chunk, 'string'); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/bloom/SCANDUMP.ts b/packages/bloom/lib/commands/bloom/SCANDUMP.ts new file mode 100644 index 00000000000..4aebc5c9fc3 --- /dev/null +++ b/packages/bloom/lib/commands/bloom/SCANDUMP.ts @@ -0,0 +1,23 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, TuplesReply, NumberReply, BlobStringReply, UnwrapReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export default { + IS_READ_ONLY: true, + /** + * Begins an incremental save of a Bloom Filter. This is useful for large filters that can't be saved at once + * @param parser - The command parser + * @param key - The name of the Bloom filter to save + * @param iterator - Iterator value; Start at 0, and use the iterator from the response for the next chunk + */ + parseCommand(parser: CommandParser, key: RedisArgument, iterator: number) { + parser.push('BF.SCANDUMP'); + parser.pushKey(key); + parser.push(iterator.toString()); + }, + transformReply(reply: UnwrapReply>) { + return { + iterator: reply[0], + chunk: reply[1] + }; + } +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/bloom/helpers.ts b/packages/bloom/lib/commands/bloom/helpers.ts new file mode 100644 index 00000000000..f5b39c71aa8 --- /dev/null +++ b/packages/bloom/lib/commands/bloom/helpers.ts @@ -0,0 +1,29 @@ +import { RESP_TYPES, TypeMapping } from "@redis/client"; + +export function transformInfoV2Reply(reply: Array, typeMapping?: TypeMapping): T { + const mapType = typeMapping ? typeMapping[RESP_TYPES.MAP] : undefined; + + switch (mapType) { + case Array: { + return reply as unknown as T; + } + case Map: { + const ret = new Map(); + + for (let i = 0; i < reply.length; i += 2) { + ret.set(reply[i].toString(), reply[i + 1]); + } + + return ret as unknown as T; + } + default: { + const ret = Object.create(null); + + for (let i = 0; i < reply.length; i += 2) { + ret[reply[i].toString()] = reply[i + 1]; + } + + return ret as unknown as T; + } + } +} \ No newline at end of file diff --git a/packages/bloom/lib/commands/bloom/index.ts b/packages/bloom/lib/commands/bloom/index.ts new file mode 100644 index 00000000000..d49ac63b2ea --- /dev/null +++ b/packages/bloom/lib/commands/bloom/index.ts @@ -0,0 +1,37 @@ +import type { RedisCommands } from '@redis/client/dist/lib/RESP/types'; + +import ADD from './ADD'; +import CARD from './CARD'; +import EXISTS from './EXISTS'; +import INFO from './INFO'; +import INSERT from './INSERT'; +import LOADCHUNK from './LOADCHUNK'; +import MADD from './MADD'; +import MEXISTS from './MEXISTS'; +import RESERVE from './RESERVE'; +import SCANDUMP from './SCANDUMP'; + +export * from './helpers'; + +export default { + ADD, + add: ADD, + CARD, + card: CARD, + EXISTS, + exists: EXISTS, + INFO, + info: INFO, + INSERT, + insert: INSERT, + LOADCHUNK, + loadChunk: LOADCHUNK, + MADD, + mAdd: MADD, + MEXISTS, + mExists: MEXISTS, + RESERVE, + reserve: RESERVE, + SCANDUMP, + scanDump: SCANDUMP +} as const satisfies RedisCommands; diff --git a/packages/bloom/lib/commands/count-min-sketch/INCRBY.spec.ts b/packages/bloom/lib/commands/count-min-sketch/INCRBY.spec.ts new file mode 100644 index 00000000000..44ccaf6046d --- /dev/null +++ b/packages/bloom/lib/commands/count-min-sketch/INCRBY.spec.ts @@ -0,0 +1,43 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import INCRBY from './INCRBY'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('CMS.INCRBY', () => { + describe('transformArguments', () => { + it('single item', () => { + assert.deepEqual( + parseArgs(INCRBY, 'key', { + item: 'item', + incrementBy: 1 + }), + ['CMS.INCRBY', 'key', 'item', '1'] + ); + }); + + it('multiple items', () => { + assert.deepEqual( + parseArgs(INCRBY, 'key', [{ + item: 'a', + incrementBy: 1 + }, { + item: 'b', + incrementBy: 2 + }]), + ['CMS.INCRBY', 'key', 'a', '1', 'b', '2'] + ); + }); + }); + + testUtils.testWithClient('client.cms.incrBy', async client => { + const [, reply] = await Promise.all([ + client.cms.initByDim('key', 1000, 5), + client.cms.incrBy('key', { + item: 'item', + incrementBy: 1 + }) + ]); + + assert.deepEqual(reply, [1]); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/count-min-sketch/INCRBY.ts b/packages/bloom/lib/commands/count-min-sketch/INCRBY.ts new file mode 100644 index 00000000000..145047b207a --- /dev/null +++ b/packages/bloom/lib/commands/count-min-sketch/INCRBY.ts @@ -0,0 +1,38 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, ArrayReply, NumberReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export interface BfIncrByItem { + item: RedisArgument; + incrementBy: number; +} + +export default { + IS_READ_ONLY: false, + /** + * Increases the count of one or more items in a Count-Min Sketch + * @param parser - The command parser + * @param key - The name of the sketch + * @param items - A single item or array of items to increment, each with an item and increment value + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + items: BfIncrByItem | Array + ) { + parser.push('CMS.INCRBY'); + parser.pushKey(key); + + if (Array.isArray(items)) { + for (const item of items) { + pushIncrByItem(parser, item); + } + } else { + pushIncrByItem(parser, items); + } + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; + +function pushIncrByItem(parser: CommandParser, { item, incrementBy }: BfIncrByItem): void { + parser.push(item, incrementBy.toString()); +} diff --git a/packages/bloom/lib/commands/count-min-sketch/INFO.spec.ts b/packages/bloom/lib/commands/count-min-sketch/INFO.spec.ts new file mode 100644 index 00000000000..cbc8065016a --- /dev/null +++ b/packages/bloom/lib/commands/count-min-sketch/INFO.spec.ts @@ -0,0 +1,29 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import INFO from './INFO'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('CMS.INFO', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(INFO, 'key'), + ['CMS.INFO', 'key'] + ); + }); + + testUtils.testWithClient('client.cms.info', async client => { + const width = 1000, + depth = 5, + [, reply] = await Promise.all([ + client.cms.initByDim('key', width, depth), + client.cms.info('key') + ]); + + const expected = Object.create(null); + expected['width'] = width; + expected['depth'] = depth; + expected['count'] = 0; + + assert.deepEqual(reply, expected); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/count-min-sketch/INFO.ts b/packages/bloom/lib/commands/count-min-sketch/INFO.ts new file mode 100644 index 00000000000..1f188bda013 --- /dev/null +++ b/packages/bloom/lib/commands/count-min-sketch/INFO.ts @@ -0,0 +1,34 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, TuplesToMapReply, NumberReply, UnwrapReply, Resp2Reply, Command, SimpleStringReply, TypeMapping } from '@redis/client/dist/lib/RESP/types'; +import { transformInfoV2Reply } from '../bloom'; + +export type CmsInfoReplyMap = TuplesToMapReply<[ + [SimpleStringReply<'width'>, NumberReply], + [SimpleStringReply<'depth'>, NumberReply], + [SimpleStringReply<'count'>, NumberReply] +]>; + +export interface CmsInfoReply { + width: NumberReply; + depth: NumberReply; + count: NumberReply; +} + +export default { + IS_READ_ONLY: true, + /** + * Returns width, depth, and total count of items in a Count-Min Sketch + * @param parser - The command parser + * @param key - The name of the sketch to get information about + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('CMS.INFO'); + parser.pushKey(key); + }, + transformReply: { + 2: (reply: UnwrapReply>, _, typeMapping?: TypeMapping): CmsInfoReply => { + return transformInfoV2Reply(reply, typeMapping); + }, + 3: undefined as unknown as () => CmsInfoReply + } +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/count-min-sketch/INITBYDIM.spec.ts b/packages/bloom/lib/commands/count-min-sketch/INITBYDIM.spec.ts new file mode 100644 index 00000000000..9fa1652a2e8 --- /dev/null +++ b/packages/bloom/lib/commands/count-min-sketch/INITBYDIM.spec.ts @@ -0,0 +1,20 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import INITBYDIM from './INITBYDIM'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('CMS.INITBYDIM', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(INITBYDIM, 'key', 1000, 5), + ['CMS.INITBYDIM', 'key', '1000', '5'] + ); + }); + + testUtils.testWithClient('client.cms.initByDim', async client => { + assert.equal( + await client.cms.initByDim('key', 1000, 5), + 'OK' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/count-min-sketch/INITBYDIM.ts b/packages/bloom/lib/commands/count-min-sketch/INITBYDIM.ts new file mode 100644 index 00000000000..2bf9d97f208 --- /dev/null +++ b/packages/bloom/lib/commands/count-min-sketch/INITBYDIM.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export default { + IS_READ_ONLY: false, + /** + * Initialize a Count-Min Sketch using width and depth parameters + * @param parser - The command parser + * @param key - The name of the sketch + * @param width - Number of counters in each array (must be a multiple of 2) + * @param depth - Number of counter arrays (determines accuracy of estimates) + */ + parseCommand(parser: CommandParser, key: RedisArgument, width: number, depth: number) { + parser.push('CMS.INITBYDIM'); + parser.pushKey(key); + parser.push(width.toString(), depth.toString()); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/count-min-sketch/INITBYPROB.spec.ts b/packages/bloom/lib/commands/count-min-sketch/INITBYPROB.spec.ts new file mode 100644 index 00000000000..b59bc14494f --- /dev/null +++ b/packages/bloom/lib/commands/count-min-sketch/INITBYPROB.spec.ts @@ -0,0 +1,20 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import INITBYPROB from './INITBYPROB'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('CMS.INITBYPROB', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(INITBYPROB, 'key', 0.001, 0.01), + ['CMS.INITBYPROB', 'key', '0.001', '0.01'] + ); + }); + + testUtils.testWithClient('client.cms.initByProb', async client => { + assert.equal( + await client.cms.initByProb('key', 0.001, 0.01), + 'OK' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/count-min-sketch/INITBYPROB.ts b/packages/bloom/lib/commands/count-min-sketch/INITBYPROB.ts new file mode 100644 index 00000000000..180781d91e8 --- /dev/null +++ b/packages/bloom/lib/commands/count-min-sketch/INITBYPROB.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export default { + IS_READ_ONLY: false, + /** + * Initialize a Count-Min Sketch using error rate and probability parameters + * @param parser - The command parser + * @param key - The name of the sketch + * @param error - Estimate error, as a decimal between 0 and 1 + * @param probability - The desired probability for inflated count, as a decimal between 0 and 1 + */ + parseCommand(parser: CommandParser, key: RedisArgument, error: number, probability: number) { + parser.push('CMS.INITBYPROB'); + parser.pushKey(key); + parser.push(error.toString(), probability.toString()); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/count-min-sketch/MERGE.spec.ts b/packages/bloom/lib/commands/count-min-sketch/MERGE.spec.ts new file mode 100644 index 00000000000..03e3d5c6364 --- /dev/null +++ b/packages/bloom/lib/commands/count-min-sketch/MERGE.spec.ts @@ -0,0 +1,35 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import MERGE from './MERGE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('CMS.MERGE', () => { + describe('transformArguments', () => { + it('without WEIGHTS', () => { + assert.deepEqual( + parseArgs(MERGE, 'destination', ['source']), + ['CMS.MERGE', 'destination', '1', 'source'] + ); + }); + + it('with WEIGHTS', () => { + assert.deepEqual( + parseArgs(MERGE, 'destination', [{ + name: 'source', + weight: 1 + }]), + ['CMS.MERGE', 'destination', '1', 'source', 'WEIGHTS', '1'] + ); + }); + }); + + testUtils.testWithClient('client.cms.merge', async client => { + const [, , reply] = await Promise.all([ + client.cms.initByDim('source', 1000, 5), + client.cms.initByDim('destination', 1000, 5), + client.cms.merge('destination', ['source']) + ]); + + assert.equal(reply, 'OK'); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/count-min-sketch/MERGE.ts b/packages/bloom/lib/commands/count-min-sketch/MERGE.ts new file mode 100644 index 00000000000..e4921c7975f --- /dev/null +++ b/packages/bloom/lib/commands/count-min-sketch/MERGE.ts @@ -0,0 +1,45 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '@redis/client/dist/lib/RESP/types'; + +interface BfMergeSketch { + name: RedisArgument; + weight: number; +} + +export type BfMergeSketches = Array | Array; + +export default { + IS_READ_ONLY: false, + /** + * Merges multiple Count-Min Sketches into a single sketch, with optional weights + * @param parser - The command parser + * @param destination - The name of the destination sketch + * @param source - Array of sketch names or array of sketches with weights + */ + parseCommand( + parser: CommandParser, + destination: RedisArgument, + source: BfMergeSketches + ) { + parser.push('CMS.MERGE'); + parser.pushKey(destination); + parser.push(source.length.toString()); + + if (isPlainSketches(source)) { + parser.pushVariadic(source); + } else { + for (let i = 0; i < source.length; i++) { + parser.push(source[i].name); + } + parser.push('WEIGHTS'); + for (let i = 0; i < source.length; i++) { + parser.push(source[i].weight.toString()) + } + } + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; + +function isPlainSketches(src: BfMergeSketches): src is Array { + return typeof src[0] === 'string' || src[0] instanceof Buffer; +} diff --git a/packages/bloom/lib/commands/count-min-sketch/QUERY.spec.ts b/packages/bloom/lib/commands/count-min-sketch/QUERY.spec.ts new file mode 100644 index 00000000000..e12a519e962 --- /dev/null +++ b/packages/bloom/lib/commands/count-min-sketch/QUERY.spec.ts @@ -0,0 +1,22 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import QUERY from './QUERY'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('CMS.QUERY', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(QUERY, 'key', 'item'), + ['CMS.QUERY', 'key', 'item'] + ); + }); + + testUtils.testWithClient('client.cms.query', async client => { + const [, reply] = await Promise.all([ + client.cms.initByDim('key', 1000, 5), + client.cms.query('key', 'item') + ]); + + assert.deepEqual(reply, [0]); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/count-min-sketch/QUERY.ts b/packages/bloom/lib/commands/count-min-sketch/QUERY.ts new file mode 100644 index 00000000000..4322b0470c0 --- /dev/null +++ b/packages/bloom/lib/commands/count-min-sketch/QUERY.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { ArrayReply, NumberReply, Command, RedisArgument } from '@redis/client/dist/lib/RESP/types'; +import { RedisVariadicArgument } from '@redis/client/dist/lib/commands/generic-transformers'; + +export default { + IS_READ_ONLY: true, + /** + * Returns the count for one or more items in a Count-Min Sketch + * @param parser - The command parser + * @param key - The name of the sketch + * @param items - One or more items to get counts for + */ + parseCommand(parser: CommandParser, key: RedisArgument, items: RedisVariadicArgument) { + parser.push('CMS.QUERY'); + parser.pushKey(key); + parser.pushVariadic(items); + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/count-min-sketch/index.ts b/packages/bloom/lib/commands/count-min-sketch/index.ts new file mode 100644 index 00000000000..4f0f395ca3d --- /dev/null +++ b/packages/bloom/lib/commands/count-min-sketch/index.ts @@ -0,0 +1,22 @@ +import type { RedisCommands } from '@redis/client/dist/lib/RESP/types'; +import INCRBY from './INCRBY'; +import INFO from './INFO'; +import INITBYDIM from './INITBYDIM'; +import INITBYPROB from './INITBYPROB'; +import MERGE from './MERGE'; +import QUERY from './QUERY'; + +export default { + INCRBY, + incrBy: INCRBY, + INFO, + info: INFO, + INITBYDIM, + initByDim: INITBYDIM, + INITBYPROB, + initByProb: INITBYPROB, + MERGE, + merge: MERGE, + QUERY, + query: QUERY +} as const satisfies RedisCommands; diff --git a/packages/bloom/lib/commands/cuckoo/ADD.spec.ts b/packages/bloom/lib/commands/cuckoo/ADD.spec.ts new file mode 100644 index 00000000000..7fa518fea84 --- /dev/null +++ b/packages/bloom/lib/commands/cuckoo/ADD.spec.ts @@ -0,0 +1,20 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import ADD from './ADD'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('CF.ADD', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(ADD, 'key', 'item'), + ['CF.ADD', 'key', 'item'] + ); + }); + + testUtils.testWithClient('client.cf.add', async client => { + assert.equal( + await client.cf.add('key', 'item'), + true + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/cuckoo/ADD.ts b/packages/bloom/lib/commands/cuckoo/ADD.ts new file mode 100644 index 00000000000..db16acdea5a --- /dev/null +++ b/packages/bloom/lib/commands/cuckoo/ADD.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, Command } from '@redis/client/dist/lib/RESP/types'; +import { transformBooleanReply } from '@redis/client/dist/lib/commands/generic-transformers'; + +export default { + IS_READ_ONLY: false, + /** + * Adds an item to a Cuckoo Filter, creating the filter if it does not exist + * @param parser - The command parser + * @param key - The name of the Cuckoo filter + * @param item - The item to add to the filter + */ + parseCommand(parser: CommandParser, key: RedisArgument, item: RedisArgument) { + parser.push('CF.ADD'); + parser.pushKey(key); + parser.push(item); + }, + transformReply: transformBooleanReply +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/cuckoo/ADDNX.spec.ts b/packages/bloom/lib/commands/cuckoo/ADDNX.spec.ts new file mode 100644 index 00000000000..c142733ce40 --- /dev/null +++ b/packages/bloom/lib/commands/cuckoo/ADDNX.spec.ts @@ -0,0 +1,20 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import ADDNX from './ADDNX'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('CF.ADDNX', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(ADDNX, 'key', 'item'), + ['CF.ADDNX', 'key', 'item'] + ); + }); + + testUtils.testWithClient('client.cf.add', async client => { + assert.equal( + await client.cf.addNX('key', 'item'), + true + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/cuckoo/ADDNX.ts b/packages/bloom/lib/commands/cuckoo/ADDNX.ts new file mode 100644 index 00000000000..ef6e1222e7f --- /dev/null +++ b/packages/bloom/lib/commands/cuckoo/ADDNX.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, Command } from '@redis/client/dist/lib/RESP/types'; +import { transformBooleanReply } from '@redis/client/dist/lib/commands/generic-transformers'; + +export default { + IS_READ_ONLY: false, + /** + * Adds an item to a Cuckoo Filter only if it does not exist + * @param parser - The command parser + * @param key - The name of the Cuckoo filter + * @param item - The item to add to the filter if it doesn't exist + */ + parseCommand(parser: CommandParser, key: RedisArgument, item: RedisArgument) { + parser.push('CF.ADDNX'); + parser.pushKey(key); + parser.push(item); + }, + transformReply: transformBooleanReply +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/cuckoo/COUNT.spec.ts b/packages/bloom/lib/commands/cuckoo/COUNT.spec.ts new file mode 100644 index 00000000000..9393494d852 --- /dev/null +++ b/packages/bloom/lib/commands/cuckoo/COUNT.spec.ts @@ -0,0 +1,20 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import COUNT from './COUNT'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('CF.COUNT', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(COUNT, 'key', 'item'), + ['CF.COUNT', 'key', 'item'] + ); + }); + + testUtils.testWithClient('client.cf.count', async client => { + assert.equal( + await client.cf.count('key', 'item'), + 0 + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/cuckoo/COUNT.ts b/packages/bloom/lib/commands/cuckoo/COUNT.ts new file mode 100644 index 00000000000..e06d71f73e9 --- /dev/null +++ b/packages/bloom/lib/commands/cuckoo/COUNT.ts @@ -0,0 +1,18 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, NumberReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export default { + IS_READ_ONLY: true, + /** + * Returns the number of times an item appears in a Cuckoo Filter + * @param parser - The command parser + * @param key - The name of the Cuckoo filter + * @param item - The item to count occurrences of + */ + parseCommand(parser: CommandParser, key: RedisArgument, item: RedisArgument) { + parser.push('CF.COUNT'); + parser.pushKey(key); + parser.push(item); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/cuckoo/DEL.spec.ts b/packages/bloom/lib/commands/cuckoo/DEL.spec.ts new file mode 100644 index 00000000000..41ed653bfc9 --- /dev/null +++ b/packages/bloom/lib/commands/cuckoo/DEL.spec.ts @@ -0,0 +1,22 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import DEL from './DEL'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('CF.DEL', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(DEL, 'key', 'item'), + ['CF.DEL', 'key', 'item'] + ); + }); + + testUtils.testWithClient('client.cf.del', async client => { + const [, reply] = await Promise.all([ + client.cf.reserve('key', 4), + client.cf.del('key', 'item') + ]); + + assert.equal(reply, false); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/cuckoo/DEL.ts b/packages/bloom/lib/commands/cuckoo/DEL.ts new file mode 100644 index 00000000000..651a5bd8a8a --- /dev/null +++ b/packages/bloom/lib/commands/cuckoo/DEL.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, Command } from '@redis/client/dist/lib/RESP/types'; +import { transformBooleanReply } from '@redis/client/dist/lib/commands/generic-transformers'; + +export default { + IS_READ_ONLY: false, + /** + * Removes an item from a Cuckoo Filter if it exists + * @param parser - The command parser + * @param key - The name of the Cuckoo filter + * @param item - The item to remove from the filter + */ + parseCommand(parser: CommandParser, key: RedisArgument, item: RedisArgument) { + parser.push('CF.DEL'); + parser.pushKey(key); + parser.push(item); + }, + transformReply: transformBooleanReply +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/cuckoo/EXISTS.spec.ts b/packages/bloom/lib/commands/cuckoo/EXISTS.spec.ts new file mode 100644 index 00000000000..f77a9d69eff --- /dev/null +++ b/packages/bloom/lib/commands/cuckoo/EXISTS.spec.ts @@ -0,0 +1,20 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import EXISTS from './EXISTS'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('CF.EXISTS', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(EXISTS, 'key', 'item'), + ['CF.EXISTS', 'key', 'item'] + ); + }); + + testUtils.testWithClient('client.cf.exists', async client => { + assert.equal( + await client.cf.exists('key', 'item'), + false + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/cuckoo/EXISTS.ts b/packages/bloom/lib/commands/cuckoo/EXISTS.ts new file mode 100644 index 00000000000..820143ae886 --- /dev/null +++ b/packages/bloom/lib/commands/cuckoo/EXISTS.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, Command } from '@redis/client/dist/lib/RESP/types'; +import { transformBooleanReply } from '@redis/client/dist/lib/commands/generic-transformers'; + +export default { + IS_READ_ONLY: false, + /** + * Checks if an item exists in a Cuckoo Filter + * @param parser - The command parser + * @param key - The name of the Cuckoo filter + * @param item - The item to check for existence + */ + parseCommand(parser: CommandParser, key: RedisArgument, item: RedisArgument) { + parser.push('CF.EXISTS'); + parser.pushKey(key); + parser.push(item); + }, + transformReply: transformBooleanReply +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/cuckoo/INFO.spec.ts b/packages/bloom/lib/commands/cuckoo/INFO.spec.ts new file mode 100644 index 00000000000..c5503ed113b --- /dev/null +++ b/packages/bloom/lib/commands/cuckoo/INFO.spec.ts @@ -0,0 +1,30 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import INFO from './INFO'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('CF.INFO', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(INFO, 'cuckoo'), + ['CF.INFO', 'cuckoo'] + ); + }); + + testUtils.testWithClient('client.cf.info', async client => { + const [, reply] = await Promise.all([ + client.cf.reserve('key', 4), + client.cf.info('key') + ]); + + assert.equal(typeof reply, 'object'); + assert.equal(typeof reply['Size'], 'number'); + assert.equal(typeof reply['Number of buckets'], 'number'); + assert.equal(typeof reply['Number of filters'], 'number'); + assert.equal(typeof reply['Number of items inserted'], 'number'); + assert.equal(typeof reply['Number of items deleted'], 'number'); + assert.equal(typeof reply['Bucket size'], 'number'); + assert.equal(typeof reply['Expansion rate'], 'number'); + assert.equal(typeof reply['Max iterations'], 'number'); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/cuckoo/INFO.ts b/packages/bloom/lib/commands/cuckoo/INFO.ts new file mode 100644 index 00000000000..88622b5cb19 --- /dev/null +++ b/packages/bloom/lib/commands/cuckoo/INFO.ts @@ -0,0 +1,33 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, Command, NumberReply, TuplesToMapReply, UnwrapReply, Resp2Reply, SimpleStringReply, TypeMapping } from '@redis/client/dist/lib/RESP/types'; +import { transformInfoV2Reply } from '../bloom'; + +export type CfInfoReplyMap = TuplesToMapReply<[ + [SimpleStringReply<'Size'>, NumberReply], + [SimpleStringReply<'Number of buckets'>, NumberReply], + [SimpleStringReply<'Number of filters'>, NumberReply], + [SimpleStringReply<'Number of items inserted'>, NumberReply], + [SimpleStringReply<'Number of items deleted'>, NumberReply], + [SimpleStringReply<'Bucket size'>, NumberReply], + [SimpleStringReply<'Expansion rate'>, NumberReply], + [SimpleStringReply<'Max iterations'>, NumberReply] +]>; + +export default { + IS_READ_ONLY: true, + /** + * Returns detailed information about a Cuckoo Filter including size, buckets, filters count, items statistics and configuration + * @param parser - The command parser + * @param key - The name of the Cuckoo filter to get information about + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('CF.INFO'); + parser.pushKey(key); + }, + transformReply: { + 2: (reply: UnwrapReply>, _, typeMapping?: TypeMapping): CfInfoReplyMap => { + return transformInfoV2Reply(reply, typeMapping); + }, + 3: undefined as unknown as () => CfInfoReplyMap + } +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/cuckoo/INSERT.spec.ts b/packages/bloom/lib/commands/cuckoo/INSERT.spec.ts new file mode 100644 index 00000000000..dc2bd574517 --- /dev/null +++ b/packages/bloom/lib/commands/cuckoo/INSERT.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import INSERT from './INSERT'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('CF.INSERT', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(INSERT, 'key', 'item', { + CAPACITY: 100, + NOCREATE: true + }), + ['CF.INSERT', 'key', 'CAPACITY', '100', 'NOCREATE', 'ITEMS', 'item'] + ); + }); + + testUtils.testWithClient('client.cf.insert', async client => { + assert.deepEqual( + await client.cf.insert('key', 'item'), + [true] + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/cuckoo/INSERT.ts b/packages/bloom/lib/commands/cuckoo/INSERT.ts new file mode 100644 index 00000000000..277c820cbcd --- /dev/null +++ b/packages/bloom/lib/commands/cuckoo/INSERT.ts @@ -0,0 +1,46 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { Command, RedisArgument } from '@redis/client/dist/lib/RESP/types'; +import { RedisVariadicArgument, transformBooleanArrayReply } from '@redis/client/dist/lib/commands/generic-transformers'; + +export interface CfInsertOptions { + CAPACITY?: number; + NOCREATE?: boolean; +} + +export function parseCfInsertArguments( + parser: CommandParser, + key: RedisArgument, + items: RedisVariadicArgument, + options?: CfInsertOptions +) { + parser.pushKey(key); + + if (options?.CAPACITY !== undefined) { + parser.push('CAPACITY', options.CAPACITY.toString()); + } + + if (options?.NOCREATE) { + parser.push('NOCREATE'); + } + + parser.push('ITEMS'); + parser.pushVariadic(items); +} + +export default { + IS_READ_ONLY: false, + /** + * Adds one or more items to a Cuckoo Filter, creating it if it does not exist + * @param parser - The command parser + * @param key - The name of the Cuckoo filter + * @param items - One or more items to add to the filter + * @param options - Optional parameters for filter creation + * @param options.CAPACITY - The number of entries intended to be added to the filter + * @param options.NOCREATE - If true, prevents automatic filter creation + */ + parseCommand(...args: Parameters) { + args[0].push('CF.INSERT'); + parseCfInsertArguments(...args); + }, + transformReply: transformBooleanArrayReply +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/cuckoo/INSERTNX.spec.ts b/packages/bloom/lib/commands/cuckoo/INSERTNX.spec.ts new file mode 100644 index 00000000000..648d9be7ac8 --- /dev/null +++ b/packages/bloom/lib/commands/cuckoo/INSERTNX.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import INSERTNX from './INSERTNX'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('CF.INSERTNX', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(INSERTNX, 'key', 'item', { + CAPACITY: 100, + NOCREATE: true + }), + ['CF.INSERTNX', 'key', 'CAPACITY', '100', 'NOCREATE', 'ITEMS', 'item'] + ); + }); + + testUtils.testWithClient('client.cf.insertnx', async client => { + assert.deepEqual( + await client.cf.insertNX('key', 'item'), + [true] + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/cuckoo/INSERTNX.ts b/packages/bloom/lib/commands/cuckoo/INSERTNX.ts new file mode 100644 index 00000000000..bf99db6c3f7 --- /dev/null +++ b/packages/bloom/lib/commands/cuckoo/INSERTNX.ts @@ -0,0 +1,20 @@ +import { Command } from '@redis/client/dist/lib/RESP/types'; +import INSERT, { parseCfInsertArguments } from './INSERT'; + +/** + * Adds one or more items to a Cuckoo Filter only if they do not exist yet, creating the filter if needed + * @param parser - The command parser + * @param key - The name of the Cuckoo filter + * @param items - One or more items to add to the filter + * @param options - Optional parameters for filter creation + * @param options.CAPACITY - The number of entries intended to be added to the filter + * @param options.NOCREATE - If true, prevents automatic filter creation + */ +export default { + IS_READ_ONLY: INSERT.IS_READ_ONLY, + parseCommand(...args: Parameters) { + args[0].push('CF.INSERTNX'); + parseCfInsertArguments(...args); + }, + transformReply: INSERT.transformReply +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/cuckoo/LOADCHUNK.spec.ts b/packages/bloom/lib/commands/cuckoo/LOADCHUNK.spec.ts new file mode 100644 index 00000000000..5415c787dda --- /dev/null +++ b/packages/bloom/lib/commands/cuckoo/LOADCHUNK.spec.ts @@ -0,0 +1,37 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import LOADCHUNK from './LOADCHUNK'; +import { RESP_TYPES } from '@redis/client'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('CF.LOADCHUNK', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(LOADCHUNK, 'item', 0, ''), + ['CF.LOADCHUNK', 'item', '0', ''] + ); + }); + + testUtils.testWithClient('client.cf.loadChunk', async client => { + const [, , { iterator, chunk }] = await Promise.all([ + client.cf.reserve('source', 4), + client.cf.add('source', 'item'), + client.cf.scanDump('source', 0) + ]); + + assert.equal( + await client.cf.loadChunk('destination', iterator, chunk!), + 'OK' + ); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + ...GLOBAL.SERVERS.OPEN.clientOptions, + commandOptions: { + typeMapping: { + [RESP_TYPES.BLOB_STRING]: Buffer + } + } + } + }); +}); diff --git a/packages/bloom/lib/commands/cuckoo/LOADCHUNK.ts b/packages/bloom/lib/commands/cuckoo/LOADCHUNK.ts new file mode 100644 index 00000000000..3a966e5145a --- /dev/null +++ b/packages/bloom/lib/commands/cuckoo/LOADCHUNK.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { SimpleStringReply, Command, RedisArgument } from '@redis/client/dist/lib/RESP/types'; + +export default { + IS_READ_ONLY: false, + /** + * Restores a Cuckoo Filter chunk previously saved using SCANDUMP + * @param parser - The command parser + * @param key - The name of the Cuckoo filter to restore + * @param iterator - Iterator value from the SCANDUMP command + * @param chunk - Data chunk from the SCANDUMP command + */ + parseCommand(parser: CommandParser, key: RedisArgument, iterator: number, chunk: RedisArgument) { + parser.push('CF.LOADCHUNK'); + parser.pushKey(key); + parser.push(iterator.toString(), chunk); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/cuckoo/RESERVE.spec.ts b/packages/bloom/lib/commands/cuckoo/RESERVE.spec.ts new file mode 100644 index 00000000000..53546e4156e --- /dev/null +++ b/packages/bloom/lib/commands/cuckoo/RESERVE.spec.ts @@ -0,0 +1,49 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import RESERVE from './RESERVE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('CF.RESERVE', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(RESERVE, 'key', 4), + ['CF.RESERVE', 'key', '4'] + ); + }); + + it('with EXPANSION', () => { + assert.deepEqual( + parseArgs(RESERVE, 'key', 4, { + EXPANSION: 1 + }), + ['CF.RESERVE', 'key', '4', 'EXPANSION', '1'] + ); + }); + + it('with BUCKETSIZE', () => { + assert.deepEqual( + parseArgs(RESERVE, 'key', 4, { + BUCKETSIZE: 2 + }), + ['CF.RESERVE', 'key', '4', 'BUCKETSIZE', '2'] + ); + }); + + it('with MAXITERATIONS', () => { + assert.deepEqual( + parseArgs(RESERVE, 'key', 4, { + MAXITERATIONS: 1 + }), + ['CF.RESERVE', 'key', '4', 'MAXITERATIONS', '1'] + ); + }); + }); + + testUtils.testWithClient('client.cf.reserve', async client => { + assert.equal( + await client.cf.reserve('key', 4), + 'OK' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/cuckoo/RESERVE.ts b/packages/bloom/lib/commands/cuckoo/RESERVE.ts new file mode 100644 index 00000000000..26e31a1c645 --- /dev/null +++ b/packages/bloom/lib/commands/cuckoo/RESERVE.ts @@ -0,0 +1,45 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export interface CfReserveOptions { + BUCKETSIZE?: number; + MAXITERATIONS?: number; + EXPANSION?: number; +} + +export default { + IS_READ_ONLY: false, + /** + * Creates an empty Cuckoo Filter with specified capacity and parameters + * @param parser - The command parser + * @param key - The name of the Cuckoo filter to create + * @param capacity - The number of entries intended to be added to the filter + * @param options - Optional parameters to tune the filter + * @param options.BUCKETSIZE - Number of items in each bucket + * @param options.MAXITERATIONS - Maximum number of iterations before declaring filter full + * @param options.EXPANSION - Number of additional buckets per expansion + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + capacity: number, + options?: CfReserveOptions + ) { + parser.push('CF.RESERVE'); + parser.pushKey(key); + parser.push(capacity.toString()); + + if (options?.BUCKETSIZE !== undefined) { + parser.push('BUCKETSIZE', options.BUCKETSIZE.toString()); + } + + if (options?.MAXITERATIONS !== undefined) { + parser.push('MAXITERATIONS', options.MAXITERATIONS.toString()); + } + + if (options?.EXPANSION !== undefined) { + parser.push('EXPANSION', options.EXPANSION.toString()); + } + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/cuckoo/SCANDUMP.spec.ts b/packages/bloom/lib/commands/cuckoo/SCANDUMP.spec.ts new file mode 100644 index 00000000000..60a57ac46ab --- /dev/null +++ b/packages/bloom/lib/commands/cuckoo/SCANDUMP.spec.ts @@ -0,0 +1,25 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import SCANDUMP from './SCANDUMP'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('CF.SCANDUMP', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(SCANDUMP, 'key', 0), + ['CF.SCANDUMP', 'key', '0'] + ); + }); + + testUtils.testWithClient('client.cf.scanDump', async client => { + const [, reply] = await Promise.all([ + client.cf.reserve('key', 4), + client.cf.scanDump('key', 0) + ]); + + assert.deepEqual(reply, { + iterator: 0, + chunk: null + }); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/cuckoo/SCANDUMP.ts b/packages/bloom/lib/commands/cuckoo/SCANDUMP.ts new file mode 100644 index 00000000000..96a036671f4 --- /dev/null +++ b/packages/bloom/lib/commands/cuckoo/SCANDUMP.ts @@ -0,0 +1,23 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, TuplesReply, NumberReply, BlobStringReply, NullReply, UnwrapReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export default { + IS_READ_ONLY: true, + /** + * Begins an incremental save of a Cuckoo Filter. This is useful for large filters that can't be saved at once + * @param parser - The command parser + * @param key - The name of the Cuckoo filter to save + * @param iterator - Iterator value; Start at 0, and use the iterator from the response for the next chunk + */ + parseCommand(parser: CommandParser, key: RedisArgument, iterator: number) { + parser.push('CF.SCANDUMP'); + parser.pushKey(key); + parser.push(iterator.toString()); + }, + transformReply(reply: UnwrapReply>) { + return { + iterator: reply[0], + chunk: reply[1] + }; + } +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/cuckoo/index.ts b/packages/bloom/lib/commands/cuckoo/index.ts new file mode 100644 index 00000000000..62c63fe8d19 --- /dev/null +++ b/packages/bloom/lib/commands/cuckoo/index.ts @@ -0,0 +1,37 @@ +import type { RedisCommands } from '@redis/client/dist/lib/RESP/types'; +import ADD from './ADD'; +import ADDNX from './ADDNX'; +import COUNT from './COUNT'; +import DEL from './DEL'; +import EXISTS from './EXISTS'; +import INFO from './INFO'; +import INSERT from './INSERT'; +import INSERTNX from './INSERTNX'; +import LOADCHUNK from './LOADCHUNK'; +import RESERVE from './RESERVE'; +import SCANDUMP from './SCANDUMP'; + +export default { + ADD, + add: ADD, + ADDNX, + addNX: ADDNX, + COUNT, + count: COUNT, + DEL, + del: DEL, + EXISTS, + exists: EXISTS, + INFO, + info: INFO, + INSERT, + insert: INSERT, + INSERTNX, + insertNX: INSERTNX, + LOADCHUNK, + loadChunk: LOADCHUNK, + RESERVE, + reserve: RESERVE, + SCANDUMP, + scanDump: SCANDUMP +} as const satisfies RedisCommands; diff --git a/packages/bloom/lib/commands/index.ts b/packages/bloom/lib/commands/index.ts new file mode 100644 index 00000000000..6f91089460a --- /dev/null +++ b/packages/bloom/lib/commands/index.ts @@ -0,0 +1,14 @@ +import { RedisModules } from '@redis/client'; +import bf from './bloom'; +import cms from './count-min-sketch'; +import cf from './cuckoo'; +import tDigest from './t-digest'; +import topK from './top-k'; + +export default { + bf, + cms, + cf, + tDigest, + topK +} as const satisfies RedisModules; diff --git a/packages/bloom/lib/commands/t-digest/ADD.spec.ts b/packages/bloom/lib/commands/t-digest/ADD.spec.ts new file mode 100644 index 00000000000..7578fb9378b --- /dev/null +++ b/packages/bloom/lib/commands/t-digest/ADD.spec.ts @@ -0,0 +1,22 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import ADD from './ADD'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TDIGEST.ADD', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(ADD, 'key', [1, 2]), + ['TDIGEST.ADD', 'key', '1', '2'] + ); + }); + + testUtils.testWithClient('client.tDigest.add', async client => { + const [, reply] = await Promise.all([ + client.tDigest.create('key'), + client.tDigest.add('key', [1]) + ]); + + assert.equal(reply, 'OK'); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/t-digest/ADD.ts b/packages/bloom/lib/commands/t-digest/ADD.ts new file mode 100644 index 00000000000..30e745f8f47 --- /dev/null +++ b/packages/bloom/lib/commands/t-digest/ADD.ts @@ -0,0 +1,21 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { SimpleStringReply, Command, RedisArgument } from '@redis/client/dist/lib/RESP/types'; + +export default { + IS_READ_ONLY: false, + /** + * Adds one or more observations to a t-digest sketch + * @param parser - The command parser + * @param key - The name of the t-digest sketch + * @param values - Array of numeric values to add to the sketch + */ + parseCommand(parser: CommandParser, key: RedisArgument, values: Array) { + parser.push('TDIGEST.ADD'); + parser.pushKey(key); + + for (const value of values) { + parser.push(value.toString()); + } + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/t-digest/BYRANK.spec.ts b/packages/bloom/lib/commands/t-digest/BYRANK.spec.ts new file mode 100644 index 00000000000..81a2c75dff5 --- /dev/null +++ b/packages/bloom/lib/commands/t-digest/BYRANK.spec.ts @@ -0,0 +1,22 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import BYRANK from './BYRANK'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TDIGEST.BYRANK', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(BYRANK, 'key', [1, 2]), + ['TDIGEST.BYRANK', 'key', '1', '2'] + ); + }); + + testUtils.testWithClient('client.tDigest.byRank', async client => { + const [, reply] = await Promise.all([ + client.tDigest.create('key'), + client.tDigest.byRank('key', [1]) + ]); + + assert.deepEqual(reply, [NaN]); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/t-digest/BYRANK.ts b/packages/bloom/lib/commands/t-digest/BYRANK.ts new file mode 100644 index 00000000000..9ac855bfeab --- /dev/null +++ b/packages/bloom/lib/commands/t-digest/BYRANK.ts @@ -0,0 +1,31 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, Command } from '@redis/client/dist/lib/RESP/types'; +import { transformDoubleArrayReply } from '@redis/client/dist/lib/commands/generic-transformers'; + +export function transformByRankArguments( + parser: CommandParser, + key: RedisArgument, + ranks: Array +) { + parser.pushKey(key); + + for (const rank of ranks) { + parser.push(rank.toString()); + } +} + +export default { + IS_READ_ONLY: true, + /** + * Returns value estimates for one or more ranks in a t-digest sketch + * @param parser - The command parser + * @param key - The name of the t-digest sketch + * @param ranks - Array of ranks to get value estimates for (ascending order) + */ + parseCommand(...args: Parameters) { + args[0].push('TDIGEST.BYRANK'); + transformByRankArguments(...args); + }, + transformReply: transformDoubleArrayReply +} as const satisfies Command; + diff --git a/packages/bloom/lib/commands/t-digest/BYREVRANK.spec.ts b/packages/bloom/lib/commands/t-digest/BYREVRANK.spec.ts new file mode 100644 index 00000000000..c8f794bef57 --- /dev/null +++ b/packages/bloom/lib/commands/t-digest/BYREVRANK.spec.ts @@ -0,0 +1,22 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import BYREVRANK from './BYREVRANK'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TDIGEST.BYREVRANK', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(BYREVRANK, 'key', [1, 2]), + ['TDIGEST.BYREVRANK', 'key', '1', '2'] + ); + }); + + testUtils.testWithClient('client.tDigest.byRevRank', async client => { + const [, reply] = await Promise.all([ + client.tDigest.create('key'), + client.tDigest.byRevRank('key', [1]) + ]); + + assert.deepEqual(reply, [NaN]); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/t-digest/BYREVRANK.ts b/packages/bloom/lib/commands/t-digest/BYREVRANK.ts new file mode 100644 index 00000000000..a94e5566bb1 --- /dev/null +++ b/packages/bloom/lib/commands/t-digest/BYREVRANK.ts @@ -0,0 +1,17 @@ +import { Command } from '@redis/client/dist/lib/RESP/types'; +import BYRANK, { transformByRankArguments } from './BYRANK'; + +/** + * Returns value estimates for one or more ranks in a t-digest sketch, starting from highest rank + * @param parser - The command parser + * @param key - The name of the t-digest sketch + * @param ranks - Array of ranks to get value estimates for (descending order) + */ +export default { + IS_READ_ONLY: BYRANK.IS_READ_ONLY, + parseCommand(...args: Parameters) { + args[0].push('TDIGEST.BYREVRANK'); + transformByRankArguments(...args); + }, + transformReply: BYRANK.transformReply +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/t-digest/CDF.spec.ts b/packages/bloom/lib/commands/t-digest/CDF.spec.ts new file mode 100644 index 00000000000..2689bf2fc9a --- /dev/null +++ b/packages/bloom/lib/commands/t-digest/CDF.spec.ts @@ -0,0 +1,22 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import CDF from './CDF'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TDIGEST.CDF', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(CDF, 'key', [1, 2]), + ['TDIGEST.CDF', 'key', '1', '2'] + ); + }); + + testUtils.testWithClient('client.tDigest.cdf', async client => { + const [, reply] = await Promise.all([ + client.tDigest.create('key'), + client.tDigest.cdf('key', [1]) + ]); + + assert.deepEqual(reply, [NaN]); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/t-digest/CDF.ts b/packages/bloom/lib/commands/t-digest/CDF.ts new file mode 100644 index 00000000000..a3d3b884a34 --- /dev/null +++ b/packages/bloom/lib/commands/t-digest/CDF.ts @@ -0,0 +1,22 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, Command } from '@redis/client/dist/lib/RESP/types'; +import { transformDoubleArrayReply } from '@redis/client/dist/lib/commands/generic-transformers'; + +export default { + IS_READ_ONLY: true, + /** + * Estimates the cumulative distribution function for values in a t-digest sketch + * @param parser - The command parser + * @param key - The name of the t-digest sketch + * @param values - Array of values to get CDF estimates for + */ + parseCommand(parser: CommandParser, key: RedisArgument, values: Array) { + parser.push('TDIGEST.CDF'); + parser.pushKey(key); + + for (const item of values) { + parser.push(item.toString()); + } + }, + transformReply: transformDoubleArrayReply +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/t-digest/CREATE.spec.ts b/packages/bloom/lib/commands/t-digest/CREATE.spec.ts new file mode 100644 index 00000000000..0f218e07ab8 --- /dev/null +++ b/packages/bloom/lib/commands/t-digest/CREATE.spec.ts @@ -0,0 +1,31 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import CREATE from './CREATE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TDIGEST.CREATE', () => { + describe('transformArguments', () => { + it('without options', () => { + assert.deepEqual( + parseArgs(CREATE, 'key'), + ['TDIGEST.CREATE', 'key'] + ); + }); + + it('with COMPRESSION', () => { + assert.deepEqual( + parseArgs(CREATE, 'key', { + COMPRESSION: 100 + }), + ['TDIGEST.CREATE', 'key', 'COMPRESSION', '100'] + ); + }); + }); + + testUtils.testWithClient('client.tDigest.create', async client => { + assert.equal( + await client.tDigest.create('key'), + 'OK' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/t-digest/CREATE.ts b/packages/bloom/lib/commands/t-digest/CREATE.ts new file mode 100644 index 00000000000..25eb9c4f425 --- /dev/null +++ b/packages/bloom/lib/commands/t-digest/CREATE.ts @@ -0,0 +1,26 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export interface TDigestCreateOptions { + COMPRESSION?: number; +} + +export default { + IS_READ_ONLY: false, + /** + * Creates a new t-digest sketch for storing distributions + * @param parser - The command parser + * @param key - The name of the t-digest sketch + * @param options - Optional parameters for sketch creation + * @param options.COMPRESSION - Compression parameter that affects performance and accuracy + */ + parseCommand(parser: CommandParser, key: RedisArgument, options?: TDigestCreateOptions) { + parser.push('TDIGEST.CREATE'); + parser.pushKey(key); + + if (options?.COMPRESSION !== undefined) { + parser.push('COMPRESSION', options.COMPRESSION.toString()); + } + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/t-digest/INFO.spec.ts b/packages/bloom/lib/commands/t-digest/INFO.spec.ts new file mode 100644 index 00000000000..d5b8b3e13ed --- /dev/null +++ b/packages/bloom/lib/commands/t-digest/INFO.spec.ts @@ -0,0 +1,31 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import INFO from './INFO'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TDIGEST.INFO', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(INFO, 'key'), + ['TDIGEST.INFO', 'key'] + ); + }); + + testUtils.testWithClient('client.tDigest.info', async client => { + const [, reply] = await Promise.all([ + client.tDigest.create('key'), + client.tDigest.info('key') + ]); + + assert(typeof reply, 'object'); + assert(typeof reply['Compression'], 'number'); + assert(typeof reply['Capacity'], 'number'); + assert(typeof reply['Merged nodes'], 'number'); + assert(typeof reply['Unmerged nodes'], 'number'); + assert(typeof reply['Merged weight'], 'number'); + assert(typeof reply['Unmerged weight'], 'number'); + assert(typeof reply['Observations'], 'number'); + assert(typeof reply['Total compressions'], 'number'); + assert(typeof reply['Memory usage'], 'number'); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/t-digest/INFO.ts b/packages/bloom/lib/commands/t-digest/INFO.ts new file mode 100644 index 00000000000..96ae420886c --- /dev/null +++ b/packages/bloom/lib/commands/t-digest/INFO.ts @@ -0,0 +1,34 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, Command, NumberReply, TuplesToMapReply, UnwrapReply, Resp2Reply, SimpleStringReply, TypeMapping } from '@redis/client/dist/lib/RESP/types'; +import { transformInfoV2Reply } from '../bloom'; + +export type TdInfoReplyMap = TuplesToMapReply<[ + [SimpleStringReply<'Compression'>, NumberReply], + [SimpleStringReply<'Capacity'>, NumberReply], + [SimpleStringReply<'Merged nodes'>, NumberReply], + [SimpleStringReply<'Unmerged nodes'>, NumberReply], + [SimpleStringReply<'Merged weight'>, NumberReply], + [SimpleStringReply<'Unmerged weight'>, NumberReply], + [SimpleStringReply<'Observations'>, NumberReply], + [SimpleStringReply<'Total compressions'>, NumberReply], + [SimpleStringReply<'Memory usage'>, NumberReply] +]>; + +export default { + IS_READ_ONLY: true, + /** + * Returns information about a t-digest sketch including compression, capacity, nodes, weights, observations and memory usage + * @param parser - The command parser + * @param key - The name of the t-digest sketch to get information about + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('TDIGEST.INFO'); + parser.pushKey(key); + }, + transformReply: { + 2: (reply: UnwrapReply>, _, typeMapping?: TypeMapping): TdInfoReplyMap => { + return transformInfoV2Reply(reply, typeMapping); + }, + 3: undefined as unknown as () => TdInfoReplyMap + } +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/t-digest/MAX.spec.ts b/packages/bloom/lib/commands/t-digest/MAX.spec.ts new file mode 100644 index 00000000000..920c9d11391 --- /dev/null +++ b/packages/bloom/lib/commands/t-digest/MAX.spec.ts @@ -0,0 +1,22 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import MAX from './MAX'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TDIGEST.MAX', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(MAX, 'key'), + ['TDIGEST.MAX', 'key'] + ); + }); + + testUtils.testWithClient('client.tDigest.max', async client => { + const [, reply] = await Promise.all([ + client.tDigest.create('key'), + client.tDigest.max('key') + ]); + + assert.deepEqual(reply, NaN); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/t-digest/MAX.ts b/packages/bloom/lib/commands/t-digest/MAX.ts new file mode 100644 index 00000000000..2353c60cdc6 --- /dev/null +++ b/packages/bloom/lib/commands/t-digest/MAX.ts @@ -0,0 +1,17 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, Command } from '@redis/client/dist/lib/RESP/types'; +import { transformDoubleReply } from '@redis/client/dist/lib/commands/generic-transformers'; + +export default { + IS_READ_ONLY: true, + /** + * Returns the maximum value from a t-digest sketch + * @param parser - The command parser + * @param key - The name of the t-digest sketch + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('TDIGEST.MAX'); + parser.pushKey(key); + }, + transformReply: transformDoubleReply +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/t-digest/MERGE.spec.ts b/packages/bloom/lib/commands/t-digest/MERGE.spec.ts new file mode 100644 index 00000000000..f2a7c1a1192 --- /dev/null +++ b/packages/bloom/lib/commands/t-digest/MERGE.spec.ts @@ -0,0 +1,51 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import MERGE from './MERGE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TDIGEST.MERGE', () => { + describe('transformArguments', () => { + describe('source', () => { + it('string', () => { + assert.deepEqual( + parseArgs(MERGE, 'destination', 'source'), + ['TDIGEST.MERGE', 'destination', '1', 'source'] + ); + }); + + it('Array', () => { + assert.deepEqual( + parseArgs(MERGE, 'destination', ['1', '2']), + ['TDIGEST.MERGE', 'destination', '2', '1', '2'] + ); + }); + }); + + it('with COMPRESSION', () => { + assert.deepEqual( + parseArgs(MERGE, 'destination', 'source', { + COMPRESSION: 100 + }), + ['TDIGEST.MERGE', 'destination', '1', 'source', 'COMPRESSION', '100'] + ); + }); + + it('with OVERRIDE', () => { + assert.deepEqual( + parseArgs(MERGE, 'destination', 'source', { + OVERRIDE: true + }), + ['TDIGEST.MERGE', 'destination', '1', 'source', 'OVERRIDE'] + ); + }); + }); + + testUtils.testWithClient('client.tDigest.merge', async client => { + const [, reply] = await Promise.all([ + client.tDigest.create('source'), + client.tDigest.merge('destination', 'source') + ]); + + assert.equal(reply, 'OK'); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/t-digest/MERGE.ts b/packages/bloom/lib/commands/t-digest/MERGE.ts new file mode 100644 index 00000000000..f34d30a7cef --- /dev/null +++ b/packages/bloom/lib/commands/t-digest/MERGE.ts @@ -0,0 +1,40 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '@redis/client/dist/lib/RESP/types'; +import { RedisVariadicArgument } from '@redis/client/dist/lib/commands/generic-transformers'; + +export interface TDigestMergeOptions { + COMPRESSION?: number; + OVERRIDE?: boolean; +} + +export default { + IS_READ_ONLY: false, + /** + * Merges multiple t-digest sketches into one, with optional compression and override settings + * @param parser - The command parser + * @param destination - The name of the destination t-digest sketch + * @param source - One or more source sketch names to merge from + * @param options - Optional parameters for merge operation + * @param options.COMPRESSION - New compression value for merged sketch + * @param options.OVERRIDE - If true, override destination sketch if it exists + */ + parseCommand( + parser: CommandParser, + destination: RedisArgument, + source: RedisVariadicArgument, + options?: TDigestMergeOptions + ) { + parser.push('TDIGEST.MERGE'); + parser.pushKey(destination); + parser.pushKeysLength(source); + + if (options?.COMPRESSION !== undefined) { + parser.push('COMPRESSION', options.COMPRESSION.toString()); + } + + if (options?.OVERRIDE) { + parser.push('OVERRIDE'); + } + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/t-digest/MIN.spec.ts b/packages/bloom/lib/commands/t-digest/MIN.spec.ts new file mode 100644 index 00000000000..278248ea465 --- /dev/null +++ b/packages/bloom/lib/commands/t-digest/MIN.spec.ts @@ -0,0 +1,22 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import MIN from './MIN'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TDIGEST.MIN', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(MIN, 'key'), + ['TDIGEST.MIN', 'key'] + ); + }); + + testUtils.testWithClient('client.tDigest.min', async client => { + const [, reply] = await Promise.all([ + client.tDigest.create('key'), + client.tDigest.min('key') + ]); + + assert.equal(reply, NaN); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/t-digest/MIN.ts b/packages/bloom/lib/commands/t-digest/MIN.ts new file mode 100644 index 00000000000..9b8ea2a0cfc --- /dev/null +++ b/packages/bloom/lib/commands/t-digest/MIN.ts @@ -0,0 +1,17 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, Command } from '@redis/client/dist/lib/RESP/types'; +import { transformDoubleReply } from '@redis/client/dist/lib/commands/generic-transformers'; + +export default { + IS_READ_ONLY: true, + /** + * Returns the minimum value from a t-digest sketch + * @param parser - The command parser + * @param key - The name of the t-digest sketch + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('TDIGEST.MIN'); + parser.pushKey(key); + }, + transformReply: transformDoubleReply +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/t-digest/QUANTILE.spec.ts b/packages/bloom/lib/commands/t-digest/QUANTILE.spec.ts new file mode 100644 index 00000000000..ac7249d12d9 --- /dev/null +++ b/packages/bloom/lib/commands/t-digest/QUANTILE.spec.ts @@ -0,0 +1,25 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import QUANTILE from './QUANTILE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TDIGEST.QUANTILE', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(QUANTILE, 'key', [1, 2]), + ['TDIGEST.QUANTILE', 'key', '1', '2'] + ); + }); + + testUtils.testWithClient('client.tDigest.quantile', async client => { + const [, reply] = await Promise.all([ + client.tDigest.create('key'), + client.tDigest.quantile('key', [1]) + ]); + + assert.deepEqual( + reply, + [NaN] + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/t-digest/QUANTILE.ts b/packages/bloom/lib/commands/t-digest/QUANTILE.ts new file mode 100644 index 00000000000..772172b84d5 --- /dev/null +++ b/packages/bloom/lib/commands/t-digest/QUANTILE.ts @@ -0,0 +1,22 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, Command } from '@redis/client/dist/lib/RESP/types'; +import { transformDoubleArrayReply } from '@redis/client/dist/lib/commands/generic-transformers'; + +export default { + IS_READ_ONLY: true, + /** + * Returns value estimates at requested quantiles from a t-digest sketch + * @param parser - The command parser + * @param key - The name of the t-digest sketch + * @param quantiles - Array of quantiles (between 0 and 1) to get value estimates for + */ + parseCommand(parser: CommandParser, key: RedisArgument, quantiles: Array) { + parser.push('TDIGEST.QUANTILE'); + parser.pushKey(key); + + for (const quantile of quantiles) { + parser.push(quantile.toString()); + } + }, + transformReply: transformDoubleArrayReply +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/t-digest/RANK.spec.ts b/packages/bloom/lib/commands/t-digest/RANK.spec.ts new file mode 100644 index 00000000000..f1747662f0a --- /dev/null +++ b/packages/bloom/lib/commands/t-digest/RANK.spec.ts @@ -0,0 +1,22 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import RANK from './RANK'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TDIGEST.RANK', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(RANK, 'key', [1, 2]), + ['TDIGEST.RANK', 'key', '1', '2'] + ); + }); + + testUtils.testWithClient('client.tDigest.rank', async client => { + const [, reply] = await Promise.all([ + client.tDigest.create('key'), + client.tDigest.rank('key', [1]) + ]); + + assert.deepEqual(reply, [-2]); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/t-digest/RANK.ts b/packages/bloom/lib/commands/t-digest/RANK.ts new file mode 100644 index 00000000000..55dd3c636ce --- /dev/null +++ b/packages/bloom/lib/commands/t-digest/RANK.ts @@ -0,0 +1,29 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, ArrayReply, NumberReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export function transformRankArguments( + parser: CommandParser, + key: RedisArgument, + values: Array +) { + parser.pushKey(key); + + for (const value of values) { + parser.push(value.toString()); + } +} + +export default { + IS_READ_ONLY: true, + /** + * Returns the rank of one or more values in a t-digest sketch (number of values that are lower than each value) + * @param parser - The command parser + * @param key - The name of the t-digest sketch + * @param values - Array of values to get ranks for + */ + parseCommand(...args: Parameters) { + args[0].push('TDIGEST.RANK'); + transformRankArguments(...args); + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/t-digest/RESET.spec.ts b/packages/bloom/lib/commands/t-digest/RESET.spec.ts new file mode 100644 index 00000000000..8e1fc12e6e3 --- /dev/null +++ b/packages/bloom/lib/commands/t-digest/RESET.spec.ts @@ -0,0 +1,22 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import RESET from './RESET'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TDIGEST.RESET', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(RESET, 'key'), + ['TDIGEST.RESET', 'key'] + ); + }); + + testUtils.testWithClient('client.tDigest.reset', async client => { + const [, reply] = await Promise.all([ + client.tDigest.create('key'), + client.tDigest.reset('key') + ]); + + assert.equal(reply, 'OK'); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/t-digest/RESET.ts b/packages/bloom/lib/commands/t-digest/RESET.ts new file mode 100644 index 00000000000..8e1cefe9ff8 --- /dev/null +++ b/packages/bloom/lib/commands/t-digest/RESET.ts @@ -0,0 +1,16 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export default { + IS_READ_ONLY: false, + /** + * Resets a t-digest sketch, clearing all previously added observations + * @param parser - The command parser + * @param key - The name of the t-digest sketch to reset + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('TDIGEST.RESET'); + parser.pushKey(key); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/t-digest/REVRANK.spec.ts b/packages/bloom/lib/commands/t-digest/REVRANK.spec.ts new file mode 100644 index 00000000000..be7b23b2238 --- /dev/null +++ b/packages/bloom/lib/commands/t-digest/REVRANK.spec.ts @@ -0,0 +1,22 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import REVRANK from './REVRANK'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TDIGEST.REVRANK', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(REVRANK, 'key', [1, 2]), + ['TDIGEST.REVRANK', 'key', '1', '2'] + ); + }); + + testUtils.testWithClient('client.tDigest.revRank', async client => { + const [, reply] = await Promise.all([ + client.tDigest.create('key'), + client.tDigest.revRank('key', [1]) + ]); + + assert.deepEqual(reply, [-2]); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/t-digest/REVRANK.ts b/packages/bloom/lib/commands/t-digest/REVRANK.ts new file mode 100644 index 00000000000..e323e10190a --- /dev/null +++ b/packages/bloom/lib/commands/t-digest/REVRANK.ts @@ -0,0 +1,17 @@ +import { Command } from '@redis/client/dist/lib/RESP/types'; +import RANK, { transformRankArguments } from './RANK'; + +/** + * Returns the reverse rank of one or more values in a t-digest sketch (number of values that are higher than each value) + * @param parser - The command parser + * @param key - The name of the t-digest sketch + * @param values - Array of values to get reverse ranks for + */ +export default { + IS_READ_ONLY: RANK.IS_READ_ONLY, + parseCommand(...args: Parameters) { + args[0].push('TDIGEST.REVRANK'); + transformRankArguments(...args); + }, + transformReply: RANK.transformReply +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/t-digest/TRIMMED_MEAN.spec.ts b/packages/bloom/lib/commands/t-digest/TRIMMED_MEAN.spec.ts new file mode 100644 index 00000000000..8e83c736476 --- /dev/null +++ b/packages/bloom/lib/commands/t-digest/TRIMMED_MEAN.spec.ts @@ -0,0 +1,22 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import TRIMMED_MEAN from './TRIMMED_MEAN'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TDIGEST.TRIMMED_MEAN', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(TRIMMED_MEAN, 'key', 0, 1), + ['TDIGEST.TRIMMED_MEAN', 'key', '0', '1'] + ); + }); + + testUtils.testWithClient('client.tDigest.trimmedMean', async client => { + const [, reply] = await Promise.all([ + client.tDigest.create('key'), + client.tDigest.trimmedMean('key', 0, 1) + ]); + + assert.equal(reply, NaN); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/t-digest/TRIMMED_MEAN.ts b/packages/bloom/lib/commands/t-digest/TRIMMED_MEAN.ts new file mode 100644 index 00000000000..f65f37a95de --- /dev/null +++ b/packages/bloom/lib/commands/t-digest/TRIMMED_MEAN.ts @@ -0,0 +1,25 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, Command } from '@redis/client/dist/lib/RESP/types'; +import { transformDoubleReply } from '@redis/client/dist/lib/commands/generic-transformers'; + +export default { + IS_READ_ONLY: true, + /** + * Returns the mean value from a t-digest sketch after trimming values at specified percentiles + * @param parser - The command parser + * @param key - The name of the t-digest sketch + * @param lowCutPercentile - Lower percentile cutoff (between 0 and 100) + * @param highCutPercentile - Higher percentile cutoff (between 0 and 100) + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + lowCutPercentile: number, + highCutPercentile: number + ) { + parser.push('TDIGEST.TRIMMED_MEAN'); + parser.pushKey(key); + parser.push(lowCutPercentile.toString(), highCutPercentile.toString()); + }, + transformReply: transformDoubleReply +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/t-digest/index.ts b/packages/bloom/lib/commands/t-digest/index.ts new file mode 100644 index 00000000000..d180911dbf9 --- /dev/null +++ b/packages/bloom/lib/commands/t-digest/index.ts @@ -0,0 +1,46 @@ +import type { RedisCommands } from '@redis/client/dist/lib/RESP/types'; +import ADD from './ADD'; +import BYRANK from './BYRANK'; +import BYREVRANK from './BYREVRANK'; +import CDF from './CDF'; +import CREATE from './CREATE'; +import INFO from './INFO'; +import MAX from './MAX'; +import MERGE from './MERGE'; +import MIN from './MIN'; +import QUANTILE from './QUANTILE'; +import RANK from './RANK'; +import RESET from './RESET'; +import REVRANK from './REVRANK'; +import TRIMMED_MEAN from './TRIMMED_MEAN'; + +export default { + ADD, + add: ADD, + BYRANK, + byRank: BYRANK, + BYREVRANK, + byRevRank: BYREVRANK, + CDF, + cdf: CDF, + CREATE, + create: CREATE, + INFO, + info: INFO, + MAX, + max: MAX, + MERGE, + merge: MERGE, + MIN, + min: MIN, + QUANTILE, + quantile: QUANTILE, + RANK, + rank: RANK, + RESET, + reset: RESET, + REVRANK, + revRank: REVRANK, + TRIMMED_MEAN, + trimmedMean: TRIMMED_MEAN +} as const satisfies RedisCommands; diff --git a/packages/bloom/lib/commands/top-k/ADD.spec.ts b/packages/bloom/lib/commands/top-k/ADD.spec.ts new file mode 100644 index 00000000000..15a7a9ce1dd --- /dev/null +++ b/packages/bloom/lib/commands/top-k/ADD.spec.ts @@ -0,0 +1,22 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import ADD from './ADD'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TOPK.ADD', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(ADD, 'key', 'item'), + ['TOPK.ADD', 'key', 'item'] + ); + }); + + testUtils.testWithClient('client.topK.add', async client => { + const [, reply] = await Promise.all([ + client.topK.reserve('topK', 3), + client.topK.add('topK', 'item') + ]); + + assert.deepEqual(reply, [null]); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/top-k/ADD.ts b/packages/bloom/lib/commands/top-k/ADD.ts new file mode 100644 index 00000000000..8eab7dd5241 --- /dev/null +++ b/packages/bloom/lib/commands/top-k/ADD.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, ArrayReply, BlobStringReply, Command } from '@redis/client/dist/lib/RESP/types'; +import { RedisVariadicArgument } from '@redis/client/dist/lib/commands/generic-transformers'; + +export default { + IS_READ_ONLY: false, + /** + * Adds one or more items to a Top-K filter and returns items dropped from the top-K list + * @param parser - The command parser + * @param key - The name of the Top-K filter + * @param items - One or more items to add to the filter + */ + parseCommand(parser: CommandParser, key: RedisArgument, items: RedisVariadicArgument) { + parser.push('TOPK.ADD'); + parser.pushKey(key); + parser.pushVariadic(items); + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/top-k/COUNT.spec.ts b/packages/bloom/lib/commands/top-k/COUNT.spec.ts new file mode 100644 index 00000000000..a242edfef8a --- /dev/null +++ b/packages/bloom/lib/commands/top-k/COUNT.spec.ts @@ -0,0 +1,22 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import COUNT from './COUNT'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TOPK.COUNT', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(COUNT, 'key', 'item'), + ['TOPK.COUNT', 'key', 'item'] + ); + }); + + testUtils.testWithClient('client.topK.count', async client => { + const [, reply] = await Promise.all([ + client.topK.reserve('key', 3), + client.topK.count('key', 'item') + ]); + + assert.deepEqual(reply, [0]); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/top-k/COUNT.ts b/packages/bloom/lib/commands/top-k/COUNT.ts new file mode 100644 index 00000000000..b72641471cd --- /dev/null +++ b/packages/bloom/lib/commands/top-k/COUNT.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, ArrayReply, NumberReply, Command } from '@redis/client/dist/lib/RESP/types'; +import { RedisVariadicArgument } from '@redis/client/dist/lib/commands/generic-transformers'; + +export default { + IS_READ_ONLY: true, + /** + * Returns the count of occurrences for one or more items in a Top-K filter + * @param parser - The command parser + * @param key - The name of the Top-K filter + * @param items - One or more items to get counts for + */ + parseCommand(parser: CommandParser, key: RedisArgument, items: RedisVariadicArgument) { + parser.push('TOPK.COUNT'); + parser.pushKey(key); + parser.pushVariadic(items); + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/top-k/INCRBY.spec.ts b/packages/bloom/lib/commands/top-k/INCRBY.spec.ts new file mode 100644 index 00000000000..94e5b1d7058 --- /dev/null +++ b/packages/bloom/lib/commands/top-k/INCRBY.spec.ts @@ -0,0 +1,43 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import INCRBY from './INCRBY'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TOPK.INCRBY', () => { + describe('transformArguments', () => { + it('single item', () => { + assert.deepEqual( + parseArgs(INCRBY, 'key', { + item: 'item', + incrementBy: 1 + }), + ['TOPK.INCRBY', 'key', 'item', '1'] + ); + }); + + it('multiple items', () => { + assert.deepEqual( + parseArgs(INCRBY, 'key', [{ + item: 'a', + incrementBy: 1 + }, { + item: 'b', + incrementBy: 2 + }]), + ['TOPK.INCRBY', 'key', 'a', '1', 'b', '2'] + ); + }); + }); + + testUtils.testWithClient('client.topK.incrby', async client => { + const [, reply] = await Promise.all([ + client.topK.reserve('key', 5), + client.topK.incrBy('key', { + item: 'item', + incrementBy: 1 + }) + ]); + + assert.deepEqual(reply, [null]); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/top-k/INCRBY.ts b/packages/bloom/lib/commands/top-k/INCRBY.ts new file mode 100644 index 00000000000..ef93653bd3c --- /dev/null +++ b/packages/bloom/lib/commands/top-k/INCRBY.ts @@ -0,0 +1,38 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, ArrayReply, SimpleStringReply, NullReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export interface TopKIncrByItem { + item: string; + incrementBy: number; +} + +function pushIncrByItem(parser: CommandParser, { item, incrementBy }: TopKIncrByItem) { + parser.push(item, incrementBy.toString()); +} + +export default { + IS_READ_ONLY: false, + /** + * Increases the score of one or more items in a Top-K filter by specified increments + * @param parser - The command parser + * @param key - The name of the Top-K filter + * @param items - A single item or array of items to increment, each with an item name and increment value + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + items: TopKIncrByItem | Array + ) { + parser.push('TOPK.INCRBY'); + parser.pushKey(key); + + if (Array.isArray(items)) { + for (const item of items) { + pushIncrByItem(parser, item); + } + } else { + pushIncrByItem(parser, items); + } + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/top-k/INFO.spec.ts b/packages/bloom/lib/commands/top-k/INFO.spec.ts new file mode 100644 index 00000000000..2efbf0bdbef --- /dev/null +++ b/packages/bloom/lib/commands/top-k/INFO.spec.ts @@ -0,0 +1,27 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import INFO from './INFO'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TOPK INFO', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(INFO, 'key'), + ['TOPK.INFO', 'key'] + ); + }); + + testUtils.testWithClient('client.topK.info', async client => { + const k = 3, + [, reply] = await Promise.all([ + client.topK.reserve('key', 3), + client.topK.info('key') + ]); + + assert.equal(typeof reply, 'object'); + assert.equal(reply.k, k); + assert.equal(typeof reply.width, 'number'); + assert.equal(typeof reply.depth, 'number'); + assert.equal(typeof reply.decay, 'number'); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/top-k/INFO.ts b/packages/bloom/lib/commands/top-k/INFO.ts new file mode 100644 index 00000000000..60ee3939324 --- /dev/null +++ b/packages/bloom/lib/commands/top-k/INFO.ts @@ -0,0 +1,32 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, TuplesToMapReply, NumberReply, DoubleReply, UnwrapReply, Resp2Reply, Command, SimpleStringReply, TypeMapping } from '@redis/client/dist/lib/RESP/types'; +import { transformDoubleReply } from '@redis/client/dist/lib/commands/generic-transformers'; +import { transformInfoV2Reply } from '../bloom'; + +export type TopKInfoReplyMap = TuplesToMapReply<[ + [SimpleStringReply<'k'>, NumberReply], + [SimpleStringReply<'width'>, NumberReply], + [SimpleStringReply<'depth'>, NumberReply], + [SimpleStringReply<'decay'>, DoubleReply] +]>; + +export default { + IS_READ_ONLY: true, + /** + * Returns configuration and statistics of a Top-K filter, including k, width, depth, and decay parameters + * @param parser - The command parser + * @param key - The name of the Top-K filter to get information about + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('TOPK.INFO'); + parser.pushKey(key); + }, + transformReply: { + 2: (reply: UnwrapReply>, preserve?: any, typeMapping?: TypeMapping): TopKInfoReplyMap => { + reply[7] = transformDoubleReply[2](reply[7], preserve, typeMapping) as any; + + return transformInfoV2Reply(reply, typeMapping); + }, + 3: undefined as unknown as () => TopKInfoReplyMap + } +} as const satisfies Command diff --git a/packages/bloom/lib/commands/top-k/LIST.spec.ts b/packages/bloom/lib/commands/top-k/LIST.spec.ts new file mode 100644 index 00000000000..8f5d0efa4db --- /dev/null +++ b/packages/bloom/lib/commands/top-k/LIST.spec.ts @@ -0,0 +1,22 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import LIST from './LIST'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TOPK.LIST', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(LIST, 'key'), + ['TOPK.LIST', 'key'] + ); + }); + + testUtils.testWithClient('client.topK.list', async client => { + const [, reply] = await Promise.all([ + client.topK.reserve('key', 3), + client.topK.list('key') + ]); + + assert.deepEqual(reply, []); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/top-k/LIST.ts b/packages/bloom/lib/commands/top-k/LIST.ts new file mode 100644 index 00000000000..e030ff02efa --- /dev/null +++ b/packages/bloom/lib/commands/top-k/LIST.ts @@ -0,0 +1,16 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, ArrayReply, BlobStringReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export default { + IS_READ_ONLY: true, + /** + * Returns all items in a Top-K filter + * @param parser - The command parser + * @param key - The name of the Top-K filter + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('TOPK.LIST'); + parser.pushKey(key); + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/top-k/LIST_WITHCOUNT.spec.ts b/packages/bloom/lib/commands/top-k/LIST_WITHCOUNT.spec.ts new file mode 100644 index 00000000000..852170e8cd3 --- /dev/null +++ b/packages/bloom/lib/commands/top-k/LIST_WITHCOUNT.spec.ts @@ -0,0 +1,28 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import LIST_WITHCOUNT from './LIST_WITHCOUNT'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TOPK.LIST WITHCOUNT', () => { + testUtils.isVersionGreaterThanHook([2, 2, 9]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(LIST_WITHCOUNT, 'key'), + ['TOPK.LIST', 'key', 'WITHCOUNT'] + ); + }); + + testUtils.testWithClient('client.topK.listWithCount', async client => { + const [, , list] = await Promise.all([ + client.topK.reserve('key', 3), + client.topK.add('key', 'item'), + client.topK.listWithCount('key') + ]); + + assert.deepEqual(list, [{ + item: 'item', + count: 1 + }]); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/top-k/LIST_WITHCOUNT.ts b/packages/bloom/lib/commands/top-k/LIST_WITHCOUNT.ts new file mode 100644 index 00000000000..bed58a36b70 --- /dev/null +++ b/packages/bloom/lib/commands/top-k/LIST_WITHCOUNT.ts @@ -0,0 +1,31 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, ArrayReply, BlobStringReply, NumberReply, UnwrapReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export default { + IS_READ_ONLY: true, + /** + * Returns all items in a Top-K filter with their respective counts + * @param parser - The command parser + * @param key - The name of the Top-K filter + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('TOPK.LIST'); + parser.pushKey(key); + parser.push('WITHCOUNT'); + }, + transformReply(rawReply: UnwrapReply>) { + const reply: Array<{ + item: BlobStringReply; + count: NumberReply; + }> = []; + + for (let i = 0; i < rawReply.length; i++) { + reply.push({ + item: rawReply[i] as BlobStringReply, + count: rawReply[++i] as NumberReply + }); + } + + return reply; + } +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/top-k/QUERY.spec.ts b/packages/bloom/lib/commands/top-k/QUERY.spec.ts new file mode 100644 index 00000000000..3651ec5d37b --- /dev/null +++ b/packages/bloom/lib/commands/top-k/QUERY.spec.ts @@ -0,0 +1,22 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import QUERY from './QUERY'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TOPK.QUERY', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(QUERY, 'key', 'item'), + ['TOPK.QUERY', 'key', 'item'] + ); + }); + + testUtils.testWithClient('client.topK.query', async client => { + const [, reply] = await Promise.all([ + client.topK.reserve('key', 3), + client.topK.query('key', 'item') + ]); + + assert.deepEqual(reply, [false]); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/top-k/QUERY.ts b/packages/bloom/lib/commands/top-k/QUERY.ts new file mode 100644 index 00000000000..8976e211979 --- /dev/null +++ b/packages/bloom/lib/commands/top-k/QUERY.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, Command } from '@redis/client/dist/lib/RESP/types'; +import { RedisVariadicArgument, transformBooleanArrayReply } from '@redis/client/dist/lib/commands/generic-transformers'; + +export default { + IS_READ_ONLY: false, + /** + * Checks if one or more items are in the Top-K list + * @param parser - The command parser + * @param key - The name of the Top-K filter + * @param items - One or more items to check in the filter + */ + parseCommand(parser: CommandParser, key: RedisArgument, items: RedisVariadicArgument) { + parser.push('TOPK.QUERY'); + parser.pushKey(key); + parser.pushVariadic(items); + }, + transformReply: transformBooleanArrayReply +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/top-k/RESERVE.spec.ts b/packages/bloom/lib/commands/top-k/RESERVE.spec.ts new file mode 100644 index 00000000000..aa8d194f940 --- /dev/null +++ b/packages/bloom/lib/commands/top-k/RESERVE.spec.ts @@ -0,0 +1,33 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../../test-utils'; +import RESERVE from './RESERVE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TOPK.RESERVE', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(RESERVE, 'topK', 3), + ['TOPK.RESERVE', 'topK', '3'] + ); + }); + + it('with options', () => { + assert.deepEqual( + parseArgs(RESERVE, 'topK', 3, { + width: 8, + depth: 7, + decay: 0.9 + }), + ['TOPK.RESERVE', 'topK', '3', '8', '7', '0.9'] + ); + }); + }); + + testUtils.testWithClient('client.topK.reserve', async client => { + assert.equal( + await client.topK.reserve('topK', 3), + 'OK' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/bloom/lib/commands/top-k/RESERVE.ts b/packages/bloom/lib/commands/top-k/RESERVE.ts new file mode 100644 index 00000000000..e31c2ceb99c --- /dev/null +++ b/packages/bloom/lib/commands/top-k/RESERVE.ts @@ -0,0 +1,36 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { SimpleStringReply, Command, RedisArgument } from '@redis/client/dist/lib/RESP/types'; + +export interface TopKReserveOptions { + width: number; + depth: number; + decay: number; +} + +export default { + IS_READ_ONLY: false, + /** + * Creates a new Top-K filter with specified parameters + * @param parser - The command parser + * @param key - The name of the Top-K filter + * @param topK - Number of top occurring items to keep + * @param options - Optional parameters for filter configuration + * @param options.width - Number of counters in each array + * @param options.depth - Number of counter-arrays + * @param options.decay - Counter decay factor + */ + parseCommand(parser: CommandParser, key: RedisArgument, topK: number, options?: TopKReserveOptions) { + parser.push('TOPK.RESERVE'); + parser.pushKey(key); + parser.push(topK.toString()); + + if (options) { + parser.push( + options.width.toString(), + options.depth.toString(), + options.decay.toString() + ); + } + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/bloom/lib/commands/top-k/index.ts b/packages/bloom/lib/commands/top-k/index.ts new file mode 100644 index 00000000000..fb5de543cab --- /dev/null +++ b/packages/bloom/lib/commands/top-k/index.ts @@ -0,0 +1,28 @@ +import type { RedisCommands } from '@redis/client/dist/lib/RESP/types'; +import ADD from './ADD'; +import COUNT from './COUNT'; +import INCRBY from './INCRBY'; +import INFO from './INFO'; +import LIST_WITHCOUNT from './LIST_WITHCOUNT'; +import LIST from './LIST'; +import QUERY from './QUERY'; +import RESERVE from './RESERVE'; + +export default { + ADD, + add: ADD, + COUNT, + count: COUNT, + INCRBY, + incrBy: INCRBY, + INFO, + info: INFO, + LIST_WITHCOUNT, + listWithCount: LIST_WITHCOUNT, + LIST, + list: LIST, + QUERY, + query: QUERY, + RESERVE, + reserve: RESERVE +} as const satisfies RedisCommands; diff --git a/packages/bloom/lib/index.ts b/packages/bloom/lib/index.ts new file mode 100644 index 00000000000..bc0e103e8c8 --- /dev/null +++ b/packages/bloom/lib/index.ts @@ -0,0 +1 @@ +export { default } from './commands'; diff --git a/packages/bloom/lib/test-utils.ts b/packages/bloom/lib/test-utils.ts new file mode 100644 index 00000000000..2bad3e07617 --- /dev/null +++ b/packages/bloom/lib/test-utils.ts @@ -0,0 +1,19 @@ +import TestUtils from '@redis/test-utils'; +import RedisBloomModules from '.'; + +export default TestUtils.createFromConfig({ + dockerImageName: 'redislabs/client-libs-test', + dockerImageVersionArgument: 'redis-version', + defaultDockerVersion: '8.4-M01-pre' +}); + +export const GLOBAL = { + SERVERS: { + OPEN: { + serverArguments: [], + clientOptions: { + modules: RedisBloomModules + } + } + } +}; diff --git a/packages/bloom/package.json b/packages/bloom/package.json new file mode 100644 index 00000000000..d6e6eb39364 --- /dev/null +++ b/packages/bloom/package.json @@ -0,0 +1,36 @@ +{ + "name": "@redis/bloom", + "version": "5.9.0-beta.2", + "license": "MIT", + "main": "./dist/lib/index.js", + "types": "./dist/lib/index.d.ts", + "files": [ + "dist/", + "!dist/tsconfig.tsbuildinfo" + ], + "scripts": { + "test": "nyc -r text-summary -r lcov mocha -r tsx './lib/**/*.spec.ts'", + "release": "release-it" + }, + "peerDependencies": { + "@redis/client": "^5.9.0-beta.2" + }, + "devDependencies": { + "@redis/test-utils": "*" + }, + "engines": { + "node": ">= 18" + }, + "repository": { + "type": "git", + "url": "git://github.com/redis/node-redis.git" + }, + "bugs": { + "url": "https://github.com/redis/node-redis/issues" + }, + "homepage": "https://github.com/redis/node-redis/tree/master/packages/bloom", + "keywords": [ + "redis", + "RedisBloom" + ] +} diff --git a/packages/bloom/tsconfig.json b/packages/bloom/tsconfig.json new file mode 100644 index 00000000000..da6b76e89be --- /dev/null +++ b/packages/bloom/tsconfig.json @@ -0,0 +1,20 @@ +{ + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "outDir": "./dist" + }, + "include": [ + "./lib/**/*.ts" + ], + "exclude": [ + "./lib/test-utils.ts", + "./lib/**/*.spec.ts" + ], + "typedocOptions": { + "entryPoints": [ + "./lib" + ], + "entryPointStrategy": "expand", + "out": "../../documentation/bloom" + } +} diff --git a/packages/client/.nycrc.json b/packages/client/.nycrc.json new file mode 100644 index 00000000000..51e807f9cef --- /dev/null +++ b/packages/client/.nycrc.json @@ -0,0 +1,4 @@ +{ + "extends": "@istanbuljs/nyc-config-typescript", + "exclude": ["dist", "**/*.spec.ts", "lib/test-utils.ts", "examples/*"] +} diff --git a/packages/client/.release-it.json b/packages/client/.release-it.json new file mode 100644 index 00000000000..fe1a6ad0d0d --- /dev/null +++ b/packages/client/.release-it.json @@ -0,0 +1,13 @@ +{ + "npm": { + "publish": true, + "publishArgs": ["--access", "public"] + }, + "git": { + "tagName": "client@${version}", + "tagMatch": "client@*", + "commitMessage": "Release ${tagName}", + "tagAnnotation": "Release ${tagName}", + "commitArgs": "--all" + } +} diff --git a/packages/client/README.md b/packages/client/README.md new file mode 100644 index 00000000000..4b5d15088fa --- /dev/null +++ b/packages/client/README.md @@ -0,0 +1,3 @@ +# @redis/client + +The source code and documentation for this package are in the main [node-redis](https://github.com/redis/node-redis) repo. diff --git a/packages/client/index.ts b/packages/client/index.ts new file mode 100644 index 00000000000..09426cb50a3 --- /dev/null +++ b/packages/client/index.ts @@ -0,0 +1,37 @@ +export { + /* CommandPolicies, */ + RedisArgument, + RedisFunctions, + RedisModules, + RedisScripts, + RespVersions, + TypeMapping, +} from './lib/RESP/types'; +export { RESP_TYPES } from './lib/RESP/decoder'; +export { VerbatimString } from './lib/RESP/verbatim-string'; +export { defineScript } from './lib/lua-script'; +export * from './lib/errors'; + +import RedisClient, { RedisClientOptions, RedisClientType } from './lib/client'; +export { RedisClientOptions, RedisClientType }; +export const createClient = RedisClient.create; +export { CommandParser } from './lib/client/parser'; + +import { RedisClientPool, RedisPoolOptions, RedisClientPoolType } from './lib/client/pool'; +export { RedisClientPoolType, RedisPoolOptions }; +export const createClientPool = RedisClientPool.create; + +import RedisCluster, { RedisClusterOptions, RedisClusterType } from './lib/cluster'; +export { RedisClusterType, RedisClusterOptions }; +export const createCluster = RedisCluster.create; + +import RedisSentinel from './lib/sentinel'; +export { RedisSentinelOptions, RedisSentinelType } from './lib/sentinel/types'; +export const createSentinel = RedisSentinel.create; + +export { GEO_REPLY_WITH, GeoReplyWith } from './lib/commands/GEOSEARCH_WITH'; + + +export { SetOptions, CLIENT_KILL_FILTERS, FAILOVER_MODES, CLUSTER_SLOT_STATES, COMMAND_LIST_FILTER_BY, REDIS_FLUSH_MODES } from './lib/commands' + +export { BasicClientSideCache, BasicPooledClientSideCache } from './lib/client/cache'; diff --git a/packages/client/lib/RESP/decoder.spec.ts b/packages/client/lib/RESP/decoder.spec.ts new file mode 100644 index 00000000000..c034815c9cd --- /dev/null +++ b/packages/client/lib/RESP/decoder.spec.ts @@ -0,0 +1,426 @@ +import { strict as assert } from 'node:assert'; +import { SinonSpy, spy } from 'sinon'; +import { Decoder, RESP_TYPES } from './decoder'; +import { BlobError, SimpleError } from '../errors'; +import { TypeMapping } from './types'; +import { VerbatimString } from './verbatim-string'; + +interface Test { + toWrite: Buffer; + typeMapping?: TypeMapping; + replies?: Array; + errorReplies?: Array; + pushReplies?: Array; +} + +function test(name: string, config: Test) { + describe(name, () => { + it('single chunk', () => { + const setup = setupTest(config); + setup.decoder.write(config.toWrite); + assertSpiesCalls(config, setup); + }); + + it('byte by byte', () => { + const setup = setupTest(config); + for (let i = 0; i < config.toWrite.length; i++) { + setup.decoder.write(config.toWrite.subarray(i, i + 1)); + } + assertSpiesCalls(config, setup); + }); + }) +} + +function setupTest(config: Test) { + const onReplySpy = spy(), + onErrorReplySpy = spy(), + onPushSpy = spy(); + + return { + decoder: new Decoder({ + getTypeMapping: () => config.typeMapping ?? {}, + onReply: onReplySpy, + onErrorReply: onErrorReplySpy, + onPush: onPushSpy + }), + onReplySpy, + onErrorReplySpy, + onPushSpy + }; +} + +function assertSpiesCalls(config: Test, spies: ReturnType) { + assertSpyCalls(spies.onReplySpy, config.replies); + assertSpyCalls(spies.onErrorReplySpy, config.errorReplies); + assertSpyCalls(spies.onPushSpy, config.pushReplies); +} + +function assertSpyCalls(spy: SinonSpy, replies?: Array) { + if (!replies) { + assert.equal(spy.callCount, 0); + return; + } + + assert.equal(spy.callCount, replies.length); + for (const [i, reply] of replies.entries()) { + assert.deepEqual( + spy.getCall(i).args, + [reply] + ); + } +} + +describe('RESP Decoder', () => { + test('Null', { + toWrite: Buffer.from('_\r\n'), + replies: [null] + }); + + describe('Boolean', () => { + test('true', { + toWrite: Buffer.from('#t\r\n'), + replies: [true] + }); + + test('false', { + toWrite: Buffer.from('#f\r\n'), + replies: [false] + }); + }); + + describe('Number', () => { + test('0', { + toWrite: Buffer.from(':0\r\n'), + replies: [0] + }); + + test('1', { + toWrite: Buffer.from(':+1\r\n'), + replies: [1] + }); + + test('+1', { + toWrite: Buffer.from(':+1\r\n'), + replies: [1] + }); + + test('-1', { + toWrite: Buffer.from(':-1\r\n'), + replies: [-1] + }); + + test('1 as string', { + typeMapping: { + [RESP_TYPES.NUMBER]: String + }, + toWrite: Buffer.from(':1\r\n'), + replies: ['1'] + }); + }); + + describe('BigNumber', () => { + test('0', { + toWrite: Buffer.from('(0\r\n'), + replies: [0n] + }); + + test('1', { + toWrite: Buffer.from('(1\r\n'), + replies: [1n] + }); + + test('+1', { + toWrite: Buffer.from('(+1\r\n'), + replies: [1n] + }); + + test('-1', { + toWrite: Buffer.from('(-1\r\n'), + replies: [-1n] + }); + + test('1 as string', { + typeMapping: { + [RESP_TYPES.BIG_NUMBER]: String + }, + toWrite: Buffer.from('(1\r\n'), + replies: ['1'] + }); + }); + + describe('Double', () => { + test('0', { + toWrite: Buffer.from(',0\r\n'), + replies: [0] + }); + + test('1', { + toWrite: Buffer.from(',1\r\n'), + replies: [1] + }); + + test('+1', { + toWrite: Buffer.from(',+1\r\n'), + replies: [1] + }); + + test('-1', { + toWrite: Buffer.from(',-1\r\n'), + replies: [-1] + }); + + test('1.1', { + toWrite: Buffer.from(',1.1\r\n'), + replies: [1.1] + }); + + test('nan', { + toWrite: Buffer.from(',nan\r\n'), + replies: [NaN] + }); + + test('inf', { + toWrite: Buffer.from(',inf\r\n'), + replies: [Infinity] + }); + + test('+inf', { + toWrite: Buffer.from(',+inf\r\n'), + replies: [Infinity] + }); + + test('-inf', { + toWrite: Buffer.from(',-inf\r\n'), + replies: [-Infinity] + }); + + test('1e1', { + toWrite: Buffer.from(',1e1\r\n'), + replies: [1e1] + }); + + test('-1.1E+1', { + toWrite: Buffer.from(',-1.1E+1\r\n'), + replies: [-1.1E+1] + }); + + test('1 as string', { + typeMapping: { + [RESP_TYPES.DOUBLE]: String + }, + toWrite: Buffer.from(',1\r\n'), + replies: ['1'] + }); + }); + + describe('SimpleString', () => { + test("'OK'", { + toWrite: Buffer.from('+OK\r\n'), + replies: ['OK'] + }); + + test("'OK' as Buffer", { + typeMapping: { + [RESP_TYPES.SIMPLE_STRING]: Buffer + }, + toWrite: Buffer.from('+OK\r\n'), + replies: [Buffer.from('OK')] + }); + }); + + describe('BlobString', () => { + test("''", { + toWrite: Buffer.from('$0\r\n\r\n'), + replies: [''] + }); + + test("'1234567890'", { + toWrite: Buffer.from('$10\r\n1234567890\r\n'), + replies: ['1234567890'] + }); + + test('null (RESP2 backwards compatibility)', { + toWrite: Buffer.from('$-1\r\n'), + replies: [null] + }); + + test("'OK' as Buffer", { + typeMapping: { + [RESP_TYPES.BLOB_STRING]: Buffer + }, + toWrite: Buffer.from('$2\r\nOK\r\n'), + replies: [Buffer.from('OK')] + }); + }); + + describe('VerbatimString', () => { + test("''", { + toWrite: Buffer.from('=4\r\ntxt:\r\n'), + replies: [''] + }); + + test("'123456'", { + toWrite: Buffer.from('=10\r\ntxt:123456\r\n'), + replies: ['123456'] + }); + + test("'OK' as VerbatimString", { + typeMapping: { + [RESP_TYPES.VERBATIM_STRING]: VerbatimString + }, + toWrite: Buffer.from('=6\r\ntxt:OK\r\n'), + replies: [new VerbatimString('txt', 'OK')] + }); + + test("'OK' as Buffer", { + typeMapping: { + [RESP_TYPES.VERBATIM_STRING]: Buffer + }, + toWrite: Buffer.from('=6\r\ntxt:OK\r\n'), + replies: [Buffer.from('OK')] + }); + }); + + test('SimpleError', { + toWrite: Buffer.from('-ERROR\r\n'), + errorReplies: [new SimpleError('ERROR')] + }); + + test('BlobError', { + toWrite: Buffer.from('!5\r\nERROR\r\n'), + errorReplies: [new BlobError('ERROR')] + }); + + describe('Array', () => { + test('[]', { + toWrite: Buffer.from('*0\r\n'), + replies: [[]] + }); + + test('[0..9]', { + toWrite: Buffer.from(`*10\r\n:0\r\n:1\r\n:2\r\n:3\r\n:4\r\n:5\r\n:6\r\n:7\r\n:8\r\n:9\r\n`), + replies: [[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]] + }); + + test('with all types', { + toWrite: Buffer.from([ + '*13\r\n', + '_\r\n', + '#f\r\n', + ':0\r\n', + '(0\r\n', + ',0\r\n', + '+\r\n', + '$0\r\n\r\n', + '=4\r\ntxt:\r\n', + '-\r\n', + '!0\r\n\r\n', + '*0\r\n', + '~0\r\n', + '%0\r\n' + ].join('')), + replies: [[ + null, + false, + 0, + 0n, + 0, + '', + '', + '', + new SimpleError(''), + new BlobError(''), + [], + [], + Object.create(null) + ]] + }); + + test('null (RESP2 backwards compatibility)', { + toWrite: Buffer.from('*-1\r\n'), + replies: [null] + }); + }); + + describe('Set', () => { + test('empty', { + toWrite: Buffer.from('~0\r\n'), + replies: [[]] + }); + + test('of 0..9', { + toWrite: Buffer.from(`~10\r\n:0\r\n:1\r\n:2\r\n:3\r\n:4\r\n:5\r\n:6\r\n:7\r\n:8\r\n:9\r\n`), + replies: [[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]] + }); + + test('0..9 as Set', { + typeMapping: { + [RESP_TYPES.SET]: Set + }, + toWrite: Buffer.from(`~10\r\n:0\r\n:1\r\n:2\r\n:3\r\n:4\r\n:5\r\n:6\r\n:7\r\n:8\r\n:9\r\n`), + replies: [new Set([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])] + }); + }); + + describe('Map', () => { + test('{}', { + toWrite: Buffer.from('%0\r\n'), + replies: [Object.create(null)] + }); + + test("{ '0'..'9': }", { + toWrite: Buffer.from(`%10\r\n+0\r\n+0\r\n+1\r\n+1\r\n+2\r\n+2\r\n+3\r\n+3\r\n+4\r\n+4\r\n+5\r\n+5\r\n+6\r\n+6\r\n+7\r\n+7\r\n+8\r\n+8\r\n+9\r\n+9\r\n`), + replies: [Object.create(null, { + 0: { value: '0', enumerable: true }, + 1: { value: '1', enumerable: true }, + 2: { value: '2', enumerable: true }, + 3: { value: '3', enumerable: true }, + 4: { value: '4', enumerable: true }, + 5: { value: '5', enumerable: true }, + 6: { value: '6', enumerable: true }, + 7: { value: '7', enumerable: true }, + 8: { value: '8', enumerable: true }, + 9: { value: '9', enumerable: true } + })] + }); + + test("{ '0'..'9': } as Map", { + typeMapping: { + [RESP_TYPES.MAP]: Map + }, + toWrite: Buffer.from(`%10\r\n+0\r\n+0\r\n+1\r\n+1\r\n+2\r\n+2\r\n+3\r\n+3\r\n+4\r\n+4\r\n+5\r\n+5\r\n+6\r\n+6\r\n+7\r\n+7\r\n+8\r\n+8\r\n+9\r\n+9\r\n`), + replies: [new Map([ + ['0', '0'], + ['1', '1'], + ['2', '2'], + ['3', '3'], + ['4', '4'], + ['5', '5'], + ['6', '6'], + ['7', '7'], + ['8', '8'], + ['9', '9'] + ])] + }); + + test("{ '0'..'9': } as Array", { + typeMapping: { + [RESP_TYPES.MAP]: Array + }, + toWrite: Buffer.from(`%10\r\n+0\r\n+0\r\n+1\r\n+1\r\n+2\r\n+2\r\n+3\r\n+3\r\n+4\r\n+4\r\n+5\r\n+5\r\n+6\r\n+6\r\n+7\r\n+7\r\n+8\r\n+8\r\n+9\r\n+9\r\n`), + replies: [['0', '0', '1', '1', '2', '2', '3', '3', '4', '4', '5', '5', '6', '6', '7', '7', '8', '8', '9', '9']] + }); + }); + + describe('Push', () => { + test('[]', { + toWrite: Buffer.from('>0\r\n'), + pushReplies: [[]] + }); + + test('[0..9]', { + toWrite: Buffer.from(`>10\r\n:0\r\n:1\r\n:2\r\n:3\r\n:4\r\n:5\r\n:6\r\n:7\r\n:8\r\n:9\r\n`), + pushReplies: [[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]] + }); + }); +}); diff --git a/packages/client/lib/RESP/decoder.ts b/packages/client/lib/RESP/decoder.ts new file mode 100644 index 00000000000..2485ea23b37 --- /dev/null +++ b/packages/client/lib/RESP/decoder.ts @@ -0,0 +1,1178 @@ +// @ts-nocheck +import { VerbatimString } from './verbatim-string'; +import { SimpleError, BlobError, ErrorReply } from '../errors'; +import { TypeMapping } from './types'; + +// https://github.com/redis/redis-specifications/blob/master/protocol/RESP3.md +export const RESP_TYPES = { + NULL: 95, // _ + BOOLEAN: 35, // # + NUMBER: 58, // : + BIG_NUMBER: 40, // ( + DOUBLE: 44, // , + SIMPLE_STRING: 43, // + + BLOB_STRING: 36, // $ + VERBATIM_STRING: 61, // = + SIMPLE_ERROR: 45, // - + BLOB_ERROR: 33, // ! + ARRAY: 42, // * + SET: 126, // ~ + MAP: 37, // % + PUSH: 62 // > +} as const; + +const ASCII = { + '\r': 13, + 't': 116, + '+': 43, + '-': 45, + '0': 48, + '.': 46, + 'i': 105, + 'n': 110, + 'E': 69, + 'e': 101 +} as const; + +export const PUSH_TYPE_MAPPING = { + [RESP_TYPES.BLOB_STRING]: Buffer +}; + +// this was written with performance in mind, so it's not very readable... sorry :( + +interface DecoderOptions { + onReply(reply: any): unknown; + onErrorReply(err: ErrorReply): unknown; + onPush(push: Array): unknown; + getTypeMapping(): TypeMapping; +} + +export class Decoder { + onReply; + onErrorReply; + onPush; + getTypeMapping; + #cursor = 0; + #next; + + constructor(config: DecoderOptions) { + this.onReply = config.onReply; + this.onErrorReply = config.onErrorReply; + this.onPush = config.onPush; + this.getTypeMapping = config.getTypeMapping; + } + + reset() { + this.#cursor = 0; + this.#next = undefined; + } + + write(chunk) { + if (this.#cursor >= chunk.length) { + this.#cursor -= chunk.length; + return; + } + + if (this.#next) { + if (this.#next(chunk) || this.#cursor >= chunk.length) { + this.#cursor -= chunk.length; + return; + } + } + + do { + const type = chunk[this.#cursor]; + if (++this.#cursor === chunk.length) { + this.#next = this.#continueDecodeTypeValue.bind(this, type); + break; + } + + if (this.#decodeTypeValue(type, chunk)) { + break; + } + } while (this.#cursor < chunk.length); + this.#cursor -= chunk.length; + } + + #continueDecodeTypeValue(type, chunk) { + this.#next = undefined; + return this.#decodeTypeValue(type, chunk); + } + + #decodeTypeValue(type, chunk) { + switch (type) { + case RESP_TYPES.NULL: + this.onReply(this.#decodeNull()); + return false; + + case RESP_TYPES.BOOLEAN: + return this.#handleDecodedValue( + this.onReply, + this.#decodeBoolean(chunk) + ); + + case RESP_TYPES.NUMBER: + return this.#handleDecodedValue( + this.onReply, + this.#decodeNumber( + this.getTypeMapping()[RESP_TYPES.NUMBER], + chunk + ) + ); + + case RESP_TYPES.BIG_NUMBER: + return this.#handleDecodedValue( + this.onReply, + this.#decodeBigNumber( + this.getTypeMapping()[RESP_TYPES.BIG_NUMBER], + chunk + ) + ); + + case RESP_TYPES.DOUBLE: + return this.#handleDecodedValue( + this.onReply, + this.#decodeDouble( + this.getTypeMapping()[RESP_TYPES.DOUBLE], + chunk + ) + ); + + case RESP_TYPES.SIMPLE_STRING: + return this.#handleDecodedValue( + this.onReply, + this.#decodeSimpleString( + this.getTypeMapping()[RESP_TYPES.SIMPLE_STRING], + chunk + ) + ); + + case RESP_TYPES.BLOB_STRING: + return this.#handleDecodedValue( + this.onReply, + this.#decodeBlobString( + this.getTypeMapping()[RESP_TYPES.BLOB_STRING], + chunk + ) + ); + + case RESP_TYPES.VERBATIM_STRING: + return this.#handleDecodedValue( + this.onReply, + this.#decodeVerbatimString( + this.getTypeMapping()[RESP_TYPES.VERBATIM_STRING], + chunk + ) + ); + + case RESP_TYPES.SIMPLE_ERROR: + return this.#handleDecodedValue( + this.onErrorReply, + this.#decodeSimpleError(chunk) + ); + + case RESP_TYPES.BLOB_ERROR: + return this.#handleDecodedValue( + this.onErrorReply, + this.#decodeBlobError(chunk) + ); + + case RESP_TYPES.ARRAY: + return this.#handleDecodedValue( + this.onReply, + this.#decodeArray(this.getTypeMapping(), chunk) + ); + + case RESP_TYPES.SET: + return this.#handleDecodedValue( + this.onReply, + this.#decodeSet(this.getTypeMapping(), chunk) + ); + + case RESP_TYPES.MAP: + return this.#handleDecodedValue( + this.onReply, + this.#decodeMap(this.getTypeMapping(), chunk) + ); + + case RESP_TYPES.PUSH: + return this.#handleDecodedValue( + this.onPush, + this.#decodeArray(PUSH_TYPE_MAPPING, chunk) + ); + + default: + throw new Error(`Unknown RESP type ${type} "${String.fromCharCode(type)}"`); + } + } + + #handleDecodedValue(cb, value) { + if (typeof value === 'function') { + this.#next = this.#continueDecodeValue.bind(this, cb, value); + return true; + } + + cb(value); + return false; + } + + #continueDecodeValue(cb, next, chunk) { + this.#next = undefined; + return this.#handleDecodedValue(cb, next(chunk)); + } + + #decodeNull() { + this.#cursor += 2; // skip \r\n + return null; + } + + #decodeBoolean(chunk) { + const boolean = chunk[this.#cursor] === ASCII.t; + this.#cursor += 3; // skip {t | f}\r\n + return boolean; + } + + #decodeNumber(type, chunk) { + if (type === String) { + return this.#decodeSimpleString(String, chunk); + } + + switch (chunk[this.#cursor]) { + case ASCII['+']: + return this.#maybeDecodeNumberValue(false, chunk); + + case ASCII['-']: + return this.#maybeDecodeNumberValue(true, chunk); + + default: + return this.#decodeNumberValue( + false, + this.#decodeUnsingedNumber.bind(this, 0), + chunk + ); + } + } + + #maybeDecodeNumberValue(isNegative, chunk) { + const cb = this.#decodeUnsingedNumber.bind(this, 0); + return ++this.#cursor === chunk.length ? + this.#decodeNumberValue.bind(this, isNegative, cb) : + this.#decodeNumberValue(isNegative, cb, chunk); + } + + #decodeNumberValue(isNegative, numberCb, chunk) { + const number = numberCb(chunk); + return typeof number === 'function' ? + this.#decodeNumberValue.bind(this, isNegative, number) : + isNegative ? -number : number; + } + + #decodeUnsingedNumber(number, chunk) { + let cursor = this.#cursor; + do { + const byte = chunk[cursor]; + if (byte === ASCII['\r']) { + this.#cursor = cursor + 2; // skip \r\n + return number; + } + number = number * 10 + byte - ASCII['0']; + } while (++cursor < chunk.length); + + this.#cursor = cursor; + return this.#decodeUnsingedNumber.bind(this, number); + } + + #decodeBigNumber(type, chunk) { + if (type === String) { + return this.#decodeSimpleString(String, chunk); + } + + switch (chunk[this.#cursor]) { + case ASCII['+']: + return this.#maybeDecodeBigNumberValue(false, chunk); + + case ASCII['-']: + return this.#maybeDecodeBigNumberValue(true, chunk); + + default: + return this.#decodeBigNumberValue( + false, + this.#decodeUnsingedBigNumber.bind(this, 0n), + chunk + ); + } + } + + #maybeDecodeBigNumberValue(isNegative, chunk) { + const cb = this.#decodeUnsingedBigNumber.bind(this, 0n); + return ++this.#cursor === chunk.length ? + this.#decodeBigNumberValue.bind(this, isNegative, cb) : + this.#decodeBigNumberValue(isNegative, cb, chunk); + } + + #decodeBigNumberValue(isNegative, bigNumberCb, chunk) { + const bigNumber = bigNumberCb(chunk); + return typeof bigNumber === 'function' ? + this.#decodeBigNumberValue.bind(this, isNegative, bigNumber) : + isNegative ? -bigNumber : bigNumber; + } + + #decodeUnsingedBigNumber(bigNumber, chunk) { + let cursor = this.#cursor; + do { + const byte = chunk[cursor]; + if (byte === ASCII['\r']) { + this.#cursor = cursor + 2; // skip \r\n + return bigNumber; + } + bigNumber = bigNumber * 10n + BigInt(byte - ASCII['0']); + } while (++cursor < chunk.length); + + this.#cursor = cursor; + return this.#decodeUnsingedBigNumber.bind(this, bigNumber); + } + + #decodeDouble(type, chunk) { + if (type === String) { + return this.#decodeSimpleString(String, chunk); + } + + switch (chunk[this.#cursor]) { + case ASCII.n: + this.#cursor += 5; // skip nan\r\n + return NaN; + + case ASCII['+']: + return this.#maybeDecodeDoubleInteger(false, chunk); + + case ASCII['-']: + return this.#maybeDecodeDoubleInteger(true, chunk); + + default: + return this.#decodeDoubleInteger(false, 0, chunk); + } + } + + #maybeDecodeDoubleInteger(isNegative, chunk) { + return ++this.#cursor === chunk.length ? + this.#decodeDoubleInteger.bind(this, isNegative, 0) : + this.#decodeDoubleInteger(isNegative, 0, chunk); + } + + #decodeDoubleInteger(isNegative, integer, chunk) { + if (chunk[this.#cursor] === ASCII.i) { + this.#cursor += 5; // skip inf\r\n + return isNegative ? -Infinity : Infinity; + } + + return this.#continueDecodeDoubleInteger(isNegative, integer, chunk); + } + + #continueDecodeDoubleInteger(isNegative, integer, chunk) { + let cursor = this.#cursor; + do { + const byte = chunk[cursor]; + switch (byte) { + case ASCII['.']: + this.#cursor = cursor + 1; // skip . + return this.#cursor < chunk.length ? + this.#decodeDoubleDecimal(isNegative, 0, integer, chunk) : + this.#decodeDoubleDecimal.bind(this, isNegative, 0, integer); + + case ASCII.E: + case ASCII.e: + this.#cursor = cursor + 1; // skip E/e + const i = isNegative ? -integer : integer; + return this.#cursor < chunk.length ? + this.#decodeDoubleExponent(i, chunk) : + this.#decodeDoubleExponent.bind(this, i); + + case ASCII['\r']: + this.#cursor = cursor + 2; // skip \r\n + return isNegative ? -integer : integer; + + default: + integer = integer * 10 + byte - ASCII['0']; + } + } while (++cursor < chunk.length); + + this.#cursor = cursor; + return this.#continueDecodeDoubleInteger.bind(this, isNegative, integer); + } + + // Precalculated multipliers for decimal points to improve performance + // "... about 15 to 17 decimal places ..." + // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number#:~:text=about%2015%20to%2017%20decimal%20places + static #DOUBLE_DECIMAL_MULTIPLIERS = [ + 1e-1, 1e-2, 1e-3, 1e-4, 1e-5, 1e-6, + 1e-7, 1e-8, 1e-9, 1e-10, 1e-11, 1e-12, + 1e-13, 1e-14, 1e-15, 1e-16, 1e-17 + ]; + + #decodeDoubleDecimal(isNegative, decimalIndex, double, chunk) { + let cursor = this.#cursor; + do { + const byte = chunk[cursor]; + switch (byte) { + case ASCII.E: + case ASCII.e: + this.#cursor = cursor + 1; // skip E/e + const d = isNegative ? -double : double; + return this.#cursor === chunk.length ? + this.#decodeDoubleExponent.bind(this, d) : + this.#decodeDoubleExponent(d, chunk); + + case ASCII['\r']: + this.#cursor = cursor + 2; // skip \r\n + return isNegative ? -double : double; + } + + if (decimalIndex < Decoder.#DOUBLE_DECIMAL_MULTIPLIERS.length) { + double += (byte - ASCII['0']) * Decoder.#DOUBLE_DECIMAL_MULTIPLIERS[decimalIndex++]; + } + } while (++cursor < chunk.length); + + this.#cursor = cursor; + return this.#decodeDoubleDecimal.bind(this, isNegative, decimalIndex, double); + } + + #decodeDoubleExponent(double, chunk) { + switch (chunk[this.#cursor]) { + case ASCII['+']: + return ++this.#cursor === chunk.length ? + this.#continueDecodeDoubleExponent.bind(this, false, double, 0) : + this.#continueDecodeDoubleExponent(false, double, 0, chunk); + + case ASCII['-']: + return ++this.#cursor === chunk.length ? + this.#continueDecodeDoubleExponent.bind(this, true, double, 0) : + this.#continueDecodeDoubleExponent(true, double, 0, chunk); + } + + return this.#continueDecodeDoubleExponent(false, double, 0, chunk); + } + + #continueDecodeDoubleExponent(isNegative, double, exponent, chunk) { + let cursor = this.#cursor; + do { + const byte = chunk[cursor]; + if (byte === ASCII['\r']) { + this.#cursor = cursor + 2; // skip \r\n + return double * 10 ** (isNegative ? -exponent : exponent); + } + + exponent = exponent * 10 + byte - ASCII['0']; + } while (++cursor < chunk.length); + + this.#cursor = cursor; + return this.#continueDecodeDoubleExponent.bind(this, isNegative, double, exponent); + } + + #findCRLF(chunk, cursor) { + while (chunk[cursor] !== ASCII['\r']) { + if (++cursor === chunk.length) { + this.#cursor = chunk.length; + return -1; + } + } + + this.#cursor = cursor + 2; // skip \r\n + return cursor; + } + + #decodeSimpleString(type, chunk) { + const start = this.#cursor, + crlfIndex = this.#findCRLF(chunk, start); + if (crlfIndex === -1) { + return this.#continueDecodeSimpleString.bind( + this, + [chunk.subarray(start)], + type + ); + } + + const slice = chunk.subarray(start, crlfIndex); + return type === Buffer ? + slice : + slice.toString(); + } + + #continueDecodeSimpleString(chunks, type, chunk) { + const start = this.#cursor, + crlfIndex = this.#findCRLF(chunk, start); + if (crlfIndex === -1) { + chunks.push(chunk.subarray(start)); + return this.#continueDecodeSimpleString.bind(this, chunks, type); + } + + chunks.push(chunk.subarray(start, crlfIndex)); + return type === Buffer ? + Buffer.concat(chunks) : + chunks.join(''); + } + + #decodeBlobString(type, chunk) { + // RESP 2 bulk string null + // https://github.com/redis/redis-specifications/blob/master/protocol/RESP2.md#resp-bulk-strings + if (chunk[this.#cursor] === ASCII['-']) { + this.#cursor += 4; // skip -1\r\n + return null; + } + + const length = this.#decodeUnsingedNumber(0, chunk); + if (typeof length === 'function') { + return this.#continueDecodeBlobStringLength.bind(this, length, type); + } else if (this.#cursor >= chunk.length) { + return this.#decodeBlobStringWithLength.bind(this, length, type); + } + + return this.#decodeBlobStringWithLength(length, type, chunk); + } + + #continueDecodeBlobStringLength(lengthCb, type, chunk) { + const length = lengthCb(chunk); + if (typeof length === 'function') { + return this.#continueDecodeBlobStringLength.bind(this, length, type); + } else if (this.#cursor >= chunk.length) { + return this.#decodeBlobStringWithLength.bind(this, length, type); + } + + return this.#decodeBlobStringWithLength(length, type, chunk); + } + + #decodeStringWithLength(length, skip, type, chunk) { + const end = this.#cursor + length; + if (end >= chunk.length) { + const slice = chunk.subarray(this.#cursor); + this.#cursor = chunk.length; + return this.#continueDecodeStringWithLength.bind( + this, + length - slice.length, + [slice], + skip, + type + ); + } + + const slice = chunk.subarray(this.#cursor, end); + this.#cursor = end + skip; + return type === Buffer ? + slice : + slice.toString(); + } + + #continueDecodeStringWithLength(length, chunks, skip, type, chunk) { + const end = this.#cursor + length; + if (end >= chunk.length) { + const slice = chunk.subarray(this.#cursor); + chunks.push(slice); + this.#cursor = chunk.length; + return this.#continueDecodeStringWithLength.bind( + this, + length - slice.length, + chunks, + skip, + type + ); + } + + chunks.push(chunk.subarray(this.#cursor, end)); + this.#cursor = end + skip; + return type === Buffer ? + Buffer.concat(chunks) : + chunks.join(''); + } + + #decodeBlobStringWithLength(length, type, chunk) { + return this.#decodeStringWithLength(length, 2, type, chunk); + } + + #decodeVerbatimString(type, chunk) { + return this.#continueDecodeVerbatimStringLength( + this.#decodeUnsingedNumber.bind(this, 0), + type, + chunk + ); + } + + #continueDecodeVerbatimStringLength(lengthCb, type, chunk) { + const length = lengthCb(chunk); + return typeof length === 'function' ? + this.#continueDecodeVerbatimStringLength.bind(this, length, type) : + this.#decodeVerbatimStringWithLength(length, type, chunk); + } + + #decodeVerbatimStringWithLength(length, type, chunk) { + const stringLength = length - 4; // skip : + if (type === VerbatimString) { + return this.#decodeVerbatimStringFormat(stringLength, chunk); + } + + this.#cursor += 4; // skip : + return this.#cursor >= chunk.length ? + this.#decodeBlobStringWithLength.bind(this, stringLength, type) : + this.#decodeBlobStringWithLength(stringLength, type, chunk); + } + + #decodeVerbatimStringFormat(stringLength, chunk) { + const formatCb = this.#decodeStringWithLength.bind(this, 3, 1, String); + return this.#cursor >= chunk.length ? + this.#continueDecodeVerbatimStringFormat.bind(this, stringLength, formatCb) : + this.#continueDecodeVerbatimStringFormat(stringLength, formatCb, chunk); + } + + #continueDecodeVerbatimStringFormat(stringLength, formatCb, chunk) { + const format = formatCb(chunk); + return typeof format === 'function' ? + this.#continueDecodeVerbatimStringFormat.bind(this, stringLength, format) : + this.#decodeVerbatimStringWithFormat(stringLength, format, chunk); + } + + #decodeVerbatimStringWithFormat(stringLength, format, chunk) { + return this.#continueDecodeVerbatimStringWithFormat( + format, + this.#decodeBlobStringWithLength.bind(this, stringLength, String), + chunk + ); + } + + #continueDecodeVerbatimStringWithFormat(format, stringCb, chunk) { + const string = stringCb(chunk); + return typeof string === 'function' ? + this.#continueDecodeVerbatimStringWithFormat.bind(this, format, string) : + new VerbatimString(format, string); + } + + #decodeSimpleError(chunk) { + const string = this.#decodeSimpleString(String, chunk); + return typeof string === 'function' ? + this.#continueDecodeSimpleError.bind(this, string) : + new SimpleError(string); + } + + #continueDecodeSimpleError(stringCb, chunk) { + const string = stringCb(chunk); + return typeof string === 'function' ? + this.#continueDecodeSimpleError.bind(this, string) : + new SimpleError(string); + } + + #decodeBlobError(chunk) { + const string = this.#decodeBlobString(String, chunk); + return typeof string === 'function' ? + this.#continueDecodeBlobError.bind(this, string) : + new BlobError(string); + } + + #continueDecodeBlobError(stringCb, chunk) { + const string = stringCb(chunk); + return typeof string === 'function' ? + this.#continueDecodeBlobError.bind(this, string) : + new BlobError(string); + } + + #decodeNestedType(typeMapping, chunk) { + const type = chunk[this.#cursor]; + return ++this.#cursor === chunk.length ? + this.#decodeNestedTypeValue.bind(this, type, typeMapping) : + this.#decodeNestedTypeValue(type, typeMapping, chunk); + } + + #decodeNestedTypeValue(type, typeMapping, chunk) { + switch (type) { + case RESP_TYPES.NULL: + return this.#decodeNull(); + + case RESP_TYPES.BOOLEAN: + return this.#decodeBoolean(chunk); + + case RESP_TYPES.NUMBER: + return this.#decodeNumber(typeMapping[RESP_TYPES.NUMBER], chunk); + + case RESP_TYPES.BIG_NUMBER: + return this.#decodeBigNumber(typeMapping[RESP_TYPES.BIG_NUMBER], chunk); + + case RESP_TYPES.DOUBLE: + return this.#decodeDouble(typeMapping[RESP_TYPES.DOUBLE], chunk); + + case RESP_TYPES.SIMPLE_STRING: + return this.#decodeSimpleString(typeMapping[RESP_TYPES.SIMPLE_STRING], chunk); + + case RESP_TYPES.BLOB_STRING: + return this.#decodeBlobString(typeMapping[RESP_TYPES.BLOB_STRING], chunk); + + case RESP_TYPES.VERBATIM_STRING: + return this.#decodeVerbatimString(typeMapping[RESP_TYPES.VERBATIM_STRING], chunk); + + case RESP_TYPES.SIMPLE_ERROR: + return this.#decodeSimpleError(chunk); + + case RESP_TYPES.BLOB_ERROR: + return this.#decodeBlobError(chunk); + + case RESP_TYPES.ARRAY: + return this.#decodeArray(typeMapping, chunk); + + case RESP_TYPES.SET: + return this.#decodeSet(typeMapping, chunk); + + case RESP_TYPES.MAP: + return this.#decodeMap(typeMapping, chunk); + + default: + throw new Error(`Unknown RESP type ${type} "${String.fromCharCode(type)}"`); + } + } + + #decodeArray(typeMapping, chunk) { + // RESP 2 null + // https://github.com/redis/redis-specifications/blob/master/protocol/RESP2.md#resp-arrays + if (chunk[this.#cursor] === ASCII['-']) { + this.#cursor += 4; // skip -1\r\n + return null; + } + + return this.#decodeArrayWithLength( + this.#decodeUnsingedNumber(0, chunk), + typeMapping, + chunk + ); + } + + #decodeArrayWithLength(length, typeMapping, chunk) { + return typeof length === 'function' ? + this.#continueDecodeArrayLength.bind(this, length, typeMapping) : + this.#decodeArrayItems( + new Array(length), + 0, + typeMapping, + chunk + ); + } + + #continueDecodeArrayLength(lengthCb, typeMapping, chunk) { + return this.#decodeArrayWithLength( + lengthCb(chunk), + typeMapping, + chunk + ); + } + + #decodeArrayItems(array, filled, typeMapping, chunk) { + for (let i = filled; i < array.length; i++) { + if (this.#cursor >= chunk.length) { + return this.#decodeArrayItems.bind( + this, + array, + i, + typeMapping + ); + } + + const item = this.#decodeNestedType(typeMapping, chunk); + if (typeof item === 'function') { + return this.#continueDecodeArrayItems.bind( + this, + array, + i, + item, + typeMapping + ); + } + + array[i] = item; + } + + return array; + } + + #continueDecodeArrayItems(array, filled, itemCb, typeMapping, chunk) { + const item = itemCb(chunk); + if (typeof item === 'function') { + return this.#continueDecodeArrayItems.bind( + this, + array, + filled, + item, + typeMapping + ); + } + + array[filled++] = item; + + return this.#decodeArrayItems(array, filled, typeMapping, chunk); + } + + #decodeSet(typeMapping, chunk) { + const length = this.#decodeUnsingedNumber(0, chunk); + if (typeof length === 'function') { + return this.#continueDecodeSetLength.bind(this, length, typeMapping); + } + + return this.#decodeSetItems( + length, + typeMapping, + chunk + ); + } + + #continueDecodeSetLength(lengthCb, typeMapping, chunk) { + const length = lengthCb(chunk); + return typeof length === 'function' ? + this.#continueDecodeSetLength.bind(this, length, typeMapping) : + this.#decodeSetItems(length, typeMapping, chunk); + } + + #decodeSetItems(length, typeMapping, chunk) { + return typeMapping[RESP_TYPES.SET] === Set ? + this.#decodeSetAsSet( + new Set(), + length, + typeMapping, + chunk + ) : + this.#decodeArrayItems( + new Array(length), + 0, + typeMapping, + chunk + ); + } + + #decodeSetAsSet(set, remaining, typeMapping, chunk) { + // using `remaining` instead of `length` & `set.size` to make it work even if the set contains duplicates + while (remaining > 0) { + if (this.#cursor >= chunk.length) { + return this.#decodeSetAsSet.bind( + this, + set, + remaining, + typeMapping + ); + } + + const item = this.#decodeNestedType(typeMapping, chunk); + if (typeof item === 'function') { + return this.#continueDecodeSetAsSet.bind( + this, + set, + remaining, + item, + typeMapping + ); + } + + set.add(item); + --remaining; + } + + return set; + } + + #continueDecodeSetAsSet(set, remaining, itemCb, typeMapping, chunk) { + const item = itemCb(chunk); + if (typeof item === 'function') { + return this.#continueDecodeSetAsSet.bind( + this, + set, + remaining, + item, + typeMapping + ); + } + + set.add(item); + + return this.#decodeSetAsSet(set, remaining - 1, typeMapping, chunk); + } + + #decodeMap(typeMapping, chunk) { + const length = this.#decodeUnsingedNumber(0, chunk); + if (typeof length === 'function') { + return this.#continueDecodeMapLength.bind(this, length, typeMapping); + } + + return this.#decodeMapItems( + length, + typeMapping, + chunk + ); + } + + #continueDecodeMapLength(lengthCb, typeMapping, chunk) { + const length = lengthCb(chunk); + return typeof length === 'function' ? + this.#continueDecodeMapLength.bind(this, length, typeMapping) : + this.#decodeMapItems(length, typeMapping, chunk); + } + + #decodeMapItems(length, typeMapping, chunk) { + switch (typeMapping[RESP_TYPES.MAP]) { + case Map: + return this.#decodeMapAsMap( + new Map(), + length, + typeMapping, + chunk + ); + + case Array: + return this.#decodeArrayItems( + new Array(length * 2), + 0, + typeMapping, + chunk + ); + + default: + return this.#decodeMapAsObject( + Object.create(null), + length, + typeMapping, + chunk + ); + } + } + + #decodeMapAsMap(map, remaining, typeMapping, chunk) { + // using `remaining` instead of `length` & `map.size` to make it work even if the map contains duplicate keys + while (remaining > 0) { + if (this.#cursor >= chunk.length) { + return this.#decodeMapAsMap.bind( + this, + map, + remaining, + typeMapping + ); + } + + const key = this.#decodeMapKey(typeMapping, chunk); + if (typeof key === 'function') { + return this.#continueDecodeMapKey.bind( + this, + map, + remaining, + key, + typeMapping + ); + } + + if (this.#cursor >= chunk.length) { + return this.#continueDecodeMapValue.bind( + this, + map, + remaining, + key, + this.#decodeNestedType.bind(this, typeMapping), + typeMapping + ); + } + + const value = this.#decodeNestedType(typeMapping, chunk); + if (typeof value === 'function') { + return this.#continueDecodeMapValue.bind( + this, + map, + remaining, + key, + value, + typeMapping + ); + } + + map.set(key, value); + --remaining; + } + + return map; + } + + #decodeMapKey(typeMapping, chunk) { + const type = chunk[this.#cursor]; + return ++this.#cursor === chunk.length ? + this.#decodeMapKeyValue.bind(this, type, typeMapping) : + this.#decodeMapKeyValue(type, typeMapping, chunk); + } + + #decodeMapKeyValue(type, typeMapping, chunk) { + switch (type) { + // decode simple string map key as string (and not as buffer) + case RESP_TYPES.SIMPLE_STRING: + return this.#decodeSimpleString(String, chunk); + + // decode blob string map key as string (and not as buffer) + case RESP_TYPES.BLOB_STRING: + return this.#decodeBlobString(String, chunk); + + default: + return this.#decodeNestedTypeValue(type, typeMapping, chunk); + } + } + + #continueDecodeMapKey(map, remaining, keyCb, typeMapping, chunk) { + const key = keyCb(chunk); + if (typeof key === 'function') { + return this.#continueDecodeMapKey.bind( + this, + map, + remaining, + key, + typeMapping + ); + } + + if (this.#cursor >= chunk.length) { + return this.#continueDecodeMapValue.bind( + this, + map, + remaining, + key, + this.#decodeNestedType.bind(this, typeMapping), + typeMapping + ); + } + + const value = this.#decodeNestedType(typeMapping, chunk); + if (typeof value === 'function') { + return this.#continueDecodeMapValue.bind( + this, + map, + remaining, + key, + value, + typeMapping + ); + } + + map.set(key, value); + return this.#decodeMapAsMap(map, remaining - 1, typeMapping, chunk); + } + + #continueDecodeMapValue(map, remaining, key, valueCb, typeMapping, chunk) { + const value = valueCb(chunk); + if (typeof value === 'function') { + return this.#continueDecodeMapValue.bind( + this, + map, + remaining, + key, + value, + typeMapping + ); + } + + map.set(key, value); + + return this.#decodeMapAsMap(map, remaining - 1, typeMapping, chunk); + } + + #decodeMapAsObject(object, remaining, typeMapping, chunk) { + while (remaining > 0) { + if (this.#cursor >= chunk.length) { + return this.#decodeMapAsObject.bind( + this, + object, + remaining, + typeMapping + ); + } + + const key = this.#decodeMapKey(typeMapping, chunk); + if (typeof key === 'function') { + return this.#continueDecodeMapAsObjectKey.bind( + this, + object, + remaining, + key, + typeMapping + ); + } + + if (this.#cursor >= chunk.length) { + return this.#continueDecodeMapAsObjectValue.bind( + this, + object, + remaining, + key, + this.#decodeNestedType.bind(this, typeMapping), + typeMapping + ); + } + + const value = this.#decodeNestedType(typeMapping, chunk); + if (typeof value === 'function') { + return this.#continueDecodeMapAsObjectValue.bind( + this, + object, + remaining, + key, + value, + typeMapping + ); + } + + object[key] = value; + --remaining; + } + + return object; + } + + #continueDecodeMapAsObjectKey(object, remaining, keyCb, typeMapping, chunk) { + const key = keyCb(chunk); + if (typeof key === 'function') { + return this.#continueDecodeMapAsObjectKey.bind( + this, + object, + remaining, + key, + typeMapping + ); + } + + if (this.#cursor >= chunk.length) { + return this.#continueDecodeMapAsObjectValue.bind( + this, + object, + remaining, + key, + this.#decodeNestedType.bind(this, typeMapping), + typeMapping + ); + } + + const value = this.#decodeNestedType(typeMapping, chunk); + if (typeof value === 'function') { + return this.#continueDecodeMapAsObjectValue.bind( + this, + object, + remaining, + key, + value, + typeMapping + ); + } + + object[key] = value; + + return this.#decodeMapAsObject(object, remaining - 1, typeMapping, chunk); + } + + #continueDecodeMapAsObjectValue(object, remaining, key, valueCb, typeMapping, chunk) { + const value = valueCb(chunk); + if (typeof value === 'function') { + return this.#continueDecodeMapAsObjectValue.bind( + this, + object, + remaining, + key, + value, + typeMapping + ); + } + + object[key] = value; + + return this.#decodeMapAsObject(object, remaining - 1, typeMapping, chunk); + } +} diff --git a/packages/client/lib/RESP/encoder.spec.ts b/packages/client/lib/RESP/encoder.spec.ts new file mode 100644 index 00000000000..2cbdc7d0b24 --- /dev/null +++ b/packages/client/lib/RESP/encoder.spec.ts @@ -0,0 +1,33 @@ +import { strict as assert } from 'node:assert'; +import { describe } from 'mocha'; +import encodeCommand from './encoder'; + +describe('RESP Encoder', () => { + it('1 byte', () => { + assert.deepEqual( + encodeCommand(['a', 'z']), + ['*2\r\n$1\r\na\r\n$1\r\nz\r\n'] + ); + }); + + it('2 bytes', () => { + assert.deepEqual( + encodeCommand(['א', 'Χͺ']), + ['*2\r\n$2\r\nא\r\n$2\r\nΧͺ\r\n'] + ); + }); + + it('4 bytes', () => { + assert.deepEqual( + [...encodeCommand(['🐣', '🐀'])], + ['*2\r\n$4\r\n🐣\r\n$4\r\n🐀\r\n'] + ); + }); + + it('buffer', () => { + assert.deepEqual( + encodeCommand([Buffer.from('string')]), + ['*1\r\n$6\r\n', Buffer.from('string'), '\r\n'] + ); + }); +}); diff --git a/packages/client/lib/RESP/encoder.ts b/packages/client/lib/RESP/encoder.ts new file mode 100644 index 00000000000..995650627f1 --- /dev/null +++ b/packages/client/lib/RESP/encoder.ts @@ -0,0 +1,28 @@ +import { RedisArgument } from './types'; + +const CRLF = '\r\n'; + +export default function encodeCommand(args: ReadonlyArray): ReadonlyArray { + const toWrite: Array = []; + + let strings = '*' + args.length + CRLF; + + for (let i = 0; i < args.length; i++) { + const arg = args[i]; + if (typeof arg === 'string') { + strings += '$' + Buffer.byteLength(arg) + CRLF + arg + CRLF; + } else if (arg instanceof Buffer) { + toWrite.push( + strings + '$' + arg.length.toString() + CRLF, + arg + ); + strings = CRLF; + } else { + throw new TypeError(`"arguments[${i}]" must be of type "string | Buffer", got ${typeof arg} instead.`); + } + } + + toWrite.push(strings); + + return toWrite; +} diff --git a/packages/client/lib/RESP/types.ts b/packages/client/lib/RESP/types.ts new file mode 100644 index 00000000000..8749bbdc7b0 --- /dev/null +++ b/packages/client/lib/RESP/types.ts @@ -0,0 +1,407 @@ +import { CommandParser } from '../client/parser'; +import { Tail } from '../commands/generic-transformers'; +import { BlobError, SimpleError } from '../errors'; +import { RedisScriptConfig, SHA1 } from '../lua-script'; +import { RESP_TYPES } from './decoder'; +import { VerbatimString } from './verbatim-string'; + +export type RESP_TYPES = typeof RESP_TYPES; + +export type RespTypes = RESP_TYPES[keyof RESP_TYPES]; + +// using interface(s) to allow circular references +// type X = BlobStringReply | ArrayReply; + +export interface RespType< + RESP_TYPE extends RespTypes, + DEFAULT, + TYPES = never, + TYPE_MAPPING = DEFAULT | TYPES +> { + RESP_TYPE: RESP_TYPE; + DEFAULT: DEFAULT; + TYPES: TYPES; + TYPE_MAPPING: MappedType; +} + +export interface NullReply extends RespType< + RESP_TYPES['NULL'], + null +> {} + +export interface BooleanReply< + T extends boolean = boolean +> extends RespType< + RESP_TYPES['BOOLEAN'], + T +> {} + +export interface NumberReply< + T extends number = number +> extends RespType< + RESP_TYPES['NUMBER'], + T, + `${T}`, + number | string +> {} + +export interface BigNumberReply< + T extends bigint = bigint +> extends RespType< + RESP_TYPES['BIG_NUMBER'], + T, + number | `${T}`, + bigint | number | string +> {} + +export interface DoubleReply< + T extends number = number +> extends RespType< + RESP_TYPES['DOUBLE'], + T, + `${T}`, + number | string +> {} + +export interface SimpleStringReply< + T extends string = string +> extends RespType< + RESP_TYPES['SIMPLE_STRING'], + T, + Buffer, + string | Buffer +> {} + +export interface BlobStringReply< + T extends string = string +> extends RespType< + RESP_TYPES['BLOB_STRING'], + T, + Buffer, + string | Buffer +> { + toString(): string +} + +export interface VerbatimStringReply< + T extends string = string +> extends RespType< + RESP_TYPES['VERBATIM_STRING'], + T, + Buffer | VerbatimString, + string | Buffer | VerbatimString +> {} + +export interface SimpleErrorReply extends RespType< + RESP_TYPES['SIMPLE_ERROR'], + SimpleError, + Buffer +> {} + +export interface BlobErrorReply extends RespType< + RESP_TYPES['BLOB_ERROR'], + BlobError, + Buffer +> {} + +export interface ArrayReply extends RespType< + RESP_TYPES['ARRAY'], + Array, + never, + Array +> {} + +export interface TuplesReply]> extends RespType< + RESP_TYPES['ARRAY'], + T, + never, + Array +> {} + +export interface SetReply extends RespType< + RESP_TYPES['SET'], + Array, + Set, + Array | Set +> {} + +export interface MapReply extends RespType< + RESP_TYPES['MAP'], + { [key: string]: V }, + Map | Array, + Map | Array +> {} + +type MapKeyValue = [key: BlobStringReply | SimpleStringReply, value: unknown]; + +type MapTuples = Array; + +type ExtractMapKey = ( + T extends BlobStringReply ? S : + T extends SimpleStringReply ? S : + never +); + +export interface TuplesToMapReply extends RespType< + RESP_TYPES['MAP'], + { + [P in T[number] as ExtractMapKey]: P[1]; + }, + Map, T[number][1]> | FlattenTuples +> {} + +type FlattenTuples = ( + T extends [] ? [] : + T extends [MapKeyValue] ? T[0] : + T extends [MapKeyValue, ...infer R] ? [ + ...T[0], + ...FlattenTuples + ] : + never +); + +export type ReplyUnion = ( + NullReply | + BooleanReply | + NumberReply | + BigNumberReply | + DoubleReply | + SimpleStringReply | + BlobStringReply | + VerbatimStringReply | + SimpleErrorReply | + BlobErrorReply | + ArrayReply | + SetReply | + MapReply +); + +export type MappedType = ((...args: any) => T) | (new (...args: any) => T); + +type InferTypeMapping = T extends RespType ? FLAG_TYPES : never; + +export type TypeMapping = { + [P in RespTypes]?: MappedType>>>; +}; + +type MapKey< + T, + TYPE_MAPPING extends TypeMapping +> = ReplyWithTypeMapping; + +export type UnwrapReply> = REPLY['DEFAULT' | 'TYPES']; + +export type ReplyWithTypeMapping< + REPLY, + TYPE_MAPPING extends TypeMapping +> = ( + // if REPLY is a type, extract the coresponding type from TYPE_MAPPING or use the default type + REPLY extends RespType ? + TYPE_MAPPING[RESP_TYPE] extends MappedType ? + ReplyWithTypeMapping, TYPE_MAPPING> : + ReplyWithTypeMapping + : ( + // if REPLY is a known generic type, convert its generic arguments + // TODO: tuples? + REPLY extends Array ? Array> : + REPLY extends Set ? Set> : + REPLY extends Map ? Map, ReplyWithTypeMapping> : + // `Date | Buffer | Error` are supersets of `Record`, so they need to be checked first + REPLY extends Date | Buffer | Error ? REPLY : + REPLY extends Record ? { + [P in keyof REPLY]: ReplyWithTypeMapping; + } : + // otherwise, just return the REPLY as is + REPLY + ) +); + +export type TransformReply = (this: void, reply: any, preserve?: any, typeMapping?: TypeMapping) => any; // TODO; + +export type RedisArgument = string | Buffer; + +export type CommandArguments = Array & { preserve?: unknown }; + +// export const REQUEST_POLICIES = { +// /** +// * TODO +// */ +// ALL_NODES: 'all_nodes', +// /** +// * TODO +// */ +// ALL_SHARDS: 'all_shards', +// /** +// * TODO +// */ +// SPECIAL: 'special' +// } as const; + +// export type REQUEST_POLICIES = typeof REQUEST_POLICIES; + +// export type RequestPolicies = REQUEST_POLICIES[keyof REQUEST_POLICIES]; + +// export const RESPONSE_POLICIES = { +// /** +// * TODO +// */ +// ONE_SUCCEEDED: 'one_succeeded', +// /** +// * TODO +// */ +// ALL_SUCCEEDED: 'all_succeeded', +// /** +// * TODO +// */ +// LOGICAL_AND: 'agg_logical_and', +// /** +// * TODO +// */ +// SPECIAL: 'special' +// } as const; + +// export type RESPONSE_POLICIES = typeof RESPONSE_POLICIES; + +// export type ResponsePolicies = RESPONSE_POLICIES[keyof RESPONSE_POLICIES]; + +// export type CommandPolicies = { +// request?: RequestPolicies | null; +// response?: ResponsePolicies | null; +// }; + +export type Command = { + CACHEABLE?: boolean; + IS_READ_ONLY?: boolean; + /** + * @internal + * TODO: remove once `POLICIES` is implemented + */ + IS_FORWARD_COMMAND?: boolean; + NOT_KEYED_COMMAND?: true; + // POLICIES?: CommandPolicies; + parseCommand(this: void, parser: CommandParser, ...args: Array): void; + TRANSFORM_LEGACY_REPLY?: boolean; + transformReply: TransformReply | Record; + unstableResp3?: boolean; +}; + +export type RedisCommands = Record; + +export type RedisModules = Record; + +export interface RedisFunction extends Command { + NUMBER_OF_KEYS?: number; +} + +export type RedisFunctions = Record>; + +export type RedisScript = RedisScriptConfig & SHA1; + +export type RedisScripts = Record; + +// TODO: move to Commander? +export interface CommanderConfig< + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions +> { + modules?: M; + functions?: F; + scripts?: S; + /** + * Specifies the Redis Serialization Protocol version to use. + * RESP2 is the default (value 2), while RESP3 (value 3) provides + * additional data types and features introduced in Redis 6.0. + */ + RESP?: RESP; + /** + * When set to true, enables commands that have unstable RESP3 implementations. + * When using RESP3 protocol, commands marked as having unstable RESP3 support + * will throw an error unless this flag is explicitly set to true. + * This primarily affects modules like Redis Search where response formats + * in RESP3 mode may change in future versions. + */ + unstableResp3?: boolean; +} + +type Resp2Array = ( + T extends [] ? [] : + T extends [infer ITEM] ? [Resp2Reply] : + T extends [infer ITEM, ...infer REST] ? [ + Resp2Reply, + ...Resp2Array + ] : + T extends Array ? Array> : + never +); + +export type Resp2Reply = ( + RESP3REPLY extends RespType ? + // TODO: RESP3 only scalar types + RESP_TYPE extends RESP_TYPES['DOUBLE'] ? BlobStringReply : + RESP_TYPE extends RESP_TYPES['ARRAY'] | RESP_TYPES['SET'] ? RespType< + RESP_TYPE, + Resp2Array + > : + RESP_TYPE extends RESP_TYPES['MAP'] ? RespType< + RESP_TYPES['ARRAY'], + Resp2Array>> + > : + RESP3REPLY : + RESP3REPLY +); + +export type RespVersions = 2 | 3; + +export type CommandReply< + COMMAND extends Command, + RESP extends RespVersions +> = ( + // if transformReply is a function, use its return type + COMMAND['transformReply'] extends (...args: any) => infer T ? T : + // if transformReply[RESP] is a function, use its return type + COMMAND['transformReply'] extends Record infer T> ? T : + // otherwise use the generic reply type + ReplyUnion +); + +export type CommandSignature< + COMMAND extends Command, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> = (...args: Tail>) => Promise, TYPE_MAPPING>>; + +// export type CommandWithPoliciesSignature< +// COMMAND extends Command, +// RESP extends RespVersions, +// TYPE_MAPPING extends TypeMapping, +// POLICIES extends CommandPolicies +// > = (...args: Parameters) => Promise< +// ReplyWithPolicy< +// ReplyWithTypeMapping, TYPE_MAPPING>, +// MergePolicies +// > +// >; + +// export type MergePolicies< +// COMMAND extends Command, +// POLICIES extends CommandPolicies +// > = Omit & POLICIES; + +// type ReplyWithPolicy< +// REPLY, +// POLICIES extends CommandPolicies, +// > = ( +// POLICIES['request'] extends REQUEST_POLICIES['SPECIAL'] ? never : +// POLICIES['request'] extends null | undefined ? REPLY : +// unknown extends POLICIES['request'] ? REPLY : +// POLICIES['response'] extends RESPONSE_POLICIES['SPECIAL'] ? never : +// POLICIES['response'] extends RESPONSE_POLICIES['ALL_SUCCEEDED' | 'ONE_SUCCEEDED' | 'LOGICAL_AND'] ? REPLY : +// // otherwise, return array of replies +// Array +// ); diff --git a/packages/client/lib/RESP/verbatim-string.ts b/packages/client/lib/RESP/verbatim-string.ts new file mode 100644 index 00000000000..92ff4fe3fb1 --- /dev/null +++ b/packages/client/lib/RESP/verbatim-string.ts @@ -0,0 +1,8 @@ +export class VerbatimString extends String { + constructor( + public format: string, + value: string + ) { + super(value); + } +} diff --git a/packages/client/lib/authx/credentials-provider.ts b/packages/client/lib/authx/credentials-provider.ts new file mode 100644 index 00000000000..667795be9b3 --- /dev/null +++ b/packages/client/lib/authx/credentials-provider.ts @@ -0,0 +1,102 @@ +import { Disposable } from './disposable'; +/** + * Provides credentials asynchronously. + */ +export interface AsyncCredentialsProvider { + readonly type: 'async-credentials-provider'; + credentials: () => Promise +} + +/** + * Provides credentials asynchronously with support for continuous updates via a subscription model. + * This is useful for environments where credentials are frequently rotated or updated or can be revoked. + */ +export interface StreamingCredentialsProvider { + readonly type: 'streaming-credentials-provider'; + + /** + * Provides initial credentials and subscribes to subsequent updates. This is used internally by the node-redis client + * to handle credential rotation and re-authentication. + * + * Note: The node-redis client manages the subscription lifecycle automatically. Users only need to implement + * onReAuthenticationError if they want to be notified about authentication failures. + * + * Error handling: + * - Errors received via onError indicate a fatal issue with the credentials stream + * - The stream is automatically closed(disposed) when onError occurs + * - onError typically mean the provider failed to fetch new credentials after retrying + * + * @example + * ```ts + * const provider = getStreamingProvider(); + * const [initialCredentials, disposable] = await provider.subscribe({ + * onNext: (newCredentials) => { + * // Handle credential update + * }, + * onError: (error) => { + * // Handle fatal stream error + * } + * }); + * + * @param listener - Callbacks to handle credential updates and errors + * @returns A Promise resolving to [initial credentials, cleanup function] + */ + subscribe: (listener: StreamingCredentialsListener) => Promise<[BasicAuth, Disposable]> + + /** + * Called when authentication fails or credentials cannot be renewed in time. + * Implement this to handle authentication errors in your application. + * + * @param error - Either a CredentialsError (invalid/expired credentials) or + * UnableToObtainNewCredentialsError (failed to fetch new credentials on time) + */ + onReAuthenticationError: (error: ReAuthenticationError) => void; + +} + +/** + * Type representing basic authentication credentials. + */ +export type BasicAuth = { username?: string, password?: string } + +/** + * Callback to handle credential updates and errors. + */ +export type StreamingCredentialsListener = { + onNext: (credentials: T) => void; + onError: (e: Error) => void; +} + + +/** + * Providers that can supply authentication credentials + */ +export type CredentialsProvider = AsyncCredentialsProvider | StreamingCredentialsProvider + +/** + * Errors that can occur during re-authentication. + */ +export type ReAuthenticationError = CredentialsError | UnableToObtainNewCredentialsError + +/** + * Thrown when re-authentication fails with provided credentials . + * e.g. when the credentials are invalid, expired or revoked. + * + */ +export class CredentialsError extends Error { + constructor(message: string) { + super(`Re-authentication with latest credentials failed: ${message}`); + this.name = 'CredentialsError'; + } + +} + +/** + * Thrown when new credentials cannot be obtained before current ones expire + */ +export class UnableToObtainNewCredentialsError extends Error { + constructor(message: string) { + super(`Unable to obtain new credentials : ${message}`); + this.name = 'UnableToObtainNewCredentialsError'; + } +} \ No newline at end of file diff --git a/packages/client/lib/authx/disposable.ts b/packages/client/lib/authx/disposable.ts new file mode 100644 index 00000000000..ee4526a37bd --- /dev/null +++ b/packages/client/lib/authx/disposable.ts @@ -0,0 +1,6 @@ +/** + * Represents a resource that can be disposed. + */ +export interface Disposable { + dispose(): void; +} \ No newline at end of file diff --git a/packages/client/lib/authx/identity-provider.ts b/packages/client/lib/authx/identity-provider.ts new file mode 100644 index 00000000000..a2d25c8f9db --- /dev/null +++ b/packages/client/lib/authx/identity-provider.ts @@ -0,0 +1,22 @@ +/** + * An identity provider is responsible for providing a token that can be used to authenticate with a service. + */ + +/** + * The response from an identity provider when requesting a token. + * + * note: "native" refers to the type of the token that the actual identity provider library is using. + * + * @type T The type of the native idp token. + * @property token The token. + * @property ttlMs The time-to-live of the token in epoch milliseconds extracted from the native token in local time. + */ +export type TokenResponse = { token: T, ttlMs: number }; + +export interface IdentityProvider { + /** + * Request a token from the identity provider. + * @returns A promise that resolves to an object containing the token and the time-to-live in epoch milliseconds. + */ + requestToken(): Promise>; +} \ No newline at end of file diff --git a/packages/client/lib/authx/index.ts b/packages/client/lib/authx/index.ts new file mode 100644 index 00000000000..ce611e1497f --- /dev/null +++ b/packages/client/lib/authx/index.ts @@ -0,0 +1,15 @@ +export { TokenManager, TokenManagerConfig, TokenStreamListener, RetryPolicy, IDPError } from './token-manager'; +export { + CredentialsProvider, + StreamingCredentialsProvider, + UnableToObtainNewCredentialsError, + CredentialsError, + StreamingCredentialsListener, + AsyncCredentialsProvider, + ReAuthenticationError, + BasicAuth +} from './credentials-provider'; +export { Token } from './token'; +export { IdentityProvider, TokenResponse } from './identity-provider'; + +export { Disposable } from './disposable' \ No newline at end of file diff --git a/packages/client/lib/authx/token-manager.spec.ts b/packages/client/lib/authx/token-manager.spec.ts new file mode 100644 index 00000000000..1cc2a207edc --- /dev/null +++ b/packages/client/lib/authx/token-manager.spec.ts @@ -0,0 +1,588 @@ +import { strict as assert } from 'node:assert'; +import { Token } from './token'; +import { IDPError, RetryPolicy, TokenManager, TokenManagerConfig, TokenStreamListener } from './token-manager'; +import { IdentityProvider, TokenResponse } from './identity-provider'; +import { setTimeout } from 'timers/promises'; + +describe('TokenManager', () => { + + /** + * Helper function to delay execution for a given number of milliseconds. + * @param ms + */ + const delay = (ms: number) => { + return setTimeout(ms); + } + + /** + * IdentityProvider that returns a fixed test token for testing and doesn't handle TTL. + */ + class TestIdentityProvider implements IdentityProvider { + requestToken(): Promise> { + return Promise.resolve({ token: 'test-token 1', ttlMs: 1000 }); + } + } + + /** + * Helper function to create a test token with a given TTL . + * @param ttlMs Time-to-live in milliseconds + */ + const createToken = (ttlMs: number): Token => { + return new Token('test-token', ttlMs, 0); + }; + + /** + * Listener that records received tokens and errors for testing. + */ + class TestListener implements TokenStreamListener { + + public readonly receivedTokens: Token[] = []; + public readonly errors: IDPError[] = []; + + onNext(token: Token): void { + this.receivedTokens.push(token); + } + + onError(error: IDPError): void { + this.errors.push(error); + } + } + + /** + * IdentityProvider that returns a sequence of tokens with a fixed delay simulating network latency. + * Used for testing token refresh scenarios. + */ + class ControlledIdentityProvider implements IdentityProvider { + private tokenIndex = 0; + private readonly delayMs: number; + private readonly ttlMs: number; + + constructor( + private readonly tokens: string[], + delayMs: number = 0, + tokenTTlMs: number = 100 + ) { + this.delayMs = delayMs; + this.ttlMs = tokenTTlMs; + } + + async requestToken(): Promise> { + + if (this.tokenIndex >= this.tokens.length) { + throw new Error('No more test tokens available'); + } + + if (this.delayMs > 0) { + await setTimeout(this.delayMs); + } + + return { token: this.tokens[this.tokenIndex++], ttlMs: this.ttlMs }; + } + + } + + /** + * IdentityProvider that simulates various error scenarios with configurable behavior + */ + class ErrorSimulatingProvider implements IdentityProvider { + private requestCount = 0; + + constructor( + private readonly errorSequence: Array, + private readonly delayMs: number = 0, + private readonly ttlMs: number = 100 + ) {} + + async requestToken(): Promise> { + + if (this.delayMs > 0) { + await delay(this.delayMs); + } + + const result = this.errorSequence[this.requestCount]; + this.requestCount++; + + if (result instanceof Error) { + throw result; + } else if (typeof result === 'string') { + return { token: result, ttlMs: this.ttlMs }; + } else { + throw new Error('No more responses configured'); + } + } + + getRequestCount(): number { + return this.requestCount; + } + } + + describe('constructor validation', () => { + it('should throw error if ratio is greater than 1', () => { + const config: TokenManagerConfig = { + expirationRefreshRatio: 1.1 + }; + + assert.throws( + () => new TokenManager(new TestIdentityProvider(), config), + /expirationRefreshRatio must be less than or equal to 1/ + ); + }); + + it('should throw error if ratio is negative', () => { + const config: TokenManagerConfig = { + expirationRefreshRatio: -0.1 + }; + + assert.throws( + () => new TokenManager(new TestIdentityProvider(), config), + /expirationRefreshRatio must be greater or equal to 0/ + ); + }); + + it('should accept ratio of 1', () => { + const config: TokenManagerConfig = { + expirationRefreshRatio: 1 + }; + + assert.doesNotThrow( + () => new TokenManager(new TestIdentityProvider(), config) + ); + }); + + it('should accept ratio of 0', () => { + const config: TokenManagerConfig = { + expirationRefreshRatio: 0 + }; + + assert.doesNotThrow( + () => new TokenManager(new TestIdentityProvider(), config) + ); + }); + }); + + describe('calculateRefreshTime', () => { + it('should calculate correct refresh time with 0.8 ratio', () => { + const config: TokenManagerConfig = { + expirationRefreshRatio: 0.8 + }; + + const manager = new TokenManager(new TestIdentityProvider(), config); + const token = createToken(1000); + const refreshTime = manager.calculateRefreshTime(token, 0); + + // With 1000s TTL and 0.8 ratio, should refresh at 800s + assert.equal(refreshTime, 800); + }); + + it('should return 0 for ratio of 0', () => { + const config: TokenManagerConfig = { + expirationRefreshRatio: 0 + }; + + const manager = new TokenManager(new TestIdentityProvider(), config); + const token = createToken(1000); + const refreshTime = manager.calculateRefreshTime(token, 0); + + assert.equal(refreshTime, 0); + }); + + it('should refresh at expiration time with ratio of 1', () => { + const config: TokenManagerConfig = { + expirationRefreshRatio: 1 + }; + + const manager = new TokenManager(new TestIdentityProvider(), config); + const token = createToken(1000); + const refreshTime = manager.calculateRefreshTime(token, 0); + + assert.equal(refreshTime, 1000); + }); + + it('should handle short TTL tokens', () => { + const config: TokenManagerConfig = { + expirationRefreshRatio: 0.8 + }; + + const manager = new TokenManager(new TestIdentityProvider(), config); + const token = createToken(5); + const refreshTime = manager.calculateRefreshTime(token, 0); + + assert.equal(refreshTime, 4); + }); + + it('should handle expired tokens', () => { + const config: TokenManagerConfig = { + expirationRefreshRatio: 0.8 + }; + + const manager = new TokenManager(new TestIdentityProvider(), config); + // Create token that expired 100s ago + const token = createToken(-100); + const refreshTime = manager.calculateRefreshTime(token, 0); + + // Should return refresh time of 0 for expired tokens + assert.equal(refreshTime, 0); + }); + describe('token refresh scenarios', () => { + + describe('token refresh', () => { + it('should handle token refresh', async () => { + const networkDelay = 20; + const tokenTtl = 100; + + const config: TokenManagerConfig = { + expirationRefreshRatio: 0.8 + }; + + const identityProvider = new ControlledIdentityProvider(['token1', 'token2', 'token3'], networkDelay, tokenTtl); + const manager = new TokenManager(identityProvider, config); + const listener = new TestListener(); + const disposable = manager.start(listener); + + assert.equal(manager.getCurrentToken(), null, 'Should not have token yet'); + // Wait for the first token request to complete ( it should be immediate, and we should wait only for the network delay) + await delay(networkDelay) + + assert.equal(listener.receivedTokens.length, 1, 'Should receive initial token'); + assert.equal(listener.receivedTokens[0].value, 'token1', 'Should have correct token value'); + assert.equal(listener.receivedTokens[0].expiresAtMs - listener.receivedTokens[0].receivedAtMs, + tokenTtl, 'Should have correct TTL'); + assert.equal(listener.errors.length, 0, 'Should not have any errors: ' + listener.errors); + assert.equal(manager.getCurrentToken().value, 'token1', 'Should have current token'); + + await delay(80); + + assert.equal(listener.receivedTokens.length, 1, 'Should not receive new token yet'); + assert.equal(listener.errors.length, 0, 'Should not have any errors'); + + await delay(networkDelay); + + assert.equal(listener.receivedTokens.length, 2, 'Should receive second token'); + assert.equal(listener.receivedTokens[1].value, 'token2', 'Should have correct token value'); + assert.equal(listener.receivedTokens[1].expiresAtMs - listener.receivedTokens[1].receivedAtMs, + tokenTtl, 'Should have correct TTL'); + assert.equal(listener.errors.length, 0, 'Should not have any errors'); + assert.equal(manager.getCurrentToken().value, 'token2', 'Should have current token'); + + await delay(80); + + assert.equal(listener.receivedTokens.length, 2, 'Should not receive new token yet'); + assert.equal(listener.errors.length, 0, 'Should not have any errors'); + + await delay(networkDelay); + + assert.equal(listener.receivedTokens.length, 3, 'Should receive third token'); + assert.equal(listener.receivedTokens[2].value, 'token3', 'Should have correct token value'); + assert.equal(listener.receivedTokens[2].expiresAtMs - listener.receivedTokens[2].receivedAtMs, + tokenTtl, 'Should have correct TTL'); + assert.equal(listener.errors.length, 0, 'Should not have any errors'); + assert.equal(manager.getCurrentToken().value, 'token3', 'Should have current token'); + + disposable?.dispose(); + }); + }); + }); + }); + + describe('TokenManager error handling', () => { + + describe('error scenarios', () => { + it('should not recover if retries are not enabled', async () => { + + const networkDelay = 20; + const tokenTtl = 100; + + const config: TokenManagerConfig = { + expirationRefreshRatio: 0.8 + }; + + const identityProvider = new ErrorSimulatingProvider( + [ + 'token1', + new Error('Fatal error'), + 'token3' + ], + networkDelay, + tokenTtl + ); + + const manager = new TokenManager(identityProvider, config); + const listener = new TestListener(); + const disposable = manager.start(listener); + + await delay(networkDelay); + + assert.equal(listener.receivedTokens.length, 1, 'Should receive initial token'); + assert.equal(listener.receivedTokens[0].value, 'token1', 'Should have correct initial token'); + assert.equal(listener.receivedTokens[0].expiresAtMs - listener.receivedTokens[0].receivedAtMs, + tokenTtl, 'Should have correct TTL'); + assert.equal(listener.errors.length, 0, 'Should not have errors yet'); + + await delay(80); + + assert.equal(listener.receivedTokens.length, 1, 'Should not receive new token yet'); + assert.equal(listener.errors.length, 0, 'Should not have any errors'); + + await delay(networkDelay); + + assert.equal(listener.receivedTokens.length, 1, 'Should not receive new token after failure'); + assert.equal(listener.errors.length, 1, 'Should receive error'); + assert.equal(listener.errors[0].message, 'Fatal error', 'Should have correct error message'); + assert.equal(listener.errors[0].isRetryable, false, 'Should be a fatal error'); + + // verify that the token manager is stopped and no more requests are made after the error and expected refresh time + await delay(80); + + assert.equal(identityProvider.getRequestCount(), 2, 'Should not make more requests after error'); + assert.equal(listener.receivedTokens.length, 1, 'Should not receive new token after error'); + assert.equal(listener.errors.length, 1, 'Should not receive more errors after error'); + assert.equal(manager.isRunning(), false, 'Should stop token manager after error'); + + disposable?.dispose(); + }); + + it('should handle retries with exponential backoff', async () => { + const networkDelay = 20; + const tokenTtl = 100; + + const config: TokenManagerConfig = { + expirationRefreshRatio: 0.8, + retry: { + maxAttempts: 3, + initialDelayMs: 100, + maxDelayMs: 1000, + backoffMultiplier: 2, + isRetryable: (error: unknown) => error instanceof Error && error.message === 'Temporary failure' + } + }; + + const identityProvider = new ErrorSimulatingProvider( + [ + 'initial-token', + new Error('Temporary failure'), // First attempt fails + new Error('Temporary failure'), // First retry fails + 'recovery-token' // Second retry succeeds + ], + networkDelay, + tokenTtl + ); + + const manager = new TokenManager(identityProvider, config); + const listener = new TestListener(); + const disposable = manager.start(listener); + + // Wait for initial token + await delay(networkDelay); + assert.equal(listener.receivedTokens.length, 1, 'Should receive initial token'); + assert.equal(listener.receivedTokens[0].value, 'initial-token', 'Should have correct initial token'); + assert.equal(listener.receivedTokens[0].expiresAtMs - listener.receivedTokens[0].receivedAtMs, + tokenTtl, 'Should have correct TTL'); + assert.equal(listener.errors.length, 0, 'Should not have errors yet'); + + await delay(80); + + assert.equal(listener.receivedTokens.length, 1, 'Should not receive new token yet'); + assert.equal(listener.errors.length, 0, 'Should not have any errors'); + + await delay(networkDelay); + + // Should have first error but not stop due to retry config + assert.equal(listener.errors.length, 1, 'Should have first error'); + assert.ok(listener.errors[0].message.includes('attempt 1'), 'Error should indicate first attempt'); + assert.equal(listener.errors[0].isRetryable, true, 'Should not be a fatal error'); + assert.equal(manager.isRunning(), true, 'Should continue running during retries'); + + // Advance past first retry (delay: 100ms due to backoff) + await delay(100); + + assert.equal(listener.errors.length, 1, 'Should not have the second error yet'); + + await delay(networkDelay); + + assert.equal(listener.errors.length, 2, 'Should have second error'); + assert.ok(listener.errors[1].message.includes('attempt 2'), 'Error should indicate second attempt'); + assert.equal(listener.errors[0].isRetryable, true, 'Should not be a fatal error'); + assert.equal(manager.isRunning(), true, 'Should continue running during retries'); + + // Advance past second retry (delay: 200ms due to backoff) + await delay(200); + + assert.equal(listener.errors.length, 2, 'Should not have another error'); + assert.equal(listener.receivedTokens.length, 1, 'Should not receive new token yet'); + + await delay(networkDelay); + + // Should have recovered + assert.equal(listener.receivedTokens.length, 2, 'Should receive recovery token'); + assert.equal(listener.receivedTokens[1].value, 'recovery-token', 'Should have correct recovery token'); + assert.equal(listener.receivedTokens[1].expiresAtMs - listener.receivedTokens[1].receivedAtMs, + tokenTtl, 'Should have correct TTL'); + assert.equal(manager.isRunning(), true, 'Should continue running after recovery'); + assert.equal(identityProvider.getRequestCount(), 4, 'Should have made exactly 4 requests'); + + disposable?.dispose(); + }); + + it('should stop after max retries exceeded', async () => { + const networkDelay = 20; + const tokenTtl = 100; + + const config: TokenManagerConfig = { + expirationRefreshRatio: 0.8, + retry: { + maxAttempts: 2, // Only allow 2 retries + initialDelayMs: 100, + maxDelayMs: 1000, + backoffMultiplier: 2, + jitterPercentage: 0, + isRetryable: (error: unknown) => error instanceof Error && error.message === 'Temporary failure' + } + }; + + // All attempts must fail + const identityProvider = new ErrorSimulatingProvider( + [ + 'initial-token', + new Error('Temporary failure'), + new Error('Temporary failure'), + new Error('Temporary failure') + ], + networkDelay, + tokenTtl + ); + + const manager = new TokenManager(identityProvider, config); + const listener = new TestListener(); + const disposable = manager.start(listener); + + // Wait for initial token + await delay(networkDelay); + assert.equal(listener.receivedTokens.length, 1, 'Should receive initial token'); + + await delay(80); + + assert.equal(listener.receivedTokens.length, 1, 'Should not receive new token yet'); + assert.equal(listener.errors.length, 0, 'Should not have any errors'); + + //wait for the "network call" to complete + await delay(networkDelay); + + // First error + assert.equal(listener.errors.length, 1, 'Should have first error'); + assert.equal(manager.isRunning(), true, 'Should continue running after first error'); + assert.equal(listener.errors[0].isRetryable, true, 'Should not be a fatal error'); + + // Advance past first retry + await delay(100); + + assert.equal(listener.errors.length, 1, 'Should not have second error yet'); + + //wait for the "network call" to complete + await delay(networkDelay); + + // Second error + assert.equal(listener.errors.length, 2, 'Should have second error'); + assert.equal(manager.isRunning(), true, 'Should continue running after second error'); + assert.equal(listener.errors[1].isRetryable, true, 'Should not be a fatal error'); + + // Advance past second retry + await delay(200); + + assert.equal(listener.errors.length, 2, 'Should not have third error yet'); + + //wait for the "network call" to complete + await delay(networkDelay); + + // Should stop after max retries + assert.equal(listener.errors.length, 3, 'Should have final error'); + assert.equal(listener.errors[2].isRetryable, false, 'Should be a fatal error'); + assert.equal(manager.isRunning(), false, 'Should stop after max retries exceeded'); + assert.equal(identityProvider.getRequestCount(), 4, 'Should have made exactly 4 requests'); + + disposable?.dispose(); + + }); + }); + }); + + describe('TokenManager retry delay calculations', () => { + const createManager = (retryConfig: Partial) => { + const config: TokenManagerConfig = { + expirationRefreshRatio: 0.8, + retry: { + maxAttempts: 3, + initialDelayMs: 100, + maxDelayMs: 1000, + backoffMultiplier: 2, + ...retryConfig + } + }; + return new TokenManager(new TestIdentityProvider(), config); + }; + + describe('calculateRetryDelay', () => { + + it('should apply exponential backoff', () => { + const manager = createManager({ + initialDelayMs: 100, + backoffMultiplier: 2, + jitterPercentage: 0 + }); + + // Test multiple retry attempts + const expectedDelays = [ + [1, 100], // First attempt: initialDelay * (2^0) = 100 + [2, 200], // Second attempt: initialDelay * (2^1) = 200 + [3, 400], // Third attempt: initialDelay * (2^2) = 400 + [4, 800], // Fourth attempt: initialDelay * (2^3) = 800 + [5, 1000] // Fifth attempt: would be 1600, but capped at maxDelay (1000) + ]; + + for (const [attempt, expectedDelay] of expectedDelays) { + manager['retryAttempt'] = attempt; + assert.equal( + manager.calculateRetryDelay(), + expectedDelay, + `Incorrect delay for attempt ${attempt}` + ); + } + }); + + it('should respect maxDelayMs', () => { + const manager = createManager({ + initialDelayMs: 100, + maxDelayMs: 300, + backoffMultiplier: 2, + jitterPercentage: 0 + }); + + // Test that delays are capped at maxDelayMs + const expectedDelays = [ + [1, 100], // First attempt: 100 + [2, 200], // Second attempt: 200 + [3, 300], // Third attempt: would be 400, capped at 300 + [4, 300], // Fourth attempt: would be 800, capped at 300 + [5, 300] // Fifth attempt: would be 1600, capped at 300 + ]; + + for (const [attempt, expectedDelay] of expectedDelays) { + manager['retryAttempt'] = attempt; + assert.equal( + manager.calculateRetryDelay(), + expectedDelay, + `Incorrect delay for attempt ${attempt}` + ); + } + }); + + it('should return 0 when no retry config is present', () => { + const manager = new TokenManager(new TestIdentityProvider(), { + expirationRefreshRatio: 0.8 + }); + manager['retryAttempt'] = 1; + assert.equal(manager.calculateRetryDelay(), 0); + }); + }); + }); +}); + diff --git a/packages/client/lib/authx/token-manager.ts b/packages/client/lib/authx/token-manager.ts new file mode 100644 index 00000000000..6532d88317b --- /dev/null +++ b/packages/client/lib/authx/token-manager.ts @@ -0,0 +1,318 @@ +import { IdentityProvider, TokenResponse } from './identity-provider'; +import { Token } from './token'; +import {Disposable} from './disposable'; + +/** + * The configuration for retrying token refreshes. + */ +export interface RetryPolicy { + /** + * The maximum number of attempts to retry token refreshes. + */ + maxAttempts: number; + + /** + * The initial delay in milliseconds before the first retry. + */ + initialDelayMs: number; + + /** + * The maximum delay in milliseconds between retries. + * The calculated delay will be capped at this value. + */ + maxDelayMs: number; + + /** + * The multiplier for exponential backoff between retries. + * @example + * A value of 2 will double the delay each time: + * - 1st retry: initialDelayMs + * - 2nd retry: initialDelayMs * 2 + * - 3rd retry: initialDelayMs * 4 + */ + backoffMultiplier: number; + + /** + * The percentage of jitter to apply to the delay. + * @example + * A value of 0.1 will add or subtract up to 10% of the delay. + */ + jitterPercentage?: number; + + /** + * Function to classify errors from the identity provider as retryable or non-retryable. + * Used to determine if a token refresh failure should be retried based on the type of error. + * + * The default behavior is to retry all types of errors if no function is provided. + * + * Common use cases: + * - Network errors that may be transient (should retry) + * - Invalid credentials (should not retry) + * - Rate limiting responses (should retry) + * + * @param error - The error from the identity provider3 + * @param attempt - Current retry attempt (0-based) + * @returns `true` if the error is considered transient and the operation should be retried + * + * @example + * ```typescript + * const retryPolicy: RetryPolicy = { + * maxAttempts: 3, + * initialDelayMs: 1000, + * maxDelayMs: 5000, + * backoffMultiplier: 2, + * isRetryable: (error) => { + * // Retry on network errors or rate limiting + * return error instanceof NetworkError || + * error instanceof RateLimitError; + * } + * }; + * ``` + */ + isRetryable?: (error: unknown, attempt: number) => boolean; +} + +/** + * the configuration for the TokenManager. + */ +export interface TokenManagerConfig { + + /** + * Represents the ratio of a token's lifetime at which a refresh should be triggered. + * For example, a value of 0.75 means the token should be refreshed when 75% of its lifetime has elapsed (or when + * 25% of its lifetime remains). + */ + expirationRefreshRatio: number; + + // The retry policy for token refreshes. If not provided, no retries will be attempted. + retry?: RetryPolicy; +} + +/** + * IDPError indicates a failure from the identity provider. + * + * The `isRetryable` flag is determined by the RetryPolicy's error classification function - if an error is + * classified as retryable, it will be marked as transient and the token manager will attempt to recover. + */ +export class IDPError extends Error { + constructor(public readonly message: string, public readonly isRetryable: boolean) { + super(message); + this.name = 'IDPError'; + } +} + +/** + * TokenStreamListener is an interface for objects that listen to token changes. + */ +export type TokenStreamListener = { + /** + * Called each time a new token is received. + * @param token + */ + onNext: (token: Token) => void; + + /** + * Called when an error occurs while calling the underlying IdentityProvider. The error can be + * transient and the token manager will attempt to obtain a token again if retry policy is configured. + * + * Only fatal errors will terminate the stream and stop the token manager. + * + * @param error + */ + onError: (error: IDPError) => void; + +} + +/** + * TokenManager is responsible for obtaining/refreshing tokens and notifying listeners about token changes. + * It uses an IdentityProvider to request tokens. The token refresh is scheduled based on the token's TTL and + * the expirationRefreshRatio configuration. + * + * The TokenManager should be disposed when it is no longer needed by calling the dispose method on the Disposable + * returned by start. + */ +export class TokenManager { + private currentToken: Token | null = null; + private refreshTimeout: NodeJS.Timeout | null = null; + private listener: TokenStreamListener | null = null; + private retryAttempt: number = 0; + + constructor( + private readonly identityProvider: IdentityProvider, + private readonly config: TokenManagerConfig + ) { + if (this.config.expirationRefreshRatio > 1) { + throw new Error('expirationRefreshRatio must be less than or equal to 1'); + } + if (this.config.expirationRefreshRatio < 0) { + throw new Error('expirationRefreshRatio must be greater or equal to 0'); + } + } + + /** + * Starts the token manager and returns a Disposable that can be used to stop the token manager. + * + * @param listener The listener that will receive token updates. + * @param initialDelayMs The initial delay in milliseconds before the first token refresh. + */ + public start(listener: TokenStreamListener, initialDelayMs: number = 0): Disposable { + if (this.listener) { + this.stop(); + } + + this.listener = listener; + this.retryAttempt = 0; + + this.scheduleNextRefresh(initialDelayMs); + + return { + dispose: () => this.stop() + }; + } + + public calculateRetryDelay(): number { + if (!this.config.retry) return 0; + + const { initialDelayMs, maxDelayMs, backoffMultiplier, jitterPercentage } = this.config.retry; + + let delay = initialDelayMs * Math.pow(backoffMultiplier, this.retryAttempt - 1); + + delay = Math.min(delay, maxDelayMs); + + if (jitterPercentage) { + const jitterRange = delay * (jitterPercentage / 100); + const jitterAmount = Math.random() * jitterRange - (jitterRange / 2); + delay += jitterAmount; + } + + let result = Math.max(0, Math.floor(delay)); + + return result; + } + + private shouldRetry(error: unknown): boolean { + if (!this.config.retry) return false; + + const { maxAttempts, isRetryable } = this.config.retry; + + if (this.retryAttempt >= maxAttempts) { + return false; + } + + if (isRetryable) { + return isRetryable(error, this.retryAttempt); + } + + return false; + } + + public isRunning(): boolean { + return this.listener !== null; + } + + private async refresh(): Promise { + if (!this.listener) { + throw new Error('TokenManager is not running, but refresh was called'); + } + + try { + await this.identityProvider.requestToken().then(this.handleNewToken); + this.retryAttempt = 0; + } catch (error) { + + if (this.shouldRetry(error)) { + this.retryAttempt++; + const retryDelay = this.calculateRetryDelay(); + this.notifyError(`Token refresh failed (attempt ${this.retryAttempt}), retrying in ${retryDelay}ms: ${error}`, true) + this.scheduleNextRefresh(retryDelay); + } else { + this.notifyError(error, false); + this.stop(); + } + } + } + + private handleNewToken = async ({ token: nativeToken, ttlMs }: TokenResponse): Promise => { + if (!this.listener) { + throw new Error('TokenManager is not running, but a new token was received'); + } + const token = this.wrapAndSetCurrentToken(nativeToken, ttlMs); + this.listener.onNext(token); + + this.scheduleNextRefresh(this.calculateRefreshTime(token)); + } + + /** + * Creates a Token object from a native token and sets it as the current token. + * + * @param nativeToken - The raw token received from the identity provider + * @param ttlMs - Time-to-live in milliseconds for the token + * + * @returns A new Token instance containing the wrapped native token and expiration details + * + */ + public wrapAndSetCurrentToken(nativeToken: T, ttlMs: number): Token { + const now = Date.now(); + const token = new Token( + nativeToken, + now + ttlMs, + now + ); + this.currentToken = token; + return token; + } + + private scheduleNextRefresh(delayMs: number): void { + if (this.refreshTimeout) { + clearTimeout(this.refreshTimeout); + this.refreshTimeout = null; + } + if (delayMs === 0) { + this.refresh(); + } else { + this.refreshTimeout = setTimeout(() => this.refresh(), delayMs); + } + + } + + /** + * Calculates the time in milliseconds when the token should be refreshed + * based on the token's TTL and the expirationRefreshRatio configuration. + * + * @param token The token to calculate the refresh time for. + * @param now The current time in milliseconds. Defaults to Date.now(). + */ + public calculateRefreshTime(token: Token, now: number = Date.now()): number { + const ttlMs = token.getTtlMs(now); + return Math.floor(ttlMs * this.config.expirationRefreshRatio); + } + + private stop(): void { + + if (this.refreshTimeout) { + clearTimeout(this.refreshTimeout); + this.refreshTimeout = null; + } + + this.listener = null; + this.currentToken = null; + this.retryAttempt = 0; + } + + /** + * Returns the current token or null if no token is available. + */ + public getCurrentToken(): Token | null { + return this.currentToken; + } + + private notifyError(error: unknown, isRetryable: boolean): void { + const errorMessage = error instanceof Error ? error.message : String(error); + + if (!this.listener) { + throw new Error(`TokenManager is not running but received an error: ${errorMessage}`); + } + + this.listener.onError(new IDPError(errorMessage, isRetryable)); + } +} \ No newline at end of file diff --git a/packages/client/lib/authx/token.ts b/packages/client/lib/authx/token.ts new file mode 100644 index 00000000000..3d6e6867d84 --- /dev/null +++ b/packages/client/lib/authx/token.ts @@ -0,0 +1,23 @@ +/** + * A token that can be used to authenticate with a service. + */ +export class Token { + constructor( + public readonly value: T, + //represents the token deadline - the time in milliseconds since the Unix epoch at which the token expires + public readonly expiresAtMs: number, + //represents the time in milliseconds since the Unix epoch at which the token was received + public readonly receivedAtMs: number + ) {} + + /** + * Returns the time-to-live of the token in milliseconds. + * @param now The current time in milliseconds since the Unix epoch. + */ + getTtlMs(now: number): number { + if (this.expiresAtMs < now) { + return 0; + } + return this.expiresAtMs - now; + } +} \ No newline at end of file diff --git a/packages/client/lib/client/cache.spec.ts b/packages/client/lib/client/cache.spec.ts new file mode 100644 index 00000000000..55f2672c26c --- /dev/null +++ b/packages/client/lib/client/cache.spec.ts @@ -0,0 +1,700 @@ +import assert from "assert"; +import testUtils, { GLOBAL } from "../test-utils" +import { BasicClientSideCache, BasicPooledClientSideCache, CacheStats } from "./cache" +import { REDIS_FLUSH_MODES } from "../commands/FLUSHALL"; +import { once } from 'events'; + +describe("Client Side Cache", () => { + describe('Basic Cache', () => { + const csc = new BasicClientSideCache({ maxEntries: 10 }); + + testUtils.testWithClient('Basic Cache Miss', async client => { + csc.clear(); + + await client.set("x", 1); + await client.get("x"); + + assert.equal(csc.stats().missCount, 1, "Cache Misses"); + assert.equal(csc.stats().hitCount, 0, "Cache Hits"); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3, + clientSideCache: csc + } + }); + + testUtils.testWithClient('Basic Cache Hit', async client => { + csc.clear(); + + await client.set("x", 1); + assert.equal(await client.get("x"), '1'); + assert.equal(await client.get("x"), '1'); + + assert.equal(csc.stats().missCount, 1, "Cache Misses"); + assert.equal(csc.stats().hitCount, 1, "Cache Hits"); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3, + clientSideCache: csc + } + }); + + testUtils.testWithClient('Max Cache Entries', async client => { + csc.clear(); + + await client.set('1', 1); + assert.equal(await client.get('1'), '1'); + assert.equal(await client.get('2'), null); + assert.equal(await client.get('3'), null); + assert.equal(await client.get('4'), null); + assert.equal(await client.get('5'), null); + assert.equal(await client.get('6'), null); + assert.equal(await client.get('7'), null); + assert.equal(await client.get('8'), null); + assert.equal(await client.get('9'), null); + assert.equal(await client.get('10'), null); + assert.equal(await client.get('11'), null); + assert.equal(await client.get('1'), '1'); + + assert.equal(csc.stats().missCount, 12, "Cache Misses"); + assert.equal(csc.stats().hitCount, 0, "Cache Hits"); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3, + clientSideCache: csc + } + }); + + testUtils.testWithClient('LRU works correctly', async client => { + csc.clear(); + + await client.set('1', 1); + assert.equal(await client.get('1'), '1'); + assert.equal(await client.get('2'), null); + assert.equal(await client.get('3'), null); + assert.equal(await client.get('4'), null); + assert.equal(await client.get('5'), null); + assert.equal(await client.get('1'), '1'); + assert.equal(await client.get('6'), null); + assert.equal(await client.get('7'), null); + assert.equal(await client.get('8'), null); + assert.equal(await client.get('9'), null); + assert.equal(await client.get('10'), null); + assert.equal(await client.get('11'), null); + assert.equal(await client.get('1'), '1'); + + assert.equal(csc.stats().missCount, 11, "Cache Misses"); + assert.equal(csc.stats().hitCount, 2, "Cache Hits"); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3, + clientSideCache: csc + } + }); + + testUtils.testWithClient('Basic Cache Clear', async client => { + csc.clear(); + + await client.set("x", 1); + await client.get("x"); + csc.clear(); + await client.get("x"); + + assert.equal(csc.stats().missCount, 1, "Cache Misses"); + assert.equal(csc.stats().hitCount, 0, "Cache Hits"); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3, + clientSideCache: csc + } + }); + + testUtils.testWithClient('Null Invalidate acts as clear', async client => { + csc.clear(); + + await client.set("x", 1); + await client.get("x"); + csc.invalidate(null); + await client.get("x"); + + assert.equal(2, csc.stats().missCount, "Cache Misses"); + assert.equal(0, csc.stats().hitCount, "Cache Hits"); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3, + clientSideCache: csc + } + }); + + testUtils.testWithClient('flushdb causes an invalidate null', async client => { + csc.clear(); + + await client.set("x", 1); + assert.equal(await client.get("x"), '1'); + await client.flushDb(REDIS_FLUSH_MODES.SYNC); + assert.equal(await client.get("x"), null); + + assert.equal(csc.stats().missCount, 2, "Cache Misses"); + assert.equal(csc.stats().hitCount, 0, "Cache Hits"); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3, + clientSideCache: csc + } + }); + + testUtils.testWithClient('Basic Cache Invalidate', async client => { + csc.clear(); + + await client.set("x", 1); + assert.equal(await client.get("x"), '1', 'first get'); + await client.set("x", 2); + assert.equal(await client.get("x"), '2', 'second get'); + await client.set("x", 3); + assert.equal(await client.get("x"), '3', 'third get'); + + assert.equal(csc.stats().missCount, 3, "Cache Misses"); + assert.equal(csc.stats().hitCount, 0, "Cache Hits"); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3, + clientSideCache: csc + } + }); + + testUtils.testWithClient("Cached Replies Don't Mutate", async client => { + csc.clear(); + + await client.set("x", 1); + await client.set('y', 2); + const ret1 = await client.mGet(['x', 'y']); + assert.deepEqual(ret1, ['1', '2'], 'first mGet'); + ret1[0] = '4'; + const ret2 = await client.mGet(['x', 'y']); + assert.deepEqual(ret2, ['1', '2'], 'second mGet'); + ret2[0] = '8'; + const ret3 = await client.mGet(['x', 'y']); + assert.deepEqual(ret3, ['1', '2'], 'third mGet'); + + assert.equal(csc.stats().missCount, 1, "Cache Misses"); + assert.equal(csc.stats().hitCount, 2, "Cache Hits"); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3, + clientSideCache: csc + } + }); + + testUtils.testWithClient("Cached cleared on disconnect", async client => { + csc.clear(); + + await client.set("x", 1); + await client.set('y', 2); + const ret1 = await client.mGet(['x', 'y']); + assert.deepEqual(ret1, ['1', '2'], 'first mGet'); + + assert.equal(csc.stats().missCount, 1, "first Cache Misses"); + assert.equal(csc.stats().hitCount, 0, "first Cache Hits"); + + await client.close(); + + await client.connect(); + + const ret2 = await client.mGet(['x', 'y']); + assert.deepEqual(ret2, ['1', '2'], 'second mGet'); + + assert.equal(csc.stats().missCount, 1, "second Cache Misses"); + assert.equal(csc.stats().hitCount, 0, "second Cache Hits"); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3, + clientSideCache: csc + } + }); + }); + + describe("Pooled Cache", () => { + const csc = new BasicPooledClientSideCache(); + + testUtils.testWithClient('Virtual Pool Disconnect', async client1 => { + const client2 = client1.duplicate(); + await client2.connect() + + assert.equal(await client2.get("x"), null); + assert.equal(await client1.get("x"), null); + + assert.equal(1, csc.stats().missCount, "Cache Misses"); + assert.equal(1, csc.stats().hitCount, "Cache Hits"); + + await client2.close(); + + assert.equal(await client1.get("x"), null); + assert.equal(await client1.get("x"), null); + + assert.equal(2, csc.stats().missCount, "Cache Misses"); + assert.equal(2, csc.stats().hitCount, "Cache Hits"); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3, + clientSideCache: csc + } + }); + + testUtils.testWithClientPool('Basic Cache Miss and Clear', async client => { + csc.clear(); + + await client.set("x", 1); + assert.equal(await client.get("x"), '1'); + + assert.equal(1, csc.stats().missCount, "Cache Misses"); + assert.equal(0, csc.stats().hitCount, "Cache Hits"); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3, + }, + poolOptions: { + minimum: 5, + maximum: 5, + acquireTimeout: 0, + cleanupDelay: 1, + clientSideCache: csc + } + }) + + testUtils.testWithClientPool('Basic Cache Hit', async client => { + csc.clear(); + + await client.set("x", 1); + assert.equal(await client.get("x"), '1'); + assert.equal(await client.get("x"), '1'); + assert.equal(await client.get("x"), '1'); + + assert.equal(csc.stats().missCount, 1, "Cache Misses"); + assert.equal(csc.stats().hitCount, 2, "Cache Hits"); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3, + }, + poolOptions: { + minimum: 5, + maximum: 5, + acquireTimeout: 0, + cleanupDelay: 1, + clientSideCache: csc + } + }) + + testUtils.testWithClientPool('Basic Cache Manually Invalidate', async client => { + csc.clear(); + + await client.set("x", 1); + + assert.equal(await client.get("x"), '1', 'first get'); + + let p: Promise> = once(csc, 'invalidate'); + await client.set("x", 2); + let [i] = await p; + + assert.equal(await client.get("x"), '2', 'second get'); + + p = once(csc, 'invalidate'); + await client.set("x", 3); + [i] = await p; + + assert.equal(await client.get("x"), '3'); + + assert.equal(csc.stats().missCount, 3, "Cache Misses"); + assert.equal(csc.stats().hitCount, 0, "Cache Hits"); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3, + }, + poolOptions: { + minimum: 5, + maximum: 5, + acquireTimeout: 0, + cleanupDelay: 1, + clientSideCache: csc + } + }) + + testUtils.testWithClientPool('Basic Cache Invalidate via message', async client => { + csc.clear(); + + await client.set('x', 1); + await client.set('y', 2); + + assert.deepEqual(await client.mGet(['x', 'y']), ['1', '2'], 'first mGet'); + + assert.equal(csc.stats().missCount, 1, "Cache Misses"); + assert.equal(csc.stats().hitCount, 0, "Cache Hits"); + + let p: Promise> = once(csc, 'invalidate'); + await client.set("x", 3); + let [i] = await p; + + assert.equal(i, 'x'); + + assert.deepEqual(await client.mGet(['x', 'y']), ['3', '2'], 'second mGet'); + + assert.equal(csc.stats().missCount, 2, "Cache Misses"); + assert.equal(csc.stats().hitCount, 0, "Cache Hits"); + + p = once(csc, 'invalidate'); + await client.set("y", 4); + [i] = await p; + + assert.equal(i, 'y'); + + assert.deepEqual(await client.mGet(['x', 'y']), ['3', '4'], 'second mGet'); + + assert.equal(csc.stats().missCount, 3, "Cache Misses"); + assert.equal(csc.stats().hitCount, 0, "Cache Hits"); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3, + }, + poolOptions: { + minimum: 5, + maximum: 5, + acquireTimeout: 0, + cleanupDelay: 1, + clientSideCache: csc + } + }) + }); + + describe('Cluster Caching', () => { + const csc = new BasicPooledClientSideCache(); + + testUtils.testWithCluster('Basic Cache Miss and Clear', async client => { + csc.clear(); + + await client.set("x", 1); + await client.get("x"); + await client.set("y", 1); + await client.get("y"); + + assert.equal(2, csc.stats().missCount, "Cache Misses"); + assert.equal(0, csc.stats().hitCount, "Cache Hits"); + }, { + ...GLOBAL.CLUSTERS.OPEN, + clusterConfiguration: { + RESP: 3, + clientSideCache: csc + } + }) + + testUtils.testWithCluster('Basic Cache Hit', async client => { + csc.clear(); + + await client.set("x", 1); + assert.equal(await client.get("x"), '1'); + assert.equal(await client.get("x"), '1'); + assert.equal(await client.get("x"), '1'); + await client.set("y", 1); + assert.equal(await client.get("y"), '1'); + assert.equal(await client.get("y"), '1'); + assert.equal(await client.get("y"), '1'); + + assert.equal(2, csc.stats().missCount, "Cache Misses"); + assert.equal(4, csc.stats().hitCount, "Cache Hits"); + }, { + ...GLOBAL.CLUSTERS.OPEN, + clusterConfiguration: { + RESP: 3, + clientSideCache: csc + } + }) + + testUtils.testWithCluster('Basic Cache Invalidate', async client => { + csc.clear(); + + await client.set("x", 1); + assert.equal(await client.get("x"), '1'); + await client.set("x", 2); + assert.equal(await client.get("x"), '2'); + await client.set("x", 3); + assert.equal(await client.get("x"), '3'); + + await client.set("y", 1); + assert.equal(await client.get("y"), '1'); + await client.set("y", 2); + assert.equal(await client.get("y"), '2'); + await client.set("y", 3); + assert.equal(await client.get("y"), '3'); + + assert.equal(6, csc.stats().missCount, "Cache Misses"); + assert.equal(0, csc.stats().hitCount, "Cache Hits"); + }, { + ...GLOBAL.CLUSTERS.OPEN, + clusterConfiguration: { + RESP: 3, + clientSideCache: csc + } + }) + }); + describe("CacheStats", () => { + describe("CacheStats.of()", () => { + it("should correctly initialize stats and calculate derived values", () => { + const stats = CacheStats.of(10, 5, 8, 2, 100, 3); + assert.strictEqual(stats.hitCount, 10, "hitCount should be 10"); + assert.strictEqual(stats.missCount, 5, "missCount should be 5"); + assert.strictEqual(stats.loadSuccessCount, 8, "loadSuccessCount should be 8"); + assert.strictEqual(stats.loadFailureCount, 2, "loadFailureCount should be 2"); + assert.strictEqual(stats.totalLoadTime, 100, "totalLoadTime should be 100"); + assert.strictEqual(stats.evictionCount, 3, "evictionCount should be 3"); + + assert.strictEqual(stats.requestCount(), 15, "requestCount should be 15 (10 hits + 5 misses)"); + assert.strictEqual(stats.hitRate(), 10 / 15, "hitRate should be 10/15"); + assert.strictEqual(stats.missRate(), 5 / 15, "missRate should be 5/15"); + assert.strictEqual(stats.loadCount(), 10, "loadCount should be 10 (8 success + 2 failure)"); + assert.strictEqual(stats.loadFailureRate(), 2 / 10, "loadFailureRate should be 2/10"); + assert.strictEqual(stats.averageLoadPenalty(), 100 / 10, "averageLoadPenalty should be 10 (100 time / 10 loads)"); + }); + + it("should handle zero values and division by zero for derived values", () => { + const stats = CacheStats.of(0, 0, 0, 0, 0, 0); + assert.strictEqual(stats.hitCount, 0, "hitCount"); + assert.strictEqual(stats.missCount, 0, "missCount"); + assert.strictEqual(stats.loadSuccessCount, 0, "loadSuccessCount"); + assert.strictEqual(stats.loadFailureCount, 0, "loadFailureCount"); + assert.strictEqual(stats.totalLoadTime, 0, "totalLoadTime"); + assert.strictEqual(stats.evictionCount, 0, "evictionCount"); + + assert.strictEqual(stats.requestCount(), 0, "requestCount should be 0"); + assert.strictEqual(stats.hitRate(), 1, "hitRate should be 1 for 0 requests"); + assert.strictEqual(stats.missRate(), 0, "missRate should be 0 for 0 requests"); + assert.strictEqual(stats.loadCount(), 0, "loadCount should be 0"); + assert.strictEqual(stats.loadFailureRate(), 0, "loadFailureRate should be 0 for 0 loads"); + assert.strictEqual(stats.averageLoadPenalty(), 0, "averageLoadPenalty should be 0 for 0 loads"); + }); + }); + + describe("CacheStats.empty()", () => { + it("should return stats with all zero counts and 0 for rates/penalties", () => { + const stats = CacheStats.empty(); + assert.strictEqual(stats.hitCount, 0, "empty.hitCount"); + assert.strictEqual(stats.missCount, 0, "empty.missCount"); + assert.strictEqual(stats.loadSuccessCount, 0, "empty.loadSuccessCount"); + assert.strictEqual(stats.loadFailureCount, 0, "empty.loadFailureCount"); + assert.strictEqual(stats.totalLoadTime, 0, "empty.totalLoadTime"); + assert.strictEqual(stats.evictionCount, 0, "empty.evictionCount"); + + assert.strictEqual(stats.requestCount(), 0, "empty.requestCount"); + assert.strictEqual(stats.hitRate(), 1, "empty.hitRate should be 1"); + assert.strictEqual(stats.missRate(), 0, "empty.missRate should be 0"); + assert.strictEqual(stats.loadCount(), 0, "empty.loadCount"); + assert.strictEqual(stats.loadFailureRate(), 0, "empty.loadFailureRate should be 0"); + assert.strictEqual(stats.averageLoadPenalty(), 0, "empty.averageLoadPenalty should be 0"); + }); + }); + + describe("instance methods", () => { + const stats1 = CacheStats.of(10, 5, 8, 2, 100, 3); + const stats2 = CacheStats.of(20, 10, 12, 3, 200, 5); + + describe("plus()", () => { + it("should correctly add two CacheStats instances", () => { + const sum = stats1.plus(stats2); + assert.strictEqual(sum.hitCount, 30); + assert.strictEqual(sum.missCount, 15); + assert.strictEqual(sum.loadSuccessCount, 20); + assert.strictEqual(sum.loadFailureCount, 5); + assert.strictEqual(sum.totalLoadTime, 300); + assert.strictEqual(sum.evictionCount, 8); + }); + + it("should correctly sum large numbers", () => { + const statsC = CacheStats.of(Number.MAX_VALUE, 1, 1, 1, 1, 1); + const statsD = CacheStats.of(Number.MAX_VALUE, 1, 1, 1, 1, 1); + const sum = statsC.plus(statsD); + assert.strictEqual(sum.hitCount, Infinity, "Summing MAX_VALUE should result in Infinity"); + }); + }); + + describe("minus()", () => { + it("should correctly subtract one CacheStats instance from another, flooring at 0", () => { + const diff = stats2.minus(stats1); + assert.strictEqual(diff.hitCount, 10); + assert.strictEqual(diff.missCount, 5); + assert.strictEqual(diff.loadSuccessCount, 4); + assert.strictEqual(diff.loadFailureCount, 1); + assert.strictEqual(diff.totalLoadTime, 100); + assert.strictEqual(diff.evictionCount, 2); + }); + + it("should floor results at 0 if minuend is smaller than subtrahend", () => { + const sSmall = CacheStats.of(5, 2, 1, 0, 10, 1); + const sLarge = CacheStats.of(10, 5, 2, 1, 20, 2); + const diff = sSmall.minus(sLarge); + assert.strictEqual(diff.hitCount, 0, "hitCount should be floored at 0 (5 - 10)"); + assert.strictEqual(diff.missCount, 0, "missCount should be floored at 0 (2 - 5)"); + assert.strictEqual(diff.loadSuccessCount, 0, "loadSuccessCount should be floored at 0 (1 - 2)"); + assert.strictEqual(diff.loadFailureCount, 0, "loadFailureCount should be floored at 0 (0 - 1)"); + assert.strictEqual(diff.totalLoadTime, 0, "totalLoadTime should be floored at 0 (10 - 20)"); + assert.strictEqual(diff.evictionCount, 0, "evictionCount should be floored at 0 (1 - 2)"); + }); + }); + + describe("hitRate()", () => { + it("should return 0 if requestCount is 0", () => { + const stats = CacheStats.of(0, 0, 0, 0, 0, 0); + assert.strictEqual(stats.hitRate(), 1); + }); + it("should return 0 if hitCount is 0 but missCount > 0", () => { + const stats = CacheStats.of(0, 1, 0, 0, 0, 0); + assert.strictEqual(stats.hitRate(), 0); + }); + it("should return 1 if missCount is 0 but hitCount > 0", () => { + const stats = CacheStats.of(1, 0, 0, 0, 0, 0); + assert.strictEqual(stats.hitRate(), 1); + }); + }); + + describe("missRate()", () => { + it("should return 0 if requestCount is 0", () => { + const stats = CacheStats.of(0, 0, 0, 0, 0, 0); + assert.strictEqual(stats.missRate(), 0); + }); + it("should return 1 if hitCount is 0 but missCount > 0", () => { + const stats = CacheStats.of(0, 1, 0, 0, 0, 0); + assert.strictEqual(stats.missRate(), 1); + }); + it("should return 0 if missCount is 0 but hitCount > 0", () => { + const stats = CacheStats.of(1, 0, 0, 0, 0, 0); + assert.strictEqual(stats.missRate(), 0); + }); + }); + + describe("loadFailureRate()", () => { + it("should return 0 if loadCount is 0", () => { + const stats = CacheStats.of(0, 0, 0, 0, 0, 0); + assert.strictEqual(stats.loadFailureRate(), 0); + }); + it("should return 0 if loadFailureCount is 0 but loadSuccessCount > 0", () => { + const stats = CacheStats.of(0, 0, 1, 0, 10, 0); + assert.strictEqual(stats.loadFailureRate(), 0); + }); + it("should return 1 if loadSuccessCount is 0 but loadFailureCount > 0", () => { + const stats = CacheStats.of(0, 0, 0, 1, 10, 0); + assert.strictEqual(stats.loadFailureRate(), 1); + }); + }); + + describe("averageLoadPenalty()", () => { + it("should return 0 if loadCount is 0, even if totalLoadTime > 0", () => { + const stats = CacheStats.of(0, 0, 0, 0, 100, 0); + assert.strictEqual(stats.averageLoadPenalty(), 0); + }); + it("should return 0 if totalLoadTime is 0 and loadCount > 0", () => { + const stats = CacheStats.of(0, 0, 1, 1, 0, 0); + assert.strictEqual(stats.averageLoadPenalty(), 0); + }); + }); + }); + }); + it('should reflect comprehensive cache operations in stats via BasicClientSideCache', async function () { + + const csc = new BasicClientSideCache({ + maxEntries: 2, // Small size to easily trigger evictions + }); + + testUtils.testWithClient('comprehensive_stats_run', async client => { + + // --- Phase 1: Initial misses and loads --- + await client.set('keyA', 'valueA_1'); + assert.strictEqual(await client.get('keyA'), 'valueA_1', "Get keyA first time"); + assert.strictEqual(csc.stats().missCount, 1); + assert.strictEqual(csc.stats().loadSuccessCount, 1); + + await client.set('keyB', 'valueB_1'); + assert.strictEqual(await client.get('keyB'), 'valueB_1', "Get keyB first time"); + assert.strictEqual(csc.stats().missCount, 2); + assert.strictEqual(csc.stats().loadSuccessCount, 2); + + // --- Phase 2: Cache hits --- + assert.strictEqual(await client.get('keyA'), 'valueA_1', "Get keyA second time (hit)"); + assert.strictEqual(csc.stats().hitCount, 1); + + assert.strictEqual(await client.get('keyB'), 'valueB_1', "Get keyB second time (hit)"); + assert.strictEqual(csc.stats().hitCount, 2); + + + // --- Phase 3: Trigger evictions and more misses/loads --- + await client.set('keyC', 'valueC_1'); + assert.strictEqual(await client.get('keyC'), 'valueC_1', "Get keyC first time (evicts keyA)"); + assert.strictEqual(csc.stats().missCount, 3); + assert.strictEqual(csc.stats().loadSuccessCount, 3); + assert.strictEqual(csc.stats().evictionCount, 1); + + + assert.strictEqual(await client.get('keyA'), 'valueA_1', "Get keyA again (miss after eviction)"); + assert.strictEqual(csc.stats().missCount, 4); + assert.strictEqual(csc.stats().loadSuccessCount, 4); + assert.strictEqual(csc.stats().evictionCount, 2); + + + // --- Phase 4: More hits --- + assert.strictEqual(await client.get('keyC'), 'valueC_1', "Get keyC again (hit)"); + assert.strictEqual(csc.stats().hitCount, 3); + + // --- Phase 5: Update a key (results in invalidation, then miss/load on next GET) --- + // Note: A SET operation on an existing cached key should invalidate it. + // The invalidation itself isn't directly a "hit" or "miss" for stats, + // but the *next* GET will be a miss. + await client.set('keyA', 'valueA_2'); + assert.strictEqual(await client.get('keyA'), 'valueA_2', "Get keyA after SET (miss due to invalidation)"); + + assert.strictEqual(csc.stats().hitCount, 3); + assert.strictEqual(csc.stats().loadSuccessCount, 5); + + + + const stats = csc.stats() + + assert.strictEqual(stats.hitCount, 3, "Final hitCount"); + assert.strictEqual(stats.missCount, 5, "Final missCount"); + assert.strictEqual(stats.loadSuccessCount, 5, "Final loadSuccessCount"); + assert.strictEqual(stats.loadFailureCount, 0, "Final loadFailureCount (expected 0 for this test)"); + assert.strictEqual(stats.evictionCount, 2, "Final evictionCount"); + assert.ok(stats.totalLoadTime >= 0, "Final totalLoadTime should be non-negative"); + + assert.strictEqual(stats.requestCount(), 8, "Final requestCount (5 misses + 3 hits)"); + assert.strictEqual(stats.hitRate(), 3 / 8, "Final hitRate"); + assert.strictEqual(stats.missRate(), 5 / 8, "Final missRate"); + + assert.strictEqual(stats.loadCount(), 5, "Final loadCount (5 success + 0 failure)"); + assert.strictEqual(stats.loadFailureRate(), 0, "Final loadFailureRate (0 failures / 5 loads)"); + + if (stats.loadCount() > 0) { + assert.ok(stats.averageLoadPenalty() >= 0, "Final averageLoadPenalty should be non-negative"); + assert.strictEqual(stats.averageLoadPenalty(), stats.totalLoadTime / stats.loadCount(), "Average load penalty calculation"); + } else { + assert.strictEqual(stats.averageLoadPenalty(), 0, "Final averageLoadPenalty should be 0 if no loads"); + } + + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3, + clientSideCache: csc + } + }); + }); +}); diff --git a/packages/client/lib/client/cache.ts b/packages/client/lib/client/cache.ts new file mode 100644 index 00000000000..7254352ee8f --- /dev/null +++ b/packages/client/lib/client/cache.ts @@ -0,0 +1,870 @@ +import { EventEmitter } from 'stream'; +import RedisClient from '.'; +import { RedisArgument, ReplyUnion, TransformReply, TypeMapping } from '../RESP/types'; +import { BasicCommandParser } from './parser'; + +/** + * A snapshot of cache statistics. + * + * This class provides an immutable view of the cache's operational statistics at a particular + * point in time. It is heavily inspired by the statistics reporting capabilities found in + * Ben Manes's Caffeine cache (https://github.com/ben-manes/caffeine). + * + * Instances of `CacheStats` are typically obtained from a {@link StatsCounter} and can be used + * for performance monitoring, debugging, or logging. It includes metrics such as hit rate, + * miss rate, load success/failure rates, average load penalty, and eviction counts. + * + * All statistics are non-negative. Rates and averages are typically in the range `[0.0, 1.0]`, + * or `0` if the an operation has not occurred (e.g. hit rate is 0 if there are no requests). + * + * Cache statistics are incremented according to specific rules: + * - When a cache lookup encounters an existing entry, hitCount is incremented. + * - When a cache lookup encounters a missing entry, missCount is incremented. + * - When a new entry is successfully loaded, loadSuccessCount is incremented and the + * loading time is added to totalLoadTime. + * - When an entry fails to load, loadFailureCount is incremented and the + * loading time is added to totalLoadTime. + * - When an entry is evicted due to size constraints or expiration, + * evictionCount is incremented. + */ +export class CacheStats { + /** + * Creates a new CacheStats instance with the specified statistics. + */ + private constructor( + public readonly hitCount: number, + public readonly missCount: number, + public readonly loadSuccessCount: number, + public readonly loadFailureCount: number, + public readonly totalLoadTime: number, + public readonly evictionCount: number + ) { + if ( + hitCount < 0 || + missCount < 0 || + loadSuccessCount < 0 || + loadFailureCount < 0 || + totalLoadTime < 0 || + evictionCount < 0 + ) { + throw new Error('All statistics values must be non-negative'); + } + } + + /** + * Creates a new CacheStats instance with the specified statistics. + * + * @param hitCount - Number of cache hits + * @param missCount - Number of cache misses + * @param loadSuccessCount - Number of successful cache loads + * @param loadFailureCount - Number of failed cache loads + * @param totalLoadTime - Total load time in milliseconds + * @param evictionCount - Number of cache evictions + */ + static of( + hitCount = 0, + missCount = 0, + loadSuccessCount = 0, + loadFailureCount = 0, + totalLoadTime = 0, + evictionCount = 0 + ): CacheStats { + return new CacheStats( + hitCount, + missCount, + loadSuccessCount, + loadFailureCount, + totalLoadTime, + evictionCount + ); + } + + /** + * Returns a statistics instance where no cache events have been recorded. + * + * @returns An empty statistics instance + */ + static empty(): CacheStats { + return CacheStats.EMPTY_STATS; + } + + /** + * An empty stats instance with all counters set to zero. + */ + private static readonly EMPTY_STATS = new CacheStats(0, 0, 0, 0, 0, 0); + + /** + * Returns the total number of times cache lookup methods have returned + * either a cached or uncached value. + * + * @returns Total number of requests (hits + misses) + */ + requestCount(): number { + return this.hitCount + this.missCount; + } + + /** + * Returns the hit rate of the cache. + * This is defined as hitCount / requestCount, or 1.0 when requestCount is 0. + * + * @returns The ratio of cache requests that were hits (between 0.0 and 1.0) + */ + hitRate(): number { + const requestCount = this.requestCount(); + return requestCount === 0 ? 1.0 : this.hitCount / requestCount; + } + + /** + * Returns the miss rate of the cache. + * This is defined as missCount / requestCount, or 0.0 when requestCount is 0. + * + * @returns The ratio of cache requests that were misses (between 0.0 and 1.0) + */ + missRate(): number { + const requestCount = this.requestCount(); + return requestCount === 0 ? 0.0 : this.missCount / requestCount; + } + + /** + * Returns the total number of load operations (successful + failed). + * + * @returns Total number of load operations + */ + loadCount(): number { + return this.loadSuccessCount + this.loadFailureCount; + } + + /** + * Returns the ratio of cache loading attempts that failed. + * This is defined as loadFailureCount / loadCount, or 0.0 when loadCount is 0. + * + * @returns Ratio of load operations that failed (between 0.0 and 1.0) + */ + loadFailureRate(): number { + const loadCount = this.loadCount(); + return loadCount === 0 ? 0.0 : this.loadFailureCount / loadCount; + } + + /** + * Returns the average time spent loading new values, in milliseconds. + * This is defined as totalLoadTime / loadCount, or 0.0 when loadCount is 0. + * + * @returns Average load time in milliseconds + */ + averageLoadPenalty(): number { + const loadCount = this.loadCount(); + return loadCount === 0 ? 0.0 : this.totalLoadTime / loadCount; + } + + /** + * Returns a new CacheStats representing the difference between this CacheStats + * and another. Negative values are rounded up to zero. + * + * @param other - The statistics to subtract from this instance + * @returns The difference between this instance and other + */ + minus(other: CacheStats): CacheStats { + return CacheStats.of( + Math.max(0, this.hitCount - other.hitCount), + Math.max(0, this.missCount - other.missCount), + Math.max(0, this.loadSuccessCount - other.loadSuccessCount), + Math.max(0, this.loadFailureCount - other.loadFailureCount), + Math.max(0, this.totalLoadTime - other.totalLoadTime), + Math.max(0, this.evictionCount - other.evictionCount) + ); + } + + /** + * Returns a new CacheStats representing the sum of this CacheStats and another. + * + * @param other - The statistics to add to this instance + * @returns The sum of this instance and other + */ + plus(other: CacheStats): CacheStats { + return CacheStats.of( + this.hitCount + other.hitCount, + this.missCount + other.missCount, + this.loadSuccessCount + other.loadSuccessCount, + this.loadFailureCount + other.loadFailureCount, + this.totalLoadTime + other.totalLoadTime, + this.evictionCount + other.evictionCount + ); + } +} + +/** + * An accumulator for cache statistics. + * + * This interface defines the contract for objects that record cache-related events + * such as hits, misses, loads (successes and failures), and evictions. The design + * is inspired by the statistics collection mechanisms in Ben Manes's Caffeine cache + * (https://github.com/ben-manes/caffeine). + * + * Implementations of this interface are responsible for aggregating these events. + * A snapshot of the current statistics can be obtained by calling the `snapshot()` + * method, which returns an immutable {@link CacheStats} object. + * + * Common implementations include `DefaultStatsCounter` for active statistics collection + * and `DisabledStatsCounter` for a no-op version when stats are not needed. + */ +export interface StatsCounter { + /** + * Records cache hits. This should be called when a cache request returns a cached value. + * + * @param count - The number of hits to record + */ + recordHits(count: number): void; + + /** + * Records cache misses. This should be called when a cache request returns a value that was not + * found in the cache. + * + * @param count - The number of misses to record + */ + recordMisses(count: number): void; + + /** + * Records the successful load of a new entry. This method should be called when a cache request + * causes an entry to be loaded and the loading completes successfully. + * + * @param loadTime - The number of milliseconds the cache spent computing or retrieving the new value + */ + recordLoadSuccess(loadTime: number): void; + + /** + * Records the failed load of a new entry. This method should be called when a cache request + * causes an entry to be loaded, but an exception is thrown while loading the entry. + * + * @param loadTime - The number of milliseconds the cache spent computing or retrieving the new value + * prior to the failure + */ + recordLoadFailure(loadTime: number): void; + + /** + * Records the eviction of an entry from the cache. This should only be called when an entry is + * evicted due to the cache's eviction strategy, and not as a result of manual invalidations. + * + * @param count - The number of evictions to record + */ + recordEvictions(count: number): void; + + /** + * Returns a snapshot of this counter's values. Note that this may be an inconsistent view, as it + * may be interleaved with update operations. + * + * @return A snapshot of this counter's values + */ + snapshot(): CacheStats; +} + +/** + * A StatsCounter implementation that does nothing and always returns empty stats. + */ +class DisabledStatsCounter implements StatsCounter { + static readonly INSTANCE = new DisabledStatsCounter(); + + private constructor() { } + + recordHits(count: number): void { } + recordMisses(count: number): void { } + recordLoadSuccess(loadTime: number): void { } + recordLoadFailure(loadTime: number): void { } + recordEvictions(count: number): void { } + snapshot(): CacheStats { return CacheStats.empty(); } +} + +/** + * Returns a StatsCounter that does not record any cache events. + * + * @return A StatsCounter that does not record metrics + */ +function disabledStatsCounter(): StatsCounter { + return DisabledStatsCounter.INSTANCE; +} + +/** + * A StatsCounter implementation that maintains cache statistics. + */ +class DefaultStatsCounter implements StatsCounter { + #hitCount = 0; + #missCount = 0; + #loadSuccessCount = 0; + #loadFailureCount = 0; + #totalLoadTime = 0; + #evictionCount = 0; + + /** + * Records cache hits. + * + * @param count - The number of hits to record + */ + recordHits(count: number): void { + this.#hitCount += count; + } + + /** + * Records cache misses. + * + * @param count - The number of misses to record + */ + recordMisses(count: number): void { + this.#missCount += count; + } + + /** + * Records the successful load of a new entry. + * + * @param loadTime - The number of milliseconds spent loading the entry + */ + recordLoadSuccess(loadTime: number): void { + this.#loadSuccessCount++; + this.#totalLoadTime += loadTime; + } + + /** + * Records the failed load of a new entry. + * + * @param loadTime - The number of milliseconds spent attempting to load the entry + */ + recordLoadFailure(loadTime: number): void { + this.#loadFailureCount++; + this.#totalLoadTime += loadTime; + } + + /** + * Records cache evictions. + * + * @param count - The number of evictions to record + */ + recordEvictions(count: number): void { + this.#evictionCount += count; + } + + /** + * Returns a snapshot of the current statistics. + * + * @returns A snapshot of the current statistics + */ + snapshot(): CacheStats { + return CacheStats.of( + this.#hitCount, + this.#missCount, + this.#loadSuccessCount, + this.#loadFailureCount, + this.#totalLoadTime, + this.#evictionCount + ); + } + + /** + * Creates a new DefaultStatsCounter. + * + * @returns A new DefaultStatsCounter instance + */ + static create(): DefaultStatsCounter { + return new DefaultStatsCounter(); + } +} + +type CachingClient = RedisClient; +type CmdFunc = () => Promise; + +type EvictionPolicy = "LRU" | "FIFO" + +/** + * Configuration options for Client Side Cache + */ +export interface ClientSideCacheConfig { + /** + * Time-to-live in milliseconds for cached entries. + * Use 0 for no expiration. + * @default 0 + */ + ttl?: number; + + /** + * Maximum number of entries to store in the cache. + * Use 0 for unlimited entries. + * @default 0 + */ + maxEntries?: number; + + /** + * Eviction policy to use when the cache reaches its capacity. + * - "LRU" (Least Recently Used): Evicts least recently accessed entries first + * - "FIFO" (First In First Out): Evicts oldest entries first + * @default "LRU" + */ + evictPolicy?: EvictionPolicy; + + /** + * Whether to collect statistics about cache operations. + * @default true + */ + recordStats?: boolean; +} + +interface CacheCreator { + epoch: number; + client: CachingClient; +} + +interface ClientSideCacheEntry { + invalidate(): void; + validate(): boolean; +} + +/** + * Generates a unique cache key from Redis command arguments + * + * @param redisArgs - Array of Redis command arguments + * @returns A unique string key for caching + */ +function generateCacheKey(redisArgs: ReadonlyArray): string { + const tmp = new Array(redisArgs.length * 2); + + for (let i = 0; i < redisArgs.length; i++) { + tmp[i] = redisArgs[i].length; + tmp[i + redisArgs.length] = redisArgs[i]; + } + + return tmp.join('_'); +} + +abstract class ClientSideCacheEntryBase implements ClientSideCacheEntry { + #invalidated = false; + readonly #expireTime: number; + + constructor(ttl: number) { + if (ttl == 0) { + this.#expireTime = 0; + } else { + this.#expireTime = Date.now() + ttl; + } + } + + invalidate(): void { + this.#invalidated = true; + } + + validate(): boolean { + return !this.#invalidated && (this.#expireTime == 0 || (Date.now() < this.#expireTime)) + } +} + +class ClientSideCacheEntryValue extends ClientSideCacheEntryBase { + readonly #value: any; + + get value() { + return this.#value; + } + + constructor(ttl: number, value: any) { + super(ttl); + this.#value = value; + } +} + +class ClientSideCacheEntryPromise extends ClientSideCacheEntryBase { + readonly #sendCommandPromise: Promise; + + get promise() { + return this.#sendCommandPromise; + } + + constructor(ttl: number, sendCommandPromise: Promise) { + super(ttl); + this.#sendCommandPromise = sendCommandPromise; + } +} + +export abstract class ClientSideCacheProvider extends EventEmitter { + abstract handleCache(client: CachingClient, parser: BasicCommandParser, fn: CmdFunc, transformReply: TransformReply | undefined, typeMapping: TypeMapping | undefined): Promise; + abstract trackingOn(): Array; + abstract invalidate(key: RedisArgument | null): void; + abstract clear(): void; + abstract stats(): CacheStats; + abstract onError(): void; + abstract onClose(): void; +} + +export class BasicClientSideCache extends ClientSideCacheProvider { + #cacheKeyToEntryMap: Map; + #keyToCacheKeySetMap: Map>; + readonly ttl: number; + readonly maxEntries: number; + readonly lru: boolean; + #statsCounter: StatsCounter; + + + recordEvictions(count: number): void { + this.#statsCounter.recordEvictions(count); + } + + recordHits(count: number): void { + this.#statsCounter.recordHits(count); + } + + recordMisses(count: number): void { + this.#statsCounter.recordMisses(count); + } + + constructor(config?: ClientSideCacheConfig) { + super(); + + this.#cacheKeyToEntryMap = new Map(); + this.#keyToCacheKeySetMap = new Map>(); + this.ttl = config?.ttl ?? 0; + this.maxEntries = config?.maxEntries ?? 0; + this.lru = config?.evictPolicy !== "FIFO"; + + const recordStats = config?.recordStats !== false; + this.#statsCounter = recordStats ? DefaultStatsCounter.create() : disabledStatsCounter(); + } + + /* logic of how caching works: + + 1. commands use a CommandParser + it enables us to define/retrieve + cacheKey - a unique key that corresponds to this command and its arguments + redisKeys - an array of redis keys as strings that if the key is modified, will cause redis to invalidate this result when cached + 2. check if cacheKey is in our cache + 2b1. if its a value cacheEntry - return it + 2b2. if it's a promise cache entry - wait on promise and then go to 3c. + 3. if cacheEntry is not in cache + 3a. send the command save the promise into a a cacheEntry and then wait on result + 3b. transform reply (if required) based on transformReply + 3b. check the cacheEntry is still valid - in cache and hasn't been deleted) + 3c. if valid - overwrite with value entry + 4. return previously non cached result + */ + override async handleCache( + client: CachingClient, + parser: BasicCommandParser, + fn: CmdFunc, + transformReply?: TransformReply, + typeMapping?: TypeMapping + ) { + let reply: ReplyUnion; + + const cacheKey = generateCacheKey(parser.redisArgs); + + // "2" + let cacheEntry = this.get(cacheKey); + if (cacheEntry) { + // If instanceof is "too slow", can add a "type" and then use an "as" cast to call proper getters. + if (cacheEntry instanceof ClientSideCacheEntryValue) { // "2b1" + this.#statsCounter.recordHits(1); + + return structuredClone(cacheEntry.value); + } else if (cacheEntry instanceof ClientSideCacheEntryPromise) { // 2b2 + // This counts as a miss since the value hasn't been fully loaded yet. + this.#statsCounter.recordMisses(1); + reply = await cacheEntry.promise; + } else { + throw new Error("unknown cache entry type"); + } + } else { // 3/3a + this.#statsCounter.recordMisses(1); + + const startTime = performance.now(); + const promise = fn(); + + cacheEntry = this.createPromiseEntry(client, promise); + this.set(cacheKey, cacheEntry, parser.keys); + + try { + reply = await promise; + const loadTime = performance.now() - startTime; + this.#statsCounter.recordLoadSuccess(loadTime); + } catch (err) { + const loadTime = performance.now() - startTime; + this.#statsCounter.recordLoadFailure(loadTime); + + if (cacheEntry.validate()) { + this.delete(cacheKey!); + } + + throw err; + } + } + + // 3b + let val; + if (transformReply) { + val = transformReply(reply, parser.preserve, typeMapping); + } else { + val = reply; + } + + // 3c + if (cacheEntry.validate()) { // revalidating promise entry (dont save value, if promise entry has been invalidated) + // 3d + cacheEntry = this.createValueEntry(client, val); + this.set(cacheKey, cacheEntry, parser.keys); + this.emit("cached-key", cacheKey); + } else { + // cache entry for key got invalidated between execution and saving, so not saving + } + + return structuredClone(val); + } + + override trackingOn() { + return ['CLIENT', 'TRACKING', 'ON']; + } + + override invalidate(key: RedisArgument | null) { + if (key === null) { + this.clear(false); + this.emit("invalidate", key); + + return; + } + + const keySet = this.#keyToCacheKeySetMap.get(key.toString()); + if (keySet) { + for (const cacheKey of keySet) { + const entry = this.#cacheKeyToEntryMap.get(cacheKey); + if (entry) { + entry.invalidate(); + } + this.#cacheKeyToEntryMap.delete(cacheKey); + } + this.#keyToCacheKeySetMap.delete(key.toString()); + } + + this.emit('invalidate', key); + } + + override clear(resetStats = true) { + const oldSize = this.#cacheKeyToEntryMap.size; + this.#cacheKeyToEntryMap.clear(); + this.#keyToCacheKeySetMap.clear(); + + if (resetStats) { + if (!(this.#statsCounter instanceof DisabledStatsCounter)) { + this.#statsCounter = DefaultStatsCounter.create(); + } + } else { + // If old entries were evicted due to clear, record them as evictions + if (oldSize > 0) { + this.#statsCounter.recordEvictions(oldSize); + } + } + } + + get(cacheKey: string) { + const val = this.#cacheKeyToEntryMap.get(cacheKey); + + if (val && !val.validate()) { + this.delete(cacheKey); + this.#statsCounter.recordEvictions(1); + this.emit("cache-evict", cacheKey); + + return undefined; + } + + if (val !== undefined && this.lru) { + this.#cacheKeyToEntryMap.delete(cacheKey); + this.#cacheKeyToEntryMap.set(cacheKey, val); + } + + return val; + } + + delete(cacheKey: string) { + const entry = this.#cacheKeyToEntryMap.get(cacheKey); + if (entry) { + entry.invalidate(); + this.#cacheKeyToEntryMap.delete(cacheKey); + } + } + + has(cacheKey: string) { + return this.#cacheKeyToEntryMap.has(cacheKey); + } + + set(cacheKey: string, cacheEntry: ClientSideCacheEntry, keys: Array) { + let count = this.#cacheKeyToEntryMap.size; + const oldEntry = this.#cacheKeyToEntryMap.get(cacheKey); + + if (oldEntry) { + count--; // overwriting, so not incrementig + oldEntry.invalidate(); + } + + if (this.maxEntries > 0 && count >= this.maxEntries) { + this.deleteOldest(); + this.#statsCounter.recordEvictions(1); + } + + this.#cacheKeyToEntryMap.set(cacheKey, cacheEntry); + + for (const key of keys) { + if (!this.#keyToCacheKeySetMap.has(key.toString())) { + this.#keyToCacheKeySetMap.set(key.toString(), new Set()); + } + + const cacheKeySet = this.#keyToCacheKeySetMap.get(key.toString()); + cacheKeySet!.add(cacheKey); + } + } + + size() { + return this.#cacheKeyToEntryMap.size; + } + + createValueEntry(client: CachingClient, value: any): ClientSideCacheEntryValue { + return new ClientSideCacheEntryValue(this.ttl, value); + } + + createPromiseEntry(client: CachingClient, sendCommandPromise: Promise): ClientSideCacheEntryPromise { + return new ClientSideCacheEntryPromise(this.ttl, sendCommandPromise); + } + + override stats(): CacheStats { + return this.#statsCounter.snapshot(); + } + + override onError(): void { + this.clear(); + } + + override onClose() { + this.clear(); + } + + /** + * @internal + */ + deleteOldest() { + const it = this.#cacheKeyToEntryMap[Symbol.iterator](); + const n = it.next(); + if (!n.done) { + const key = n.value[0]; + const entry = this.#cacheKeyToEntryMap.get(key); + if (entry) { + entry.invalidate(); + } + this.#cacheKeyToEntryMap.delete(key); + } + } + + /** + * Get cache entries for debugging + * @internal + */ + entryEntries(): IterableIterator<[string, ClientSideCacheEntry]> { + return this.#cacheKeyToEntryMap.entries(); + } + + /** + * Get key set entries for debugging + * @internal + */ + keySetEntries(): IterableIterator<[string, Set]> { + return this.#keyToCacheKeySetMap.entries(); + } +} + +export abstract class PooledClientSideCacheProvider extends BasicClientSideCache { + #disabled = false; + + disable(): void { + this.#disabled = true; + } + + enable(): void { + this.#disabled = false; + } + + override get(cacheKey: string): ClientSideCacheEntry | undefined { + if (this.#disabled) { + return undefined; + } + + return super.get(cacheKey); + } + + override has(cacheKey: string): boolean { + if (this.#disabled) { + return false; + } + + return super.has(cacheKey); + } + + onPoolClose(): void { + this.clear(); + } +} + +export class BasicPooledClientSideCache extends PooledClientSideCacheProvider { + override onError() { + this.clear(false); + } + + override onClose() { + this.clear(false); + } +} + +class PooledClientSideCacheEntryValue extends ClientSideCacheEntryValue { + #creator: CacheCreator; + + constructor(ttl: number, creator: CacheCreator, value: any) { + super(ttl, value); + + this.#creator = creator; + } + + override validate(): boolean { + let ret = super.validate(); + if (this.#creator) { + ret = ret && this.#creator.client.isReady && this.#creator.client.socketEpoch == this.#creator.epoch + } + + return ret; + } +} + +class PooledClientSideCacheEntryPromise extends ClientSideCacheEntryPromise { + #creator: CacheCreator; + + constructor(ttl: number, creator: CacheCreator, sendCommandPromise: Promise) { + super(ttl, sendCommandPromise); + + this.#creator = creator; + } + + override validate(): boolean { + let ret = super.validate(); + + return ret && this.#creator.client.isReady && this.#creator.client.socketEpoch == this.#creator.epoch + } +} + +export class PooledNoRedirectClientSideCache extends BasicPooledClientSideCache { + override createValueEntry(client: CachingClient, value: any): ClientSideCacheEntryValue { + const creator = { + epoch: client.socketEpoch, + client: client + }; + + return new PooledClientSideCacheEntryValue(this.ttl, creator, value); + } + + override createPromiseEntry(client: CachingClient, sendCommandPromise: Promise): ClientSideCacheEntryPromise { + const creator = { + epoch: client.socketEpoch, + client: client + }; + + return new PooledClientSideCacheEntryPromise(this.ttl, creator, sendCommandPromise); + } + + override onError() { } + + override onClose() { } +} diff --git a/packages/client/lib/client/commands-queue.ts b/packages/client/lib/client/commands-queue.ts new file mode 100644 index 00000000000..9b7f737113b --- /dev/null +++ b/packages/client/lib/client/commands-queue.ts @@ -0,0 +1,544 @@ +import { DoublyLinkedNode, DoublyLinkedList, EmptyAwareSinglyLinkedList } from './linked-list'; +import encodeCommand from '../RESP/encoder'; +import { Decoder, PUSH_TYPE_MAPPING, RESP_TYPES } from '../RESP/decoder'; +import { TypeMapping, ReplyUnion, RespVersions, RedisArgument } from '../RESP/types'; +import { ChannelListeners, PubSub, PubSubCommand, PubSubListener, PubSubType, PubSubTypeListeners } from './pub-sub'; +import { AbortError, ErrorReply, CommandTimeoutDuringMaintenanceError, TimeoutError } from '../errors'; +import { MonitorCallback } from '.'; +import { dbgMaintenance } from './enterprise-maintenance-manager'; + +export interface CommandOptions { + chainId?: symbol; + asap?: boolean; + abortSignal?: AbortSignal; + /** + * Maps between RESP and JavaScript types + */ + typeMapping?: T; + /** + * Timeout for the command in milliseconds + */ + timeout?: number; +} + +export interface CommandToWrite extends CommandWaitingForReply { + args: ReadonlyArray; + chainId: symbol | undefined; + abort: { + signal: AbortSignal; + listener: () => unknown; + } | undefined; + timeout: { + signal: AbortSignal; + listener: () => unknown; + originalTimeout: number | undefined; + } | undefined; +} + +interface CommandWaitingForReply { + resolve(reply?: unknown): void; + reject(err: unknown): void; + channelsCounter: number | undefined; + typeMapping: TypeMapping | undefined; +} + +export type OnShardedChannelMoved = (channel: string, listeners: ChannelListeners) => void; + +const PONG = Buffer.from('pong'), + RESET = Buffer.from('RESET'); + +const RESP2_PUSH_TYPE_MAPPING = { + ...PUSH_TYPE_MAPPING, + [RESP_TYPES.SIMPLE_STRING]: Buffer +}; + +// Try to handle a push notification. Return whether you +// successfully consumed the notification or not. This is +// important in order for the queue to be able to pass the +// notification to another handler if the current one did not +// succeed. +type PushHandler = (pushItems: Array) => boolean; + +export default class RedisCommandsQueue { + readonly #respVersion; + readonly #maxLength; + readonly #toWrite = new DoublyLinkedList(); + readonly #waitingForReply = new EmptyAwareSinglyLinkedList(); + readonly #onShardedChannelMoved; + #chainInExecution: symbol | undefined; + readonly decoder; + readonly #pubSub = new PubSub(); + + #pushHandlers: PushHandler[] = [this.#onPush.bind(this)]; + + #maintenanceCommandTimeout: number | undefined + + setMaintenanceCommandTimeout(ms: number | undefined) { + // Prevent possible api misuse + if (this.#maintenanceCommandTimeout === ms) { + dbgMaintenance(`Queue already set maintenanceCommandTimeout to ${ms}, skipping`); + return; + }; + + dbgMaintenance(`Setting maintenance command timeout to ${ms}`); + this.#maintenanceCommandTimeout = ms; + + if(this.#maintenanceCommandTimeout === undefined) { + dbgMaintenance(`Queue will keep maintenanceCommandTimeout for exisitng commands, just to be on the safe side. New commands will receive normal timeouts`); + return; + } + + let counter = 0; + const total = this.#toWrite.length; + + // Overwrite timeouts of all eligible toWrite commands + for(const node of this.#toWrite.nodes()) { + const command = node.value; + + // Remove timeout listener if it exists + RedisCommandsQueue.#removeTimeoutListener(command) + + counter++; + const newTimeout = this.#maintenanceCommandTimeout; + + // Overwrite the command's timeout + const signal = AbortSignal.timeout(newTimeout); + command.timeout = { + signal, + listener: () => { + this.#toWrite.remove(node); + command.reject(new CommandTimeoutDuringMaintenanceError(newTimeout)); + }, + originalTimeout: command.timeout?.originalTimeout + }; + signal.addEventListener('abort', command.timeout.listener, { once: true }); + }; + dbgMaintenance(`Total of ${counter} of ${total} timeouts reset to ${ms}`); + } + + get isPubSubActive() { + return this.#pubSub.isActive; + } + + constructor( + respVersion: RespVersions, + maxLength: number | null | undefined, + onShardedChannelMoved: OnShardedChannelMoved + ) { + this.#respVersion = respVersion; + this.#maxLength = maxLength; + this.#onShardedChannelMoved = onShardedChannelMoved; + this.decoder = this.#initiateDecoder(); + } + + #onReply(reply: ReplyUnion) { + this.#waitingForReply.shift()!.resolve(reply); + } + + #onErrorReply(err: ErrorReply) { + this.#waitingForReply.shift()!.reject(err); + } + + #onPush(push: Array) { + // TODO: type + if (this.#pubSub.handleMessageReply(push)) return true; + + const isShardedUnsubscribe = PubSub.isShardedUnsubscribe(push); + if (isShardedUnsubscribe && !this.#waitingForReply.length) { + const channel = push[1].toString(); + this.#onShardedChannelMoved( + channel, + this.#pubSub.removeShardedListeners(channel) + ); + return true; + } else if (isShardedUnsubscribe || PubSub.isStatusReply(push)) { + const head = this.#waitingForReply.head!.value; + if ( + (Number.isNaN(head.channelsCounter!) && push[2] === 0) || + --head.channelsCounter! === 0 + ) { + this.#waitingForReply.shift()!.resolve(); + } + return true; + } + return false + } + + #getTypeMapping() { + return this.#waitingForReply.head!.value.typeMapping ?? {}; + } + + #initiateDecoder() { + return new Decoder({ + onReply: reply => this.#onReply(reply), + onErrorReply: err => this.#onErrorReply(err), + //TODO: we can shave off a few cycles by not adding onPush handler at all if CSC is not used + onPush: push => { + for(const pushHandler of this.#pushHandlers) { + if(pushHandler(push)) return + } + }, + getTypeMapping: () => this.#getTypeMapping() + }); + } + + addPushHandler(handler: PushHandler): void { + this.#pushHandlers.push(handler); + } + + async waitForInflightCommandsToComplete(): Promise { + // In-flight commands already completed + if(this.#waitingForReply.length === 0) { + return + }; + // Otherwise wait for in-flight commands to fire `empty` event + return new Promise(resolve => { + this.#waitingForReply.events.on('empty', resolve) + }); + } + + addCommand( + args: ReadonlyArray, + options?: CommandOptions + ): Promise { + if (this.#maxLength && this.#toWrite.length + this.#waitingForReply.length >= this.#maxLength) { + return Promise.reject(new Error('The queue is full')); + } else if (options?.abortSignal?.aborted) { + return Promise.reject(new AbortError()); + } + + return new Promise((resolve, reject) => { + let node: DoublyLinkedNode; + const value: CommandToWrite = { + args, + chainId: options?.chainId, + abort: undefined, + timeout: undefined, + resolve, + reject, + channelsCounter: undefined, + typeMapping: options?.typeMapping + }; + + // If #maintenanceCommandTimeout was explicitly set, we should + // use it instead of the timeout provided by the command + const timeout = this.#maintenanceCommandTimeout ?? options?.timeout; + const wasInMaintenance = this.#maintenanceCommandTimeout !== undefined; + if (timeout) { + + const signal = AbortSignal.timeout(timeout); + value.timeout = { + signal, + listener: () => { + this.#toWrite.remove(node); + value.reject(wasInMaintenance ? new CommandTimeoutDuringMaintenanceError(timeout) : new TimeoutError()); + }, + originalTimeout: options?.timeout + }; + signal.addEventListener('abort', value.timeout.listener, { once: true }); + } + + const signal = options?.abortSignal; + if (signal) { + value.abort = { + signal, + listener: () => { + this.#toWrite.remove(node); + value.reject(new AbortError()); + } + }; + signal.addEventListener('abort', value.abort.listener, { once: true }); + } + + node = this.#toWrite.add(value, options?.asap); + }); + } + + #addPubSubCommand(command: PubSubCommand, asap = false, chainId?: symbol) { + return new Promise((resolve, reject) => { + this.#toWrite.add({ + args: command.args, + chainId, + abort: undefined, + timeout: undefined, + resolve() { + command.resolve(); + resolve(); + }, + reject(err) { + command.reject?.(); + reject(err); + }, + channelsCounter: command.channelsCounter, + typeMapping: PUSH_TYPE_MAPPING + }, asap); + }); + } + + #setupPubSubHandler() { + // RESP3 uses `onPush` to handle PubSub, so no need to modify `onReply` + if (this.#respVersion !== 2) return; + + this.decoder.onReply = (reply => { + if (Array.isArray(reply)) { + if (this.#onPush(reply)) return; + + if (PONG.equals(reply[0] as Buffer)) { + const { resolve, typeMapping } = this.#waitingForReply.shift()!, + buffer = ((reply[1] as Buffer).length === 0 ? reply[0] : reply[1]) as Buffer; + resolve(typeMapping?.[RESP_TYPES.SIMPLE_STRING] === Buffer ? buffer : buffer.toString()); + return; + } + } + + return this.#onReply(reply); + }) as Decoder['onReply']; + this.decoder.getTypeMapping = () => RESP2_PUSH_TYPE_MAPPING; + } + + subscribe( + type: PubSubType, + channels: string | Array, + listener: PubSubListener, + returnBuffers?: T + ) { + const command = this.#pubSub.subscribe(type, channels, listener, returnBuffers); + if (!command) return; + + this.#setupPubSubHandler(); + return this.#addPubSubCommand(command); + } + + #resetDecoderCallbacks() { + this.decoder.onReply = (reply => this.#onReply(reply)) as Decoder['onReply']; + this.decoder.getTypeMapping = () => this.#getTypeMapping(); + } + + unsubscribe( + type: PubSubType, + channels?: string | Array, + listener?: PubSubListener, + returnBuffers?: T + ) { + const command = this.#pubSub.unsubscribe(type, channels, listener, returnBuffers); + if (!command) return; + + if (command && this.#respVersion === 2) { + // RESP2 modifies `onReply` to handle PubSub (see #setupPubSubHandler) + const { resolve } = command; + command.resolve = () => { + if (!this.#pubSub.isActive) { + this.#resetDecoderCallbacks(); + } + + resolve(); + }; + } + + return this.#addPubSubCommand(command); + } + + removeAllPubSubListeners() { + return this.#pubSub.removeAllListeners(); + } + + resubscribe(chainId?: symbol) { + const commands = this.#pubSub.resubscribe(); + if (!commands.length) return; + + this.#setupPubSubHandler(); + return Promise.all( + commands.map(command => this.#addPubSubCommand(command, true, chainId)) + ); + } + + extendPubSubChannelListeners( + type: PubSubType, + channel: string, + listeners: ChannelListeners + ) { + const command = this.#pubSub.extendChannelListeners(type, channel, listeners); + if (!command) return; + + this.#setupPubSubHandler(); + return this.#addPubSubCommand(command); + } + + extendPubSubListeners(type: PubSubType, listeners: PubSubTypeListeners) { + const command = this.#pubSub.extendTypeListeners(type, listeners); + if (!command) return; + + this.#setupPubSubHandler(); + return this.#addPubSubCommand(command); + } + + getPubSubListeners(type: PubSubType) { + return this.#pubSub.listeners[type]; + } + + monitor(callback: MonitorCallback, options?: CommandOptions) { + return new Promise((resolve, reject) => { + const typeMapping = options?.typeMapping ?? {}; + this.#toWrite.add({ + args: ['MONITOR'], + chainId: options?.chainId, + abort: undefined, + timeout: undefined, + // using `resolve` instead of using `.then`/`await` to make sure it'll be called before processing the next reply + resolve: () => { + // after running `MONITOR` only `MONITOR` and `RESET` replies are expected + // any other command should cause an error + + // if `RESET` already overrides `onReply`, set monitor as it's fallback + if (this.#resetFallbackOnReply) { + this.#resetFallbackOnReply = callback; + } else { + this.decoder.onReply = callback; + } + + this.decoder.getTypeMapping = () => typeMapping; + resolve(); + }, + reject, + channelsCounter: undefined, + typeMapping + }, options?.asap); + }); + } + + resetDecoder() { + this.#resetDecoderCallbacks(); + this.decoder.reset(); + } + + #resetFallbackOnReply?: Decoder['onReply']; + + async reset(chainId: symbol, typeMapping?: T) { + return new Promise((resolve, reject) => { + // overriding onReply to handle `RESET` while in `MONITOR` or PubSub mode + this.#resetFallbackOnReply = this.decoder.onReply; + this.decoder.onReply = (reply => { + if ( + (typeof reply === 'string' && reply === 'RESET') || + (reply instanceof Buffer && RESET.equals(reply)) + ) { + this.#resetDecoderCallbacks(); + this.#resetFallbackOnReply = undefined; + this.#pubSub.reset(); + + this.#waitingForReply.shift()!.resolve(reply); + return; + } + + this.#resetFallbackOnReply!(reply); + }) as Decoder['onReply']; + + this.#toWrite.push({ + args: ['RESET'], + chainId, + abort: undefined, + timeout: undefined, + resolve, + reject, + channelsCounter: undefined, + typeMapping + }); + }); + } + + isWaitingToWrite() { + return this.#toWrite.length > 0; + } + + *commandsToWrite() { + let toSend = this.#toWrite.shift(); + while (toSend) { + let encoded: ReadonlyArray + try { + encoded = encodeCommand(toSend.args); + } catch (err) { + toSend.reject(err); + toSend = this.#toWrite.shift(); + continue; + } + + // TODO reuse `toSend` or create new object? + (toSend as any).args = undefined; + if (toSend.abort) { + RedisCommandsQueue.#removeAbortListener(toSend); + toSend.abort = undefined; + } + if (toSend.timeout) { + RedisCommandsQueue.#removeTimeoutListener(toSend); + toSend.timeout = undefined; + } + this.#chainInExecution = toSend.chainId; + toSend.chainId = undefined; + this.#waitingForReply.push(toSend); + + yield encoded; + toSend = this.#toWrite.shift(); + } + } + + #flushWaitingForReply(err: Error): void { + for (const node of this.#waitingForReply) { + node.reject(err); + } + this.#waitingForReply.reset(); + } + + static #removeAbortListener(command: CommandToWrite) { + command.abort!.signal.removeEventListener('abort', command.abort!.listener); + } + + static #removeTimeoutListener(command: CommandToWrite) { + command.timeout?.signal.removeEventListener('abort', command.timeout!.listener); + } + + static #flushToWrite(toBeSent: CommandToWrite, err: Error) { + if (toBeSent.abort) { + RedisCommandsQueue.#removeAbortListener(toBeSent); + } + if (toBeSent.timeout) { + RedisCommandsQueue.#removeTimeoutListener(toBeSent); + } + + toBeSent.reject(err); + } + + flushWaitingForReply(err: Error): void { + this.resetDecoder(); + this.#pubSub.reset(); + + this.#flushWaitingForReply(err); + + if (!this.#chainInExecution) return; + + while (this.#toWrite.head?.value.chainId === this.#chainInExecution) { + RedisCommandsQueue.#flushToWrite( + this.#toWrite.shift()!, + err + ); + } + + this.#chainInExecution = undefined; + } + + flushAll(err: Error): void { + this.resetDecoder(); + this.#pubSub.reset(); + this.#flushWaitingForReply(err); + for (const node of this.#toWrite) { + RedisCommandsQueue.#flushToWrite(node, err); + } + this.#toWrite.reset(); + } + + isEmpty() { + return ( + this.#toWrite.length === 0 && + this.#waitingForReply.length === 0 + ); + } +} diff --git a/packages/client/lib/client/enterprise-maintenance-manager.spec.ts b/packages/client/lib/client/enterprise-maintenance-manager.spec.ts new file mode 100644 index 00000000000..59e2bfe8c0a --- /dev/null +++ b/packages/client/lib/client/enterprise-maintenance-manager.spec.ts @@ -0,0 +1,49 @@ +import assert from "node:assert"; +import { createClient } from "../../"; + +describe("EnterpriseMaintenanceManager does not prevent proper options parsing", () => { + it("should not throw when initializing without options", async () => { + const client = createClient(); + assert.doesNotThrow(async () => { + //Expected to reject because there is no url or socket provided and there is no running server on localhost + await assert.rejects(client.connect); + }); + }); + + it("should not throw when initializing without url/socket and with maint", async () => { + const client = createClient({ + maintNotifications: "enabled", + RESP: 3, + }); + assert.doesNotThrow(async () => { + //Expected to reject because there is no url or socket provided and there is no running server on localhost + await assert.rejects(client.connect); + }); + }); + it("should not throw when initializing with url and with maint", async () => { + const client = createClient({ + maintNotifications: "enabled", + RESP: 3, + url: "redis://localhost:6379", + }); + assert.doesNotThrow(async () => { + //Expected to reject because there is no url or socket provided and there is no running server on localhost + await assert.rejects(client.connect); + }); + }); + + it("should not throw when initializing with socket and with maint", async () => { + const client = createClient({ + maintNotifications: "enabled", + RESP: 3, + socket: { + host: "localhost", + port: 6379, + }, + }); + assert.doesNotThrow(async () => { + //Expected to reject because there is no url or socket provided and there is no running server on localhost + await assert.rejects(client.connect); + }); + }); +}); diff --git a/packages/client/lib/client/enterprise-maintenance-manager.ts b/packages/client/lib/client/enterprise-maintenance-manager.ts new file mode 100644 index 00000000000..9892a5be8a4 --- /dev/null +++ b/packages/client/lib/client/enterprise-maintenance-manager.ts @@ -0,0 +1,359 @@ +import { RedisClientOptions } from "."; +import RedisCommandsQueue from "./commands-queue"; +import { RedisArgument } from "../.."; +import { isIP } from "net"; +import { lookup } from "dns/promises"; +import assert from "node:assert"; +import { setTimeout } from "node:timers/promises"; +import RedisSocket, { RedisTcpSocketOptions } from "./socket"; +import diagnostics_channel from "node:diagnostics_channel"; + +export const MAINTENANCE_EVENTS = { + PAUSE_WRITING: "pause-writing", + RESUME_WRITING: "resume-writing", + TIMEOUTS_UPDATE: "timeouts-update", +} as const; + +const PN = { + MOVING: "MOVING", + MIGRATING: "MIGRATING", + MIGRATED: "MIGRATED", + FAILING_OVER: "FAILING_OVER", + FAILED_OVER: "FAILED_OVER", +}; + +export type DiagnosticsEvent = { + type: string; + timestamp: number; + data?: Object; +}; + +export const dbgMaintenance = (...args: any[]) => { + if (!process.env.REDIS_DEBUG_MAINTENANCE) return; + return console.log("[MNT]", ...args); +}; + +export const emitDiagnostics = (event: DiagnosticsEvent) => { + if (!process.env.REDIS_EMIT_DIAGNOSTICS) return; + + const channel = diagnostics_channel.channel("redis.maintenance"); + channel.publish(event); +}; + +export interface MaintenanceUpdate { + relaxedCommandTimeout?: number; + relaxedSocketTimeout?: number; +} + +interface Client { + _ejectSocket: () => RedisSocket; + _insertSocket: (socket: RedisSocket) => void; + _pause: () => void; + _unpause: () => void; + _maintenanceUpdate: (update: MaintenanceUpdate) => void; + duplicate: () => Client; + connect: () => Promise; + destroy: () => void; + on: (event: string, callback: (value: unknown) => void) => void; +} + +export default class EnterpriseMaintenanceManager { + #commandsQueue: RedisCommandsQueue; + #options: RedisClientOptions; + #isMaintenance = 0; + #client: Client; + + static setupDefaultMaintOptions(options: RedisClientOptions) { + if (options.maintNotifications === undefined) { + options.maintNotifications = + options?.RESP === 3 ? "auto" : "disabled"; + } + if (options.maintEndpointType === undefined) { + options.maintEndpointType = "auto"; + } + if (options.maintRelaxedSocketTimeout === undefined) { + options.maintRelaxedSocketTimeout = 10000; + } + if (options.maintRelaxedCommandTimeout === undefined) { + options.maintRelaxedCommandTimeout = 10000; + } + } + + static async getHandshakeCommand( + options: RedisClientOptions, + ): Promise< + | { cmd: Array; errorHandler: (error: Error) => void } + | undefined + > { + if (options.maintNotifications === "disabled") return; + + const host = options.url + ? new URL(options.url).hostname + : (options.socket as RedisTcpSocketOptions | undefined)?.host; + + if (!host) return; + + const tls = options.socket?.tls ?? false + + const movingEndpointType = await determineEndpoint(tls, host, options); + return { + cmd: [ + "CLIENT", + "MAINT_NOTIFICATIONS", + "ON", + "moving-endpoint-type", + movingEndpointType, + ], + errorHandler: (error: Error) => { + dbgMaintenance("handshake failed:", error); + if (options.maintNotifications === "enabled") { + throw error; + } + }, + }; + } + + constructor( + commandsQueue: RedisCommandsQueue, + client: Client, + options: RedisClientOptions, + ) { + this.#commandsQueue = commandsQueue; + this.#options = options; + this.#client = client; + + this.#commandsQueue.addPushHandler(this.#onPush); + } + + #onPush = (push: Array): boolean => { + dbgMaintenance("ONPUSH:", push.map(String)); + + if (!Array.isArray(push) || !["MOVING", "MIGRATING", "MIGRATED", "FAILING_OVER", "FAILED_OVER"].includes(String(push[0]))) { + return false; + } + + const type = String(push[0]); + + emitDiagnostics({ + type, + timestamp: Date.now(), + data: { + push: push.map(String), + }, + }); + switch (type) { + case PN.MOVING: { + // [ 'MOVING', '17', '15', '54.78.247.156:12075' ] + // ^seq ^after ^new ip + const afterSeconds = push[2]; + const url: string | null = push[3] ? String(push[3]) : null; + dbgMaintenance("Received MOVING:", afterSeconds, url); + this.#onMoving(afterSeconds, url); + return true; + } + case PN.MIGRATING: + case PN.FAILING_OVER: { + dbgMaintenance("Received MIGRATING|FAILING_OVER"); + this.#onMigrating(); + return true; + } + case PN.MIGRATED: + case PN.FAILED_OVER: { + dbgMaintenance("Received MIGRATED|FAILED_OVER"); + this.#onMigrated(); + return true; + } + } + return false; + }; + + // Queue: + // toWrite [ C D E ] + // waitingForReply [ A B ] - aka In-flight commands + // + // time: ---1-2---3-4-5-6--------------------------- + // + // 1. [EVENT] MOVING PN received + // 2. [ACTION] Pause writing ( we need to wait for new socket to connect and for all in-flight commands to complete ) + // 3. [EVENT] New socket connected + // 4. [EVENT] In-flight commands completed + // 5. [ACTION] Destroy old socket + // 6. [ACTION] Resume writing -> we are going to write to the new socket from now on + #onMoving = async ( + afterSeconds: number, + url: string | null, + ): Promise => { + // 1 [EVENT] MOVING PN received + this.#onMigrating(); + + let host: string; + let port: number; + + // The special value `none` indicates that the `MOVING` message doesn’t need + // to contain an endpoint. Instead it contains the value `null` then. In + // such a corner case, the client is expected to schedule a graceful + // reconnect to its currently configured endpoint after half of the grace + // period that was communicated by the server is over. + if (url === null) { + assert(this.#options.maintEndpointType === "none"); + assert(this.#options.socket !== undefined); + assert("host" in this.#options.socket); + assert(typeof this.#options.socket.host === "string"); + host = this.#options.socket.host; + assert(typeof this.#options.socket.port === "number"); + port = this.#options.socket.port; + const waitTime = (afterSeconds * 1000) / 2; + dbgMaintenance(`Wait for ${waitTime}ms`); + await setTimeout(waitTime); + } else { + const split = url.split(":"); + host = split[0]; + port = Number(split[1]); + } + + // 2 [ACTION] Pause writing + dbgMaintenance("Pausing writing of new commands to old socket"); + this.#client._pause(); + + dbgMaintenance("Creating new tmp client"); + let start = performance.now(); + + // If the URL is provided, it takes precedense + // the options object could just be mutated + if(this.#options.url) { + const u = new URL(this.#options.url); + u.hostname = host; + u.port = String(port); + this.#options.url = u.toString(); + } else { + this.#options.socket = { + ...this.#options.socket, + host, + port + } + } + const tmpClient = this.#client.duplicate(); + tmpClient.on('error', (error: unknown) => { + //We dont know how to handle tmp client errors + dbgMaintenance(`[ERR]`, error) + }); + dbgMaintenance(`Tmp client created in ${( performance.now() - start ).toFixed(2)}ms`); + dbgMaintenance( + `Set timeout for tmp client to ${this.#options.maintRelaxedSocketTimeout}`, + ); + tmpClient._maintenanceUpdate({ + relaxedCommandTimeout: this.#options.maintRelaxedCommandTimeout, + relaxedSocketTimeout: this.#options.maintRelaxedSocketTimeout, + }); + dbgMaintenance(`Connecting tmp client: ${host}:${port}`); + start = performance.now(); + await tmpClient.connect(); + dbgMaintenance(`Connected to tmp client in ${(performance.now() - start).toFixed(2)}ms`); + // 3 [EVENT] New socket connected + + dbgMaintenance(`Wait for all in-flight commands to complete`); + await this.#commandsQueue.waitForInflightCommandsToComplete(); + dbgMaintenance(`In-flight commands completed`); + // 4 [EVENT] In-flight commands completed + + dbgMaintenance("Swap client sockets..."); + const oldSocket = this.#client._ejectSocket(); + const newSocket = tmpClient._ejectSocket(); + this.#client._insertSocket(newSocket); + tmpClient._insertSocket(oldSocket); + tmpClient.destroy(); + dbgMaintenance("Swap client sockets done."); + // 5 + 6 + dbgMaintenance("Resume writing"); + this.#client._unpause(); + this.#onMigrated(); + }; + + #onMigrating = () => { + this.#isMaintenance++; + if (this.#isMaintenance > 1) { + dbgMaintenance(`Timeout relaxation already done`); + return; + } + + const update: MaintenanceUpdate = { + relaxedCommandTimeout: this.#options.maintRelaxedCommandTimeout, + relaxedSocketTimeout: this.#options.maintRelaxedSocketTimeout, + }; + + this.#client._maintenanceUpdate(update); + }; + + #onMigrated = () => { + //ensure that #isMaintenance doesnt go under 0 + this.#isMaintenance = Math.max(this.#isMaintenance - 1, 0); + if (this.#isMaintenance > 0) { + dbgMaintenance(`Not ready to unrelax timeouts yet`); + return; + } + + const update: MaintenanceUpdate = { + relaxedCommandTimeout: undefined, + relaxedSocketTimeout: undefined + }; + + this.#client._maintenanceUpdate(update); + }; +} + +export type MovingEndpointType = + | "auto" + | "internal-ip" + | "internal-fqdn" + | "external-ip" + | "external-fqdn" + | "none"; + +function isPrivateIP(ip: string): boolean { + const version = isIP(ip); + if (version === 4) { + const octets = ip.split(".").map(Number); + return ( + octets[0] === 10 || + (octets[0] === 172 && octets[1] >= 16 && octets[1] <= 31) || + (octets[0] === 192 && octets[1] === 168) + ); + } + if (version === 6) { + return ( + ip.startsWith("fc") || // Unique local + ip.startsWith("fd") || // Unique local + ip === "::1" || // Loopback + ip.startsWith("fe80") // Link-local unicast + ); + } + return false; +} + +async function determineEndpoint( + tlsEnabled: boolean, + host: string, + options: RedisClientOptions, +): Promise { + assert(options.maintEndpointType !== undefined); + if (options.maintEndpointType !== "auto") { + dbgMaintenance( + `Determine endpoint type: ${options.maintEndpointType}`, + ); + return options.maintEndpointType; + } + + const ip = isIP(host) ? host : (await lookup(host, { family: 0 })).address; + + const isPrivate = isPrivateIP(ip); + + let result: MovingEndpointType; + if (tlsEnabled) { + result = isPrivate ? "internal-fqdn" : "external-fqdn"; + } else { + result = isPrivate ? "internal-ip" : "external-ip"; + } + + dbgMaintenance(`Determine endpoint type: ${result}`); + return result; +} diff --git a/packages/client/lib/client/index.spec.ts b/packages/client/lib/client/index.spec.ts new file mode 100644 index 00000000000..d7ce00f38ae --- /dev/null +++ b/packages/client/lib/client/index.spec.ts @@ -0,0 +1,1031 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL, waitTillBeenCalled } from '../test-utils'; +import RedisClient, { RedisClientOptions, RedisClientType } from '.'; +import { AbortError, ClientClosedError, ClientOfflineError, ConnectionTimeoutError, DisconnectsClientError, ErrorReply, MultiErrorReply, TimeoutError, WatchError } from '../errors'; +import { defineScript } from '../lua-script'; +import { spy, stub } from 'sinon'; +import { once } from 'node:events'; +import { MATH_FUNCTION, loadMathFunction } from '../commands/FUNCTION_LOAD.spec'; +import { RESP_TYPES } from '../RESP/decoder'; +import { BlobStringReply, NumberReply } from '../RESP/types'; +import { SortedSetMember } from '../commands/generic-transformers'; +import { CommandParser } from './parser'; + +export const SQUARE_SCRIPT = defineScript({ + SCRIPT: + `local number = redis.call('GET', KEYS[1]) + return number * number`, + NUMBER_OF_KEYS: 1, + FIRST_KEY_INDEX: 0, + parseCommand(parser: CommandParser, key: string) { + parser.pushKey(key); + }, + transformReply: undefined as unknown as () => NumberReply +}); + +describe('Client', () => { + describe('initialization', () => { + describe('clientSideCache validation', () => { + const clientSideCacheConfig = { ttl: 0, maxEntries: 0 }; + + it('should throw error when clientSideCache is enabled with RESP 2', () => { + assert.throws( + () => new RedisClient({ + clientSideCache: clientSideCacheConfig, + RESP: 2, + }), + new Error('Client Side Caching is only supported with RESP3') + ); + }); + + it('should throw error when clientSideCache is enabled with RESP undefined', () => { + assert.throws( + () => new RedisClient({ + clientSideCache: clientSideCacheConfig, + }), + new Error('Client Side Caching is only supported with RESP3') + ); + }); + + it('should not throw when clientSideCache is enabled with RESP 3', () => { + assert.doesNotThrow(() => + new RedisClient({ + clientSideCache: clientSideCacheConfig, + RESP: 3, + }) + ); + }); + }); + }); + + describe('parseURL', () => { + it('redis://user:secret@localhost:6379/0', async () => { + const result = RedisClient.parseURL('redis://user:secret@localhost:6379/0'); + const expected: RedisClientOptions = { + socket: { + host: 'localhost', + port: 6379, + tls: false + }, + username: 'user', + password: 'secret', + database: 0, + credentialsProvider: { + type: 'async-credentials-provider', + credentials: async () => ({ + password: 'secret', + username: 'user' + }) + } + }; + + // Compare everything except the credentials function + const { credentialsProvider: resultCredProvider, ...resultRest } = result; + const { credentialsProvider: expectedCredProvider, ...expectedRest } = expected; + + // Compare non-function properties + assert.deepEqual(resultRest, expectedRest); + + if (result?.credentialsProvider?.type === 'async-credentials-provider' + && expected?.credentialsProvider?.type === 'async-credentials-provider') { + + // Compare the actual output of the credentials functions + const resultCreds = await result.credentialsProvider?.credentials(); + const expectedCreds = await expected.credentialsProvider?.credentials(); + assert.deepEqual(resultCreds, expectedCreds); + } else { + assert.fail('Credentials provider type mismatch'); + } + + + }); + + it('rediss://user:secret@localhost:6379/0', async () => { + const result = RedisClient.parseURL('rediss://user:secret@localhost:6379/0'); + const expected: RedisClientOptions = { + socket: { + host: 'localhost', + port: 6379, + tls: true + }, + username: 'user', + password: 'secret', + database: 0, + credentialsProvider: { + credentials: async () => ({ + password: 'secret', + username: 'user' + }), + type: 'async-credentials-provider' + } + }; + + // Compare everything except the credentials function + const { credentialsProvider: resultCredProvider, ...resultRest } = result; + const { credentialsProvider: expectedCredProvider, ...expectedRest } = expected; + + // Compare non-function properties + assert.deepEqual(resultRest, expectedRest); + assert.equal(resultCredProvider?.type, expectedCredProvider?.type); + + if (result?.credentialsProvider?.type === 'async-credentials-provider' && + expected?.credentialsProvider?.type === 'async-credentials-provider') { + + // Compare the actual output of the credentials functions + const resultCreds = await result.credentialsProvider.credentials(); + const expectedCreds = await expected.credentialsProvider.credentials(); + assert.deepEqual(resultCreds, expectedCreds); + + } else { + assert.fail('Credentials provider type mismatch'); + } + + }) + + it('Invalid protocol', () => { + assert.throws( + () => RedisClient.parseURL('redi://user:secret@localhost:6379/0'), + TypeError + ); + }); + + it('Invalid pathname', () => { + assert.throws( + () => RedisClient.parseURL('redis://user:secret@localhost:6379/NaN'), + TypeError + ); + }); + + it('redis://localhost', () => { + assert.deepEqual( + RedisClient.parseURL('redis://localhost'), + { + socket: { + host: 'localhost', + tls: false + } + } + ); + }); + + it('DB in URL should be parsed', async () => { + const client = RedisClient.create({ + url: 'redis://user:secret@localhost:6379/5' + }); + + assert.equal(client?.options?.database, 5); + }) + }); + + describe('parseOptions', () => { + it('should throw error if tls socket option is set to true and the url protocol is "redis:"', () => { + assert.throws( + () => RedisClient.parseOptions({ + url: 'redis://localhost', + socket: { + tls: true + } + }), + TypeError + ); + }); + it('should throw error if tls socket option is set to false and the url protocol is "rediss:"', () => { + assert.throws( + () => RedisClient.parseOptions({ + url: 'rediss://localhost', + socket: { + tls: false + } + }), + TypeError + ); + }); + it('should not throw when tls socket option and url protocol matches"', () => { + assert.equal( + RedisClient.parseOptions({ + url: 'rediss://localhost', + socket: { + tls: true + } + }).socket.tls, + true + ); + assert.equal( + RedisClient.parseOptions({ + url: 'redis://localhost', + socket: { + tls: false + } + }).socket.tls, + false + ); + }); + }); + + describe('authentication', () => { + testUtils.testWithClient('Client should be authenticated', async client => { + assert.equal( + await client.ping(), + 'PONG' + ); + }, GLOBAL.SERVERS.PASSWORD); + + testUtils.testWithClient('Client can authenticate asynchronously ', async client => { + assert.equal( + await client.ping(), + 'PONG' + ); + }, GLOBAL.SERVERS.ASYNC_BASIC_AUTH); + + testUtils.testWithClient('Client can authenticate using the streaming credentials provider for initial token acquisition', + async client => { + assert.equal( + await client.ping(), + 'PONG' + ); + }, GLOBAL.SERVERS.STREAMING_AUTH); + + testUtils.testWithClient('should execute AUTH before SELECT', async client => { + assert.equal( + (await client.clientInfo()).db, + 2 + ); + }, { + ...GLOBAL.SERVERS.PASSWORD, + clientOptions: { + ...GLOBAL.SERVERS.PASSWORD.clientOptions, + database: 2 + }, + minimumDockerVersion: [6, 2] + }); + }); + + testUtils.testWithClient('should set connection name', async client => { + assert.equal( + await client.clientGetName(), + 'name' + ); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + name: 'name' + } + }); + + // TODO: fix & uncomment + // testUtils.testWithClient('connect, ready and end events', async client => { + // await Promise.all([ + // once(client, 'connect'), + // once(client, 'ready'), + // client.connect() + // ]); + + // await Promise.all([ + // once(client, 'end'), + // client.close() + // ]); + // }, { + // ...GLOBAL.SERVERS.OPEN, + // disableClientSetup: true + // }); + + describe('sendCommand', () => { + testUtils.testWithClient('PING', async client => { + assert.equal(await client.sendCommand(['PING']), 'PONG'); + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClient('Unactivated AbortController should not abort', async client => { + await client.sendCommand(['PING'], { + abortSignal: new AbortController().signal + }); + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClient('AbortError', async client => { + await blockSetImmediate(async () => { + await assert.rejects(client.sendCommand(['PING'], { + abortSignal: AbortSignal.timeout(5) + }), AbortError); + }) + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClient('Timeout with custom timeout config', async client => { + await blockSetImmediate(async () => { + await assert.rejects(client.sendCommand(['PING'], { + timeout: 5 + }), TimeoutError); + }) + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithCluster('Timeout with custom timeout config (cluster)', async cluster => { + await blockSetImmediate(async () => { + await assert.rejects(cluster.sendCommand(undefined, true, ['PING'], { + timeout: 5 + }), TimeoutError); + }) + }, GLOBAL.CLUSTERS.OPEN); + + testUtils.testWithClientSentinel('Timeout with custom timeout config (sentinel)', async sentinel => { + await blockSetImmediate(async () => { + await assert.rejects(sentinel.sendCommand(true, ['PING'], { + timeout: 5 + }), TimeoutError); + }) + }, GLOBAL.CLUSTERS.OPEN); + + testUtils.testWithClient('Timeout with global timeout config', async client => { + await blockSetImmediate(async () => { + await assert.rejects(client.ping(), TimeoutError); + await assert.rejects(client.sendCommand(['PING']), TimeoutError); + }); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + commandOptions: { + timeout: 5 + } + } + }); + + testUtils.testWithCluster('Timeout with global timeout config (cluster)', async cluster => { + await blockSetImmediate(async () => { + await assert.rejects(cluster.HSET('key', 'foo', 'value'), TimeoutError); + await assert.rejects(cluster.sendCommand(undefined, true, ['PING']), TimeoutError); + }); + }, { + ...GLOBAL.CLUSTERS.OPEN, + clusterConfiguration: { + commandOptions: { + timeout: 5 + } + } + }); + + testUtils.testWithClientSentinel('Timeout with global timeout config (sentinel)', async sentinel => { + await blockSetImmediate(async () => { + await assert.rejects(sentinel.HSET('key', 'foo', 'value'), TimeoutError); + await assert.rejects(sentinel.sendCommand(true, ['PING']), TimeoutError); + }); + }, { + ...GLOBAL.SENTINEL.OPEN, + clientOptions: { + commandOptions: { + timeout: 5 + } + } + }); + + testUtils.testWithClient('undefined and null should not break the client', async client => { + await assert.rejects( + client.sendCommand([null as any, undefined as any]), + TypeError + ); + + assert.equal( + await client.ping(), + 'PONG' + ); + }, GLOBAL.SERVERS.OPEN); + }); + + describe('multi', () => { + testUtils.testWithClient('simple', async client => { + assert.deepEqual( + await client.multi() + .ping() + .set('key', 'value') + .get('key') + .exec(), + ['PONG', 'OK', 'value'] + ); + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClient('should reject the whole chain on error', client => { + return assert.rejects( + client.multi() + .ping() + .addCommand(['INVALID COMMAND']) + .ping() + .exec() + ); + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClient('should reject the whole chain upon client disconnect', async client => { + await client.close(); + + return assert.rejects( + client.multi() + .ping() + .set('key', 'value') + .get('key') + .exec(), + ClientClosedError + ); + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClient('with script', async client => { + assert.deepEqual( + await client.multi() + .set('key', '2') + .square('key') + .exec(), + ['OK', 4] + ); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + scripts: { + square: SQUARE_SCRIPT + } + } + }); + + testUtils.testWithClient('WatchError', async client => { + await client.watch('key'); + + const duplicate = await client.duplicate().connect(); + try { + await client.set( + 'key', + '1' + ); + } finally { + duplicate.destroy(); + } + + await assert.rejects( + client.multi() + .decr('key') + .exec(), + WatchError + ); + }, GLOBAL.SERVERS.OPEN); + + describe('execAsPipeline', () => { + testUtils.testWithClient('exec(true)', async client => { + assert.deepEqual( + await client.multi() + .ping() + .exec(true), + ['PONG'] + ); + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClient('empty execAsPipeline', async client => { + assert.deepEqual( + await client.multi().execAsPipeline(), + [] + ); + }, GLOBAL.SERVERS.OPEN); + }); + + testUtils.testWithClient('should remember selected db', async client => { + await client.multi() + .select(1) + .exec(); + await killClient(client); + assert.equal( + (await client.clientInfo()).db, + 1 + ); + }, { + ...GLOBAL.SERVERS.OPEN, + minimumDockerVersion: [6, 2] // CLIENT INFO + }); + + testUtils.testWithClient('should handle error replies (#2665)', async client => { + await assert.rejects( + client.multi() + .set('key', 'value') + .hGetAll('key') + .exec(), + err => { + assert.ok(err instanceof MultiErrorReply); + assert.equal(err.replies.length, 2); + assert.deepEqual(err.errorIndexes, [1]); + assert.ok(err.replies[1] instanceof ErrorReply); + // @ts-ignore TS2802 + assert.deepEqual([...err.errors()], [err.replies[1]]); + return true; + } + ); + }, GLOBAL.SERVERS.OPEN); + }); + + testUtils.testWithClient('scripts', async client => { + const [, reply] = await Promise.all([ + client.set('key', '2'), + client.square('key') + ]); + + assert.equal(reply, 4); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + scripts: { + square: SQUARE_SCRIPT + } + } + }); + + const module = { + echo: { + parseCommand(parser: CommandParser, message: string) { + parser.push('ECHO', message); + }, + transformReply: undefined as unknown as () => BlobStringReply + } + }; + + testUtils.testWithClient('modules', async client => { + assert.equal( + await client.module.echo('message'), + 'message' + ); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + modules: { + module + } + } + }); + + testUtils.testWithClient('functions', async client => { + const [, , reply] = await Promise.all([ + loadMathFunction(client), + client.set('key', '2'), + client.math.square('key') + ]); + + assert.equal(reply, 4); + }, { + ...GLOBAL.SERVERS.OPEN, + minimumDockerVersion: [7, 0], + clientOptions: { + functions: { + math: MATH_FUNCTION.library + } + } + }); + + testUtils.testWithClient('duplicate should reuse command options', async client => { + const duplicate = client.duplicate(); + + await duplicate.connect(); + + try { + assert.deepEqual( + await duplicate.ping(), + Buffer.from('PONG') + ); + } finally { + duplicate.close(); + } + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + commandOptions: { + typeMapping: { + [RESP_TYPES.SIMPLE_STRING]: Buffer + } + } + }, + disableClientSetup: true, + }); + + async function killClient( + client: RedisClientType, + errorClient: RedisClientType = client + ): Promise { + const onceErrorPromise = once(errorClient, 'error'); + await client.sendCommand(['QUIT']); + await Promise.all([ + onceErrorPromise, + assert.rejects(client.ping()) + ]); + } + + testUtils.testWithClient('should reconnect when socket disconnects', async client => { + await killClient(client); + await assert.doesNotReject(client.ping()); + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClient('should remember selected db', async client => { + await client.select(1); + await killClient(client); + assert.equal( + (await client.clientInfo()).db, + 1 + ); + }, { + ...GLOBAL.SERVERS.OPEN, + minimumDockerVersion: [6, 2] // CLIENT INFO + }); + + testUtils.testWithClient('scanIterator', async client => { + const entries: Array = [], + keys = new Set(); + for (let i = 0; i < 100; i++) { + const key = i.toString(); + keys.add(key); + entries.push(key, ''); + } + + await client.mSet(entries); + + const results = new Set(); + for await (const keys of client.scanIterator()) { + for (const key of keys) { + results.add(key); + } + } + + assert.deepEqual(keys, results); + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClient('hScanIterator', async client => { + const hash: Record = {}; + for (let i = 0; i < 100; i++) { + hash[i.toString()] = i.toString(); + } + + await client.hSet('key', hash); + + const results: Record = {}; + for await (const entries of client.hScanIterator('key')) { + for (const { field, value } of entries) { + results[field] = value; + } + } + + assert.deepEqual(hash, results); + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClient('hScanNoValuesIterator', async client => { + const hash: Record = {}; + const expectedFields: Array = []; + for (let i = 0; i < 100; i++) { + hash[i.toString()] = i.toString(); + expectedFields.push(i.toString()); + } + + await client.hSet('key', hash); + + const actualFields: Array = []; + for await (const fields of client.hScanNoValuesIterator('key')) { + for (const field of fields) { + actualFields.push(field); + } + } + + function sort(a: string, b: string) { + return Number(a) - Number(b); + } + + assert.deepEqual(actualFields.sort(sort), expectedFields); + }, { + ...GLOBAL.SERVERS.OPEN, + minimumDockerVersion: [7, 4] + }); + + testUtils.testWithClient('sScanIterator', async client => { + const members = new Set(); + for (let i = 0; i < 100; i++) { + members.add(i.toString()); + } + + await client.sAdd('key', Array.from(members)); + + const results = new Set(); + for await (const members of client.sScanIterator('key')) { + for (const member of members) { + results.add(member); + } + } + + assert.deepEqual(members, results); + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClient('zScanIterator', async client => { + const members: Array = [], + map = new Map(); + for (let i = 0; i < 100; i++) { + const member = { + value: i.toString(), + score: 1 + }; + map.set(member.value, member.score); + members.push(member); + } + + await client.zAdd('key', members); + + const results = new Map(); + for await (const members of client.zScanIterator('key')) { + for (const { value, score } of members) { + results.set(value, score); + } + } + + assert.deepEqual(map, results); + }, GLOBAL.SERVERS.OPEN); + + describe('PubSub', () => { + testUtils.testWithClient('should be able to publish and subscribe to messages', async publisher => { + function assertStringListener(message: string, channel: string) { + assert.equal(typeof message, 'string'); + assert.equal(typeof channel, 'string'); + } + + function assertBufferListener(message: Buffer, channel: Buffer) { + assert.ok(message instanceof Buffer); + assert.ok(channel instanceof Buffer); + } + + const subscriber = await publisher.duplicate().connect(); + + try { + const channelListener1 = spy(assertBufferListener), + channelListener2 = spy(assertStringListener), + patternListener = spy(assertStringListener); + + await Promise.all([ + subscriber.subscribe('channel', channelListener1, true), + subscriber.subscribe('channel', channelListener2), + subscriber.pSubscribe('channel*', patternListener) + ]); + await Promise.all([ + waitTillBeenCalled(channelListener1), + waitTillBeenCalled(channelListener2), + waitTillBeenCalled(patternListener), + publisher.publish(Buffer.from('channel'), Buffer.from('message')) + ]); + assert.ok(channelListener1.calledOnceWithExactly(Buffer.from('message'), Buffer.from('channel'))); + assert.ok(channelListener2.calledOnceWithExactly('message', 'channel')); + assert.ok(patternListener.calledOnceWithExactly('message', 'channel')); + + await subscriber.unsubscribe('channel', channelListener1, true); + await Promise.all([ + waitTillBeenCalled(channelListener2), + waitTillBeenCalled(patternListener), + publisher.publish('channel', 'message') + ]); + assert.ok(channelListener1.calledOnce); + assert.ok(channelListener2.calledTwice); + assert.ok(channelListener2.secondCall.calledWithExactly('message', 'channel')); + assert.ok(patternListener.calledTwice); + assert.ok(patternListener.secondCall.calledWithExactly('message', 'channel')); + await subscriber.unsubscribe('channel'); + await Promise.all([ + waitTillBeenCalled(patternListener), + publisher.publish('channel', 'message') + ]); + assert.ok(channelListener1.calledOnce); + assert.ok(channelListener2.calledTwice); + assert.ok(patternListener.calledThrice); + assert.ok(patternListener.thirdCall.calledWithExactly('message', 'channel')); + + await subscriber.pUnsubscribe(); + await publisher.publish('channel', 'message'); + assert.ok(channelListener1.calledOnce); + assert.ok(channelListener2.calledTwice); + assert.ok(patternListener.calledThrice); + + // should be able to send commands when unsubsribed from all channels (see #1652) + await assert.doesNotReject(subscriber.ping()); + } finally { + subscriber.destroy(); + } + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClient('should resubscribe', async publisher => { + const subscriber = await publisher.duplicate().connect(); + + try { + const channelListener = spy(); + await subscriber.subscribe('channel', channelListener); + + const patternListener = spy(); + await subscriber.pSubscribe('channe*', patternListener); + + await Promise.all([ + once(subscriber, 'error'), + publisher.clientKill({ + filter: 'SKIPME', + skipMe: true + }) + ]); + + await once(subscriber, 'ready'); + + await Promise.all([ + waitTillBeenCalled(channelListener), + waitTillBeenCalled(patternListener), + publisher.publish('channel', 'message') + ]); + } finally { + subscriber.destroy(); + } + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClient('should not fail when message arrives right after subscribe', async publisher => { + const subscriber = await publisher.duplicate().connect(); + + try { + await assert.doesNotReject(Promise.all([ + subscriber.subscribe('channel', () => { + // noop + }), + publisher.publish('channel', 'message') + ])); + } finally { + subscriber.destroy(); + } + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClient('should be able to quit in PubSub mode', async client => { + await client.subscribe('channel', () => { + // noop + }); + + await assert.doesNotReject(client.quit()); + + assert.equal(client.isOpen, false); + }, GLOBAL.SERVERS.OPEN); + }); + + testUtils.testWithClient('ConnectionTimeoutError', async client => { + const promise = assert.rejects(client.connect(), ConnectionTimeoutError), + start = process.hrtime.bigint(); + + while (process.hrtime.bigint() - start < 1_000_000) { + // block the event loop for 1ms, to make sure the connection will timeout + } + + await promise; + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + socket: { + connectTimeout: 1 + } + }, + disableClientSetup: true + }); + + testUtils.testWithClient('client.quit', async client => { + await client.connect(); + + const pingPromise = client.ping(), + quitPromise = client.quit(); + assert.equal(client.isOpen, false); + + const [ping, quit] = await Promise.all([ + pingPromise, + quitPromise, + assert.rejects(client.ping(), ClientClosedError) + ]); + + assert.equal(ping, 'PONG'); + assert.equal(quit, 'OK'); + }, { + ...GLOBAL.SERVERS.OPEN, + disableClientSetup: true + }); + + testUtils.testWithClient('client.disconnect', async client => { + const pingPromise = client.ping(), + disconnectPromise = client.disconnect(); + assert.equal(client.isOpen, false); + await Promise.all([ + assert.rejects(pingPromise, DisconnectsClientError), + assert.doesNotReject(disconnectPromise), + assert.rejects(client.ping(), ClientClosedError) + ]); + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClient('should be able to connect after disconnect (see #1801)', async client => { + await client.disconnect(); + await client.connect(); + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClient('should be able to use ref and unref', client => { + client.unref(); + client.ref(); + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClient('pingInterval', async client => { + assert.deepEqual( + await once(client, 'ping-interval'), + ['PONG'] + ); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + pingInterval: 1 + } + }); + + testUtils.testWithClient('should reject commands in connect phase when `disableOfflineQueue`', async client => { + const connectPromise = client.connect(); + await assert.rejects( + client.ping(), + ClientOfflineError + ); + await connectPromise; + await client.disconnect(); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + disableOfflineQueue: true + }, + disableClientSetup: true + }); + + describe('MONITOR', () => { + testUtils.testWithClient('should be able to monitor commands', async client => { + const duplicate = await client.duplicate().connect(), + listener = spy(message => assert.equal(typeof message, 'string')); + await duplicate.monitor(listener); + + try { + await Promise.all([ + waitTillBeenCalled(listener), + client.ping() + ]); + } finally { + duplicate.destroy(); + } + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClient('should keep monitoring after reconnection', async client => { + const duplicate = await client.duplicate().connect(), + listener = spy(message => assert.equal(typeof message, 'string')); + await duplicate.monitor(listener); + + try { + await Promise.all([ + once(duplicate, 'error'), + client.clientKill({ + filter: 'SKIPME', + skipMe: true + }) + ]); + + await once(duplicate, 'ready'); + + await Promise.all([ + waitTillBeenCalled(listener), + client.ping() + ]); + } finally { + duplicate.destroy(); + } + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClient('should be able to go back to "normal mode"', async client => { + await Promise.all([ + client.monitor(() => { }), + client.reset() + ]); + await assert.doesNotReject(client.ping()); + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClient('should respect type mapping', async client => { + const duplicate = await client.duplicate().connect(), + listener = spy(message => assert.ok(message instanceof Buffer)); + await duplicate.withTypeMapping({ + [RESP_TYPES.SIMPLE_STRING]: Buffer + }).monitor(listener); + + try { + await Promise.all([ + waitTillBeenCalled(listener), + client.ping() + ]); + } finally { + duplicate.destroy(); + } + }, GLOBAL.SERVERS.OPEN); + }); +}); + +/** + * Executes the provided function in a context where setImmediate is stubbed to not do anything. + * This blocks setImmediate callbacks from executing + */ +async function blockSetImmediate(fn: () => Promise) { + let setImmediateStub: any; + + try { + setImmediateStub = stub(global, 'setImmediate'); + setImmediateStub.callsFake(() => { + //Dont call the callback, effectively blocking execution + }); + await fn(); + } finally { + if (setImmediateStub) { + setImmediateStub.restore(); + } + } +} diff --git a/packages/client/lib/client/index.ts b/packages/client/lib/client/index.ts new file mode 100644 index 00000000000..ea2102c37fd --- /dev/null +++ b/packages/client/lib/client/index.ts @@ -0,0 +1,1567 @@ +import COMMANDS from '../commands'; +import RedisSocket, { RedisSocketOptions } from './socket'; +import { BasicAuth, CredentialsError, CredentialsProvider, StreamingCredentialsProvider, UnableToObtainNewCredentialsError, Disposable } from '../authx'; +import RedisCommandsQueue, { CommandOptions } from './commands-queue'; +import { EventEmitter } from 'node:events'; +import { attachConfig, functionArgumentsPrefix, getTransformReply, scriptArgumentsPrefix } from '../commander'; +import { ClientClosedError, ClientOfflineError, DisconnectsClientError, WatchError } from '../errors'; +import { URL } from 'node:url'; +import { TcpSocketConnectOpts } from 'node:net'; +import { PUBSUB_TYPE, PubSubType, PubSubListener, PubSubTypeListeners, ChannelListeners } from './pub-sub'; +import { Command, CommandSignature, TypeMapping, CommanderConfig, RedisFunction, RedisFunctions, RedisModules, RedisScript, RedisScripts, ReplyUnion, RespVersions, RedisArgument, ReplyWithTypeMapping, SimpleStringReply, TransformReply, CommandArguments } from '../RESP/types'; +import RedisClientMultiCommand, { RedisClientMultiCommandType } from './multi-command'; +import { MULTI_MODE, MultiMode, RedisMultiQueuedCommand } from '../multi-command'; +import HELLO, { HelloOptions } from '../commands/HELLO'; +import { ScanOptions, ScanCommonOptions } from '../commands/SCAN'; +import { RedisLegacyClient, RedisLegacyClientType } from './legacy-mode'; +import { RedisPoolOptions, RedisClientPool } from './pool'; +import { RedisVariadicArgument, parseArgs, pushVariadicArguments } from '../commands/generic-transformers'; +import { BasicClientSideCache, ClientSideCacheConfig, ClientSideCacheProvider } from './cache'; +import { BasicCommandParser, CommandParser } from './parser'; +import SingleEntryCache from '../single-entry-cache'; +import { version } from '../../package.json' +import EnterpriseMaintenanceManager, { MaintenanceUpdate, MovingEndpointType } from './enterprise-maintenance-manager'; + +export interface RedisClientOptions< + M extends RedisModules = RedisModules, + F extends RedisFunctions = RedisFunctions, + S extends RedisScripts = RedisScripts, + RESP extends RespVersions = RespVersions, + TYPE_MAPPING extends TypeMapping = TypeMapping, + SocketOptions extends RedisSocketOptions = RedisSocketOptions +> extends CommanderConfig { + /** + * `redis[s]://[[username][:password]@][host][:port][/db-number]` + * See [`redis`](https://www.iana.org/assignments/uri-schemes/prov/redis) and [`rediss`](https://www.iana.org/assignments/uri-schemes/prov/rediss) IANA registration for more details + */ + url?: string; + /** + * Socket connection properties + */ + socket?: SocketOptions; + /** + * ACL username ([see ACL guide](https://redis.io/topics/acl)) + */ + username?: string; + /** + * ACL password or the old "--requirepass" password + */ + password?: string; + + /** + * Provides credentials for authentication. Can be set directly or will be created internally + * if username/password are provided instead. If both are supplied, this credentialsProvider + * takes precedence over username/password. + */ + credentialsProvider?: CredentialsProvider; + /** + * Client name ([see `CLIENT SETNAME`](https://redis.io/commands/client-setname)) + */ + name?: string; + /** + * Redis database number (see [`SELECT`](https://redis.io/commands/select) command) + */ + database?: number; + /** + * Maximum length of the client's internal command queue + */ + commandsQueueMaxLength?: number; + /** + * When `true`, commands are rejected when the client is reconnecting. + * When `false`, commands are queued for execution after reconnection. + */ + disableOfflineQueue?: boolean; + /** + * Connect in [`READONLY`](https://redis.io/commands/readonly) mode + */ + readonly?: boolean; + /** + * Send `PING` command at interval (in ms). + * Useful with Redis deployments that do not honor TCP Keep-Alive. + */ + pingInterval?: number; + /** + * Default command options to be applied to all commands executed through this client. + * + * These options can be overridden on a per-command basis when calling specific commands. + * + * @property {symbol} [chainId] - Identifier for chaining commands together + * @property {boolean} [asap] - When true, the command is executed as soon as possible + * @property {AbortSignal} [abortSignal] - AbortSignal to cancel the command + * @property {TypeMapping} [typeMapping] - Custom type mappings between RESP and JavaScript types + * + * @example Setting default command options + * ``` + * const client = createClient({ + * commandOptions: { + * asap: true, + * typeMapping: { + * // Custom type mapping configuration + * } + * } + * }); + * ``` + */ + commandOptions?: CommandOptions; + /** + * Client Side Caching configuration. + * + * Enables Redis Servers and Clients to work together to cache results from commands + * sent to a server. The server will notify the client when cached results are no longer valid. + * + * Note: Client Side Caching is only supported with RESP3. + * + * @example Anonymous cache configuration + * ``` + * const client = createClient({ + * RESP: 3, + * clientSideCache: { + * ttl: 0, + * maxEntries: 0, + * evictPolicy: "LRU" + * } + * }); + * ``` + * + * @example Using a controllable cache + * ``` + * const cache = new BasicClientSideCache({ + * ttl: 0, + * maxEntries: 0, + * evictPolicy: "LRU" + * }); + * const client = createClient({ + * RESP: 3, + * clientSideCache: cache + * }); + * ``` + */ + clientSideCache?: ClientSideCacheProvider | ClientSideCacheConfig; + /** + * If set to true, disables sending client identifier (user-agent like message) to the redis server + */ + disableClientInfo?: boolean; + /** + * Tag to append to library name that is sent to the Redis server + */ + clientInfoTag?: string; + /** + * When set to true, client tracking is turned on and the client emits `invalidate` events when it receives invalidation messages from the redis server. + * Mutually exclusive with `clientSideCache` option. + */ + emitInvalidate?: boolean; + /** + * Controls how the client handles Redis Enterprise maintenance push notifications. + * + * - `disabled`: The feature is not used by the client. + * - `enabled`: The client attempts to enable the feature on the server. If the server responds with an error, the connection is interrupted. + * - `auto`: The client attempts to enable the feature on the server. If the server returns an error, the client disables the feature and continues. + * + * The default is `auto`. + */ + maintNotifications?: 'disabled' | 'enabled' | 'auto'; + /** + * Controls how the client requests the endpoint to reconnect to during a MOVING notification in Redis Enterprise maintenance. + * + * - `auto`: If the connection is opened to a name or IP address that is from/resolves to a reserved private IP range, request an internal endpoint (e.g., internal-ip), otherwise an external one. If TLS is enabled, then request a FQDN. + * - `internal-ip`: Enforce requesting the internal IP. + * - `internal-fqdn`: Enforce requesting the internal FQDN. + * - `external-ip`: Enforce requesting the external IP address. + * - `external-fqdn`: Enforce requesting the external FQDN. + * - `none`: Used to request a null endpoint, which tells the client to reconnect based on its current config + + * The default is `auto`. + */ + maintEndpointType?: MovingEndpointType; + /** + * Specifies a more relaxed timeout (in milliseconds) for commands during a maintenance window. + * This helps minimize command timeouts during maintenance. Timeouts during maintenance period result + * in a `CommandTimeoutDuringMaintenance` error. + * + * The default is 10000 + */ + maintRelaxedCommandTimeout?: number; + /** + * Specifies a more relaxed timeout (in milliseconds) for the socket during a maintenance window. + * This helps minimize socket timeouts during maintenance. Timeouts during maintenance period result + * in a `SocketTimeoutDuringMaintenance` error. + * + * The default is 10000 + */ + maintRelaxedSocketTimeout?: number; +}; + +export type WithCommands< + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> = { + [P in keyof typeof COMMANDS]: CommandSignature<(typeof COMMANDS)[P], RESP, TYPE_MAPPING>; + }; + +export type WithModules< + M extends RedisModules, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> = { + [P in keyof M]: { + [C in keyof M[P]]: CommandSignature; + }; + }; + +export type WithFunctions< + F extends RedisFunctions, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> = { + [L in keyof F]: { + [C in keyof F[L]]: CommandSignature; + }; + }; + +export type WithScripts< + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> = { + [P in keyof S]: CommandSignature; + }; + +export type RedisClientExtensions< + M extends RedisModules = {}, + F extends RedisFunctions = {}, + S extends RedisScripts = {}, + RESP extends RespVersions = 2, + TYPE_MAPPING extends TypeMapping = {} +> = ( + WithCommands & + WithModules & + WithFunctions & + WithScripts + ); + +export type RedisClientType< + M extends RedisModules = {}, + F extends RedisFunctions = {}, + S extends RedisScripts = {}, + RESP extends RespVersions = 2, + TYPE_MAPPING extends TypeMapping = {} +> = ( + RedisClient & + RedisClientExtensions + ); + +type ProxyClient = RedisClient; + +type NamespaceProxyClient = { _self: ProxyClient }; + +interface ScanIteratorOptions { + cursor?: RedisArgument; +} + +export type MonitorCallback = (reply: ReplyWithTypeMapping) => unknown; + +export default class RedisClient< + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> extends EventEmitter { + static #createCommand(command: Command, resp: RespVersions) { + const transformReply = getTransformReply(command, resp); + + return async function (this: ProxyClient, ...args: Array) { + const parser = new BasicCommandParser(); + command.parseCommand(parser, ...args); + + return this._self._executeCommand(command, parser, this._commandOptions, transformReply); + } + } + + static #createModuleCommand(command: Command, resp: RespVersions) { + const transformReply = getTransformReply(command, resp); + + return async function (this: NamespaceProxyClient, ...args: Array) { + const parser = new BasicCommandParser(); + command.parseCommand(parser, ...args); + + return this._self._executeCommand(command, parser, this._self._commandOptions, transformReply); + }; + } + + static #createFunctionCommand(name: string, fn: RedisFunction, resp: RespVersions) { + const prefix = functionArgumentsPrefix(name, fn); + const transformReply = getTransformReply(fn, resp); + + return async function (this: NamespaceProxyClient, ...args: Array) { + const parser = new BasicCommandParser(); + parser.push(...prefix); + fn.parseCommand(parser, ...args); + + return this._self._executeCommand(fn, parser, this._self._commandOptions, transformReply); + }; + } + + static #createScriptCommand(script: RedisScript, resp: RespVersions) { + const prefix = scriptArgumentsPrefix(script); + const transformReply = getTransformReply(script, resp); + + return async function (this: ProxyClient, ...args: Array) { + const parser = new BasicCommandParser(); + parser.push(...prefix); + script.parseCommand(parser, ...args) + + return this._executeScript(script, parser, this._commandOptions, transformReply); + } + } + + static #SingleEntryCache = new SingleEntryCache() + + static factory< + M extends RedisModules = {}, + F extends RedisFunctions = {}, + S extends RedisScripts = {}, + RESP extends RespVersions = 2 + >(config?: CommanderConfig) { + + + let Client = RedisClient.#SingleEntryCache.get(config); + if (!Client) { + Client = attachConfig({ + BaseClass: RedisClient, + commands: COMMANDS, + createCommand: RedisClient.#createCommand, + createModuleCommand: RedisClient.#createModuleCommand, + createFunctionCommand: RedisClient.#createFunctionCommand, + createScriptCommand: RedisClient.#createScriptCommand, + config + }); + + Client.prototype.Multi = RedisClientMultiCommand.extend(config); + + RedisClient.#SingleEntryCache.set(config, Client); + } + + return ( + options?: Omit, keyof Exclude> + ) => { + // returning a "proxy" to prevent the namespaces._self to leak between "proxies" + return Object.create(new Client(options)) as RedisClientType; + }; + } + + static create< + M extends RedisModules = {}, + F extends RedisFunctions = {}, + S extends RedisScripts = {}, + RESP extends RespVersions = 2, + TYPE_MAPPING extends TypeMapping = {} + >(this: void, options?: RedisClientOptions) { + return RedisClient.factory(options)(options); + } + + static parseOptions(options: O): O { + if (options?.url) { + const parsed = RedisClient.parseURL(options.url); + if (options.socket) { + if (options.socket.tls !== undefined && options.socket.tls !== parsed.socket.tls) { + throw new TypeError(`tls socket option is set to ${options.socket.tls} which is mismatch with protocol or the URL ${options.url} passed`) + } + parsed.socket = Object.assign(options.socket, parsed.socket); + } + + Object.assign(options, parsed); + } + return options; + } + + static parseURL(url: string): RedisClientOptions & { + socket: Exclude & { + tls: boolean + } + } { + // https://www.iana.org/assignments/uri-schemes/prov/redis + const { hostname, port, protocol, username, password, pathname } = new URL(url), + parsed: RedisClientOptions & { + socket: Exclude & { + tls: boolean + } + } = { + socket: { + host: hostname, + tls: false + } + }; + + if (protocol !== 'redis:' && protocol !== 'rediss:') { + throw new TypeError('Invalid protocol'); + } + + parsed.socket.tls = protocol === 'rediss:'; + + if (port) { + (parsed.socket as TcpSocketConnectOpts).port = Number(port); + } + + if (username) { + parsed.username = decodeURIComponent(username); + } + + if (password) { + parsed.password = decodeURIComponent(password); + } + + if (username || password) { + parsed.credentialsProvider = { + type: 'async-credentials-provider', + credentials: async () => ( + { + username: username ? decodeURIComponent(username) : undefined, + password: password ? decodeURIComponent(password) : undefined + }) + }; + } + + if (pathname.length > 1) { + const database = Number(pathname.substring(1)); + if (isNaN(database)) { + throw new TypeError('Invalid pathname'); + } + + parsed.database = database; + } + + return parsed; + } + + readonly #options: RedisClientOptions; + #socket: RedisSocket; + readonly #queue: RedisCommandsQueue; + #selectedDB = 0; + #monitorCallback?: MonitorCallback; + private _self = this; + private _commandOptions?: CommandOptions; + // flag used to annotate that the client + // was in a watch transaction when + // a topology change occured + #dirtyWatch?: string; + #watchEpoch?: number; + #clientSideCache?: ClientSideCacheProvider; + #credentialsSubscription: Disposable | null = null; + // Flag used to pause writing to the socket during maintenance windows. + // When true, prevents new commands from being written while waiting for: + // 1. New socket to be ready after maintenance redirect + // 2. In-flight commands on the old socket to complete + #paused = false; + + get clientSideCache() { + return this._self.#clientSideCache; + } + + get options(): RedisClientOptions { + return this._self.#options; + } + + get isOpen(): boolean { + return this._self.#socket.isOpen; + } + + get isReady(): boolean { + return this._self.#socket.isReady; + } + + get isPubSubActive() { + return this._self.#queue.isPubSubActive; + } + + get socketEpoch() { + return this._self.#socket.socketEpoch; + } + + get isWatching() { + return this._self.#watchEpoch !== undefined; + } + + /** + * Indicates whether the client's WATCH command has been invalidated by a topology change. + * When this returns true, any transaction using WATCH will fail with a WatchError. + * @returns true if the watched keys have been modified, false otherwise + */ + get isDirtyWatch(): boolean { + return this._self.#dirtyWatch !== undefined + } + + /** + * Marks the client's WATCH command as invalidated due to a topology change. + * This will cause any subsequent EXEC in a transaction to fail with a WatchError. + * @param msg - The error message explaining why the WATCH is dirty + */ + setDirtyWatch(msg: string) { + this._self.#dirtyWatch = msg; + } + + constructor(options?: RedisClientOptions) { + super(); + this.#validateOptions(options) + this.#options = this.#initiateOptions(options); + this.#queue = this.#initiateQueue(); + this.#socket = this.#initiateSocket(); + + + if(this.#options.maintNotifications !== 'disabled') { + new EnterpriseMaintenanceManager(this.#queue, this, this.#options); + }; + + if (this.#options.clientSideCache) { + if (this.#options.clientSideCache instanceof ClientSideCacheProvider) { + this.#clientSideCache = this.#options.clientSideCache; + } else { + const cscConfig = this.#options.clientSideCache; + this.#clientSideCache = new BasicClientSideCache(cscConfig); + } + this.#queue.addPushHandler((push: Array): boolean => { + if (push[0].toString() !== 'invalidate') return false; + + if (push[1] !== null) { + for (const key of push[1]) { + this.#clientSideCache?.invalidate(key) + } + } else { + this.#clientSideCache?.invalidate(null) + } + + return true + }); + } else if (options?.emitInvalidate) { + this.#queue.addPushHandler((push: Array): boolean => { + if (push[0].toString() !== 'invalidate') return false; + + if (push[1] !== null) { + for (const key of push[1]) { + this.emit('invalidate', key); + } + } else { + this.emit('invalidate', null); + } + return true + }); + } + } + + #validateOptions(options?: RedisClientOptions) { + if (options?.clientSideCache && options?.RESP !== 3) { + throw new Error('Client Side Caching is only supported with RESP3'); + } + if (options?.emitInvalidate && options?.RESP !== 3) { + throw new Error('emitInvalidate is only supported with RESP3'); + } + if (options?.clientSideCache && options?.emitInvalidate) { + throw new Error('emitInvalidate is not supported (or necessary) when clientSideCache is enabled'); + } + if (options?.maintNotifications && options?.maintNotifications !== 'disabled' && options?.RESP !== 3) { + throw new Error('Graceful Maintenance is only supported with RESP3'); + } + } + + #initiateOptions(options: RedisClientOptions = {}): RedisClientOptions { + + // Convert username/password to credentialsProvider if no credentialsProvider is already in place + if (!options.credentialsProvider && (options.username || options.password)) { + + options.credentialsProvider = { + type: 'async-credentials-provider', + credentials: async () => ({ + username: options.username, + password: options.password + }) + }; + } + + if (options.database) { + this._self.#selectedDB = options.database; + } + + if (options.commandOptions) { + this._commandOptions = options.commandOptions; + } + + if(options.maintNotifications !== 'disabled') { + EnterpriseMaintenanceManager.setupDefaultMaintOptions(options); + } + + if (options.url) { + const parsedOptions = RedisClient.parseOptions(options); + if (parsedOptions?.database) { + this._self.#selectedDB = parsedOptions.database; + } + return parsedOptions; + } + + return options; + } + + #initiateQueue(): RedisCommandsQueue { + return new RedisCommandsQueue( + this.#options.RESP ?? 2, + this.#options.commandsQueueMaxLength, + (channel, listeners) => this.emit('sharded-channel-moved', channel, listeners) + ); + } + + /** + * @param credentials + */ + private reAuthenticate = async (credentials: BasicAuth) => { + // Re-authentication is not supported on RESP2 with PubSub active + if (!(this.isPubSubActive && !this.#options.RESP)) { + await this.sendCommand( + parseArgs(COMMANDS.AUTH, { + username: credentials.username, + password: credentials.password ?? '' + }) + ); + } + } + + #subscribeForStreamingCredentials(cp: StreamingCredentialsProvider): Promise<[BasicAuth, Disposable]> { + return cp.subscribe({ + onNext: credentials => { + this.reAuthenticate(credentials).catch(error => { + const errorMessage = error instanceof Error ? error.message : String(error); + cp.onReAuthenticationError(new CredentialsError(errorMessage)); + }); + + }, + onError: (e: Error) => { + const errorMessage = `Error from streaming credentials provider: ${e.message}`; + cp.onReAuthenticationError(new UnableToObtainNewCredentialsError(errorMessage)); + } + }); + } + + async #handshake(chainId: symbol, asap: boolean) { + const promises = []; + const commandsWithErrorHandlers = await this.#getHandshakeCommands(); + + if (asap) commandsWithErrorHandlers.reverse() + + for (const { cmd, errorHandler } of commandsWithErrorHandlers) { + promises.push( + this.#queue + .addCommand(cmd, { + chainId, + asap + }) + .catch(errorHandler) + ); + } + return promises; + } + + async #getHandshakeCommands(): Promise< + Array<{ cmd: CommandArguments } & { errorHandler?: (err: Error) => void }> + > { + const commands = []; + const cp = this.#options.credentialsProvider; + + if (this.#options.RESP) { + const hello: HelloOptions = {}; + + if (cp && cp.type === 'async-credentials-provider') { + const credentials = await cp.credentials(); + if (credentials.password) { + hello.AUTH = { + username: credentials.username ?? 'default', + password: credentials.password + }; + } + } + + if (cp && cp.type === 'streaming-credentials-provider') { + const [credentials, disposable] = + await this.#subscribeForStreamingCredentials(cp); + this.#credentialsSubscription = disposable; + + if (credentials.password) { + hello.AUTH = { + username: credentials.username ?? 'default', + password: credentials.password + }; + } + } + + if (this.#options.name) { + hello.SETNAME = this.#options.name; + } + + commands.push({ cmd: parseArgs(HELLO, this.#options.RESP, hello) }); + } else { + if (cp && cp.type === 'async-credentials-provider') { + const credentials = await cp.credentials(); + + if (credentials.username || credentials.password) { + commands.push({ + cmd: parseArgs(COMMANDS.AUTH, { + username: credentials.username, + password: credentials.password ?? '' + }) + }); + } + } + + if (cp && cp.type === 'streaming-credentials-provider') { + const [credentials, disposable] = + await this.#subscribeForStreamingCredentials(cp); + this.#credentialsSubscription = disposable; + + if (credentials.username || credentials.password) { + commands.push({ + cmd: parseArgs(COMMANDS.AUTH, { + username: credentials.username, + password: credentials.password ?? '' + }) + }); + } + } + + if (this.#options.name) { + commands.push({ + cmd: parseArgs(COMMANDS.CLIENT_SETNAME, this.#options.name) + }); + } + } + + if (this.#selectedDB !== 0) { + commands.push({ cmd: ['SELECT', this.#selectedDB.toString()] }); + } + + if (this.#options.readonly) { + commands.push({ cmd: parseArgs(COMMANDS.READONLY) }); + } + + if (!this.#options.disableClientInfo) { + commands.push({ + cmd: ['CLIENT', 'SETINFO', 'LIB-VER', version], + errorHandler: () => { + // Client libraries are expected to pipeline this command + // after authentication on all connections and ignore failures + // since they could be connected to an older version that doesn't support them. + } + }); + + commands.push({ + cmd: [ + 'CLIENT', + 'SETINFO', + 'LIB-NAME', + this.#options.clientInfoTag + ? `node-redis(${this.#options.clientInfoTag})` + : 'node-redis' + ], + errorHandler: () => { + // Client libraries are expected to pipeline this command + // after authentication on all connections and ignore failures + // since they could be connected to an older version that doesn't support them. + } + }); + } + + if (this.#clientSideCache) { + commands.push({cmd: this.#clientSideCache.trackingOn()}); + } + + if (this.#options?.emitInvalidate) { + commands.push({cmd: ['CLIENT', 'TRACKING', 'ON']}); + } + + const maintenanceHandshakeCmd = await EnterpriseMaintenanceManager.getHandshakeCommand(this.#options); + + if(maintenanceHandshakeCmd) { + commands.push(maintenanceHandshakeCmd); + }; + + return commands; + } + + #attachListeners(socket: RedisSocket) { + socket.on('data', chunk => { + try { + this.#queue.decoder.write(chunk); + } catch (err) { + this.#queue.resetDecoder(); + this.emit('error', err); + } + }) + .on('error', err => { + this.emit('error', err); + this.#clientSideCache?.onError(); + if (this.#socket.isOpen && !this.#options.disableOfflineQueue) { + this.#queue.flushWaitingForReply(err); + } else { + this.#queue.flushAll(err); + } + }) + .on('connect', () => this.emit('connect')) + .on('ready', () => { + this.emit('ready'); + this.#setPingTimer(); + this.#maybeScheduleWrite(); + }) + .on('reconnecting', () => this.emit('reconnecting')) + .on('drain', () => this.#maybeScheduleWrite()) + .on('end', () => this.emit('end')); + } + + #initiateSocket(): RedisSocket { + const socketInitiator = async () => { + const promises = [], + chainId = Symbol('Socket Initiator'); + + const resubscribePromise = this.#queue.resubscribe(chainId); + resubscribePromise?.catch(error => { + if (error.message && error.message.startsWith('MOVED')) { + this.emit('__MOVED', this._self.#queue.removeAllPubSubListeners()); + } + }); + if (resubscribePromise) { + promises.push(resubscribePromise); + } + + if (this.#monitorCallback) { + promises.push( + this.#queue.monitor( + this.#monitorCallback, + { + typeMapping: this._commandOptions?.typeMapping, + chainId, + asap: true + } + ) + ); + } + + promises.push(...(await this.#handshake(chainId, true))); + + if (promises.length) { + this.#write(); + return Promise.all(promises); + } + }; + + const socket = new RedisSocket(socketInitiator, this.#options.socket); + this.#attachListeners(socket); + return socket; + } + + #pingTimer?: NodeJS.Timeout; + + #setPingTimer(): void { + if (!this.#options.pingInterval || !this.#socket.isReady) return; + clearTimeout(this.#pingTimer); + + this.#pingTimer = setTimeout(() => { + if (!this.#socket.isReady) return; + + this.sendCommand(['PING']) + .then(reply => this.emit('ping-interval', reply)) + .catch(err => this.emit('error', err)) + .finally(() => this.#setPingTimer()); + }, this.#options.pingInterval); + } + + withCommandOptions< + OPTIONS extends CommandOptions, + TYPE_MAPPING extends TypeMapping + >(options: OPTIONS) { + const proxy = Object.create(this._self); + proxy._commandOptions = options; + return proxy as RedisClientType< + M, + F, + S, + RESP, + TYPE_MAPPING extends TypeMapping ? TYPE_MAPPING : {} + >; + } + + private _commandOptionsProxy< + K extends keyof CommandOptions, + V extends CommandOptions[K] + >( + key: K, + value: V + ) { + const proxy = Object.create(this._self); + proxy._commandOptions = Object.create(this._commandOptions ?? null); + proxy._commandOptions[key] = value; + return proxy as RedisClientType< + M, + F, + S, + RESP, + K extends 'typeMapping' ? V extends TypeMapping ? V : {} : TYPE_MAPPING + >; + } + + /** + * Override the `typeMapping` command option + */ + withTypeMapping(typeMapping: TYPE_MAPPING) { + return this._commandOptionsProxy('typeMapping', typeMapping); + } + + /** + * Override the `abortSignal` command option + */ + withAbortSignal(abortSignal: AbortSignal) { + return this._commandOptionsProxy('abortSignal', abortSignal); + } + + /** + * Override the `asap` command option to `true` + */ + asap() { + return this._commandOptionsProxy('asap', true); + } + + /** + * Create the "legacy" (v3/callback) interface + */ + legacy(): RedisLegacyClientType { + return new RedisLegacyClient( + this as unknown as RedisClientType + ) as RedisLegacyClientType; + } + + /** + * Create {@link RedisClientPool `RedisClientPool`} using this client as a prototype + */ + createPool(options?: Partial) { + return RedisClientPool.create( + this._self.#options, + options + ); + } + + duplicate< + _M extends RedisModules = M, + _F extends RedisFunctions = F, + _S extends RedisScripts = S, + _RESP extends RespVersions = RESP, + _TYPE_MAPPING extends TypeMapping = TYPE_MAPPING + >(overrides?: Partial>) { + return new (Object.getPrototypeOf(this).constructor)({ + ...this._self.#options, + commandOptions: this._commandOptions, + ...overrides + }) as RedisClientType<_M, _F, _S, _RESP, _TYPE_MAPPING>; + } + + async connect() { + await this._self.#socket.connect(); + return this as unknown as RedisClientType; + } + + /** + * @internal + */ + _ejectSocket(): RedisSocket { + const socket = this._self.#socket; + // @ts-ignore + this._self.#socket = null; + socket.removeAllListeners(); + return socket; + } + + /** + * @internal + */ + _insertSocket(socket: RedisSocket) { + if(this._self.#socket) { + this._self._ejectSocket().destroy(); + } + this._self.#socket = socket; + this._self.#attachListeners(this._self.#socket); + } + + /** + * @internal + */ + _maintenanceUpdate(update: MaintenanceUpdate) { + this._self.#socket.setMaintenanceTimeout(update.relaxedSocketTimeout); + this._self.#queue.setMaintenanceCommandTimeout(update.relaxedCommandTimeout); + } + + /** + * @internal + */ + _pause() { + this._self.#paused = true; + } + + /** + * @internal + */ + _unpause() { + this._self.#paused = false; + this._self.#maybeScheduleWrite(); + } + + /** + * @internal + */ + async _executeCommand( + command: Command, + parser: CommandParser, + commandOptions: CommandOptions | undefined, + transformReply: TransformReply | undefined, + ) { + const csc = this._self.#clientSideCache; + const defaultTypeMapping = this._self.#options.commandOptions === commandOptions; + + const fn = () => { return this.sendCommand(parser.redisArgs, commandOptions) }; + + if (csc && command.CACHEABLE && defaultTypeMapping) { + return await csc.handleCache(this._self, parser as BasicCommandParser, fn, transformReply, commandOptions?.typeMapping); + } else { + const reply = await fn(); + + if (transformReply) { + return transformReply(reply, parser.preserve, commandOptions?.typeMapping); + } + return reply; + } + } + + /** + * @internal + */ + async _executeScript( + script: RedisScript, + parser: CommandParser, + options: CommandOptions | undefined, + transformReply: TransformReply | undefined, + ) { + const args = parser.redisArgs as Array; + + let reply: ReplyUnion; + try { + reply = await this.sendCommand(args, options); + } catch (err) { + if (!(err as Error)?.message?.startsWith?.('NOSCRIPT')) throw err; + + args[0] = 'EVAL'; + args[1] = script.SCRIPT; + reply = await this.sendCommand(args, options); + } + + return transformReply ? + transformReply(reply, parser.preserve, options?.typeMapping) : + reply; + } + + sendCommand( + args: ReadonlyArray, + options?: CommandOptions + ): Promise { + if (!this._self.#socket.isOpen) { + return Promise.reject(new ClientClosedError()); + } else if (!this._self.#socket.isReady && this._self.#options.disableOfflineQueue) { + return Promise.reject(new ClientOfflineError()); + } + + // Merge global options with provided options + const opts = { + ...this._self._commandOptions, + ...options + } + + const promise = this._self.#queue.addCommand(args, opts); + this._self.#scheduleWrite(); + return promise; + } + + async SELECT(db: number): Promise { + await this.sendCommand(['SELECT', db.toString()]); + this._self.#selectedDB = db; + } + + select = this.SELECT; + + #pubSubCommand(promise: Promise | undefined) { + if (promise === undefined) return Promise.resolve(); + + this.#scheduleWrite(); + return promise; + } + + SUBSCRIBE( + channels: string | Array, + listener: PubSubListener, + bufferMode?: T + ): Promise { + return this._self.#pubSubCommand( + this._self.#queue.subscribe( + PUBSUB_TYPE.CHANNELS, + channels, + listener, + bufferMode + ) + ); + } + + subscribe = this.SUBSCRIBE; + + UNSUBSCRIBE( + channels?: string | Array, + listener?: PubSubListener, + bufferMode?: T + ): Promise { + return this._self.#pubSubCommand( + this._self.#queue.unsubscribe( + PUBSUB_TYPE.CHANNELS, + channels, + listener, + bufferMode + ) + ); + } + + unsubscribe = this.UNSUBSCRIBE; + + PSUBSCRIBE( + patterns: string | Array, + listener: PubSubListener, + bufferMode?: T + ): Promise { + return this._self.#pubSubCommand( + this._self.#queue.subscribe( + PUBSUB_TYPE.PATTERNS, + patterns, + listener, + bufferMode + ) + ); + } + + pSubscribe = this.PSUBSCRIBE; + + PUNSUBSCRIBE( + patterns?: string | Array, + listener?: PubSubListener, + bufferMode?: T + ): Promise { + return this._self.#pubSubCommand( + this._self.#queue.unsubscribe( + PUBSUB_TYPE.PATTERNS, + patterns, + listener, + bufferMode + ) + ); + } + + pUnsubscribe = this.PUNSUBSCRIBE; + + SSUBSCRIBE( + channels: string | Array, + listener: PubSubListener, + bufferMode?: T + ): Promise { + return this._self.#pubSubCommand( + this._self.#queue.subscribe( + PUBSUB_TYPE.SHARDED, + channels, + listener, + bufferMode + ) + ); + } + + sSubscribe = this.SSUBSCRIBE; + + SUNSUBSCRIBE( + channels?: string | Array, + listener?: PubSubListener, + bufferMode?: T + ): Promise { + return this._self.#pubSubCommand( + this._self.#queue.unsubscribe( + PUBSUB_TYPE.SHARDED, + channels, + listener, + bufferMode + ) + ); + } + + sUnsubscribe = this.SUNSUBSCRIBE; + + async WATCH(key: RedisVariadicArgument) { + const reply = await this._self.sendCommand( + pushVariadicArguments(['WATCH'], key) + ); + this._self.#watchEpoch ??= this._self.socketEpoch; + return reply as unknown as ReplyWithTypeMapping, TYPE_MAPPING>; + } + + watch = this.WATCH; + + async UNWATCH() { + const reply = await this._self.sendCommand(['UNWATCH']); + this._self.#watchEpoch = undefined; + return reply as unknown as ReplyWithTypeMapping, TYPE_MAPPING>; + } + + unwatch = this.UNWATCH; + + getPubSubListeners(type: PubSubType) { + return this._self.#queue.getPubSubListeners(type); + } + + extendPubSubChannelListeners( + type: PubSubType, + channel: string, + listeners: ChannelListeners + ) { + return this._self.#pubSubCommand( + this._self.#queue.extendPubSubChannelListeners(type, channel, listeners) + ); + } + + extendPubSubListeners(type: PubSubType, listeners: PubSubTypeListeners) { + return this._self.#pubSubCommand( + this._self.#queue.extendPubSubListeners(type, listeners) + ); + } + + #write() { + if(this.#paused) { + return + } + this.#socket.write(this.#queue.commandsToWrite()); + } + + #scheduledWrite?: NodeJS.Immediate; + + #scheduleWrite() { + if (!this.#socket.isReady || this.#scheduledWrite) return; + + this.#scheduledWrite = setImmediate(() => { + this.#write(); + this.#scheduledWrite = undefined; + }); + } + + #maybeScheduleWrite() { + if (!this.#queue.isWaitingToWrite()) return; + + this.#scheduleWrite(); + } + + /** + * @internal + */ + async _executePipeline( + commands: Array, + selectedDB?: number + ) { + if (!this._self.#socket.isOpen) { + return Promise.reject(new ClientClosedError()); + } + + const chainId = Symbol('Pipeline Chain'), + promise = Promise.all( + commands.map(({ args }) => this._self.#queue.addCommand(args, { + chainId, + typeMapping: this._commandOptions?.typeMapping + })) + ); + this._self.#scheduleWrite(); + const result = await promise; + + if (selectedDB !== undefined) { + this._self.#selectedDB = selectedDB; + } + + return result; + } + + /** + * @internal + */ + async _executeMulti( + commands: Array, + selectedDB?: number + ) { + const dirtyWatch = this._self.#dirtyWatch; + this._self.#dirtyWatch = undefined; + const watchEpoch = this._self.#watchEpoch; + this._self.#watchEpoch = undefined; + + if (!this._self.#socket.isOpen) { + throw new ClientClosedError(); + } + + if (dirtyWatch) { + throw new WatchError(dirtyWatch); + } + + if (watchEpoch && watchEpoch !== this._self.socketEpoch) { + throw new WatchError('Client reconnected after WATCH'); + } + + const typeMapping = this._commandOptions?.typeMapping; + const chainId = Symbol('MULTI Chain'); + const promises = [ + this._self.#queue.addCommand(['MULTI'], { chainId }), + ]; + + for (const { args } of commands) { + promises.push( + this._self.#queue.addCommand(args, { + chainId, + typeMapping + }) + ); + } + + promises.push( + this._self.#queue.addCommand(['EXEC'], { chainId }) + ); + + this._self.#scheduleWrite(); + + const results = await Promise.all(promises), + execResult = results[results.length - 1]; + + if (execResult === null) { + throw new WatchError(); + } + + if (selectedDB !== undefined) { + this._self.#selectedDB = selectedDB; + } + + return execResult as Array; + } + + MULTI() { + type Multi = new (...args: ConstructorParameters) => RedisClientMultiCommandType; + return new ((this as any).Multi as Multi)( + this._executeMulti.bind(this), + this._executePipeline.bind(this), + this._commandOptions?.typeMapping + ); + } + + multi = this.MULTI; + + async* scanIterator( + this: RedisClientType, + options?: ScanOptions & ScanIteratorOptions + ) { + let cursor = options?.cursor ?? '0'; + do { + const reply = await this.scan(cursor, options); + cursor = reply.cursor; + yield reply.keys; + } while (cursor !== '0'); + } + + async* hScanIterator( + this: RedisClientType, + key: RedisArgument, + options?: ScanCommonOptions & ScanIteratorOptions + ) { + let cursor = options?.cursor ?? '0'; + do { + const reply = await this.hScan(key, cursor, options); + cursor = reply.cursor; + yield reply.entries; + } while (cursor !== '0'); + } + + async* hScanValuesIterator( + this: RedisClientType, + key: RedisArgument, + options?: ScanCommonOptions & ScanIteratorOptions + ) { + let cursor = options?.cursor ?? '0'; + do { + const reply = await this.hScanNoValues(key, cursor, options); + cursor = reply.cursor; + yield reply.fields; + } while (cursor !== '0'); + } + + async* hScanNoValuesIterator( + this: RedisClientType, + key: RedisArgument, + options?: ScanCommonOptions & ScanIteratorOptions + ) { + let cursor = options?.cursor ?? '0'; + do { + const reply = await this.hScanNoValues(key, cursor, options); + cursor = reply.cursor; + yield reply.fields; + } while (cursor !== '0'); + } + + async* sScanIterator( + this: RedisClientType, + key: RedisArgument, + options?: ScanCommonOptions & ScanIteratorOptions + ) { + let cursor = options?.cursor ?? '0'; + do { + const reply = await this.sScan(key, cursor, options); + cursor = reply.cursor; + yield reply.members; + } while (cursor !== '0'); + } + + async* zScanIterator( + this: RedisClientType, + key: RedisArgument, + options?: ScanCommonOptions & ScanIteratorOptions + ) { + let cursor = options?.cursor ?? '0'; + do { + const reply = await this.zScan(key, cursor, options); + cursor = reply.cursor; + yield reply.members; + } while (cursor !== '0'); + } + + async MONITOR(callback: MonitorCallback) { + const promise = this._self.#queue.monitor(callback, { + typeMapping: this._commandOptions?.typeMapping + }); + this._self.#scheduleWrite(); + await promise; + this._self.#monitorCallback = callback; + } + + monitor = this.MONITOR; + + /** + * Reset the client to its default state (i.e. stop PubSub, stop monitoring, select default DB, etc.) + */ + async reset() { + const chainId = Symbol('Reset Chain'), + promises = [this._self.#queue.reset(chainId)], + selectedDB = this._self.#options?.database ?? 0; + this._self.#credentialsSubscription?.dispose(); + this._self.#credentialsSubscription = null; + promises.push(...(await this._self.#handshake(chainId, false))); + this._self.#scheduleWrite(); + await Promise.all(promises); + this._self.#selectedDB = selectedDB; + this._self.#monitorCallback = undefined; + this._self.#dirtyWatch = undefined; + this._self.#watchEpoch = undefined; + } + + /** + * If the client has state, reset it. + * An internal function to be used by wrapper class such as `RedisClientPool`. + * @internal + */ + resetIfDirty() { + let shouldReset = false; + if (this._self.#selectedDB !== (this._self.#options?.database ?? 0)) { + console.warn('Returning a client with a different selected DB'); + shouldReset = true; + } + + if (this._self.#monitorCallback) { + console.warn('Returning a client with active MONITOR'); + shouldReset = true; + } + + if (this._self.#queue.isPubSubActive) { + console.warn('Returning a client with active PubSub'); + shouldReset = true; + } + + if (this._self.#dirtyWatch || this._self.#watchEpoch) { + console.warn('Returning a client with active WATCH'); + shouldReset = true; + } + + if (shouldReset) { + return this.reset(); + } + } + + /** + * @deprecated use .close instead + */ + QUIT(): Promise { + this._self.#credentialsSubscription?.dispose(); + this._self.#credentialsSubscription = null; + return this._self.#socket.quit(async () => { + clearTimeout(this._self.#pingTimer); + const quitPromise = this._self.#queue.addCommand(['QUIT']); + this._self.#scheduleWrite(); + return quitPromise; + }); + } + + quit = this.QUIT; + + /** + * @deprecated use .destroy instead + */ + disconnect() { + return Promise.resolve(this.destroy()); + } + + /** + * Close the client. Wait for pending commands. + */ + close() { + return new Promise(resolve => { + clearTimeout(this._self.#pingTimer); + this._self.#socket.close(); + this._self.#clientSideCache?.onClose(); + + if (this._self.#queue.isEmpty()) { + this._self.#socket.destroySocket(); + return resolve(); + } + + const maybeClose = () => { + if (!this._self.#queue.isEmpty()) return; + + this._self.#socket.off('data', maybeClose); + this._self.#socket.destroySocket(); + resolve(); + }; + this._self.#socket.on('data', maybeClose); + this._self.#credentialsSubscription?.dispose(); + this._self.#credentialsSubscription = null; + }); + } + + /** + * Destroy the client. Rejects all commands immediately. + */ + destroy() { + clearTimeout(this._self.#pingTimer); + this._self.#queue.flushAll(new DisconnectsClientError()); + this._self.#socket.destroy(); + this._self.#clientSideCache?.onClose(); + this._self.#credentialsSubscription?.dispose(); + this._self.#credentialsSubscription = null; + } + + ref() { + this._self.#socket.ref(); + } + + unref() { + this._self.#socket.unref(); + } +} diff --git a/packages/client/lib/client/legacy-mode.spec.ts b/packages/client/lib/client/legacy-mode.spec.ts new file mode 100644 index 00000000000..306ea7f3353 --- /dev/null +++ b/packages/client/lib/client/legacy-mode.spec.ts @@ -0,0 +1,111 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import { promisify } from 'node:util'; +import { RedisLegacyClientType } from './legacy-mode'; +import { ErrorReply } from '../errors'; +import { RedisClientType } from '.'; +import { once } from 'node:events'; + +function testWithLegacyClient(title: string, fn: (legacy: RedisLegacyClientType, client: RedisClientType) => Promise) { + testUtils.testWithClient(title, client => fn(client.legacy(), client), GLOBAL.SERVERS.OPEN); +} + +describe('Legacy Mode', () => { + describe('client.sendCommand', () => { + testWithLegacyClient('resolve', async client => { + assert.equal( + await promisify(client.sendCommand).call(client, 'PING'), + 'PONG' + ); + }); + + testWithLegacyClient('reject', async client => { + await assert.rejects( + promisify(client.sendCommand).call(client, 'ERROR'), + ErrorReply + ); + }); + + testWithLegacyClient('reject without a callback', async (legacy, client) => { + legacy.sendCommand('ERROR'); + const [err] = await once(client, 'error'); + assert.ok(err instanceof ErrorReply); + }); + }); + + describe('hGetAll (TRANSFORM_LEGACY_REPLY)', () => { + testWithLegacyClient('resolve', async client => { + await promisify(client.hSet).call(client, 'key', 'field', 'value'); + assert.deepEqual( + await promisify(client.hGetAll).call(client, 'key'), + Object.create(null, { + field: { + value: 'value', + configurable: true, + enumerable: true + } + }) + ); + }); + + testWithLegacyClient('reject', async client => { + await assert.rejects( + promisify(client.hGetAll).call(client), + ErrorReply + ); + }); + }); + + describe('client.set', () => { + testWithLegacyClient('vardict', async client => { + assert.equal( + await promisify(client.set).call(client, 'a', 'b'), + 'OK' + ); + }); + + testWithLegacyClient('array', async client => { + assert.equal( + await promisify(client.set).call(client, ['a', 'b']), + 'OK' + ); + }); + + testWithLegacyClient('vardict & arrays', async client => { + assert.equal( + await promisify(client.set).call(client, ['a'], 'b', ['EX', 1]), + 'OK' + ); + }); + + testWithLegacyClient('reject without a callback', async (legacy, client) => { + legacy.set('ERROR'); + const [err] = await once(client, 'error'); + assert.ok(err instanceof ErrorReply); + }); + }); + + describe('client.multi', () => { + testWithLegacyClient('resolve', async client => { + const multi = client.multi().ping().sendCommand('PING'); + assert.deepEqual( + await promisify(multi.exec).call(multi), + ['PONG', 'PONG'] + ); + }); + + testWithLegacyClient('reject', async client => { + const multi = client.multi().sendCommand('ERROR'); + await assert.rejects( + promisify(multi.exec).call(multi), + ErrorReply + ); + }); + + testWithLegacyClient('reject without a callback', async (legacy, client) => { + legacy.multi().sendCommand('ERROR').exec(); + const [err] = await once(client, 'error'); + assert.ok(err instanceof ErrorReply); + }); + }); +}); diff --git a/packages/client/lib/client/legacy-mode.ts b/packages/client/lib/client/legacy-mode.ts new file mode 100644 index 00000000000..03e7cf4efe1 --- /dev/null +++ b/packages/client/lib/client/legacy-mode.ts @@ -0,0 +1,177 @@ +import { RedisModules, RedisFunctions, RedisScripts, RespVersions, Command, CommandArguments, ReplyUnion } from '../RESP/types'; +import { RedisClientType } from '.'; +import { getTransformReply } from '../commander'; +import { ErrorReply } from '../errors'; +import COMMANDS from '../commands'; +import RedisMultiCommand from '../multi-command'; + +type LegacyArgument = string | Buffer | number | Date; + +type LegacyArguments = Array; + +type LegacyCallback = (err: ErrorReply | null, reply?: ReplyUnion) => unknown + +type LegacyCommandArguments = LegacyArguments | [ + ...args: LegacyArguments, + callback: LegacyCallback +]; + +type WithCommands = { + [P in keyof typeof COMMANDS]: (...args: LegacyCommandArguments) => void; +}; + +export type RedisLegacyClientType = RedisLegacyClient & WithCommands; + +export class RedisLegacyClient { + static #transformArguments(redisArgs: CommandArguments, args: LegacyCommandArguments) { + let callback: LegacyCallback | undefined; + if (typeof args[args.length - 1] === 'function') { + callback = args.pop() as LegacyCallback; + } + + RedisLegacyClient.pushArguments(redisArgs, args as LegacyArguments); + + return callback; + } + + static pushArguments(redisArgs: CommandArguments, args: LegacyArguments) { + for (let i = 0; i < args.length; ++i) { + const arg = args[i]; + if (Array.isArray(arg)) { + RedisLegacyClient.pushArguments(redisArgs, arg); + } else { + redisArgs.push( + typeof arg === 'number' || arg instanceof Date ? + arg.toString() : + arg + ); + } + } + } + + static getTransformReply(command: Command, resp: RespVersions) { + return command.TRANSFORM_LEGACY_REPLY ? + getTransformReply(command, resp) : + undefined; + } + + static #createCommand(name: string, command: Command, resp: RespVersions) { + const transformReply = RedisLegacyClient.getTransformReply(command, resp); + return function (this: RedisLegacyClient, ...args: LegacyCommandArguments) { + const redisArgs = [name], + callback = RedisLegacyClient.#transformArguments(redisArgs, args), + promise = this.#client.sendCommand(redisArgs); + + if (!callback) { + promise.catch(err => this.#client.emit('error', err)); + return; + } + + promise + .then(reply => callback(null, transformReply ? transformReply(reply) : reply)) + .catch(err => callback(err)); + }; + } + + #client: RedisClientType; + #Multi: ReturnType; + + constructor( + client: RedisClientType + ) { + this.#client = client; + + const RESP = client.options?.RESP ?? 2; + for (const [name, command] of Object.entries(COMMANDS)) { + // TODO: as any? + (this as any)[name] = RedisLegacyClient.#createCommand( + name, + command, + RESP + ); + } + + this.#Multi = LegacyMultiCommand.factory(RESP); + } + + sendCommand(...args: LegacyCommandArguments) { + const redisArgs: CommandArguments = [], + callback = RedisLegacyClient.#transformArguments(redisArgs, args), + promise = this.#client.sendCommand(redisArgs); + + if (!callback) { + promise.catch(err => this.#client.emit('error', err)); + return; + } + + promise + .then(reply => callback(null, reply)) + .catch(err => callback(err)); + } + + multi() { + return this.#Multi(this.#client); + } +} + +type MultiWithCommands = { + [P in keyof typeof COMMANDS]: (...args: LegacyCommandArguments) => RedisLegacyMultiType; +}; + +export type RedisLegacyMultiType = LegacyMultiCommand & MultiWithCommands; + +class LegacyMultiCommand { + static #createCommand(name: string, command: Command, resp: RespVersions) { + const transformReply = RedisLegacyClient.getTransformReply(command, resp); + return function (this: LegacyMultiCommand, ...args: LegacyArguments) { + const redisArgs = [name]; + RedisLegacyClient.pushArguments(redisArgs, args); + this.#multi.addCommand(redisArgs, transformReply); + return this; + }; + } + + static factory(resp: RespVersions) { + const Multi = class extends LegacyMultiCommand {}; + + for (const [name, command] of Object.entries(COMMANDS)) { + // TODO: as any? + (Multi as any).prototype[name] = LegacyMultiCommand.#createCommand( + name, + command, + resp + ); + } + + return (client: RedisClientType) => { + return new Multi(client) as unknown as RedisLegacyMultiType; + }; + } + + readonly #multi = new RedisMultiCommand(); + readonly #client: RedisClientType; + + constructor(client: RedisClientType) { + this.#client = client; + } + + sendCommand(...args: LegacyArguments) { + const redisArgs: CommandArguments = []; + RedisLegacyClient.pushArguments(redisArgs, args); + this.#multi.addCommand(redisArgs); + return this; + } + + exec(cb?: (err: ErrorReply | null, replies?: Array) => unknown) { + const promise = this.#client._executeMulti(this.#multi.queue); + + if (!cb) { + promise.catch(err => this.#client.emit('error', err)); + return; + } + + promise + .then(results => cb(null, this.#multi.transformReplies(results))) + .catch(err => cb?.(err)); + } +} diff --git a/packages/client/lib/client/linked-list.spec.ts b/packages/client/lib/client/linked-list.spec.ts new file mode 100644 index 00000000000..88379364054 --- /dev/null +++ b/packages/client/lib/client/linked-list.spec.ts @@ -0,0 +1,229 @@ +import { + SinglyLinkedList, + DoublyLinkedList, + EmptyAwareSinglyLinkedList, +} from "./linked-list"; +import { equal, deepEqual } from "assert/strict"; + +describe("DoublyLinkedList", () => { + const list = new DoublyLinkedList(); + + it("should start empty", () => { + equal(list.length, 0); + equal(list.head, undefined); + equal(list.tail, undefined); + deepEqual(Array.from(list), []); + }); + + it("shift empty", () => { + equal(list.shift(), undefined); + equal(list.length, 0); + deepEqual(Array.from(list), []); + }); + + it("push 1", () => { + list.push(1); + equal(list.length, 1); + deepEqual(Array.from(list), [1]); + }); + + it("push 2", () => { + list.push(2); + equal(list.length, 2); + deepEqual(Array.from(list), [1, 2]); + }); + + it("unshift 0", () => { + list.unshift(0); + equal(list.length, 3); + deepEqual(Array.from(list), [0, 1, 2]); + }); + + it("remove middle node", () => { + list.remove(list.head!.next!); + equal(list.length, 2); + deepEqual(Array.from(list), [0, 2]); + }); + + it("remove head", () => { + list.remove(list.head!); + equal(list.length, 1); + deepEqual(Array.from(list), [2]); + }); + + it("remove tail", () => { + list.remove(list.tail!); + equal(list.length, 0); + deepEqual(Array.from(list), []); + }); + + it("unshift empty queue", () => { + list.unshift(0); + equal(list.length, 1); + deepEqual(Array.from(list), [0]); + }); + + it("push 1", () => { + list.push(1); + equal(list.length, 2); + deepEqual(Array.from(list), [0, 1]); + }); + + it("shift", () => { + equal(list.shift(), 0); + equal(list.length, 1); + deepEqual(Array.from(list), [1]); + }); + + it("shift last element", () => { + equal(list.shift(), 1); + equal(list.length, 0); + deepEqual(Array.from(list), []); + }); + + it("provide forEach for nodes", () => { + list.reset(); + list.push(1); + list.push(2); + list.push(3); + let count = 0; + for(const _ of list.nodes()) { + count++; + } + equal(count, 3); + for(const _ of list.nodes()) { + count++; + } + equal(count, 6); + }); + + it("should handle remove on empty list", () => { + list.reset(); + const node = list.push(1); + list.remove(node); + equal(list.length, 0); + deepEqual(Array.from(list), []); + list.remove(node); + equal(list.length, 0); + deepEqual(Array.from(list), []); + }); + + + it("should safely remove nodes while iterating", () => { + list.reset(); + list.push(1); + list.push(2); + list.push(3); + list.push(4); + list.push(5); + + const visited: number[] = []; + for (const node of list.nodes()) { + visited.push(node.value); + if (node.value % 2 === 0) { + list.remove(node); + } + } + deepEqual(visited, [1, 2, 3, 4, 5]); + equal(list.length, 3); + deepEqual(Array.from(list), [1, 3, 5]); + }); +}); + +describe("SinglyLinkedList", () => { + const list = new SinglyLinkedList(); + + it("should start empty", () => { + equal(list.length, 0); + equal(list.head, undefined); + equal(list.tail, undefined); + deepEqual(Array.from(list), []); + }); + + it("shift empty", () => { + equal(list.shift(), undefined); + equal(list.length, 0); + deepEqual(Array.from(list), []); + }); + + it("push 1", () => { + list.push(1); + equal(list.length, 1); + deepEqual(Array.from(list), [1]); + }); + + it("push 2", () => { + list.push(2); + equal(list.length, 2); + deepEqual(Array.from(list), [1, 2]); + }); + + it("push 3", () => { + list.push(3); + equal(list.length, 3); + deepEqual(Array.from(list), [1, 2, 3]); + }); + + it("shift 1", () => { + equal(list.shift(), 1); + equal(list.length, 2); + deepEqual(Array.from(list), [2, 3]); + }); + + it("shift 2", () => { + equal(list.shift(), 2); + equal(list.length, 1); + deepEqual(Array.from(list), [3]); + }); + + it("shift 3", () => { + equal(list.shift(), 3); + equal(list.length, 0); + deepEqual(Array.from(list), []); + }); + + it("should be empty", () => { + equal(list.length, 0); + equal(list.head, undefined); + equal(list.tail, undefined); + }); +}); + +describe("EmptyAwareSinglyLinkedList", () => { + it("should emit 'empty' event when reset", () => { + const list = new EmptyAwareSinglyLinkedList(); + let count = 0; + list.events.on("empty", () => count++); + list.push(1); + list.reset(); + equal(count, 1); + list.reset(); + equal(count, 1); + }); + + it("should emit 'empty' event when shift makes the list empty", () => { + const list = new EmptyAwareSinglyLinkedList(); + let count = 0; + list.events.on("empty", () => count++); + list.push(1); + list.push(2); + list.shift(); + equal(count, 0); + list.shift(); + equal(count, 1); + list.shift(); + equal(count, 1); + }); + + it("should emit 'empty' event when remove makes the list empty", () => { + const list = new EmptyAwareSinglyLinkedList(); + let count = 0; + list.events.on("empty", () => count++); + const node1 = list.push(1); + const node2 = list.push(2); + list.remove(node1, undefined); + equal(count, 0); + list.remove(node2, undefined); + equal(count, 1); + }); +}); diff --git a/packages/client/lib/client/linked-list.ts b/packages/client/lib/client/linked-list.ts new file mode 100644 index 00000000000..910319268a4 --- /dev/null +++ b/packages/client/lib/client/linked-list.ts @@ -0,0 +1,245 @@ +import EventEmitter from "events"; + +export interface DoublyLinkedNode { + value: T; + previous: DoublyLinkedNode | undefined; + next: DoublyLinkedNode | undefined; +} + +export class DoublyLinkedList { + #length = 0; + + get length() { + return this.#length; + } + + #head?: DoublyLinkedNode; + + get head() { + return this.#head; + } + + #tail?: DoublyLinkedNode; + + get tail() { + return this.#tail; + } + + push(value: T) { + ++this.#length; + + if (this.#tail === undefined) { + return this.#head = this.#tail = { + previous: this.#head, + next: undefined, + value + }; + } + + return this.#tail = this.#tail.next = { + previous: this.#tail, + next: undefined, + value + }; + } + + unshift(value: T) { + ++this.#length; + + if (this.#head === undefined) { + return this.#head = this.#tail = { + previous: undefined, + next: undefined, + value + }; + } + + return this.#head = this.#head.previous = { + previous: undefined, + next: this.#head, + value + }; + } + + add(value: T, prepend = false) { + return prepend ? + this.unshift(value) : + this.push(value); + } + + shift() { + if (this.#head === undefined) return undefined; + + --this.#length; + const node = this.#head; + if (node.next) { + node.next.previous = undefined; + this.#head = node.next; + node.next = undefined; + } else { + this.#head = this.#tail = undefined; + } + return node.value; + } + + remove(node: DoublyLinkedNode) { + if (this.#length === 0) return; + --this.#length; + + if (this.#tail === node) { + this.#tail = node.previous; + } + if (this.#head === node) { + this.#head = node.next; + } else { + if (node.previous) { + node.previous.next = node.next; + } + if (node.next) { + node.next.previous = node.previous; + } + } + node.previous = undefined; + node.next = undefined; + } + + reset() { + this.#length = 0; + this.#head = this.#tail = undefined; + } + + *[Symbol.iterator]() { + let node = this.#head; + while (node !== undefined) { + yield node.value; + node = node.next; + } + } + + *nodes() { + let node = this.#head; + while(node) { + const next = node.next + yield node; + node = next; + } + } +} + +export interface SinglyLinkedNode { + value: T; + next: SinglyLinkedNode | undefined; + removed: boolean; +} + +export class SinglyLinkedList { + #length = 0; + + get length() { + return this.#length; + } + + #head?: SinglyLinkedNode; + + get head() { + return this.#head; + } + + #tail?: SinglyLinkedNode; + + get tail() { + return this.#tail; + } + + push(value: T) { + ++this.#length; + + const node = { + value, + next: undefined, + removed: false + }; + + if (this.#head === undefined) { + return this.#head = this.#tail = node; + } + + return this.#tail!.next = this.#tail = node; + } + + remove(node: SinglyLinkedNode, parent: SinglyLinkedNode | undefined) { + if (node.removed) { + throw new Error("node already removed"); + } + --this.#length; + + if (this.#head === node) { + if (this.#tail === node) { + this.#head = this.#tail = undefined; + } else { + this.#head = node.next; + } + } else if (this.#tail === node) { + this.#tail = parent; + parent!.next = undefined; + } else { + parent!.next = node.next; + } + + node.removed = true; + } + + shift() { + if (this.#head === undefined) return undefined; + + const node = this.#head; + if (--this.#length === 0) { + this.#head = this.#tail = undefined; + } else { + this.#head = node.next; + } + + node.removed = true; + return node.value; + } + + reset() { + this.#length = 0; + this.#head = this.#tail = undefined; + } + + *[Symbol.iterator]() { + let node = this.#head; + while (node !== undefined) { + yield node.value; + node = node.next; + } + } +} + +export class EmptyAwareSinglyLinkedList extends SinglyLinkedList { + readonly events = new EventEmitter(); + reset() { + const old = this.length; + super.reset(); + if(old !== this.length && this.length === 0) { + this.events.emit('empty'); + } + } + shift(): T | undefined { + const old = this.length; + const ret = super.shift(); + if(old !== this.length && this.length === 0) { + this.events.emit('empty'); + } + return ret; + } + remove(node: SinglyLinkedNode, parent: SinglyLinkedNode | undefined) { + const old = this.length; + super.remove(node, parent); + if(old !== this.length && this.length === 0) { + this.events.emit('empty'); + } + } + +} diff --git a/packages/client/lib/client/multi-command.ts b/packages/client/lib/client/multi-command.ts new file mode 100644 index 00000000000..fdb958b8033 --- /dev/null +++ b/packages/client/lib/client/multi-command.ts @@ -0,0 +1,252 @@ +import COMMANDS from '../commands'; +import RedisMultiCommand, { MULTI_MODE, MULTI_REPLY, MultiMode, MultiReply, MultiReplyType, RedisMultiQueuedCommand } from '../multi-command'; +import { ReplyWithTypeMapping, CommandReply, Command, CommandArguments, CommanderConfig, RedisFunctions, RedisModules, RedisScripts, RespVersions, TransformReply, RedisScript, RedisFunction, TypeMapping } from '../RESP/types'; +import { attachConfig, functionArgumentsPrefix, getTransformReply } from '../commander'; +import { BasicCommandParser } from './parser'; +import { Tail } from '../commands/generic-transformers'; + +type CommandSignature< + REPLIES extends Array, + C extends Command, + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> = (...args: Tail>) => InternalRedisClientMultiCommandType< + [...REPLIES, ReplyWithTypeMapping, TYPE_MAPPING>], + M, + F, + S, + RESP, + TYPE_MAPPING +>; + +type WithCommands< + REPLIES extends Array, + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> = { + [P in keyof typeof COMMANDS]: CommandSignature; +}; + +type WithModules< + REPLIES extends Array, + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> = { + [P in keyof M]: { + [C in keyof M[P]]: CommandSignature; + }; +}; + +type WithFunctions< + REPLIES extends Array, + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> = { + [L in keyof F]: { + [C in keyof F[L]]: CommandSignature; + }; +}; + +type WithScripts< + REPLIES extends Array, + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> = { + [P in keyof S]: CommandSignature; +}; + +type InternalRedisClientMultiCommandType< + REPLIES extends Array, + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> = ( + RedisClientMultiCommand & + WithCommands & + WithModules & + WithFunctions & + WithScripts +); + +type TypedOrAny = + [Flag] extends [MULTI_MODE['TYPED']] ? T : any; + +export type RedisClientMultiCommandType< + isTyped extends MultiMode, + REPLIES extends Array, + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> = TypedOrAny>; + +type ExecuteMulti = (commands: Array, selectedDB?: number) => Promise>; + +export default class RedisClientMultiCommand { + static #createCommand(command: Command, resp: RespVersions) { + const transformReply = getTransformReply(command, resp); + + return function (this: RedisClientMultiCommand, ...args: Array) { + const parser = new BasicCommandParser(); + command.parseCommand(parser, ...args); + + const redisArgs: CommandArguments = parser.redisArgs; + redisArgs.preserve = parser.preserve; + + return this.addCommand( + redisArgs, + transformReply + ); + }; + } + + static #createModuleCommand(command: Command, resp: RespVersions) { + const transformReply = getTransformReply(command, resp); + + return function (this: { _self: RedisClientMultiCommand }, ...args: Array) { + const parser = new BasicCommandParser(); + command.parseCommand(parser, ...args); + + const redisArgs: CommandArguments = parser.redisArgs; + redisArgs.preserve = parser.preserve; + + return this._self.addCommand( + redisArgs, + transformReply + ); + }; + } + + static #createFunctionCommand(name: string, fn: RedisFunction, resp: RespVersions) { + const prefix = functionArgumentsPrefix(name, fn); + const transformReply = getTransformReply(fn, resp); + + return function (this: { _self: RedisClientMultiCommand }, ...args: Array) { + const parser = new BasicCommandParser(); + parser.push(...prefix); + fn.parseCommand(parser, ...args); + + const redisArgs: CommandArguments = parser.redisArgs; + redisArgs.preserve = parser.preserve; + + return this._self.addCommand( + redisArgs, + transformReply + ); + }; + } + + static #createScriptCommand(script: RedisScript, resp: RespVersions) { + const transformReply = getTransformReply(script, resp); + + return function (this: RedisClientMultiCommand, ...args: Array) { + const parser = new BasicCommandParser(); + script.parseCommand(parser, ...args); + + const redisArgs: CommandArguments = parser.redisArgs; + redisArgs.preserve = parser.preserve; + + return this.#addScript( + script, + redisArgs, + transformReply + ); + }; + } + + static extend< + M extends RedisModules = Record, + F extends RedisFunctions = Record, + S extends RedisScripts = Record, + RESP extends RespVersions = 2 + >(config?: CommanderConfig) { + return attachConfig({ + BaseClass: RedisClientMultiCommand, + commands: COMMANDS, + createCommand: RedisClientMultiCommand.#createCommand, + createModuleCommand: RedisClientMultiCommand.#createModuleCommand, + createFunctionCommand: RedisClientMultiCommand.#createFunctionCommand, + createScriptCommand: RedisClientMultiCommand.#createScriptCommand, + config + }); + } + + readonly #multi: RedisMultiCommand + readonly #executeMulti: ExecuteMulti; + readonly #executePipeline: ExecuteMulti; + + #selectedDB?: number; + + constructor(executeMulti: ExecuteMulti, executePipeline: ExecuteMulti, typeMapping?: TypeMapping) { + this.#multi = new RedisMultiCommand(typeMapping); + this.#executeMulti = executeMulti; + this.#executePipeline = executePipeline; + } + + SELECT(db: number, transformReply?: TransformReply): this { + this.#selectedDB = db; + this.#multi.addCommand(['SELECT', db.toString()], transformReply); + return this; + } + + select = this.SELECT; + + addCommand(args: CommandArguments, transformReply?: TransformReply) { + this.#multi.addCommand(args, transformReply); + return this; + } + + #addScript( + script: RedisScript, + args: CommandArguments, + transformReply?: TransformReply + ) { + this.#multi.addScript(script, args, transformReply); + + return this; + } + + async exec(execAsPipeline = false): Promise> { + if (execAsPipeline) return this.execAsPipeline(); + + return this.#multi.transformReplies( + await this.#executeMulti(this.#multi.queue, this.#selectedDB) + ) as MultiReplyType; + } + + EXEC = this.exec; + + execTyped(execAsPipeline = false) { + return this.exec(execAsPipeline); + } + + async execAsPipeline(): Promise> { + if (this.#multi.queue.length === 0) return [] as MultiReplyType; + + return this.#multi.transformReplies( + await this.#executePipeline(this.#multi.queue, this.#selectedDB) + ) as MultiReplyType; + } + + execAsPipelineTyped() { + return this.execAsPipeline(); + } +} diff --git a/packages/client/lib/client/parser.ts b/packages/client/lib/client/parser.ts new file mode 100644 index 00000000000..3e820230429 --- /dev/null +++ b/packages/client/lib/client/parser.ts @@ -0,0 +1,103 @@ +import { RedisArgument } from '../RESP/types'; +import { RedisVariadicArgument } from '../commands/generic-transformers'; + +export interface CommandParser { + redisArgs: ReadonlyArray; + keys: ReadonlyArray; + firstKey: RedisArgument | undefined; + preserve: unknown; + + push: (...arg: Array) => unknown; + pushVariadic: (vals: RedisVariadicArgument) => unknown; + pushVariadicWithLength: (vals: RedisVariadicArgument) => unknown; + pushVariadicNumber: (vals: number | Array) => unknown; + pushKey: (key: RedisArgument) => unknown; // normal push of keys + pushKeys: (keys: RedisVariadicArgument) => unknown; // push multiple keys at a time + pushKeysLength: (keys: RedisVariadicArgument) => unknown; // push multiple keys at a time +} + +export class BasicCommandParser implements CommandParser { + #redisArgs: Array = []; + #keys: Array = []; + preserve: unknown; + + get redisArgs() { + return this.#redisArgs; + } + + get keys() { + return this.#keys; + } + + get firstKey() { + return this.#keys[0]; + } + + get cacheKey() { + const tmp = new Array(this.#redisArgs.length*2); + + for (let i = 0; i < this.#redisArgs.length; i++) { + tmp[i] = this.#redisArgs[i].length; + tmp[i+this.#redisArgs.length] = this.#redisArgs[i]; + } + + return tmp.join('_'); + } + + push(...arg: Array) { + this.#redisArgs.push(...arg); + }; + + pushVariadic(vals: RedisVariadicArgument) { + if (Array.isArray(vals)) { + for (const val of vals) { + this.push(val); + } + } else { + this.push(vals); + } + } + + pushVariadicWithLength(vals: RedisVariadicArgument) { + if (Array.isArray(vals)) { + this.#redisArgs.push(vals.length.toString()); + } else { + this.#redisArgs.push('1'); + } + this.pushVariadic(vals); + } + + pushVariadicNumber(vals: number | number[]) { + if (Array.isArray(vals)) { + for (const val of vals) { + this.push(val.toString()); + } + } else { + this.push(vals.toString()); + } + } + + pushKey(key: RedisArgument) { + this.#keys.push(key); + this.#redisArgs.push(key); + } + + pushKeysLength(keys: RedisVariadicArgument) { + if (Array.isArray(keys)) { + this.#redisArgs.push(keys.length.toString()); + } else { + this.#redisArgs.push('1'); + } + this.pushKeys(keys); + } + + pushKeys(keys: RedisVariadicArgument) { + if (Array.isArray(keys)) { + this.#keys.push(...keys); + this.#redisArgs.push(...keys); + } else { + this.#keys.push(keys); + this.#redisArgs.push(keys); + } + } +} diff --git a/packages/client/lib/client/pool.spec.ts b/packages/client/lib/client/pool.spec.ts new file mode 100644 index 00000000000..f292dc171c7 --- /dev/null +++ b/packages/client/lib/client/pool.spec.ts @@ -0,0 +1,42 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; + +describe('RedisClientPool', () => { + testUtils.testWithClientPool('sendCommand', async pool => { + assert.equal( + await pool.sendCommand(['PING']), + 'PONG' + ); + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClientPool( + 'proper error propagation in sequential operations', + async (pool) => { + let hasUnhandledRejection = false; + + process.once('unhandledRejection', () => { + hasUnhandledRejection = true; + }); + + const groupName = 'test-group'; + const streamName = 'test-stream'; + + // First attempt - should succeed + await pool.xGroupCreate(streamName, groupName, '0', { + MKSTREAM: true, + }); + + // Subsequent attempts - should all throw BUSYGROUP errors and be handled properly + for (let i = 0; i < 3; i++) { + await assert.rejects( + pool.xGroupCreate(streamName, groupName, '0', { + MKSTREAM: true, + }) + ); + } + + assert.equal(hasUnhandledRejection, false); + }, + GLOBAL.SERVERS.OPEN + ); +}); diff --git a/packages/client/lib/client/pool.ts b/packages/client/lib/client/pool.ts new file mode 100644 index 00000000000..75aca57a9f5 --- /dev/null +++ b/packages/client/lib/client/pool.ts @@ -0,0 +1,548 @@ +import COMMANDS from '../commands'; +import { Command, RedisArgument, RedisFunction, RedisFunctions, RedisModules, RedisScript, RedisScripts, RespVersions, TypeMapping } from '../RESP/types'; +import RedisClient, { RedisClientType, RedisClientOptions, RedisClientExtensions } from '.'; +import { EventEmitter } from 'node:events'; +import { DoublyLinkedNode, DoublyLinkedList, SinglyLinkedList } from './linked-list'; +import { TimeoutError } from '../errors'; +import { attachConfig, functionArgumentsPrefix, getTransformReply, scriptArgumentsPrefix } from '../commander'; +import { CommandOptions } from './commands-queue'; +import RedisClientMultiCommand, { RedisClientMultiCommandType } from './multi-command'; +import { BasicPooledClientSideCache, ClientSideCacheConfig, PooledClientSideCacheProvider } from './cache'; +import { BasicCommandParser } from './parser'; +import SingleEntryCache from '../single-entry-cache'; +import { MULTI_MODE, MultiMode } from '../multi-command'; + +export interface RedisPoolOptions { + /** + * The minimum number of clients to keep in the pool (>= 1). + */ + minimum: number; + /** + * The maximum number of clients to keep in the pool (>= {@link RedisPoolOptions.minimum} >= 1). + */ + maximum: number; + /** + * The maximum time a task can wait for a client to become available (>= 0). + */ + acquireTimeout: number; + /** + * The delay in milliseconds before a cleanup operation is performed on idle clients. + * + * After this delay, the pool will check if there are too many idle clients and destroy + * excess ones to maintain optimal pool size. + */ + cleanupDelay: number; + /** + * Client Side Caching configuration for the pool. + * + * Enables Redis Servers and Clients to work together to cache results from commands + * sent to a server. The server will notify the client when cached results are no longer valid. + * In pooled mode, the cache is shared across all clients in the pool. + * + * Note: Client Side Caching is only supported with RESP3. + * + * @example Anonymous cache configuration + * ``` + * const client = createClientPool({RESP: 3}, { + * clientSideCache: { + * ttl: 0, + * maxEntries: 0, + * evictPolicy: "LRU" + * }, + * minimum: 5 + * }); + * ``` + * + * @example Using a controllable cache + * ``` + * const cache = new BasicPooledClientSideCache({ + * ttl: 0, + * maxEntries: 0, + * evictPolicy: "LRU" + * }); + * const client = createClientPool({RESP: 3}, { + * clientSideCache: cache, + * minimum: 5 + * }); + * ``` + */ + clientSideCache?: PooledClientSideCacheProvider | ClientSideCacheConfig; + /** + * Enable experimental support for RESP3 module commands. + * + * When enabled, allows the use of module commands that have been adapted + * for the RESP3 protocol. This is an unstable feature and may change in + * future versions. + * + * @default false + */ + unstableResp3Modules?: boolean; +} + +export type PoolTask< + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping, + T = unknown +> = (client: RedisClientType) => T; + +export type RedisClientPoolType< + M extends RedisModules = {}, + F extends RedisFunctions = {}, + S extends RedisScripts = {}, + RESP extends RespVersions = 2, + TYPE_MAPPING extends TypeMapping = {} +> = ( + RedisClientPool & + RedisClientExtensions +); + +type ProxyPool = RedisClientPoolType; + +type NamespaceProxyPool = { _self: ProxyPool }; + +export class RedisClientPool< + M extends RedisModules = {}, + F extends RedisFunctions = {}, + S extends RedisScripts = {}, + RESP extends RespVersions = 2, + TYPE_MAPPING extends TypeMapping = {} +> extends EventEmitter { + static #createCommand(command: Command, resp: RespVersions) { + const transformReply = getTransformReply(command, resp); + + return async function (this: ProxyPool, ...args: Array) { + const parser = new BasicCommandParser(); + command.parseCommand(parser, ...args); + + return this.execute(client => client._executeCommand(command, parser, this._commandOptions, transformReply)) + }; + } + + static #createModuleCommand(command: Command, resp: RespVersions) { + const transformReply = getTransformReply(command, resp); + + return async function (this: NamespaceProxyPool, ...args: Array) { + const parser = new BasicCommandParser(); + command.parseCommand(parser, ...args); + + return this._self.execute(client => client._executeCommand(command, parser, this._self._commandOptions, transformReply)) + }; + } + + static #createFunctionCommand(name: string, fn: RedisFunction, resp: RespVersions) { + const prefix = functionArgumentsPrefix(name, fn); + const transformReply = getTransformReply(fn, resp); + + return async function (this: NamespaceProxyPool, ...args: Array) { + const parser = new BasicCommandParser(); + parser.push(...prefix); + fn.parseCommand(parser, ...args); + + return this._self.execute(client => client._executeCommand(fn, parser, this._self._commandOptions, transformReply)) }; + } + + static #createScriptCommand(script: RedisScript, resp: RespVersions) { + const prefix = scriptArgumentsPrefix(script); + const transformReply = getTransformReply(script, resp); + + return async function (this: ProxyPool, ...args: Array) { + const parser = new BasicCommandParser(); + parser.pushVariadic(prefix); + script.parseCommand(parser, ...args); + + return this.execute(client => client._executeScript(script, parser, this._commandOptions, transformReply)) + }; + } + + static #SingleEntryCache = new SingleEntryCache(); + + static create< + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping = {} + >( + clientOptions?: Omit, "clientSideCache">, + options?: Partial + ) { + + let Pool = RedisClientPool.#SingleEntryCache.get(clientOptions); + if(!Pool) { + Pool = attachConfig({ + BaseClass: RedisClientPool, + commands: COMMANDS, + createCommand: RedisClientPool.#createCommand, + createModuleCommand: RedisClientPool.#createModuleCommand, + createFunctionCommand: RedisClientPool.#createFunctionCommand, + createScriptCommand: RedisClientPool.#createScriptCommand, + config: clientOptions + }); + Pool.prototype.Multi = RedisClientMultiCommand.extend(clientOptions); + RedisClientPool.#SingleEntryCache.set(clientOptions, Pool); + } + + // returning a "proxy" to prevent the namespaces._self to leak between "proxies" + return Object.create( + new Pool( + clientOptions, + options + ) + ) as RedisClientPoolType; + } + + // TODO: defaults + static #DEFAULTS = { + minimum: 1, + maximum: 100, + acquireTimeout: 3000, + cleanupDelay: 3000 + } satisfies RedisPoolOptions; + + readonly #clientFactory: () => RedisClientType; + readonly #options: RedisPoolOptions; + + readonly #idleClients = new SinglyLinkedList>(); + + /** + * The number of idle clients. + */ + get idleClients() { + return this._self.#idleClients.length; + } + + readonly #clientsInUse = new DoublyLinkedList>(); + + /** + * The number of clients in use. + */ + get clientsInUse() { + return this._self.#clientsInUse.length; + } + + /** + * The total number of clients in the pool (including connecting, idle, and in use). + */ + get totalClients() { + return this._self.#idleClients.length + this._self.#clientsInUse.length; + } + + readonly #tasksQueue = new SinglyLinkedList<{ + timeout: NodeJS.Timeout | undefined; + resolve: (value: unknown) => unknown; + reject: (reason?: unknown) => unknown; + fn: PoolTask; + }>(); + + /** + * The number of tasks waiting for a client to become available. + */ + get tasksQueueLength() { + return this._self.#tasksQueue.length; + } + + #isOpen = false; + + /** + * Whether the pool is open (either connecting or connected). + */ + get isOpen() { + return this._self.#isOpen; + } + + #isClosing = false; + + /** + * Whether the pool is closing (*not* closed). + */ + get isClosing() { + return this._self.#isClosing; + } + + #clientSideCache?: PooledClientSideCacheProvider; + get clientSideCache() { + return this._self.#clientSideCache; + } + + /** + * You are probably looking for {@link RedisClient.createPool `RedisClient.createPool`}, + * {@link RedisClientPool.fromClient `RedisClientPool.fromClient`}, + * or {@link RedisClientPool.fromOptions `RedisClientPool.fromOptions`}... + */ + constructor( + clientOptions?: RedisClientOptions, + options?: Partial + ) { + super(); + + this.#options = { + ...RedisClientPool.#DEFAULTS, + ...options + }; + if (options?.clientSideCache) { + if (clientOptions === undefined) { + clientOptions = {}; + } + + if (options.clientSideCache instanceof PooledClientSideCacheProvider) { + this.#clientSideCache = clientOptions.clientSideCache = options.clientSideCache; + } else { + const cscConfig = options.clientSideCache; + this.#clientSideCache = clientOptions.clientSideCache = new BasicPooledClientSideCache(cscConfig); +// this.#clientSideCache = clientOptions.clientSideCache = new PooledNoRedirectClientSideCache(cscConfig); + } + } + + this.#clientFactory = RedisClient.factory(clientOptions).bind(undefined, clientOptions) as () => RedisClientType; + } + + private _self = this; + private _commandOptions?: CommandOptions; + + withCommandOptions< + OPTIONS extends CommandOptions, + TYPE_MAPPING extends TypeMapping + >(options: OPTIONS) { + const proxy = Object.create(this._self); + proxy._commandOptions = options; + return proxy as RedisClientPoolType< + M, + F, + S, + RESP, + TYPE_MAPPING extends TypeMapping ? TYPE_MAPPING : {} + >; + } + + #commandOptionsProxy< + K extends keyof CommandOptions, + V extends CommandOptions[K] + >( + key: K, + value: V + ) { + const proxy = Object.create(this._self); + proxy._commandOptions = Object.create(this._commandOptions ?? null); + proxy._commandOptions[key] = value; + return proxy as RedisClientPoolType< + M, + F, + S, + RESP, + K extends 'typeMapping' ? V extends TypeMapping ? V : {} : TYPE_MAPPING + >; + } + + /** + * Override the `typeMapping` command option + */ + withTypeMapping(typeMapping: TYPE_MAPPING) { + return this._self.#commandOptionsProxy('typeMapping', typeMapping); + } + + /** + * Override the `abortSignal` command option + */ + withAbortSignal(abortSignal: AbortSignal) { + return this._self.#commandOptionsProxy('abortSignal', abortSignal); + } + + /** + * Override the `asap` command option to `true` + * TODO: remove? + */ + asap() { + return this._self.#commandOptionsProxy('asap', true); + } + + async connect() { + if (this._self.#isOpen) return; // TODO: throw error? + this._self.#isOpen = true; + + const promises = []; + while (promises.length < this._self.#options.minimum) { + promises.push(this._self.#create()); + } + + try { + await Promise.all(promises); + } catch (err) { + this.destroy(); + throw err; + } + + return this as unknown as RedisClientPoolType; + } + + async #create() { + const node = this._self.#clientsInUse.push( + this._self.#clientFactory() + .on('error', (err: Error) => this.emit('error', err)) + ); + + try { + const client = node.value; + await client.connect(); + } catch (err) { + this._self.#clientsInUse.remove(node); + throw err; + } + + this._self.#returnClient(node); + } + + execute(fn: PoolTask) { + return new Promise>((resolve, reject) => { + const client = this._self.#idleClients.shift(), + { tail } = this._self.#tasksQueue; + if (!client) { + let timeout; + if (this._self.#options.acquireTimeout > 0) { + timeout = setTimeout( + () => { + this._self.#tasksQueue.remove(task, tail); + reject(new TimeoutError('Timeout waiting for a client')); // TODO: message + }, + this._self.#options.acquireTimeout + ); + } + + const task = this._self.#tasksQueue.push({ + timeout, + // @ts-ignore + resolve, + reject, + fn + }); + + if (this.totalClients < this._self.#options.maximum) { + this._self.#create(); + } + + return; + } + + const node = this._self.#clientsInUse.push(client); + // @ts-ignore + this._self.#executeTask(node, resolve, reject, fn); + }); + } + + #executeTask( + node: DoublyLinkedNode>, + resolve: (value: T | PromiseLike) => void, + reject: (reason?: unknown) => void, + fn: PoolTask + ) { + const result = fn(node.value); + if (result instanceof Promise) { + result + .then(resolve, reject) + .finally(() => this.#returnClient(node)) + } else { + resolve(result); + this.#returnClient(node); + } + } + + #returnClient(node: DoublyLinkedNode>) { + const task = this.#tasksQueue.shift(); + if (task) { + clearTimeout(task.timeout); + this.#executeTask(node, task.resolve, task.reject, task.fn); + return; + } + + this.#clientsInUse.remove(node); + this.#idleClients.push(node.value); + + this.#scheduleCleanup(); + } + + cleanupTimeout?: NodeJS.Timeout; + + #scheduleCleanup() { + if (this.totalClients <= this.#options.minimum) return; + + clearTimeout(this.cleanupTimeout); + this.cleanupTimeout = setTimeout(() => this.#cleanup(), this.#options.cleanupDelay); + } + + #cleanup() { + const toDestroy = Math.min(this.#idleClients.length, this.totalClients - this.#options.minimum); + for (let i = 0; i < toDestroy; i++) { + // TODO: shift vs pop + const client = this.#idleClients.shift()! + client.destroy(); + } + } + + sendCommand( + args: Array, + options?: CommandOptions + ) { + return this.execute(client => client.sendCommand(args, options)); + } + + + MULTI() { + type Multi = new (...args: ConstructorParameters) => RedisClientMultiCommandType; + return new ((this as any).Multi as Multi)( + (commands, selectedDB) => this.execute(client => client._executeMulti(commands, selectedDB)), + commands => this.execute(client => client._executePipeline(commands)), + this._commandOptions?.typeMapping + ); + } + + multi = this.MULTI; + + async close() { + if (this._self.#isClosing) return; // TODO: throw err? + if (!this._self.#isOpen) return; // TODO: throw err? + + this._self.#isClosing = true; + + try { + const promises = []; + + for (const client of this._self.#idleClients) { + promises.push(client.close()); + } + + for (const client of this._self.#clientsInUse) { + promises.push(client.close()); + } + + await Promise.all(promises); + + this.#clientSideCache?.onPoolClose(); + + this._self.#idleClients.reset(); + this._self.#clientsInUse.reset(); + } catch (err) { + + } finally { + this._self.#isClosing = false; + } + } + + destroy() { + for (const client of this._self.#idleClients) { + client.destroy(); + } + this._self.#idleClients.reset(); + + for (const client of this._self.#clientsInUse) { + client.destroy(); + } + + this._self.#clientSideCache?.onPoolClose(); + + this._self.#clientsInUse.reset(); + + this._self.#isOpen = false; + } +} diff --git a/packages/client/lib/client/pub-sub.spec.ts b/packages/client/lib/client/pub-sub.spec.ts new file mode 100644 index 00000000000..74bd85c1831 --- /dev/null +++ b/packages/client/lib/client/pub-sub.spec.ts @@ -0,0 +1,151 @@ +import { strict as assert } from 'node:assert'; +import { PubSub, PUBSUB_TYPE } from './pub-sub'; + +describe('PubSub', () => { + const TYPE = PUBSUB_TYPE.CHANNELS, + CHANNEL = 'channel', + LISTENER = () => {}; + + describe('subscribe to new channel', () => { + function createAndSubscribe() { + const pubSub = new PubSub(), + command = pubSub.subscribe(TYPE, CHANNEL, LISTENER); + + assert.equal(pubSub.isActive, true); + assert.ok(command); + assert.equal(command.channelsCounter, 1); + + return { + pubSub, + command + }; + } + + it('resolve', () => { + const { pubSub, command } = createAndSubscribe(); + + command.resolve(); + + assert.equal(pubSub.isActive, true); + }); + + it('reject', () => { + const { pubSub, command } = createAndSubscribe(); + + assert.ok(command.reject); + command.reject(); + + assert.equal(pubSub.isActive, false); + }); + }); + + it('subscribe to already subscribed channel', () => { + const pubSub = new PubSub(), + firstSubscribe = pubSub.subscribe(TYPE, CHANNEL, LISTENER); + assert.ok(firstSubscribe); + + const secondSubscribe = pubSub.subscribe(TYPE, CHANNEL, LISTENER); + assert.ok(secondSubscribe); + + firstSubscribe.resolve(); + + assert.equal( + pubSub.subscribe(TYPE, CHANNEL, LISTENER), + undefined + ); + }); + + it('unsubscribe all', () => { + const pubSub = new PubSub(); + + const subscribe = pubSub.subscribe(TYPE, CHANNEL, LISTENER); + assert.ok(subscribe); + subscribe.resolve(); + assert.equal(pubSub.isActive, true); + + const unsubscribe = pubSub.unsubscribe(TYPE); + assert.equal(pubSub.isActive, true); + assert.ok(unsubscribe); + unsubscribe.resolve(); + assert.equal(pubSub.isActive, false); + }); + + describe('unsubscribe from channel', () => { + it('when not subscribed', () => { + const pubSub = new PubSub(), + unsubscribe = pubSub.unsubscribe(TYPE, CHANNEL); + assert.ok(unsubscribe); + unsubscribe.resolve(); + assert.equal(pubSub.isActive, false); + }); + + it('when already subscribed', () => { + const pubSub = new PubSub(), + subscribe = pubSub.subscribe(TYPE, CHANNEL, LISTENER); + assert.ok(subscribe); + subscribe.resolve(); + assert.equal(pubSub.isActive, true); + + const unsubscribe = pubSub.unsubscribe(TYPE, CHANNEL); + assert.equal(pubSub.isActive, true); + assert.ok(unsubscribe); + unsubscribe.resolve(); + assert.equal(pubSub.isActive, false); + }); + }); + + describe('unsubscribe from listener', () => { + it('when it\'s the only listener', () => { + const pubSub = new PubSub(), + subscribe = pubSub.subscribe(TYPE, CHANNEL, LISTENER); + assert.ok(subscribe); + subscribe.resolve(); + assert.equal(pubSub.isActive, true); + + const unsubscribe = pubSub.unsubscribe(TYPE, CHANNEL, LISTENER); + assert.ok(unsubscribe); + unsubscribe.resolve(); + assert.equal(pubSub.isActive, false); + }); + + it('when there are more listeners', () => { + const pubSub = new PubSub(), + subscribe = pubSub.subscribe(TYPE, CHANNEL, LISTENER); + assert.ok(subscribe); + subscribe.resolve(); + assert.equal(pubSub.isActive, true); + + assert.equal( + pubSub.subscribe(TYPE, CHANNEL, () => { }), + undefined + ); + + assert.equal( + pubSub.unsubscribe(TYPE, CHANNEL, LISTENER), + undefined + ); + }); + + describe('non-existing listener', () => { + it('on subscribed channel', () => { + const pubSub = new PubSub(), + subscribe = pubSub.subscribe(TYPE, CHANNEL, LISTENER); + assert.ok(subscribe); + subscribe.resolve(); + assert.equal(pubSub.isActive, true); + + assert.equal( + pubSub.unsubscribe(TYPE, CHANNEL, () => { }), + undefined + ); + assert.equal(pubSub.isActive, true); + }); + + it('on unsubscribed channel', () => { + const pubSub = new PubSub(); + assert.ok(pubSub.unsubscribe(TYPE, CHANNEL, () => { })); + assert.equal(pubSub.isActive, false); + }); + }); + }); +}); diff --git a/packages/client/lib/client/pub-sub.ts b/packages/client/lib/client/pub-sub.ts new file mode 100644 index 00000000000..1895f96a883 --- /dev/null +++ b/packages/client/lib/client/pub-sub.ts @@ -0,0 +1,450 @@ +import { RedisArgument } from '../RESP/types'; +import { CommandToWrite } from './commands-queue'; + +export const PUBSUB_TYPE = { + CHANNELS: 'CHANNELS', + PATTERNS: 'PATTERNS', + SHARDED: 'SHARDED' +} as const; + +export type PUBSUB_TYPE = typeof PUBSUB_TYPE; + +export type PubSubType = PUBSUB_TYPE[keyof PUBSUB_TYPE]; + +const COMMANDS = { + [PUBSUB_TYPE.CHANNELS]: { + subscribe: Buffer.from('subscribe'), + unsubscribe: Buffer.from('unsubscribe'), + message: Buffer.from('message') + }, + [PUBSUB_TYPE.PATTERNS]: { + subscribe: Buffer.from('psubscribe'), + unsubscribe: Buffer.from('punsubscribe'), + message: Buffer.from('pmessage') + }, + [PUBSUB_TYPE.SHARDED]: { + subscribe: Buffer.from('ssubscribe'), + unsubscribe: Buffer.from('sunsubscribe'), + message: Buffer.from('smessage') + } +}; + +export type PubSubListener< + RETURN_BUFFERS extends boolean = false +> = (message: T, channel: T) => unknown; + +export interface ChannelListeners { + unsubscribing: boolean; + buffers: Set>; + strings: Set>; +} + +export type PubSubTypeListeners = Map; + +export type PubSubListeners = Record; + +export type PubSubCommand = ( + Required> & { + reject: undefined | (() => unknown); + } +); + +export class PubSub { + static isStatusReply(reply: Array): boolean { + return ( + COMMANDS[PUBSUB_TYPE.CHANNELS].subscribe.equals(reply[0]) || + COMMANDS[PUBSUB_TYPE.CHANNELS].unsubscribe.equals(reply[0]) || + COMMANDS[PUBSUB_TYPE.PATTERNS].subscribe.equals(reply[0]) || + COMMANDS[PUBSUB_TYPE.PATTERNS].unsubscribe.equals(reply[0]) || + COMMANDS[PUBSUB_TYPE.SHARDED].subscribe.equals(reply[0]) + ); + } + + static isShardedUnsubscribe(reply: Array): boolean { + return COMMANDS[PUBSUB_TYPE.SHARDED].unsubscribe.equals(reply[0]); + } + + static #channelsArray(channels: string | Array) { + return (Array.isArray(channels) ? channels : [channels]); + } + + static #listenersSet( + listeners: ChannelListeners, + returnBuffers?: T + ) { + return (returnBuffers ? listeners.buffers : listeners.strings); + } + + #subscribing = 0; + + #isActive = false; + + get isActive() { + return this.#isActive; + } + + readonly listeners: PubSubListeners = { + [PUBSUB_TYPE.CHANNELS]: new Map(), + [PUBSUB_TYPE.PATTERNS]: new Map(), + [PUBSUB_TYPE.SHARDED]: new Map() + }; + + subscribe( + type: PubSubType, + channels: string | Array, + listener: PubSubListener, + returnBuffers?: T + ) { + const args: Array = [COMMANDS[type].subscribe], + channelsArray = PubSub.#channelsArray(channels); + for (const channel of channelsArray) { + let channelListeners = this.listeners[type].get(channel); + if (!channelListeners || channelListeners.unsubscribing) { + args.push(channel); + } + } + + if (args.length === 1) { + // all channels are already subscribed, add listeners without issuing a command + for (const channel of channelsArray) { + PubSub.#listenersSet( + this.listeners[type].get(channel)!, + returnBuffers + ).add(listener); + } + return; + } + + this.#isActive = true; + this.#subscribing++; + return { + args, + channelsCounter: args.length - 1, + resolve: () => { + this.#subscribing--; + for (const channel of channelsArray) { + let listeners = this.listeners[type].get(channel); + if (!listeners) { + listeners = { + unsubscribing: false, + buffers: new Set(), + strings: new Set() + }; + this.listeners[type].set(channel, listeners); + } + + PubSub.#listenersSet(listeners, returnBuffers).add(listener); + } + }, + reject: () => { + this.#subscribing--; + this.#updateIsActive(); + } + } satisfies PubSubCommand; + } + + extendChannelListeners( + type: PubSubType, + channel: string, + listeners: ChannelListeners + ) { + if (!this.#extendChannelListeners(type, channel, listeners)) return; + + this.#isActive = true; + this.#subscribing++; + return { + args: [ + COMMANDS[type].subscribe, + channel + ], + channelsCounter: 1, + resolve: () => this.#subscribing--, + reject: () => { + this.#subscribing--; + this.#updateIsActive(); + } + } satisfies PubSubCommand; + } + + #extendChannelListeners( + type: PubSubType, + channel: string, + listeners: ChannelListeners + ) { + const existingListeners = this.listeners[type].get(channel); + if (!existingListeners) { + this.listeners[type].set(channel, listeners); + return true; + } + + for (const listener of listeners.buffers) { + existingListeners.buffers.add(listener); + } + + for (const listener of listeners.strings) { + existingListeners.strings.add(listener); + } + + return false; + } + + extendTypeListeners(type: PubSubType, listeners: PubSubTypeListeners) { + const args: Array = [COMMANDS[type].subscribe]; + for (const [channel, channelListeners] of listeners) { + if (this.#extendChannelListeners(type, channel, channelListeners)) { + args.push(channel); + } + } + + if (args.length === 1) return; + + this.#isActive = true; + this.#subscribing++; + return { + args, + channelsCounter: args.length - 1, + resolve: () => this.#subscribing--, + reject: () => { + this.#subscribing--; + this.#updateIsActive(); + } + } satisfies PubSubCommand; + } + + unsubscribe( + type: PubSubType, + channels?: string | Array, + listener?: PubSubListener, + returnBuffers?: T + ) { + const listeners = this.listeners[type]; + if (!channels) { + return this.#unsubscribeCommand( + [COMMANDS[type].unsubscribe], + // cannot use `this.#subscribed` because there might be some `SUBSCRIBE` commands in the queue + // cannot use `this.#subscribed + this.#subscribing` because some `SUBSCRIBE` commands might fail + NaN, + () => listeners.clear() + ); + } + + const channelsArray = PubSub.#channelsArray(channels); + if (!listener) { + return this.#unsubscribeCommand( + [COMMANDS[type].unsubscribe, ...channelsArray], + channelsArray.length, + () => { + for (const channel of channelsArray) { + listeners.delete(channel); + } + } + ); + } + + const args: Array = [COMMANDS[type].unsubscribe]; + for (const channel of channelsArray) { + const sets = listeners.get(channel); + if (sets) { + let current, + other; + if (returnBuffers) { + current = sets.buffers; + other = sets.strings; + } else { + current = sets.strings; + other = sets.buffers; + } + + const currentSize = current.has(listener) ? current.size - 1 : current.size; + if (currentSize !== 0 || other.size !== 0) continue; + sets.unsubscribing = true; + } + + args.push(channel); + } + + if (args.length === 1) { + // all channels has other listeners, + // delete the listeners without issuing a command + for (const channel of channelsArray) { + PubSub.#listenersSet( + listeners.get(channel)!, + returnBuffers + ).delete(listener); + } + return; + } + + return this.#unsubscribeCommand( + args, + args.length - 1, + () => { + for (const channel of channelsArray) { + const sets = listeners.get(channel); + if (!sets) continue; + + (returnBuffers ? sets.buffers : sets.strings).delete(listener); + if (sets.buffers.size === 0 && sets.strings.size === 0) { + listeners.delete(channel); + } + } + } + ); + } + + #unsubscribeCommand( + args: Array, + channelsCounter: number, + removeListeners: () => void + ) { + return { + args, + channelsCounter, + resolve: () => { + removeListeners(); + this.#updateIsActive(); + }, + reject: undefined + } satisfies PubSubCommand; + } + + #updateIsActive() { + this.#isActive = ( + this.listeners[PUBSUB_TYPE.CHANNELS].size !== 0 || + this.listeners[PUBSUB_TYPE.PATTERNS].size !== 0 || + this.listeners[PUBSUB_TYPE.SHARDED].size !== 0 || + this.#subscribing !== 0 + ); + } + + reset() { + this.#isActive = false; + this.#subscribing = 0; + } + + resubscribe() { + const commands: PubSubCommand[] = []; + for (const [type, listeners] of Object.entries(this.listeners)) { + if (!listeners.size) continue; + + this.#isActive = true; + + if(type === PUBSUB_TYPE.SHARDED) { + this.#shardedResubscribe(commands, listeners); + } else { + this.#normalResubscribe(commands, type, listeners); + } + } + + return commands; + } + + #normalResubscribe(commands: PubSubCommand[], type: string, listeners: PubSubTypeListeners) { + this.#subscribing++; + const callback = () => this.#subscribing--; + commands.push({ + args: [ + COMMANDS[type as PubSubType].subscribe, + ...listeners.keys() + ], + channelsCounter: listeners.size, + resolve: callback, + reject: callback + }); + } + + #shardedResubscribe(commands: PubSubCommand[], listeners: PubSubTypeListeners) { + const callback = () => this.#subscribing--; + for(const channel of listeners.keys()) { + this.#subscribing++; + commands.push({ + args: [ + COMMANDS[PUBSUB_TYPE.SHARDED].subscribe, + channel + ], + channelsCounter: 1, + resolve: callback, + reject: callback + }) + } + } + + handleMessageReply(reply: Array): boolean { + if (COMMANDS[PUBSUB_TYPE.CHANNELS].message.equals(reply[0])) { + this.#emitPubSubMessage( + PUBSUB_TYPE.CHANNELS, + reply[2], + reply[1] + ); + return true; + } else if (COMMANDS[PUBSUB_TYPE.PATTERNS].message.equals(reply[0])) { + this.#emitPubSubMessage( + PUBSUB_TYPE.PATTERNS, + reply[3], + reply[2], + reply[1] + ); + return true; + } else if (COMMANDS[PUBSUB_TYPE.SHARDED].message.equals(reply[0])) { + this.#emitPubSubMessage( + PUBSUB_TYPE.SHARDED, + reply[2], + reply[1] + ); + return true; + } + + return false; + } + + removeShardedListeners(channel: string): ChannelListeners { + const listeners = this.listeners[PUBSUB_TYPE.SHARDED].get(channel)!; + this.listeners[PUBSUB_TYPE.SHARDED].delete(channel); + this.#updateIsActive(); + return listeners; + } + + removeAllListeners() { + const result = { + [PUBSUB_TYPE.CHANNELS]: this.listeners[PUBSUB_TYPE.CHANNELS], + [PUBSUB_TYPE.PATTERNS]: this.listeners[PUBSUB_TYPE.PATTERNS], + [PUBSUB_TYPE.SHARDED]: this.listeners[PUBSUB_TYPE.SHARDED] + } + + this.#updateIsActive(); + + this.listeners[PUBSUB_TYPE.CHANNELS] = new Map(); + this.listeners[PUBSUB_TYPE.PATTERNS] = new Map(); + this.listeners[PUBSUB_TYPE.SHARDED] = new Map(); + + return result; + } + + #emitPubSubMessage( + type: PubSubType, + message: Buffer, + channel: Buffer, + pattern?: Buffer + ): void { + const keyString = (pattern ?? channel).toString(), + listeners = this.listeners[type].get(keyString); + + if (!listeners) return; + + for (const listener of listeners.buffers) { + listener(message, channel); + } + + if (!listeners.strings.size) return; + + const channelString = pattern ? channel.toString() : keyString, + messageString = channelString === '__redis__:invalidate' ? + // https://github.com/redis/redis/pull/7469 + // https://github.com/redis/redis/issues/7463 + (message === null ? null : (message as any as Array).map(x => x.toString())) as any : + message.toString(); + for (const listener of listeners.strings) { + listener(messageString, channelString); + } + } +} diff --git a/packages/client/lib/client/socket.spec.ts b/packages/client/lib/client/socket.spec.ts new file mode 100644 index 00000000000..5117cc4f49d --- /dev/null +++ b/packages/client/lib/client/socket.spec.ts @@ -0,0 +1,148 @@ +import { strict as assert } from 'node:assert'; +import { spy } from 'sinon'; +import { once } from 'node:events'; +import RedisSocket, { RedisSocketOptions } from './socket'; +import testUtils, { GLOBAL } from '../test-utils'; +import { setTimeout } from 'timers/promises'; + +describe('Socket', () => { + function createSocket(options: RedisSocketOptions): RedisSocket { + const socket = new RedisSocket(() => Promise.resolve(), options); + + socket.on('error', () => { + // ignore errors + }); + + return socket; + } + + describe('reconnectStrategy', () => { + it('false', async () => { + const socket = createSocket({ + host: 'error', + connectTimeout: 1, + reconnectStrategy: false + }); + + await assert.rejects(socket.connect()); + + assert.equal(socket.isOpen, false); + }); + + it('0', async () => { + const socket = createSocket({ + host: 'error', + connectTimeout: 1, + reconnectStrategy: 0 + }); + + socket.connect(); + await once(socket, 'error'); + assert.equal(socket.isOpen, true); + assert.equal(socket.isReady, false); + socket.destroy(); + assert.equal(socket.isOpen, false); + }); + + it('custom strategy', async () => { + const numberOfRetries = 3; + + const reconnectStrategy = spy((retries: number) => { + assert.equal(retries + 1, reconnectStrategy.callCount); + + if (retries === numberOfRetries) return new Error(`${numberOfRetries}`); + + return 0; + }); + + const socket = createSocket({ + host: 'error', + connectTimeout: 1, + reconnectStrategy + }); + + await assert.rejects(socket.connect(), { + message: `${numberOfRetries}` + }); + + assert.equal(socket.isOpen, false); + }); + + it('should handle errors', async () => { + const socket = createSocket({ + host: 'error', + connectTimeout: 1, + reconnectStrategy(retries: number) { + if (retries === 1) return new Error('done'); + throw new Error(); + } + }); + + await assert.rejects(socket.connect()); + + assert.equal(socket.isOpen, false); + }); + }); + + describe('socketTimeout', () => { + const timeout = 50; + testUtils.testWithClient( + 'should timeout with positive socketTimeout values', + async client => { + let timedOut = false; + + assert.equal(client.isReady, true, 'client.isReady'); + assert.equal(client.isOpen, true, 'client.isOpen'); + + client.on('error', err => { + assert.equal( + err.message, + `Socket timeout timeout. Expecting data, but didn't receive any in ${timeout}ms.` + ); + + assert.equal(client.isReady, false, 'client.isReady'); + + // This is actually a bug with the onSocketError implementation, + // the client should be closed before the error is emitted + process.nextTick(() => { + assert.equal(client.isOpen, false, 'client.isOpen'); + }); + + timedOut = true; + }); + await setTimeout(timeout * 2); + if (!timedOut) assert.fail('Should have timed out by now'); + }, + { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + socket: { + socketTimeout: timeout + } + } + } + ); + + testUtils.testWithClient( + 'should not timeout with undefined socketTimeout', + async client => { + + assert.equal(client.isReady, true, 'client.isReady'); + assert.equal(client.isOpen, true, 'client.isOpen'); + + client.on('error', err => { + assert.fail('Should not have timed out or errored in any way'); + }); + await setTimeout(100); + }, + { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + socket: { + socketTimeout: undefined + } + } + } + ); + }); +}); diff --git a/packages/client/lib/client/socket.ts b/packages/client/lib/client/socket.ts new file mode 100644 index 00000000000..c5569e86547 --- /dev/null +++ b/packages/client/lib/client/socket.ts @@ -0,0 +1,391 @@ +import { EventEmitter, once } from 'node:events'; +import net from 'node:net'; +import tls from 'node:tls'; +import { ConnectionTimeoutError, ClientClosedError, SocketClosedUnexpectedlyError, ReconnectStrategyError, SocketTimeoutError, SocketTimeoutDuringMaintenanceError } from '../errors'; +import { setTimeout } from 'node:timers/promises'; +import { RedisArgument } from '../RESP/types'; +import { dbgMaintenance } from './enterprise-maintenance-manager'; + +type NetOptions = { + tls?: false; +}; + +type ReconnectStrategyFunction = (retries: number, cause: Error) => false | Error | number; + +type RedisSocketOptionsCommon = { + /** + * Connection timeout (in milliseconds) + */ + connectTimeout?: number; + /** + * When the socket closes unexpectedly (without calling `.close()`/`.destroy()`), the client uses `reconnectStrategy` to decide what to do. The following values are supported: + * 1. `false` -> do not reconnect, close the client and flush the command queue. + * 2. `number` -> wait for `X` milliseconds before reconnecting. + * 3. `(retries: number, cause: Error) => false | number | Error` -> `number` is the same as configuring a `number` directly, `Error` is the same as `false`, but with a custom error. + */ + reconnectStrategy?: false | number | ReconnectStrategyFunction; + /** + * The timeout (in milliseconds) after which the socket will be closed. `undefined` means no timeout. + */ + socketTimeout?: number; +} + +type RedisTcpOptions = RedisSocketOptionsCommon & NetOptions & Omit< + net.TcpNetConnectOpts, + 'timeout' | 'onread' | 'readable' | 'writable' | 'port' +> & { + port?: number; +}; + +type RedisTlsOptions = RedisSocketOptionsCommon & tls.ConnectionOptions & { + tls: true; +} + +type RedisIpcOptions = RedisSocketOptionsCommon & Omit< + net.IpcNetConnectOpts, + 'timeout' | 'onread' | 'readable' | 'writable' +> & { + tls: false; +} + +export type RedisTcpSocketOptions = RedisTcpOptions | RedisTlsOptions; + +export type RedisSocketOptions = RedisTcpSocketOptions | RedisIpcOptions; + +export type RedisSocketInitiator = () => void | Promise; + +export default class RedisSocket extends EventEmitter { + readonly #initiator; + readonly #connectTimeout; + readonly #reconnectStrategy; + readonly #socketFactory; + readonly #socketTimeout; + + #maintenanceTimeout: number | undefined; + + #socket?: net.Socket | tls.TLSSocket; + + #isOpen = false; + + get isOpen() { + return this.#isOpen; + } + + #isReady = false; + + get isReady() { + return this.#isReady; + } + + #isSocketUnrefed = false; + + #socketEpoch = 0; + + get socketEpoch() { + return this.#socketEpoch; + } + + constructor(initiator: RedisSocketInitiator, options?: RedisSocketOptions) { + super(); + + this.#initiator = initiator; + this.#connectTimeout = options?.connectTimeout ?? 5000; + this.#reconnectStrategy = this.#createReconnectStrategy(options); + this.#socketFactory = this.#createSocketFactory(options); + this.#socketTimeout = options?.socketTimeout; + } + + #createReconnectStrategy(options?: RedisSocketOptions): ReconnectStrategyFunction { + const strategy = options?.reconnectStrategy; + if (strategy === false || typeof strategy === 'number') { + return () => strategy; + } + + if (strategy) { + return (retries, cause) => { + try { + const retryIn = strategy(retries, cause); + if (retryIn !== false && !(retryIn instanceof Error) && typeof retryIn !== 'number') { + throw new TypeError(`Reconnect strategy should return \`false | Error | number\`, got ${retryIn} instead`); + } + return retryIn; + } catch (err) { + this.emit('error', err); + return this.defaultReconnectStrategy(retries, err); + } + }; + } + + return this.defaultReconnectStrategy; + } + + #createSocketFactory(options?: RedisSocketOptions) { + // TLS + if (options?.tls === true) { + const withDefaults: tls.ConnectionOptions = { + ...options, + port: options?.port ?? 6379, + // https://nodejs.org/api/tls.html#tlsconnectoptions-callback "Any socket.connect() option not already listed" + // @types/node is... incorrect... + // @ts-expect-error + noDelay: options?.noDelay ?? true, + // https://nodejs.org/api/tls.html#tlsconnectoptions-callback "Any socket.connect() option not already listed" + // @types/node is... incorrect... + // @ts-expect-error + keepAlive: options?.keepAlive ?? true, + // https://nodejs.org/api/tls.html#tlsconnectoptions-callback "Any socket.connect() option not already listed" + // @types/node is... incorrect... + // @ts-expect-error + keepAliveInitialDelay: options?.keepAliveInitialDelay ?? 5000, + timeout: undefined, + onread: undefined, + readable: true, + writable: true + }; + return { + create() { + return tls.connect(withDefaults); + }, + event: 'secureConnect' + }; + } + + // IPC + if (options && 'path' in options) { + const withDefaults: net.IpcNetConnectOpts = { + ...options, + timeout: undefined, + onread: undefined, + readable: true, + writable: true + }; + return { + create() { + return net.createConnection(withDefaults); + }, + event: 'connect' + }; + } + + // TCP + const withDefaults: net.TcpNetConnectOpts = { + ...options, + port: options?.port ?? 6379, + noDelay: options?.noDelay ?? true, + keepAlive: options?.keepAlive ?? true, + keepAliveInitialDelay: options?.keepAliveInitialDelay ?? 5000, + timeout: undefined, + onread: undefined, + readable: true, + writable: true + }; + return { + create() { + return net.createConnection(withDefaults); + }, + event: 'connect' + }; + } + + #shouldReconnect(retries: number, cause: Error) { + const retryIn = this.#reconnectStrategy(retries, cause); + if (retryIn === false) { + this.#isOpen = false; + this.emit('error', cause); + return cause; + } else if (retryIn instanceof Error) { + this.#isOpen = false; + this.emit('error', cause); + return new ReconnectStrategyError(retryIn, cause); + } + + return retryIn; + } + + async connect(): Promise { + if (this.#isOpen) { + throw new Error('Socket already opened'); + } + + this.#isOpen = true; + return this.#connect(); + } + + async #connect(): Promise { + let retries = 0; + do { + try { + this.#socket = await this.#createSocket(); + this.emit('connect'); + + try { + await this.#initiator(); + } catch (err) { + this.#socket.destroy(); + this.#socket = undefined; + throw err; + } + this.#isReady = true; + this.#socketEpoch++; + this.emit('ready'); + } catch (err) { + const retryIn = this.#shouldReconnect(retries++, err as Error); + if (typeof retryIn !== 'number') { + throw retryIn; + } + + this.emit('error', err); + await setTimeout(retryIn); + this.emit('reconnecting'); + } + } while (this.#isOpen && !this.#isReady); + } + + setMaintenanceTimeout(ms?: number) { + dbgMaintenance(`Set socket timeout to ${ms}`); + if (this.#maintenanceTimeout === ms) { + dbgMaintenance(`Socket already set maintenanceCommandTimeout to ${ms}, skipping`); + return; + }; + + this.#maintenanceTimeout = ms; + + if(ms !== undefined) { + this.#socket?.setTimeout(ms); + } else { + this.#socket?.setTimeout(this.#socketTimeout ?? 0); + } + } + + async #createSocket(): Promise { + const socket = this.#socketFactory.create(); + + let onTimeout; + if (this.#connectTimeout !== undefined) { + onTimeout = () => socket.destroy(new ConnectionTimeoutError()); + socket.once('timeout', onTimeout); + socket.setTimeout(this.#connectTimeout); + } + + if (this.#isSocketUnrefed) { + socket.unref(); + } + + await once(socket, this.#socketFactory.event); + + if (onTimeout) { + socket.removeListener('timeout', onTimeout); + } + + if (this.#socketTimeout) { + socket.once('timeout', () => { + const error = this.#maintenanceTimeout + ? new SocketTimeoutDuringMaintenanceError(this.#maintenanceTimeout) + : new SocketTimeoutError(this.#socketTimeout!) + socket.destroy(error); + }); + socket.setTimeout(this.#socketTimeout); + } + + socket + .once('error', err => this.#onSocketError(err)) + .once('close', hadError => { + if (hadError || !this.#isOpen || this.#socket !== socket) return; + this.#onSocketError(new SocketClosedUnexpectedlyError()); + }) + .on('drain', () => this.emit('drain')) + .on('data', data => this.emit('data', data)); + + return socket; + } + + #onSocketError(err: Error): void { + const wasReady = this.#isReady; + this.#isReady = false; + this.emit('error', err); + + if (!wasReady || !this.#isOpen || typeof this.#shouldReconnect(0, err) !== 'number') return; + + this.emit('reconnecting'); + this.#connect().catch(() => { + // the error was already emitted, silently ignore it + }); + } + + write(iterable: Iterable>) { + if (!this.#socket) return; + + this.#socket.cork(); + for (const args of iterable) { + for (const toWrite of args) { + this.#socket.write(toWrite); + } + + if (this.#socket.writableNeedDrain) break; + } + this.#socket.uncork(); + } + + async quit(fn: () => Promise): Promise { + if (!this.#isOpen) { + throw new ClientClosedError(); + } + + this.#isOpen = false; + const reply = await fn(); + this.destroySocket(); + return reply; + } + + close() { + if (!this.#isOpen) { + throw new ClientClosedError(); + } + + this.#isOpen = false; + } + + destroy() { + if (!this.#isOpen) { + throw new ClientClosedError(); + } + + this.#isOpen = false; + this.destroySocket(); + } + + destroySocket() { + this.#isReady = false; + + if (this.#socket) { + this.#socket.destroy(); + this.#socket = undefined; + } + + this.emit('end'); + } + + ref() { + this.#isSocketUnrefed = false; + this.#socket?.ref(); + } + + unref() { + this.#isSocketUnrefed = true; + this.#socket?.unref(); + } + + defaultReconnectStrategy(retries: number, cause: unknown) { + // By default, do not reconnect on socket timeout. + if (cause instanceof SocketTimeoutError) { + return false; + } + + // Generate a random jitter between 0 – 200 ms: + const jitter = Math.floor(Math.random() * 200); + // Delay is an exponential back off, (times^2) * 50 ms, with a maximum value of 2000 ms: + const delay = Math.min(Math.pow(2, retries) * 50, 2000); + + return delay + jitter; + } +} diff --git a/packages/client/lib/cluster/cluster-slots.spec.ts b/packages/client/lib/cluster/cluster-slots.spec.ts new file mode 100644 index 00000000000..76c4cb53fdf --- /dev/null +++ b/packages/client/lib/cluster/cluster-slots.spec.ts @@ -0,0 +1,57 @@ +import { strict as assert } from 'node:assert'; +import { EventEmitter } from 'node:events'; +import { RedisClusterOptions, RedisClusterClientOptions } from './index'; +import RedisClusterSlots from './cluster-slots'; + +describe('RedisClusterSlots', () => { + describe('initialization', () => { + describe('clientSideCache validation', () => { + const mockEmit = ((_event: string | symbol, ..._args: any[]): boolean => true) as EventEmitter['emit']; + const clientSideCacheConfig = { ttl: 0, maxEntries: 0 }; + const rootNodes: Array = [ + { socket: { host: 'localhost', port: 30001 } } + ]; + + it('should throw error when clientSideCache is enabled with RESP 2', () => { + assert.throws( + () => new RedisClusterSlots({ + rootNodes, + clientSideCache: clientSideCacheConfig, + RESP: 2 as const, + }, mockEmit), + new Error('Client Side Caching is only supported with RESP3') + ); + }); + + it('should throw error when clientSideCache is enabled with RESP undefined', () => { + assert.throws( + () => new RedisClusterSlots({ + rootNodes, + clientSideCache: clientSideCacheConfig, + }, mockEmit), + new Error('Client Side Caching is only supported with RESP3') + ); + }); + + it('should not throw when clientSideCache is enabled with RESP 3', () => { + assert.doesNotThrow(() => + new RedisClusterSlots({ + rootNodes, + clientSideCache: clientSideCacheConfig, + RESP: 3 as const, + }, mockEmit) + ); + }); + }); + }); + + describe('getRandomNode', ()=> { + it('should not enter infinite loop when no nodes', () => { + const slots = new RedisClusterSlots({ + rootNodes: [] + }, () => true) + slots.getRandomNode() + slots.getRandomNode() + }); + }); +}); diff --git a/packages/client/lib/cluster/cluster-slots.ts b/packages/client/lib/cluster/cluster-slots.ts new file mode 100644 index 00000000000..ae814958437 --- /dev/null +++ b/packages/client/lib/cluster/cluster-slots.ts @@ -0,0 +1,661 @@ +import { RedisClusterClientOptions, RedisClusterOptions } from '.'; +import { RootNodesUnavailableError } from '../errors'; +import RedisClient, { RedisClientOptions, RedisClientType } from '../client'; +import { EventEmitter } from 'node:stream'; +import { ChannelListeners, PUBSUB_TYPE, PubSubListeners, PubSubTypeListeners } from '../client/pub-sub'; +import { RedisArgument, RedisFunctions, RedisModules, RedisScripts, RespVersions, TypeMapping } from '../RESP/types'; +import calculateSlot from 'cluster-key-slot'; +import { RedisSocketOptions } from '../client/socket'; +import { BasicPooledClientSideCache, PooledClientSideCacheProvider } from '../client/cache'; + +interface NodeAddress { + host: string; + port: number; +} + +export type NodeAddressMap = { + [address: string]: NodeAddress; +} | ((address: string) => NodeAddress | undefined); + +export interface Node< + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> { + address: string; + client?: RedisClientType; + connectPromise?: Promise>; +} + +export interface ShardNode< + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> extends Node, NodeAddress { + id: string; + readonly: boolean; +} + +export interface MasterNode< + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> extends ShardNode { + pubSub?: { + connectPromise?: Promise>; + client: RedisClientType; + }; +} + +export interface Shard< + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> { + master: MasterNode; + replicas?: Array>; + nodesIterator?: IterableIterator>; +} + +type ShardWithReplicas< + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> = Shard & Required, 'replicas'>>; + +type PubSubNode< + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> = ( + Omit, 'client'> & + Required, 'client'>> + ); + +type PubSubToResubscribe = Record< + PUBSUB_TYPE['CHANNELS'] | PUBSUB_TYPE['PATTERNS'], + PubSubTypeListeners +>; + +export type OnShardedChannelMovedError = ( + err: unknown, + channel: string, + listeners?: ChannelListeners +) => void; + +export default class RedisClusterSlots< + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> { + static #SLOTS = 16384; + + readonly #options; + readonly #clientFactory; + readonly #emit: EventEmitter['emit']; + slots = new Array>(RedisClusterSlots.#SLOTS); + masters = new Array>(); + replicas = new Array>(); + readonly nodeByAddress = new Map | ShardNode>(); + pubSubNode?: PubSubNode; + clientSideCache?: PooledClientSideCacheProvider; + + #isOpen = false; + + get isOpen() { + return this.#isOpen; + } + + #validateOptions(options?: RedisClusterOptions) { + if (options?.clientSideCache && options?.RESP !== 3) { + throw new Error('Client Side Caching is only supported with RESP3'); + } + } + + constructor( + options: RedisClusterOptions, + emit: EventEmitter['emit'] + ) { + this.#validateOptions(options); + this.#options = options; + + if (options?.clientSideCache) { + if (options.clientSideCache instanceof PooledClientSideCacheProvider) { + this.clientSideCache = options.clientSideCache; + } else { + this.clientSideCache = new BasicPooledClientSideCache(options.clientSideCache) + } + } + + this.#clientFactory = RedisClient.factory(this.#options); + this.#emit = emit; + } + + async connect() { + if (this.#isOpen) { + throw new Error('Cluster already open'); + } + + this.#isOpen = true; + try { + await this.#discoverWithRootNodes(); + this.#emit('connect'); + } catch (err) { + this.#isOpen = false; + throw err; + } + } + + async #discoverWithRootNodes() { + let start = Math.floor(Math.random() * this.#options.rootNodes.length); + for (let i = start; i < this.#options.rootNodes.length; i++) { + if (!this.#isOpen) throw new Error('Cluster closed'); + if (await this.#discover(this.#options.rootNodes[i])) return; + } + + for (let i = 0; i < start; i++) { + if (!this.#isOpen) throw new Error('Cluster closed'); + if (await this.#discover(this.#options.rootNodes[i])) return; + } + + throw new RootNodesUnavailableError(); + } + + #resetSlots() { + this.slots = new Array(RedisClusterSlots.#SLOTS); + this.masters = []; + this.replicas = []; + this._randomNodeIterator = undefined; + } + + async #discover(rootNode: RedisClusterClientOptions) { + this.clientSideCache?.clear(); + this.clientSideCache?.disable(); + + try { + const addressesInUse = new Set(), + promises: Array> = [], + eagerConnect = this.#options.minimizeConnections !== true; + + const shards = await this.#getShards(rootNode); + this.#resetSlots(); // Reset slots AFTER shards have been fetched to prevent a race condition + for (const { from, to, master, replicas } of shards) { + const shard: Shard = { + master: this.#initiateSlotNode(master, false, eagerConnect, addressesInUse, promises) + }; + + if (this.#options.useReplicas) { + shard.replicas = replicas.map(replica => + this.#initiateSlotNode(replica, true, eagerConnect, addressesInUse, promises) + ); + } + + for (let i = from; i <= to; i++) { + this.slots[i] = shard; + } + } + + if (this.pubSubNode && !addressesInUse.has(this.pubSubNode.address)) { + const channelsListeners = this.pubSubNode.client.getPubSubListeners(PUBSUB_TYPE.CHANNELS), + patternsListeners = this.pubSubNode.client.getPubSubListeners(PUBSUB_TYPE.PATTERNS); + + this.pubSubNode.client.destroy(); + + if (channelsListeners.size || patternsListeners.size) { + promises.push( + this.#initiatePubSubClient({ + [PUBSUB_TYPE.CHANNELS]: channelsListeners, + [PUBSUB_TYPE.PATTERNS]: patternsListeners + }) + ); + } + } + + //Keep only the nodes that are still in use + for (const [address, node] of this.nodeByAddress.entries()) { + if (addressesInUse.has(address)) continue; + + if (node.client) { + node.client.destroy(); + } + + const { pubSub } = node as MasterNode; + if (pubSub) { + pubSub.client.destroy(); + } + + this.nodeByAddress.delete(address); + } + + await Promise.all(promises); + this.clientSideCache?.enable(); + + return true; + } catch (err) { + this.#emit('error', err); + return false; + } + } + + async #getShards(rootNode: RedisClusterClientOptions) { + const options = this.#clientOptionsDefaults(rootNode)!; + options.socket ??= {}; + options.socket.reconnectStrategy = false; + options.RESP = this.#options.RESP; + options.commandOptions = undefined; + + // TODO: find a way to avoid type casting + const client = await this.#clientFactory(options as RedisClientOptions) + .on('error', err => this.#emit('error', err)) + .connect(); + + try { + // switch to `CLUSTER SHARDS` when Redis 7.0 will be the minimum supported version + return await client.clusterSlots(); + } finally { + client.destroy(); + } + } + + #getNodeAddress(address: string): NodeAddress | undefined { + switch (typeof this.#options.nodeAddressMap) { + case 'object': + return this.#options.nodeAddressMap[address]; + + case 'function': + return this.#options.nodeAddressMap(address); + } + } + + #clientOptionsDefaults(options?: RedisClientOptions) { + if (!this.#options.defaults) return options; + + let socket; + if (this.#options.defaults.socket) { + socket = { + ...this.#options.defaults.socket, + ...options?.socket + }; + } else { + socket = options?.socket; + } + + return { + ...this.#options.defaults, + ...options, + socket: socket as RedisSocketOptions + }; + } + + #initiateSlotNode( + shard: NodeAddress & { id: string; }, + readonly: boolean, + eagerConnent: boolean, + addressesInUse: Set, + promises: Array> + ) { + const address = `${shard.host}:${shard.port}`; + + let node = this.nodeByAddress.get(address); + if (!node) { + node = { + ...shard, + address, + readonly, + client: undefined, + connectPromise: undefined + }; + + if (eagerConnent) { + promises.push(this.#createNodeClient(node)); + } + + this.nodeByAddress.set(address, node); + } + + if (!addressesInUse.has(address)) { + addressesInUse.add(address); + (readonly ? this.replicas : this.masters).push(node); + } + + return node; + } + + #createClient(node: ShardNode, readonly = node.readonly) { + const socket = + this.#getNodeAddress(node.address) ?? + { host: node.host, port: node.port, }; + const clientInfo = Object.freeze({ + host: socket.host, + port: socket.port, + }); + const emit = this.#emit; + const client = this.#clientFactory( + this.#clientOptionsDefaults({ + clientSideCache: this.clientSideCache, + RESP: this.#options.RESP, + socket, + readonly, + })) + .on('error', error => emit('node-error', error, clientInfo)) + .on('reconnecting', () => emit('node-reconnecting', clientInfo)) + .once('ready', () => emit('node-ready', clientInfo)) + .once('connect', () => emit('node-connect', clientInfo)) + .once('end', () => emit('node-disconnect', clientInfo)) + .on('__MOVED', async (allPubSubListeners: PubSubListeners) => { + await this.rediscover(client); + this.#emit('__resubscribeAllPubSubListeners', allPubSubListeners); + }); + + return client; + } + + #createNodeClient(node: ShardNode, readonly?: boolean) { + const client = node.client = this.#createClient(node, readonly); + return node.connectPromise = client.connect() + .finally(() => node.connectPromise = undefined); + } + + nodeClient(node: ShardNode) { + return ( + node.connectPromise ?? // if the node is connecting + node.client ?? // if the node is connected + this.#createNodeClient(node) // if the not is disconnected + ); + } + + #runningRediscoverPromise?: Promise; + + async rediscover(startWith: RedisClientType): Promise { + this.#runningRediscoverPromise ??= this.#rediscover(startWith) + .finally(() => { + this.#runningRediscoverPromise = undefined + }); + return this.#runningRediscoverPromise; + } + + async #rediscover(startWith: RedisClientType): Promise { + if (await this.#discover(startWith.options!)) return; + + return this.#discoverWithRootNodes(); + } + + /** + * @deprecated Use `close` instead. + */ + quit(): Promise { + return this.#destroy(client => client.quit()); + } + + /** + * @deprecated Use `destroy` instead. + */ + disconnect(): Promise { + return this.#destroy(client => client.disconnect()); + } + + close() { + return this.#destroy(client => client.close()); + } + + destroy() { + this.#isOpen = false; + + for (const client of this.#clients()) { + client.destroy(); + } + + if (this.pubSubNode) { + this.pubSubNode.client.destroy(); + this.pubSubNode = undefined; + } + + this.#resetSlots(); + this.nodeByAddress.clear(); + this.#emit('disconnect'); + } + + *#clients() { + for (const master of this.masters) { + if (master.client) { + yield master.client; + } + + if (master.pubSub) { + yield master.pubSub.client; + } + } + + for (const replica of this.replicas) { + if (replica.client) { + yield replica.client; + } + } + } + + async #destroy(fn: (client: RedisClientType) => Promise): Promise { + this.#isOpen = false; + + const promises = []; + for (const client of this.#clients()) { + promises.push(fn(client)); + } + + if (this.pubSubNode) { + promises.push(fn(this.pubSubNode.client)); + this.pubSubNode = undefined; + } + + this.#resetSlots(); + this.nodeByAddress.clear(); + + await Promise.allSettled(promises); + this.#emit('disconnect'); + } + + getClient( + firstKey: RedisArgument | undefined, + isReadonly: boolean | undefined + ) { + if (!firstKey) { + return this.nodeClient(this.getRandomNode()); + } + + const slotNumber = calculateSlot(firstKey); + if (!isReadonly) { + return this.nodeClient(this.slots[slotNumber].master); + } + + return this.nodeClient(this.getSlotRandomNode(slotNumber)); + } + + *#iterateAllNodes() { + if(this.masters.length + this.replicas.length === 0) return + let i = Math.floor(Math.random() * (this.masters.length + this.replicas.length)); + if (i < this.masters.length) { + do { + yield this.masters[i]; + } while (++i < this.masters.length); + + for (const replica of this.replicas) { + yield replica; + } + } else { + i -= this.masters.length; + do { + yield this.replicas[i]; + } while (++i < this.replicas.length); + } + + while (true) { + for (const master of this.masters) { + yield master; + } + + for (const replica of this.replicas) { + yield replica; + } + } + } + + _randomNodeIterator?: IterableIterator>; + + getRandomNode() { + this._randomNodeIterator ??= this.#iterateAllNodes(); + return this._randomNodeIterator.next().value as ShardNode; + } + + *#slotNodesIterator(slot: ShardWithReplicas) { + let i = Math.floor(Math.random() * (1 + slot.replicas.length)); + if (i < slot.replicas.length) { + do { + yield slot.replicas[i]; + } while (++i < slot.replicas.length); + } + + while (true) { + yield slot.master; + + for (const replica of slot.replicas) { + yield replica; + } + } + } + + getSlotRandomNode(slotNumber: number) { + const slot = this.slots[slotNumber]; + if (!slot.replicas?.length) { + return slot.master; + } + + slot.nodesIterator ??= this.#slotNodesIterator(slot as ShardWithReplicas); + return slot.nodesIterator.next().value as ShardNode; + } + + getMasterByAddress(address: string) { + const master = this.nodeByAddress.get(address); + if (!master) return; + + return this.nodeClient(master); + } + + getPubSubClient() { + if (!this.pubSubNode) return this.#initiatePubSubClient(); + + return this.pubSubNode.connectPromise ?? this.pubSubNode.client; + } + + async #initiatePubSubClient(toResubscribe?: PubSubToResubscribe) { + const index = Math.floor(Math.random() * (this.masters.length + this.replicas.length)), + node = index < this.masters.length ? + this.masters[index] : + this.replicas[index - this.masters.length], + client = this.#createClient(node, false); + + this.pubSubNode = { + address: node.address, + client, + connectPromise: client.connect() + .then(async client => { + if (toResubscribe) { + await Promise.all([ + client.extendPubSubListeners(PUBSUB_TYPE.CHANNELS, toResubscribe[PUBSUB_TYPE.CHANNELS]), + client.extendPubSubListeners(PUBSUB_TYPE.PATTERNS, toResubscribe[PUBSUB_TYPE.PATTERNS]) + ]); + } + + this.pubSubNode!.connectPromise = undefined; + return client; + }) + .catch(err => { + this.pubSubNode = undefined; + throw err; + }) + }; + + return this.pubSubNode.connectPromise!; + } + + async executeUnsubscribeCommand( + unsubscribe: (client: RedisClientType) => Promise + ): Promise { + const client = await this.getPubSubClient(); + await unsubscribe(client); + + if (!client.isPubSubActive) { + client.destroy(); + this.pubSubNode = undefined; + } + } + + getShardedPubSubClient(channel: string) { + const { master } = this.slots[calculateSlot(channel)]; + if (!master.pubSub) return this.#initiateShardedPubSubClient(master); + return master.pubSub.connectPromise ?? master.pubSub.client; + } + + async #initiateShardedPubSubClient(master: MasterNode) { + const client = this.#createClient(master, false) + .on('server-sunsubscribe', async (channel, listeners) => { + try { + await this.rediscover(client); + const redirectTo = await this.getShardedPubSubClient(channel); + await redirectTo.extendPubSubChannelListeners( + PUBSUB_TYPE.SHARDED, + channel, + listeners + ); + } catch (err) { + this.#emit('sharded-shannel-moved-error', err, channel, listeners); + } + }); + + master.pubSub = { + client, + connectPromise: client.connect() + .then(client => { + master.pubSub!.connectPromise = undefined; + return client; + }) + .catch(err => { + master.pubSub = undefined; + throw err; + }) + }; + + return master.pubSub.connectPromise!; + } + + async executeShardedUnsubscribeCommand( + channel: string, + unsubscribe: (client: RedisClientType) => Promise + ) { + const { master } = this.slots[calculateSlot(channel)]; + if (!master.pubSub) return; + + const client = master.pubSub.connectPromise ? + await master.pubSub.connectPromise : + master.pubSub.client; + + await unsubscribe(client); + + if (!client.isPubSubActive) { + client.destroy(); + master.pubSub = undefined; + } + } +} diff --git a/packages/client/lib/cluster/index.spec.ts b/packages/client/lib/cluster/index.spec.ts new file mode 100644 index 00000000000..af8e880ec3b --- /dev/null +++ b/packages/client/lib/cluster/index.spec.ts @@ -0,0 +1,381 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL, waitTillBeenCalled } from '../test-utils'; +import RedisCluster from '.'; +import { SQUARE_SCRIPT } from '../client/index.spec'; +import { RootNodesUnavailableError } from '../errors'; +import { spy } from 'sinon'; +import RedisClient from '../client'; + +describe('Cluster', () => { + testUtils.testWithCluster('sendCommand', async cluster => { + assert.equal( + await cluster.sendCommand(undefined, true, ['PING']), + 'PONG' + ); + }, GLOBAL.CLUSTERS.OPEN); + + testUtils.testWithCluster('isOpen', async cluster => { + assert.equal(cluster.isOpen, true); + await cluster.destroy(); + assert.equal(cluster.isOpen, false); + }, GLOBAL.CLUSTERS.OPEN); + + testUtils.testWithCluster('connect should throw if already connected', async cluster => { + await assert.rejects(cluster.connect()); + }, GLOBAL.CLUSTERS.OPEN); + + testUtils.testWithCluster('multi', async cluster => { + const key = 'key'; + assert.deepEqual( + await cluster.multi() + .set(key, 'value') + .get(key) + .exec(), + ['OK', 'value'] + ); + }, GLOBAL.CLUSTERS.OPEN); + + testUtils.testWithCluster('scripts', async cluster => { + const [, reply] = await Promise.all([ + cluster.set('key', '2'), + cluster.square('key') + ]); + + assert.equal(reply, 4); + }, { + ...GLOBAL.CLUSTERS.OPEN, + clusterConfiguration: { + scripts: { + square: SQUARE_SCRIPT + } + } + }); + + it('should throw RootNodesUnavailableError', async () => { + const cluster = RedisCluster.create({ + rootNodes: [] + }); + + try { + await assert.rejects( + cluster.connect(), + RootNodesUnavailableError + ); + } catch (err) { + await cluster.disconnect(); + throw err; + } + }); + + testUtils.testWithCluster('should handle live resharding', async cluster => { + const slot = 12539, + key = 'key', + value = 'value'; + await cluster.set(key, value); + + const importing = cluster.slots[0].master, + migrating = cluster.slots[slot].master, + [importingClient, migratingClient] = await Promise.all([ + cluster.nodeClient(importing), + cluster.nodeClient(migrating) + ]); + + await Promise.all([ + importingClient.clusterSetSlot(slot, 'IMPORTING', migrating.id), + migratingClient.clusterSetSlot(slot, 'MIGRATING', importing.id) + ]); + + // should be able to get the key from the migrating node + assert.equal( + await cluster.get(key), + value + ); + + await migratingClient.migrate( + importing.host, + importing.port, + key, + 0, + 10 + ); + + // should be able to get the key from the importing node using `ASKING` + assert.equal( + await cluster.get(key), + value + ); + + await Promise.all([ + importingClient.clusterSetSlot(slot, 'NODE', importing.id), + migratingClient.clusterSetSlot(slot, 'NODE', importing.id), + ]); + + // should handle `MOVED` errors + assert.equal( + await cluster.get(key), + value + ); + }, { + serverArguments: [], + numberOfMasters: 2 + }); + + testUtils.testWithCluster('getRandomNode should spread the the load evenly', async cluster => { + const totalNodes = cluster.masters.length + cluster.replicas.length, + ids = new Set(); + for (let i = 0; i < totalNodes; i++) { + ids.add(cluster.getRandomNode().id); + } + + assert.equal(ids.size, totalNodes); + }, GLOBAL.CLUSTERS.WITH_REPLICAS); + + testUtils.testWithCluster('getSlotRandomNode should spread the the load evenly', async cluster => { + const totalNodes = 1 + cluster.slots[0].replicas!.length, + ids = new Set(); + for (let i = 0; i < totalNodes; i++) { + ids.add(cluster.getSlotRandomNode(0).id); + } + + assert.equal(ids.size, totalNodes); + }, GLOBAL.CLUSTERS.WITH_REPLICAS); + + testUtils.testWithCluster('cluster topology', async cluster => { + assert.equal(cluster.slots.length, 16384); + const { numberOfMasters, numberOfReplicas } = GLOBAL.CLUSTERS.WITH_REPLICAS; + assert.equal(cluster.masters.length, numberOfMasters); + assert.equal(cluster.replicas.length, numberOfReplicas * numberOfMasters); + assert.equal(cluster.nodeByAddress.size, numberOfMasters + numberOfMasters * numberOfReplicas); + }, GLOBAL.CLUSTERS.WITH_REPLICAS); + + testUtils.testWithCluster('getMasters should be backwards competiable (without `minimizeConnections`)', async cluster => { + const masters = cluster.getMasters(); + assert.ok(Array.isArray(masters)); + for (const master of masters) { + assert.equal(typeof master.id, 'string'); + assert.ok(master.client instanceof RedisClient); + } + }, { + ...GLOBAL.CLUSTERS.OPEN, + clusterConfiguration: { + minimizeConnections: undefined // reset to default + } + }); + + testUtils.testWithCluster('getSlotMaster should be backwards competiable (without `minimizeConnections`)', async cluster => { + const master = cluster.getSlotMaster(0); + assert.equal(typeof master.id, 'string'); + assert.ok(master.client instanceof RedisClient); + }, { + ...GLOBAL.CLUSTERS.OPEN, + clusterConfiguration: { + minimizeConnections: undefined // reset to default + } + }); + + testUtils.testWithCluster('should throw CROSSSLOT error', async cluster => { + await assert.rejects(cluster.mGet(['a', 'b'])); + }, GLOBAL.CLUSTERS.OPEN); + + describe('minimizeConnections', () => { + testUtils.testWithCluster('false', async cluster => { + for (const master of cluster.masters) { + assert.ok(master.client instanceof RedisClient); + } + }, { + ...GLOBAL.CLUSTERS.OPEN, + clusterConfiguration: { + minimizeConnections: false + } + }); + + testUtils.testWithCluster('true', async cluster => { + for (const master of cluster.masters) { + assert.equal(master.client, undefined); + } + }, { + ...GLOBAL.CLUSTERS.OPEN, + clusterConfiguration: { + minimizeConnections: true + } + }); + }); + + describe('PubSub', () => { + testUtils.testWithCluster('subscribe & unsubscribe', async cluster => { + const listener = spy(); + + await cluster.subscribe('channel', listener); + + await Promise.all([ + waitTillBeenCalled(listener), + cluster.publish('channel', 'message') + ]); + + assert.ok(listener.calledOnceWithExactly('message', 'channel')); + + await cluster.unsubscribe('channel', listener); + + assert.equal(cluster.pubSubNode, undefined); + }, GLOBAL.CLUSTERS.OPEN); + + testUtils.testWithCluster('psubscribe & punsubscribe', async cluster => { + const listener = spy(); + + await cluster.pSubscribe('channe*', listener); + + await Promise.all([ + waitTillBeenCalled(listener), + cluster.publish('channel', 'message') + ]); + + assert.ok(listener.calledOnceWithExactly('message', 'channel')); + + await cluster.pUnsubscribe('channe*', listener); + + assert.equal(cluster.pubSubNode, undefined); + }, GLOBAL.CLUSTERS.OPEN); + + testUtils.testWithCluster('should move listeners when PubSub node disconnects from the cluster', async cluster => { + const listener = spy(); + await cluster.subscribe('channel', listener); + + assert.ok(cluster.pubSubNode); + const [migrating, importing] = cluster.masters[0].address === cluster.pubSubNode.address ? + cluster.masters : + [cluster.masters[1], cluster.masters[0]], + [migratingClient, importingClient] = await Promise.all([ + cluster.nodeClient(migrating), + cluster.nodeClient(importing) + ]); + + const range = cluster.slots[0].master === migrating ? { + key: 'bar', // 5061 + start: 0, + end: 8191 + } : { + key: 'foo', // 12182 + start: 8192, + end: 16383 + }; + + // TODO: is there a better way to migrate slots without causing CLUSTERDOWN? + const promises: Array> = []; + for (let i = range.start; i <= range.end; i++) { + promises.push( + migratingClient.clusterSetSlot(i, 'NODE', importing.id), + importingClient.clusterSetSlot(i, 'NODE', importing.id) + ); + } + await Promise.all(promises); + + // make sure to cause `MOVED` error + await cluster.get(range.key); + + await Promise.all([ + cluster.publish('channel', 'message'), + waitTillBeenCalled(listener) + ]); + + assert.ok(listener.calledOnceWithExactly('message', 'channel')); + }, { + serverArguments: [], + numberOfMasters: 2, + minimumDockerVersion: [7] + }); + + testUtils.testWithCluster('ssubscribe & sunsubscribe', async cluster => { + const listener = spy(); + + await cluster.sSubscribe('channel', listener); + + await Promise.all([ + waitTillBeenCalled(listener), + cluster.sPublish('channel', 'message') + ]); + + assert.ok(listener.calledOnceWithExactly('message', 'channel')); + + await cluster.sUnsubscribe('channel', listener); + + // 10328 is the slot of `channel` + assert.equal(cluster.slots[10328].master.pubSub, undefined); + }, { + ...GLOBAL.CLUSTERS.OPEN, + minimumDockerVersion: [7] + }); + + testUtils.testWithCluster('should handle sharded-channel-moved events', async cluster => { + const SLOT = 10328, + migrating = cluster.slots[SLOT].master, + importing = cluster.masters.find(master => master !== migrating)!, + [migratingClient, importingClient] = await Promise.all([ + cluster.nodeClient(migrating), + cluster.nodeClient(importing) + ]); + + await Promise.all([ + migratingClient.clusterDelSlots(SLOT), + importingClient.clusterDelSlots(SLOT), + importingClient.clusterAddSlots(SLOT), + // cause "topology refresh" on both nodes + migratingClient.clusterSetSlot(SLOT, 'NODE', importing.id), + importingClient.clusterSetSlot(SLOT, 'NODE', importing.id) + ]); + + const listener = spy(); + + // will trigger `MOVED` error + await cluster.sSubscribe('channel', listener); + + await Promise.all([ + waitTillBeenCalled(listener), + cluster.sPublish('channel', 'message') + ]); + + assert.ok(listener.calledOnceWithExactly('message', 'channel')); + }, { + serverArguments: [], + minimumDockerVersion: [7] + }); + }); + + describe('clusterEvents', () => { + testUtils.testWithCluster('should fire events', async (cluster) => { + const log: string[] = []; + + cluster + .on('connect', () => log.push('connect')) + .on('disconnect', () => log.push('disconnect')) + .on('error', () => log.push('error')) + .on('node-error', () => log.push('node-error')) + .on('node-reconnecting', () => log.push('node-reconnecting')) + .on('node-ready', () => log.push('node-ready')) + .on('node-connect', () => log.push('node-connect')) + .on('node-disconnect', () => log.push('node-disconnect')) + + await cluster.connect(); + cluster.destroy(); + + assert.deepEqual(log, [ + 'node-connect', + 'node-connect', + 'node-ready', + 'node-ready', + 'connect', + 'node-disconnect', + 'node-disconnect', + 'disconnect', + ]); + }, { + ...GLOBAL.CLUSTERS.OPEN, + disableClusterSetup: true, + numberOfMasters: 2, + numberOfReplicas: 1, + clusterConfiguration: { + minimizeConnections: false + } + }); + }); + +}); diff --git a/packages/client/lib/cluster/index.ts b/packages/client/lib/cluster/index.ts new file mode 100644 index 00000000000..238f3a59198 --- /dev/null +++ b/packages/client/lib/cluster/index.ts @@ -0,0 +1,676 @@ +import { RedisClientOptions, RedisClientType } from '../client'; +import { CommandOptions } from '../client/commands-queue'; +import { Command, CommandArguments, CommanderConfig, TypeMapping, RedisArgument, RedisFunction, RedisFunctions, RedisModules, RedisScript, RedisScripts, ReplyUnion, RespVersions } from '../RESP/types'; +import COMMANDS from '../commands'; +import { EventEmitter } from 'node:events'; +import { attachConfig, functionArgumentsPrefix, getTransformReply, scriptArgumentsPrefix } from '../commander'; +import RedisClusterSlots, { NodeAddressMap, ShardNode } from './cluster-slots'; +import RedisClusterMultiCommand, { RedisClusterMultiCommandType } from './multi-command'; +import { PubSubListener, PubSubListeners } from '../client/pub-sub'; +import { ErrorReply } from '../errors'; +import { RedisTcpSocketOptions } from '../client/socket'; +import { ClientSideCacheConfig, PooledClientSideCacheProvider } from '../client/cache'; +import { BasicCommandParser } from '../client/parser'; +import { ASKING_CMD } from '../commands/ASKING'; +import SingleEntryCache from '../single-entry-cache' +import { WithCommands, WithFunctions, WithModules, WithScripts } from '../client'; + +interface ClusterCommander< + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping, + // POLICIES extends CommandPolicies +> extends CommanderConfig { + commandOptions?: ClusterCommandOptions; +} + +export type RedisClusterClientOptions = Omit< + RedisClientOptions, + keyof ClusterCommander +>; + +export interface RedisClusterOptions< + M extends RedisModules = RedisModules, + F extends RedisFunctions = RedisFunctions, + S extends RedisScripts = RedisScripts, + RESP extends RespVersions = RespVersions, + TYPE_MAPPING extends TypeMapping = TypeMapping, + // POLICIES extends CommandPolicies = CommandPolicies +> extends ClusterCommander { + /** + * Should contain details for some of the cluster nodes that the client will use to discover + * the "cluster topology". We recommend including details for at least 3 nodes here. + */ + rootNodes: Array; + /** + * Default values used for every client in the cluster. Use this to specify global values, + * for example: ACL credentials, timeouts, TLS configuration etc. + */ + defaults?: Partial; + /** + * When `true`, `.connect()` will only discover the cluster topology, without actually connecting to all the nodes. + * Useful for short-term or PubSub-only connections. + */ + minimizeConnections?: boolean; + /** + * When `true`, distribute load by executing readonly commands (such as `GET`, `GEOSEARCH`, etc.) across all cluster nodes. When `false`, only use master nodes. + */ + // TODO: replicas only mode? + useReplicas?: boolean; + /** + * The maximum number of times a command will be redirected due to `MOVED` or `ASK` errors. + */ + maxCommandRedirections?: number; + /** + * Mapping between the addresses in the cluster (see `CLUSTER SHARDS`) and the addresses the client should connect to + * Useful when the cluster is running on another network + */ + nodeAddressMap?: NodeAddressMap; + /** + * Client Side Caching configuration for the pool. + * + * Enables Redis Servers and Clients to work together to cache results from commands + * sent to a server. The server will notify the client when cached results are no longer valid. + * In pooled mode, the cache is shared across all clients in the pool. + * + * Note: Client Side Caching is only supported with RESP3. + * + * @example Anonymous cache configuration + * ``` + * const client = createCluster({ + * clientSideCache: { + * ttl: 0, + * maxEntries: 0, + * evictPolicy: "LRU" + * }, + * minimum: 5 + * }); + * ``` + * + * @example Using a controllable cache + * ``` + * const cache = new BasicPooledClientSideCache({ + * ttl: 0, + * maxEntries: 0, + * evictPolicy: "LRU" + * }); + * const client = createCluster({ + * clientSideCache: cache, + * minimum: 5 + * }); + * ``` + */ + clientSideCache?: PooledClientSideCacheProvider | ClientSideCacheConfig; +} + +export type RedisClusterType< + M extends RedisModules = {}, + F extends RedisFunctions = {}, + S extends RedisScripts = {}, + RESP extends RespVersions = 2, + TYPE_MAPPING extends TypeMapping = {}, + // POLICIES extends CommandPolicies = {} +> = ( + RedisCluster & + WithCommands & + WithModules & + WithFunctions & + WithScripts +); + +export interface ClusterCommandOptions< + TYPE_MAPPING extends TypeMapping = TypeMapping + // POLICIES extends CommandPolicies = CommandPolicies +> extends CommandOptions { + // policies?: POLICIES; +} + +type ProxyCluster = RedisCluster; + +type NamespaceProxyCluster = { _self: ProxyCluster }; + +export default class RedisCluster< + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping, + // POLICIES extends CommandPolicies +> extends EventEmitter { + static #createCommand(command: Command, resp: RespVersions) { + const transformReply = getTransformReply(command, resp); + + return async function (this: ProxyCluster, ...args: Array) { + const parser = new BasicCommandParser(); + command.parseCommand(parser, ...args); + + return this._self._execute( + parser.firstKey, + command.IS_READ_ONLY, + this._commandOptions, + (client, opts) => client._executeCommand(command, parser, opts, transformReply) + ); + }; + } + + static #createModuleCommand(command: Command, resp: RespVersions) { + const transformReply = getTransformReply(command, resp); + + return async function (this: NamespaceProxyCluster, ...args: Array) { + const parser = new BasicCommandParser(); + command.parseCommand(parser, ...args); + + return this._self._execute( + parser.firstKey, + command.IS_READ_ONLY, + this._self._commandOptions, + (client, opts) => client._executeCommand(command, parser, opts, transformReply) + ); + }; + } + + static #createFunctionCommand(name: string, fn: RedisFunction, resp: RespVersions) { + const prefix = functionArgumentsPrefix(name, fn); + const transformReply = getTransformReply(fn, resp); + + return async function (this: NamespaceProxyCluster, ...args: Array) { + const parser = new BasicCommandParser(); + parser.push(...prefix); + fn.parseCommand(parser, ...args); + + return this._self._execute( + parser.firstKey, + fn.IS_READ_ONLY, + this._self._commandOptions, + (client, opts) => client._executeCommand(fn, parser, opts, transformReply) + ); + }; + } + + static #createScriptCommand(script: RedisScript, resp: RespVersions) { + const prefix = scriptArgumentsPrefix(script); + const transformReply = getTransformReply(script, resp); + + return async function (this: ProxyCluster, ...args: Array) { + const parser = new BasicCommandParser(); + parser.push(...prefix); + script.parseCommand(parser, ...args); + + return this._self._execute( + parser.firstKey, + script.IS_READ_ONLY, + this._commandOptions, + (client, opts) => client._executeScript(script, parser, opts, transformReply) + ); + }; + } + + static #SingleEntryCache = new SingleEntryCache(); + + static factory< + M extends RedisModules = {}, + F extends RedisFunctions = {}, + S extends RedisScripts = {}, + RESP extends RespVersions = 2, + TYPE_MAPPING extends TypeMapping = {}, + // POLICIES extends CommandPolicies = {} + >(config?: ClusterCommander) { + + let Cluster = RedisCluster.#SingleEntryCache.get(config); + if (!Cluster) { + Cluster = attachConfig({ + BaseClass: RedisCluster, + commands: COMMANDS, + createCommand: RedisCluster.#createCommand, + createModuleCommand: RedisCluster.#createModuleCommand, + createFunctionCommand: RedisCluster.#createFunctionCommand, + createScriptCommand: RedisCluster.#createScriptCommand, + config + }); + + Cluster.prototype.Multi = RedisClusterMultiCommand.extend(config); + RedisCluster.#SingleEntryCache.set(config, Cluster); + } + + return (options?: Omit>) => { + // returning a "proxy" to prevent the namespaces._self to leak between "proxies" + return Object.create(new Cluster(options)) as RedisClusterType; + }; + } + + static create< + M extends RedisModules = {}, + F extends RedisFunctions = {}, + S extends RedisScripts = {}, + RESP extends RespVersions = 2, + TYPE_MAPPING extends TypeMapping = {}, + // POLICIES extends CommandPolicies = {} + >(options?: RedisClusterOptions) { + return RedisCluster.factory(options)(options); + } + + readonly _options: RedisClusterOptions; + + readonly _slots: RedisClusterSlots; + + private _self = this; + private _commandOptions?: ClusterCommandOptions; + + /** + * An array of the cluster slots, each slot contain its `master` and `replicas`. + * Use with {@link RedisCluster.prototype.nodeClient} to get the client for a specific node (master or replica). + */ + get slots() { + return this._self._slots.slots; + } + + get clientSideCache() { + return this._self._slots.clientSideCache; + } + + /** + * An array of the cluster masters. + * Use with {@link RedisCluster.prototype.nodeClient} to get the client for a specific master node. + */ + get masters() { + return this._self._slots.masters; + } + + /** + * An array of the cluster replicas. + * Use with {@link RedisCluster.prototype.nodeClient} to get the client for a specific replica node. + */ + get replicas() { + return this._self._slots.replicas; + } + + /** + * A map form a node address (`:`) to its shard, each shard contain its `master` and `replicas`. + * Use with {@link RedisCluster.prototype.nodeClient} to get the client for a specific node (master or replica). + */ + get nodeByAddress() { + return this._self._slots.nodeByAddress; + } + + /** + * The current pub/sub node. + */ + get pubSubNode() { + return this._self._slots.pubSubNode; + } + + get isOpen() { + return this._self._slots.isOpen; + } + + constructor(options: RedisClusterOptions) { + super(); + + this._options = options; + this._slots = new RedisClusterSlots(options, this.emit.bind(this)); + this.on('__resubscribeAllPubSubListeners', this.resubscribeAllPubSubListeners.bind(this)); + + if (options?.commandOptions) { + this._commandOptions = options.commandOptions; + } + } + + duplicate< + _M extends RedisModules = M, + _F extends RedisFunctions = F, + _S extends RedisScripts = S, + _RESP extends RespVersions = RESP, + _TYPE_MAPPING extends TypeMapping = TYPE_MAPPING + >(overrides?: Partial>) { + return new (Object.getPrototypeOf(this).constructor)({ + ...this._self._options, + commandOptions: this._commandOptions, + ...overrides + }) as RedisClusterType<_M, _F, _S, _RESP, _TYPE_MAPPING>; + } + + async connect() { + await this._self._slots.connect(); + return this as unknown as RedisClusterType; + } + + withCommandOptions< + OPTIONS extends ClusterCommandOptions, + TYPE_MAPPING extends TypeMapping, + // POLICIES extends CommandPolicies + >(options: OPTIONS) { + const proxy = Object.create(this); + proxy._commandOptions = options; + return proxy as RedisClusterType< + M, + F, + S, + RESP, + TYPE_MAPPING extends TypeMapping ? TYPE_MAPPING : {} + // POLICIES extends CommandPolicies ? POLICIES : {} + >; + } + + private _commandOptionsProxy< + K extends keyof ClusterCommandOptions, + V extends ClusterCommandOptions[K] + >( + key: K, + value: V + ) { + const proxy = Object.create(this); + proxy._commandOptions = Object.create(this._commandOptions ?? null); + proxy._commandOptions[key] = value; + return proxy as RedisClusterType< + M, + F, + S, + RESP, + K extends 'typeMapping' ? V extends TypeMapping ? V : {} : TYPE_MAPPING + // K extends 'policies' ? V extends CommandPolicies ? V : {} : POLICIES + >; + } + + /** + * Override the `typeMapping` command option + */ + withTypeMapping(typeMapping: TYPE_MAPPING) { + return this._commandOptionsProxy('typeMapping', typeMapping); + } + + // /** + // * Override the `policies` command option + // * TODO + // */ + // withPolicies (policies: POLICIES) { + // return this._commandOptionsProxy('policies', policies); + // } + + _handleAsk( + fn: (client: RedisClientType, opts?: ClusterCommandOptions) => Promise + ) { + return async (client: RedisClientType, options?: ClusterCommandOptions) => { + const chainId = Symbol("asking chain"); + const opts = options ? {...options} : {}; + opts.chainId = chainId; + + + + const ret = await Promise.all( + [ + client.sendCommand([ASKING_CMD], {chainId: chainId}), + fn(client, opts) + ] + ); + + return ret[1]; + }; + } + + async _execute( + firstKey: RedisArgument | undefined, + isReadonly: boolean | undefined, + options: ClusterCommandOptions | undefined, + fn: (client: RedisClientType, opts?: ClusterCommandOptions) => Promise + ): Promise { + const maxCommandRedirections = this._options.maxCommandRedirections ?? 16; + let client = await this._slots.getClient(firstKey, isReadonly); + let i = 0; + + let myFn = fn; + + while (true) { + try { + return await myFn(client, options); + } catch (err) { + myFn = fn; + + // TODO: error class + if (++i > maxCommandRedirections || !(err instanceof Error)) { + throw err; + } + + if (err.message.startsWith('ASK')) { + const address = err.message.substring(err.message.lastIndexOf(' ') + 1); + let redirectTo = await this._slots.getMasterByAddress(address); + if (!redirectTo) { + await this._slots.rediscover(client); + redirectTo = await this._slots.getMasterByAddress(address); + } + + if (!redirectTo) { + throw new Error(`Cannot find node ${address}`); + } + + client = redirectTo; + myFn = this._handleAsk(fn); + continue; + } + + if (err.message.startsWith('MOVED')) { + await this._slots.rediscover(client); + client = await this._slots.getClient(firstKey, isReadonly); + continue; + } + + throw err; + } + } + } + + async sendCommand( + firstKey: RedisArgument | undefined, + isReadonly: boolean | undefined, + args: CommandArguments, + options?: ClusterCommandOptions, + // defaultPolicies?: CommandPolicies + ): Promise { + + // Merge global options with local options + const opts = { + ...this._self._commandOptions, + ...options + } + return this._self._execute( + firstKey, + isReadonly, + opts, + (client, opts) => client.sendCommand(args, opts) + ); + } + + MULTI(routing?: RedisArgument) { + type Multi = new (...args: ConstructorParameters) => RedisClusterMultiCommandType<[], M, F, S, RESP, TYPE_MAPPING>; + return new ((this as any).Multi as Multi)( + async (firstKey, isReadonly, commands) => { + const client = await this._self._slots.getClient(firstKey, isReadonly); + return client._executeMulti(commands); + }, + async (firstKey, isReadonly, commands) => { + const client = await this._self._slots.getClient(firstKey, isReadonly); + return client._executePipeline(commands); + }, + routing, + this._commandOptions?.typeMapping + ); + } + + multi = this.MULTI; + + async SUBSCRIBE( + channels: string | Array, + listener: PubSubListener, + bufferMode?: T + ) { + return (await this._self._slots.getPubSubClient()) + .SUBSCRIBE(channels, listener, bufferMode); + } + + subscribe = this.SUBSCRIBE; + + async UNSUBSCRIBE( + channels?: string | Array, + listener?: PubSubListener, + bufferMode?: T + ) { + return this._self._slots.executeUnsubscribeCommand(client => + client.UNSUBSCRIBE(channels, listener, bufferMode) + ); + } + + unsubscribe = this.UNSUBSCRIBE; + + async PSUBSCRIBE( + patterns: string | Array, + listener: PubSubListener, + bufferMode?: T + ) { + return (await this._self._slots.getPubSubClient()) + .PSUBSCRIBE(patterns, listener, bufferMode); + } + + pSubscribe = this.PSUBSCRIBE; + + async PUNSUBSCRIBE( + patterns?: string | Array, + listener?: PubSubListener, + bufferMode?: T + ) { + return this._self._slots.executeUnsubscribeCommand(client => + client.PUNSUBSCRIBE(patterns, listener, bufferMode) + ); + } + + pUnsubscribe = this.PUNSUBSCRIBE; + + async SSUBSCRIBE( + channels: string | Array, + listener: PubSubListener, + bufferMode?: T + ) { + const maxCommandRedirections = this._self._options.maxCommandRedirections ?? 16, + firstChannel = Array.isArray(channels) ? channels[0] : channels; + let client = await this._self._slots.getShardedPubSubClient(firstChannel); + for (let i = 0; ; i++) { + try { + return await client.SSUBSCRIBE(channels, listener, bufferMode); + } catch (err) { + if (++i > maxCommandRedirections || !(err instanceof ErrorReply)) { + throw err; + } + + if (err.message.startsWith('MOVED')) { + await this._self._slots.rediscover(client); + client = await this._self._slots.getShardedPubSubClient(firstChannel); + continue; + } + + throw err; + } + } + } + + sSubscribe = this.SSUBSCRIBE; + + SUNSUBSCRIBE( + channels: string | Array, + listener?: PubSubListener, + bufferMode?: T + ) { + return this._self._slots.executeShardedUnsubscribeCommand( + Array.isArray(channels) ? channels[0] : channels, + client => client.SUNSUBSCRIBE(channels, listener, bufferMode) + ); + } + + resubscribeAllPubSubListeners(allListeners: PubSubListeners) { + for(const [channel, listeners] of allListeners.CHANNELS) { + listeners.buffers.forEach(bufListener => { + this.subscribe(channel, bufListener, true); + }); + listeners.strings.forEach(strListener => { + this.subscribe(channel, strListener); + }); + }; + for (const [channel, listeners] of allListeners.PATTERNS) { + listeners.buffers.forEach(bufListener => { + this.pSubscribe(channel, bufListener, true); + }); + listeners.strings.forEach(strListener => { + this.pSubscribe(channel, strListener); + }); + }; + for (const [channel, listeners] of allListeners.SHARDED) { + listeners.buffers.forEach(bufListener => { + this.sSubscribe(channel, bufListener, true); + }); + listeners.strings.forEach(strListener => { + this.sSubscribe(channel, strListener); + }); + }; + } + + sUnsubscribe = this.SUNSUBSCRIBE; + + /** + * @deprecated Use `close` instead. + */ + quit() { + return this._self._slots.quit(); + } + + /** + * @deprecated Use `destroy` instead. + */ + disconnect() { + return this._self._slots.disconnect(); + } + + close() { + this._self._slots.clientSideCache?.onPoolClose(); + return this._self._slots.close(); + } + + destroy() { + this._self._slots.clientSideCache?.onPoolClose(); + return this._self._slots.destroy(); + } + + nodeClient(node: ShardNode) { + return this._self._slots.nodeClient(node); + } + + /** + * Returns a random node from the cluster. + * Userful for running "forward" commands (like PUBLISH) on a random node. + */ + getRandomNode() { + return this._self._slots.getRandomNode(); + } + + /** + * Get a random node from a slot. + * Useful for running readonly commands on a slot. + */ + getSlotRandomNode(slot: number) { + return this._self._slots.getSlotRandomNode(slot); + } + + /** + * @deprecated use `.masters` instead + * TODO + */ + getMasters() { + return this.masters; + } + + /** + * @deprecated use `.slots[]` instead + * TODO + */ + getSlotMaster(slot: number) { + return this.slots[slot].master; + } +} diff --git a/packages/client/lib/cluster/multi-command.ts b/packages/client/lib/cluster/multi-command.ts new file mode 100644 index 00000000000..f370618ff30 --- /dev/null +++ b/packages/client/lib/cluster/multi-command.ts @@ -0,0 +1,279 @@ +import COMMANDS from '../commands'; +import RedisMultiCommand, { MULTI_REPLY, MultiReply, MultiReplyType, RedisMultiQueuedCommand } from '../multi-command'; +import { ReplyWithTypeMapping, CommandReply, Command, CommandArguments, CommanderConfig, RedisFunctions, RedisModules, RedisScripts, RespVersions, TransformReply, RedisScript, RedisFunction, TypeMapping, RedisArgument } from '../RESP/types'; +import { attachConfig, functionArgumentsPrefix, getTransformReply } from '../commander'; +import { BasicCommandParser } from '../client/parser'; +import { Tail } from '../commands/generic-transformers'; + +type CommandSignature< + REPLIES extends Array, + C extends Command, + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> = (...args: Tail>) => RedisClusterMultiCommandType< + [...REPLIES, ReplyWithTypeMapping, TYPE_MAPPING>], + M, + F, + S, + RESP, + TYPE_MAPPING +>; + +type WithCommands< + REPLIES extends Array, + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> = { + [P in keyof typeof COMMANDS]: CommandSignature; +}; + +type WithModules< + REPLIES extends Array, + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> = { + [P in keyof M]: { + [C in keyof M[P]]: CommandSignature; + }; +}; + +type WithFunctions< + REPLIES extends Array, + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> = { + [L in keyof F]: { + [C in keyof F[L]]: CommandSignature; + }; +}; + +type WithScripts< + REPLIES extends Array, + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> = { + [P in keyof S]: CommandSignature; +}; + +export type RedisClusterMultiCommandType< + REPLIES extends Array, + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> = ( + RedisClusterMultiCommand & + WithCommands & + WithModules & + WithFunctions & + WithScripts +); + +export type ClusterMultiExecute = ( + firstKey: RedisArgument | undefined, + isReadonly: boolean | undefined, + commands: Array +) => Promise>; + +export default class RedisClusterMultiCommand { + static #createCommand(command: Command, resp: RespVersions) { + const transformReply = getTransformReply(command, resp); + + return function (this: RedisClusterMultiCommand, ...args: Array) { + const parser = new BasicCommandParser(); + command.parseCommand(parser, ...args); + + const redisArgs: CommandArguments = parser.redisArgs; + redisArgs.preserve = parser.preserve; + const firstKey = parser.firstKey; + + return this.addCommand( + firstKey, + command.IS_READ_ONLY, + redisArgs, + transformReply + ); + }; + } + + static #createModuleCommand(command: Command, resp: RespVersions) { + const transformReply = getTransformReply(command, resp); + + return function (this: { _self: RedisClusterMultiCommand }, ...args: Array) { + const parser = new BasicCommandParser(); + command.parseCommand(parser, ...args); + + const redisArgs: CommandArguments = parser.redisArgs; + redisArgs.preserve = parser.preserve; + const firstKey = parser.firstKey; + + return this._self.addCommand( + firstKey, + command.IS_READ_ONLY, + redisArgs, + transformReply + ); + }; + } + + static #createFunctionCommand(name: string, fn: RedisFunction, resp: RespVersions) { + const prefix = functionArgumentsPrefix(name, fn); + const transformReply = getTransformReply(fn, resp); + + return function (this: { _self: RedisClusterMultiCommand }, ...args: Array) { + const parser = new BasicCommandParser(); + parser.push(...prefix); + fn.parseCommand(parser, ...args); + + const redisArgs: CommandArguments = parser.redisArgs; + redisArgs.preserve = parser.preserve; + const firstKey = parser.firstKey; + + return this._self.addCommand( + firstKey, + fn.IS_READ_ONLY, + redisArgs, + transformReply + ); + }; + } + + static #createScriptCommand(script: RedisScript, resp: RespVersions) { + const transformReply = getTransformReply(script, resp); + + return function (this: RedisClusterMultiCommand, ...args: Array) { + const parser = new BasicCommandParser(); + script.parseCommand(parser, ...args); + + const scriptArgs: CommandArguments = parser.redisArgs; + scriptArgs.preserve = parser.preserve; + const firstKey = parser.firstKey; + + return this.#addScript( + firstKey, + script.IS_READ_ONLY, + script, + scriptArgs, + transformReply + ); + }; + } + + static extend< + M extends RedisModules = Record, + F extends RedisFunctions = Record, + S extends RedisScripts = Record, + RESP extends RespVersions = 2 + >(config?: CommanderConfig) { + return attachConfig({ + BaseClass: RedisClusterMultiCommand, + commands: COMMANDS, + createCommand: RedisClusterMultiCommand.#createCommand, + createModuleCommand: RedisClusterMultiCommand.#createModuleCommand, + createFunctionCommand: RedisClusterMultiCommand.#createFunctionCommand, + createScriptCommand: RedisClusterMultiCommand.#createScriptCommand, + config + }); + } + + readonly #multi: RedisMultiCommand + + readonly #executeMulti: ClusterMultiExecute; + readonly #executePipeline: ClusterMultiExecute; + #firstKey: RedisArgument | undefined; + #isReadonly: boolean | undefined = true; + + constructor( + executeMulti: ClusterMultiExecute, + executePipeline: ClusterMultiExecute, + routing: RedisArgument | undefined, + typeMapping?: TypeMapping + ) { + this.#multi = new RedisMultiCommand(typeMapping); + this.#executeMulti = executeMulti; + this.#executePipeline = executePipeline; + this.#firstKey = routing; + } + + #setState( + firstKey: RedisArgument | undefined, + isReadonly: boolean | undefined, + ) { + this.#firstKey ??= firstKey; + this.#isReadonly &&= isReadonly; + } + + addCommand( + firstKey: RedisArgument | undefined, + isReadonly: boolean | undefined, + args: CommandArguments, + transformReply?: TransformReply + ) { + this.#setState(firstKey, isReadonly); + this.#multi.addCommand(args, transformReply); + return this; + } + + #addScript( + firstKey: RedisArgument | undefined, + isReadonly: boolean | undefined, + script: RedisScript, + args: CommandArguments, + transformReply?: TransformReply + ) { + this.#setState(firstKey, isReadonly); + this.#multi.addScript(script, args, transformReply); + + return this; + } + + async exec(execAsPipeline = false) { + if (execAsPipeline) return this.execAsPipeline(); + + return this.#multi.transformReplies( + await this.#executeMulti( + this.#firstKey, + this.#isReadonly, + this.#multi.queue + ) + ) as MultiReplyType; + } + + EXEC = this.exec; + + execTyped(execAsPipeline = false) { + return this.exec(execAsPipeline); + } + + async execAsPipeline() { + if (this.#multi.queue.length === 0) return [] as MultiReplyType; + + return this.#multi.transformReplies( + await this.#executePipeline( + this.#firstKey, + this.#isReadonly, + this.#multi.queue + ) + ) as MultiReplyType; + } + + execAsPipelineTyped() { + return this.execAsPipeline(); + } +} diff --git a/packages/client/lib/commander.ts b/packages/client/lib/commander.ts new file mode 100644 index 00000000000..cfdf39526cc --- /dev/null +++ b/packages/client/lib/commander.ts @@ -0,0 +1,128 @@ +import { Command, CommanderConfig, RedisArgument, RedisCommands, RedisFunction, RedisFunctions, RedisModules, RedisScript, RedisScripts, RespVersions, TransformReply } from './RESP/types'; + +interface AttachConfigOptions< + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions +> { + BaseClass: new (...args: any) => any; + commands: RedisCommands; + createCommand(command: Command, resp: RespVersions): (...args: any) => any; + createModuleCommand(command: Command, resp: RespVersions): (...args: any) => any; + createFunctionCommand(name: string, fn: RedisFunction, resp: RespVersions): (...args: any) => any; + createScriptCommand(script: RedisScript, resp: RespVersions): (...args: any) => any; + config?: CommanderConfig; +} + +/* FIXME: better error message / link */ +function throwResp3SearchModuleUnstableError() { + throw new Error('Some RESP3 results for Redis Query Engine responses may change. Refer to the readme for guidance'); +} + +export function attachConfig< + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions +>({ + BaseClass, + commands, + createCommand, + createModuleCommand, + createFunctionCommand, + createScriptCommand, + config +}: AttachConfigOptions) { + const RESP = config?.RESP ?? 2, + Class: any = class extends BaseClass {}; + + for (const [name, command] of Object.entries(commands)) { + if (config?.RESP == 3 && command.unstableResp3 && !config.unstableResp3) { + Class.prototype[name] = throwResp3SearchModuleUnstableError; + } else { + Class.prototype[name] = createCommand(command, RESP); + } + } + + if (config?.modules) { + for (const [moduleName, module] of Object.entries(config.modules)) { + const fns = Object.create(null); + for (const [name, command] of Object.entries(module)) { + if (config.RESP == 3 && command.unstableResp3 && !config.unstableResp3) { + fns[name] = throwResp3SearchModuleUnstableError; + } else { + fns[name] = createModuleCommand(command, RESP); + } + } + + attachNamespace(Class.prototype, moduleName, fns); + } + } + + if (config?.functions) { + for (const [library, commands] of Object.entries(config.functions)) { + const fns = Object.create(null); + for (const [name, command] of Object.entries(commands)) { + fns[name] = createFunctionCommand(name, command, RESP); + } + + attachNamespace(Class.prototype, library, fns); + } + } + + if (config?.scripts) { + for (const [name, script] of Object.entries(config.scripts)) { + Class.prototype[name] = createScriptCommand(script, RESP); + } + } + + return Class; +} + +function attachNamespace(prototype: any, name: PropertyKey, fns: any) { + Object.defineProperty(prototype, name, { + get() { + const value = Object.create(fns); + value._self = this; + Object.defineProperty(this, name, { value }); + return value; + } + }); +} + +export function getTransformReply(command: Command, resp: RespVersions): TransformReply | undefined { + switch (typeof command.transformReply) { + case 'function': + return command.transformReply; + + case 'object': + return command.transformReply[resp]; + } +} + +export function functionArgumentsPrefix(name: string, fn: RedisFunction) { + const prefix: Array = [ + fn.IS_READ_ONLY ? 'FCALL_RO' : 'FCALL', + name + ]; + + if (fn.NUMBER_OF_KEYS !== undefined) { + prefix.push(fn.NUMBER_OF_KEYS.toString()); + } + + return prefix; +} + +export function scriptArgumentsPrefix(script: RedisScript) { + const prefix: Array = [ + script.IS_READ_ONLY ? 'EVALSHA_RO' : 'EVALSHA', + script.SHA1 + ]; + + if (script.NUMBER_OF_KEYS !== undefined) { + prefix.push(script.NUMBER_OF_KEYS.toString()); + } + + return prefix; +} diff --git a/packages/client/lib/commands/ACL_CAT.spec.ts b/packages/client/lib/commands/ACL_CAT.spec.ts new file mode 100644 index 00000000000..09d5ecade5a --- /dev/null +++ b/packages/client/lib/commands/ACL_CAT.spec.ts @@ -0,0 +1,32 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import { parseArgs } from './generic-transformers'; +import ACL_CAT from './ACL_CAT'; + +describe('ACL CAT', () => { + testUtils.isVersionGreaterThanHook([6]); + + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(ACL_CAT), + ['ACL', 'CAT'] + ); + }); + + it('with categoryName', () => { + assert.deepEqual( + parseArgs(ACL_CAT, 'dangerous'), + ['ACL', 'CAT', 'dangerous'] + ); + }); + }); + + testUtils.testWithClient('client.aclCat', async client => { + const categories = await client.aclCat(); + assert.ok(Array.isArray(categories)); + for (const category of categories) { + assert.equal(typeof category, 'string'); + } + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/ACL_CAT.ts b/packages/client/lib/commands/ACL_CAT.ts new file mode 100644 index 00000000000..f4ddfacc68d --- /dev/null +++ b/packages/client/lib/commands/ACL_CAT.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, BlobStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Lists ACL categories or commands in a category + * @param parser - The Redis command parser + * @param categoryName - Optional category name to filter commands + */ + parseCommand(parser: CommandParser, categoryName?: RedisArgument) { + parser.push('ACL', 'CAT'); + if (categoryName) { + parser.push(categoryName); + } + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ACL_DELUSER.spec.ts b/packages/client/lib/commands/ACL_DELUSER.spec.ts new file mode 100644 index 00000000000..45fa3af9fc7 --- /dev/null +++ b/packages/client/lib/commands/ACL_DELUSER.spec.ts @@ -0,0 +1,31 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ACL_DELUSER from './ACL_DELUSER'; +import { parseArgs } from './generic-transformers'; + +describe('ACL DELUSER', () => { + testUtils.isVersionGreaterThanHook([6]); + + describe('transformArguments', () => { + it('string', () => { + assert.deepEqual( + parseArgs(ACL_DELUSER, 'username'), + ['ACL', 'DELUSER', 'username'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(ACL_DELUSER, ['1', '2']), + ['ACL', 'DELUSER', '1', '2'] + ); + }); + }); + + testUtils.testWithClient('client.aclDelUser', async client => { + assert.equal( + typeof await client.aclDelUser('user'), + 'number' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/ACL_DELUSER.ts b/packages/client/lib/commands/ACL_DELUSER.ts new file mode 100644 index 00000000000..404641e0abb --- /dev/null +++ b/packages/client/lib/commands/ACL_DELUSER.ts @@ -0,0 +1,18 @@ +import { CommandParser } from '../client/parser'; +import { NumberReply, Command } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Deletes one or more users from the ACL + * @param parser - The Redis command parser + * @param username - Username(s) to delete + */ + parseCommand(parser: CommandParser, username: RedisVariadicArgument) { + parser.push('ACL', 'DELUSER'); + parser.pushVariadic(username); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ACL_DRYRUN.spec.ts b/packages/client/lib/commands/ACL_DRYRUN.spec.ts new file mode 100644 index 00000000000..38a4def8361 --- /dev/null +++ b/packages/client/lib/commands/ACL_DRYRUN.spec.ts @@ -0,0 +1,22 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ACL_DRYRUN from './ACL_DRYRUN'; +import { parseArgs } from './generic-transformers'; + +describe('ACL DRYRUN', () => { + testUtils.isVersionGreaterThanHook([7]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(ACL_DRYRUN, 'default', ['GET', 'key']), + ['ACL', 'DRYRUN', 'default', 'GET', 'key'] + ); + }); + + testUtils.testWithClient('client.aclDryRun', async client => { + assert.equal( + await client.aclDryRun('default', ['GET', 'key']), + 'OK' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/ACL_DRYRUN.ts b/packages/client/lib/commands/ACL_DRYRUN.ts new file mode 100644 index 00000000000..49ac41a859a --- /dev/null +++ b/packages/client/lib/commands/ACL_DRYRUN.ts @@ -0,0 +1,18 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, SimpleStringReply, BlobStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Simulates ACL operations without executing them + * @param parser - The Redis command parser + * @param username - Username to simulate ACL operations for + * @param command - Command arguments to simulate + */ + parseCommand(parser: CommandParser, username: RedisArgument, command: Array) { + parser.push('ACL', 'DRYRUN', username, ...command); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> | BlobStringReply +} as const satisfies Command; + diff --git a/packages/client/lib/commands/ACL_GENPASS.spec.ts b/packages/client/lib/commands/ACL_GENPASS.spec.ts new file mode 100644 index 00000000000..35e161f424f --- /dev/null +++ b/packages/client/lib/commands/ACL_GENPASS.spec.ts @@ -0,0 +1,31 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ACL_GENPASS from './ACL_GENPASS'; +import { parseArgs } from './generic-transformers'; + +describe('ACL GENPASS', () => { + testUtils.isVersionGreaterThanHook([6]); + + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(ACL_GENPASS), + ['ACL', 'GENPASS'] + ); + }); + + it('with bits', () => { + assert.deepEqual( + parseArgs(ACL_GENPASS, 128), + ['ACL', 'GENPASS', '128'] + ); + }); + }); + + testUtils.testWithClient('client.aclGenPass', async client => { + assert.equal( + typeof await client.aclGenPass(), + 'string' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/ACL_GENPASS.ts b/packages/client/lib/commands/ACL_GENPASS.ts new file mode 100644 index 00000000000..d1785839a5c --- /dev/null +++ b/packages/client/lib/commands/ACL_GENPASS.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '../client/parser'; +import { BlobStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Generates a secure password for ACL users + * @param parser - The Redis command parser + * @param bits - Optional number of bits for password entropy + */ + parseCommand(parser: CommandParser, bits?: number) { + parser.push('ACL', 'GENPASS'); + if (bits) { + parser.push(bits.toString()); + } + }, + transformReply: undefined as unknown as () => BlobStringReply +} as const satisfies Command; + diff --git a/packages/client/lib/commands/ACL_GETUSER.spec.ts b/packages/client/lib/commands/ACL_GETUSER.spec.ts new file mode 100644 index 00000000000..83776a3473a --- /dev/null +++ b/packages/client/lib/commands/ACL_GETUSER.spec.ts @@ -0,0 +1,35 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ACL_GETUSER from './ACL_GETUSER'; +import { parseArgs } from './generic-transformers'; + +describe('ACL GETUSER', () => { + testUtils.isVersionGreaterThanHook([6]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(ACL_GETUSER, 'username'), + ['ACL', 'GETUSER', 'username'] + ); + }); + + testUtils.testWithClient('client.aclGetUser', async client => { + const reply = await client.aclGetUser('default'); + + assert.ok(Array.isArray(reply.passwords)); + assert.equal(typeof reply.commands, 'string'); + assert.ok(Array.isArray(reply.flags)); + + if (testUtils.isVersionGreaterThan([7])) { + assert.equal(typeof reply.keys, 'string'); + assert.equal(typeof reply.channels, 'string'); + assert.ok(Array.isArray(reply.selectors)); + } else { + assert.ok(Array.isArray(reply.keys)); + + if (testUtils.isVersionGreaterThan([6, 2])) { + assert.ok(Array.isArray(reply.channels)); + } + } + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/ACL_GETUSER.ts b/packages/client/lib/commands/ACL_GETUSER.ts new file mode 100644 index 00000000000..a1505251c6e --- /dev/null +++ b/packages/client/lib/commands/ACL_GETUSER.ts @@ -0,0 +1,49 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, TuplesToMapReply, BlobStringReply, ArrayReply, UnwrapReply, Resp2Reply, Command } from '../RESP/types'; + +type AclUser = TuplesToMapReply<[ + [BlobStringReply<'flags'>, ArrayReply], + [BlobStringReply<'passwords'>, ArrayReply], + [BlobStringReply<'commands'>, BlobStringReply], + /** changed to BlobStringReply in 7.0 */ + [BlobStringReply<'keys'>, ArrayReply | BlobStringReply], + /** added in 6.2, changed to BlobStringReply in 7.0 */ + [BlobStringReply<'channels'>, ArrayReply | BlobStringReply], + /** added in 7.0 */ + [BlobStringReply<'selectors'>, ArrayReply, BlobStringReply], + [BlobStringReply<'keys'>, BlobStringReply], + [BlobStringReply<'channels'>, BlobStringReply] + ]>>], +]>; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Returns ACL information about a specific user + * @param parser - The Redis command parser + * @param username - Username to get information for + */ + parseCommand(parser: CommandParser, username: RedisArgument) { + parser.push('ACL', 'GETUSER', username); + }, + transformReply: { + 2: (reply: UnwrapReply>) => ({ + flags: reply[1], + passwords: reply[3], + commands: reply[5], + keys: reply[7], + channels: reply[9], + selectors: (reply[11] as unknown as UnwrapReply)?.map(selector => { + const inferred = selector as unknown as UnwrapReply; + return { + commands: inferred[1], + keys: inferred[3], + channels: inferred[5] + }; + }) + }), + 3: undefined as unknown as () => AclUser + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/ACL_LIST.spec.ts b/packages/client/lib/commands/ACL_LIST.spec.ts new file mode 100644 index 00000000000..0f67aaa53e9 --- /dev/null +++ b/packages/client/lib/commands/ACL_LIST.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ACL_LIST from './ACL_LIST'; +import { parseArgs } from './generic-transformers'; + +describe('ACL LIST', () => { + testUtils.isVersionGreaterThanHook([6]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(ACL_LIST), + ['ACL', 'LIST'] + ); + }); + + testUtils.testWithClient('client.aclList', async client => { + const users = await client.aclList(); + assert.ok(Array.isArray(users)); + for (const user of users) { + assert.equal(typeof user, 'string'); + } + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/ACL_LIST.ts b/packages/client/lib/commands/ACL_LIST.ts new file mode 100644 index 00000000000..4d2ec995cd5 --- /dev/null +++ b/packages/client/lib/commands/ACL_LIST.ts @@ -0,0 +1,15 @@ +import { CommandParser } from '../client/parser'; +import { ArrayReply, BlobStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Returns all configured ACL users and their permissions + * @param parser - The Redis command parser + */ + parseCommand(parser: CommandParser) { + parser.push('ACL', 'LIST'); + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ACL_LOAD.spec.ts b/packages/client/lib/commands/ACL_LOAD.spec.ts new file mode 100644 index 00000000000..a41ce45e8a6 --- /dev/null +++ b/packages/client/lib/commands/ACL_LOAD.spec.ts @@ -0,0 +1,15 @@ +import { strict as assert } from 'node:assert'; +import testUtils from '../test-utils'; +import ACL_LOAD from './ACL_LOAD'; +import { parseArgs } from './generic-transformers'; + +describe('ACL LOAD', () => { + testUtils.isVersionGreaterThanHook([6]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(ACL_LOAD), + ['ACL', 'LOAD'] + ); + }); +}); diff --git a/packages/client/lib/commands/ACL_LOAD.ts b/packages/client/lib/commands/ACL_LOAD.ts new file mode 100644 index 00000000000..0367904a507 --- /dev/null +++ b/packages/client/lib/commands/ACL_LOAD.ts @@ -0,0 +1,15 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Reloads ACL configuration from the ACL file + * @param parser - The Redis command parser + */ + parseCommand(parser: CommandParser) { + parser.push('ACL', 'LOAD'); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/ACL_LOG.spec.ts b/packages/client/lib/commands/ACL_LOG.spec.ts new file mode 100644 index 00000000000..7da61faca37 --- /dev/null +++ b/packages/client/lib/commands/ACL_LOG.spec.ts @@ -0,0 +1,51 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ACL_LOG from './ACL_LOG'; +import { parseArgs } from './generic-transformers'; + +describe('ACL LOG', () => { + testUtils.isVersionGreaterThanHook([6]); + + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(ACL_LOG), + ['ACL', 'LOG'] + ); + }); + + it('with count', () => { + assert.deepEqual( + parseArgs(ACL_LOG, 10), + ['ACL', 'LOG', '10'] + ); + }); + }); + + testUtils.testWithClient('client.aclLog', async client => { + // make sure to create one log + await assert.rejects( + client.auth({ + username: 'incorrect', + password: 'incorrect' + }) + ); + + const logs = await client.aclLog(); + assert.ok(Array.isArray(logs)); + for (const log of logs) { + assert.equal(typeof log.count, 'number'); + assert.equal(typeof log.reason, 'string'); + assert.equal(typeof log.context, 'string'); + assert.equal(typeof log.object, 'string'); + assert.equal(typeof log.username, 'string'); + assert.equal(typeof log['age-seconds'], 'number'); + assert.equal(typeof log['client-info'], 'string'); + if (testUtils.isVersionGreaterThan([7, 2])) { + assert.equal(typeof log['entry-id'], 'number'); + assert.equal(typeof log['timestamp-created'], 'number'); + assert.equal(typeof log['timestamp-last-updated'], 'number'); + } + } + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/ACL_LOG.ts b/packages/client/lib/commands/ACL_LOG.ts new file mode 100644 index 00000000000..a65f85039b1 --- /dev/null +++ b/packages/client/lib/commands/ACL_LOG.ts @@ -0,0 +1,55 @@ +import { CommandParser } from '../client/parser'; +import { ArrayReply, TuplesToMapReply, BlobStringReply, NumberReply, DoubleReply, UnwrapReply, Resp2Reply, Command, TypeMapping } from '../RESP/types'; +import { transformDoubleReply } from './generic-transformers'; + +export type AclLogReply = ArrayReply, NumberReply], + [BlobStringReply<'reason'>, BlobStringReply], + [BlobStringReply<'context'>, BlobStringReply], + [BlobStringReply<'object'>, BlobStringReply], + [BlobStringReply<'username'>, BlobStringReply], + [BlobStringReply<'age-seconds'>, DoubleReply], + [BlobStringReply<'client-info'>, BlobStringReply], + /** added in 7.0 */ + [BlobStringReply<'entry-id'>, NumberReply], + /** added in 7.0 */ + [BlobStringReply<'timestamp-created'>, NumberReply], + /** added in 7.0 */ + [BlobStringReply<'timestamp-last-updated'>, NumberReply] +]>>; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Returns ACL security events log entries + * @param parser - The Redis command parser + * @param count - Optional maximum number of entries to return + */ + parseCommand(parser: CommandParser, count?: number) { + parser.push('ACL', 'LOG'); + if (count != undefined) { + parser.push(count.toString()); + } + }, + transformReply: { + 2: (reply: UnwrapReply>, preserve?: any, typeMapping?: TypeMapping) => { + return reply.map(item => { + const inferred = item as unknown as UnwrapReply; + return { + count: inferred[1], + reason: inferred[3], + context: inferred[5], + object: inferred[7], + username: inferred[9], + 'age-seconds': transformDoubleReply[2](inferred[11], preserve, typeMapping), + 'client-info': inferred[13], + 'entry-id': inferred[15], + 'timestamp-created': inferred[17], + 'timestamp-last-updated': inferred[19] + }; + }) + }, + 3: undefined as unknown as () => AclLogReply + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/ACL_LOG_RESET.spec.ts b/packages/client/lib/commands/ACL_LOG_RESET.spec.ts new file mode 100644 index 00000000000..62d193a132d --- /dev/null +++ b/packages/client/lib/commands/ACL_LOG_RESET.spec.ts @@ -0,0 +1,22 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ACL_LOG_RESET from './ACL_LOG_RESET'; +import { parseArgs } from './generic-transformers'; + +describe('ACL LOG RESET', () => { + testUtils.isVersionGreaterThanHook([6]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(ACL_LOG_RESET), + ['ACL', 'LOG', 'RESET'] + ); + }); + + testUtils.testWithClient('client.aclLogReset', async client => { + assert.equal( + await client.aclLogReset(), + 'OK' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/ACL_LOG_RESET.ts b/packages/client/lib/commands/ACL_LOG_RESET.ts new file mode 100644 index 00000000000..31897c1458c --- /dev/null +++ b/packages/client/lib/commands/ACL_LOG_RESET.ts @@ -0,0 +1,16 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; +import ACL_LOG from './ACL_LOG'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: ACL_LOG.IS_READ_ONLY, + /** + * Clears the ACL security events log + * @param parser - The Redis command parser + */ + parseCommand(parser: CommandParser) { + parser.push('ACL', 'LOG', 'RESET'); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/ACL_SAVE.spec.ts b/packages/client/lib/commands/ACL_SAVE.spec.ts new file mode 100644 index 00000000000..98f7c9f183d --- /dev/null +++ b/packages/client/lib/commands/ACL_SAVE.spec.ts @@ -0,0 +1,15 @@ +import { strict as assert } from 'node:assert'; +import testUtils from '../test-utils'; +import ACL_SAVE from './ACL_SAVE'; +import { parseArgs } from './generic-transformers'; + +describe('ACL SAVE', () => { + testUtils.isVersionGreaterThanHook([6]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(ACL_SAVE), + ['ACL', 'SAVE'] + ); + }); +}); diff --git a/packages/client/lib/commands/ACL_SAVE.ts b/packages/client/lib/commands/ACL_SAVE.ts new file mode 100644 index 00000000000..c53c611ab6f --- /dev/null +++ b/packages/client/lib/commands/ACL_SAVE.ts @@ -0,0 +1,15 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Saves the current ACL configuration to the ACL file + * @param parser - The Redis command parser + */ + parseCommand(parser: CommandParser) { + parser.push('ACL', 'SAVE'); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/ACL_SETUSER.spec.ts b/packages/client/lib/commands/ACL_SETUSER.spec.ts new file mode 100644 index 00000000000..9f39868e809 --- /dev/null +++ b/packages/client/lib/commands/ACL_SETUSER.spec.ts @@ -0,0 +1,24 @@ +import { strict as assert } from 'node:assert'; +import testUtils from '../test-utils'; +import ACL_SETUSER from './ACL_SETUSER'; +import { parseArgs } from './generic-transformers'; + +describe('ACL SETUSER', () => { + testUtils.isVersionGreaterThanHook([6]); + + describe('transformArguments', () => { + it('string', () => { + assert.deepEqual( + parseArgs(ACL_SETUSER, 'username', 'allkeys'), + ['ACL', 'SETUSER', 'username', 'allkeys'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(ACL_SETUSER, 'username', ['allkeys', 'allchannels']), + ['ACL', 'SETUSER', 'username', 'allkeys', 'allchannels'] + ); + }); + }); +}); diff --git a/packages/client/lib/commands/ACL_SETUSER.ts b/packages/client/lib/commands/ACL_SETUSER.ts new file mode 100644 index 00000000000..dd3d9b770f5 --- /dev/null +++ b/packages/client/lib/commands/ACL_SETUSER.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Creates or modifies ACL user with specified rules + * @param parser - The Redis command parser + * @param username - Username to create or modify + * @param rule - ACL rule(s) to apply to the user + */ + parseCommand(parser: CommandParser, username: RedisArgument, rule: RedisVariadicArgument) { + parser.push('ACL', 'SETUSER', username); + parser.pushVariadic(rule); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/ACL_USERS.spec.ts b/packages/client/lib/commands/ACL_USERS.spec.ts new file mode 100644 index 00000000000..d897b61e4f3 --- /dev/null +++ b/packages/client/lib/commands/ACL_USERS.spec.ts @@ -0,0 +1,15 @@ +import { strict as assert } from 'node:assert'; +import testUtils from '../test-utils'; +import ACL_USERS from './ACL_USERS'; +import { parseArgs } from './generic-transformers'; + +describe('ACL USERS', () => { + testUtils.isVersionGreaterThanHook([6]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(ACL_USERS), + ['ACL', 'USERS'] + ); + }); +}); diff --git a/packages/client/lib/commands/ACL_USERS.ts b/packages/client/lib/commands/ACL_USERS.ts new file mode 100644 index 00000000000..92c965b9f93 --- /dev/null +++ b/packages/client/lib/commands/ACL_USERS.ts @@ -0,0 +1,15 @@ +import { CommandParser } from '../client/parser'; +import { ArrayReply, BlobStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Returns a list of all configured ACL usernames + * @param parser - The Redis command parser + */ + parseCommand(parser: CommandParser) { + parser.push('ACL', 'USERS'); + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ACL_WHOAMI.spec.ts b/packages/client/lib/commands/ACL_WHOAMI.spec.ts new file mode 100644 index 00000000000..f939c657a7a --- /dev/null +++ b/packages/client/lib/commands/ACL_WHOAMI.spec.ts @@ -0,0 +1,15 @@ +import { strict as assert } from 'node:assert'; +import testUtils from '../test-utils'; +import ACL_WHOAMI from './ACL_WHOAMI'; +import { parseArgs } from './generic-transformers'; + +describe('ACL WHOAMI', () => { + testUtils.isVersionGreaterThanHook([6]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(ACL_WHOAMI), + ['ACL', 'WHOAMI'] + ); + }); +}); diff --git a/packages/client/lib/commands/ACL_WHOAMI.ts b/packages/client/lib/commands/ACL_WHOAMI.ts new file mode 100644 index 00000000000..1d58eb2f3d3 --- /dev/null +++ b/packages/client/lib/commands/ACL_WHOAMI.ts @@ -0,0 +1,15 @@ +import { CommandParser } from '../client/parser'; +import { BlobStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Returns the username of the current connection + * @param parser - The Redis command parser + */ + parseCommand(parser: CommandParser) { + parser.push('ACL', 'WHOAMI'); + }, + transformReply: undefined as unknown as () => BlobStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/APPEND.spec.ts b/packages/client/lib/commands/APPEND.spec.ts new file mode 100644 index 00000000000..925c16917b9 --- /dev/null +++ b/packages/client/lib/commands/APPEND.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import APPEND from './APPEND'; +import { parseArgs } from './generic-transformers'; + +describe('APPEND', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(APPEND, 'key', 'value'), + ['APPEND', 'key', 'value'] + ); + }); + + testUtils.testAll('append', async client => { + assert.equal( + await client.append('key', 'value'), + 5 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/APPEND.ts b/packages/client/lib/commands/APPEND.ts new file mode 100644 index 00000000000..f7ca6c83578 --- /dev/null +++ b/packages/client/lib/commands/APPEND.ts @@ -0,0 +1,17 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +export default { + IS_READ_ONLY: false, + /** + * Appends a value to a string key + * @param parser - The Redis command parser + * @param key - The key to append to + * @param value - The value to append + */ + parseCommand(parser: CommandParser, key: RedisArgument, value: RedisArgument) { + parser.push('APPEND', key, value); + }, + + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ASKING.spec.ts b/packages/client/lib/commands/ASKING.spec.ts new file mode 100644 index 00000000000..7be4d25d449 --- /dev/null +++ b/packages/client/lib/commands/ASKING.spec.ts @@ -0,0 +1,12 @@ +import { strict as assert } from 'node:assert'; +import ASKING from './ASKING'; +import { parseArgs } from './generic-transformers'; + +describe('ASKING', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(ASKING), + ['ASKING'] + ); + }); +}); diff --git a/packages/client/lib/commands/ASKING.ts b/packages/client/lib/commands/ASKING.ts new file mode 100644 index 00000000000..5484b2ccc62 --- /dev/null +++ b/packages/client/lib/commands/ASKING.ts @@ -0,0 +1,17 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; + +export const ASKING_CMD = 'ASKING'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Tells a Redis cluster node that the client is ok receiving such redirects + * @param parser - The Redis command parser + */ + parseCommand(parser: CommandParser) { + parser.push(ASKING_CMD); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/AUTH.spec.ts b/packages/client/lib/commands/AUTH.spec.ts new file mode 100644 index 00000000000..762dd24f16a --- /dev/null +++ b/packages/client/lib/commands/AUTH.spec.ts @@ -0,0 +1,26 @@ +import { strict as assert } from 'node:assert'; +import AUTH from './AUTH'; +import { parseArgs } from './generic-transformers'; + +describe('AUTH', () => { + describe('transformArguments', () => { + it('password only', () => { + assert.deepEqual( + parseArgs(AUTH, { + password: 'password' + }), + ['AUTH', 'password'] + ); + }); + + it('username & password', () => { + assert.deepEqual( + parseArgs(AUTH, { + username: 'username', + password: 'password' + }), + ['AUTH', 'username', 'password'] + ); + }); + }); +}); diff --git a/packages/client/lib/commands/AUTH.ts b/packages/client/lib/commands/AUTH.ts new file mode 100644 index 00000000000..40ed45bc936 --- /dev/null +++ b/packages/client/lib/commands/AUTH.ts @@ -0,0 +1,27 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '../RESP/types'; + +export interface AuthOptions { + username?: RedisArgument; + password: RedisArgument; +} + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Authenticates the connection using a password or username and password + * @param parser - The Redis command parser + * @param options - Authentication options containing username and/or password + * @param options.username - Optional username for authentication + * @param options.password - Password for authentication + */ + parseCommand(parser: CommandParser, { username, password }: AuthOptions) { + parser.push('AUTH'); + if (username !== undefined) { + parser.push(username); + } + parser.push(password); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/BGREWRITEAOF.spec.ts b/packages/client/lib/commands/BGREWRITEAOF.spec.ts new file mode 100644 index 00000000000..f58ec9a5762 --- /dev/null +++ b/packages/client/lib/commands/BGREWRITEAOF.spec.ts @@ -0,0 +1,20 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import BGREWRITEAOF from './BGREWRITEAOF'; +import { parseArgs } from './generic-transformers'; + +describe('BGREWRITEAOF', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(BGREWRITEAOF), + ['BGREWRITEAOF'] + ); + }); + + testUtils.testWithClient('client.bgRewriteAof', async client => { + assert.equal( + typeof await client.bgRewriteAof(), + 'string' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/BGREWRITEAOF.ts b/packages/client/lib/commands/BGREWRITEAOF.ts new file mode 100644 index 00000000000..2aa4c6b7b89 --- /dev/null +++ b/packages/client/lib/commands/BGREWRITEAOF.ts @@ -0,0 +1,15 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Asynchronously rewrites the append-only file + * @param parser - The Redis command parser + */ + parseCommand(parser: CommandParser) { + parser.push('BGREWRITEAOF'); + }, + transformReply: undefined as unknown as () => SimpleStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/BGSAVE.spec.ts b/packages/client/lib/commands/BGSAVE.spec.ts new file mode 100644 index 00000000000..dcf7b815119 --- /dev/null +++ b/packages/client/lib/commands/BGSAVE.spec.ts @@ -0,0 +1,33 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import BGSAVE from './BGSAVE'; +import { parseArgs } from './generic-transformers'; + +describe('BGSAVE', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(BGSAVE), + ['BGSAVE'] + ); + }); + + it('with SCHEDULE', () => { + assert.deepEqual( + parseArgs(BGSAVE, { + SCHEDULE: true + }), + ['BGSAVE', 'SCHEDULE'] + ); + }); + }); + + testUtils.testWithClient('client.bgSave', async client => { + assert.equal( + typeof await client.bgSave({ + SCHEDULE: true // using `SCHEDULE` to make sure it won't throw an error + }), + 'string' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/BGSAVE.ts b/packages/client/lib/commands/BGSAVE.ts new file mode 100644 index 00000000000..cad8ad5393e --- /dev/null +++ b/packages/client/lib/commands/BGSAVE.ts @@ -0,0 +1,24 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; + +export interface BgSaveOptions { + SCHEDULE?: boolean; +} + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Asynchronously saves the dataset to disk + * @param parser - The Redis command parser + * @param options - Optional configuration + * @param options.SCHEDULE - Schedule a BGSAVE operation when no BGSAVE is already in progress + */ + parseCommand(parser: CommandParser, options?: BgSaveOptions) { + parser.push('BGSAVE'); + if (options?.SCHEDULE) { + parser.push('SCHEDULE'); + } + }, + transformReply: undefined as unknown as () => SimpleStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/BITCOUNT.spec.ts b/packages/client/lib/commands/BITCOUNT.spec.ts new file mode 100644 index 00000000000..e2990472948 --- /dev/null +++ b/packages/client/lib/commands/BITCOUNT.spec.ts @@ -0,0 +1,48 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import BITCOUNT from './BITCOUNT'; +import { parseArgs } from './generic-transformers'; + +describe('BITCOUNT', () => { + describe('parseCommand', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(BITCOUNT, 'key'), + ['BITCOUNT', 'key'] + ); + }); + + describe('with range', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(BITCOUNT, 'key', { + start: 0, + end: 1 + }), + ['BITCOUNT', 'key', '0', '1'] + ); + }); + + it('with mode', () => { + assert.deepEqual( + parseArgs(BITCOUNT, 'key', { + start: 0, + end: 1, + mode: 'BIT' + }), + ['BITCOUNT', 'key', '0', '1', 'BIT'] + ); + }); + }); + }); + + testUtils.testAll('bitCount', async client => { + assert.equal( + await client.bitCount('key'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/BITCOUNT.ts b/packages/client/lib/commands/BITCOUNT.ts new file mode 100644 index 00000000000..a8d7295ff3c --- /dev/null +++ b/packages/client/lib/commands/BITCOUNT.ts @@ -0,0 +1,35 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +export interface BitCountRange { + start: number; + end: number; + mode?: 'BYTE' | 'BIT'; +} + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Returns the count of set bits in a string key + * @param parser - The Redis command parser + * @param key - The key to count bits in + * @param range - Optional range specification + * @param range.start - Start offset in bytes/bits + * @param range.end - End offset in bytes/bits + * @param range.mode - Optional counting mode: BYTE or BIT + */ + parseCommand(parser: CommandParser, key: RedisArgument, range?: BitCountRange) { + parser.push('BITCOUNT'); + parser.pushKey(key); + if (range) { + parser.push(range.start.toString()); + parser.push(range.end.toString()); + + if (range.mode) { + parser.push(range.mode); + } + } + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/BITFIELD.spec.ts b/packages/client/lib/commands/BITFIELD.spec.ts new file mode 100644 index 00000000000..5fcc112466b --- /dev/null +++ b/packages/client/lib/commands/BITFIELD.spec.ts @@ -0,0 +1,56 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import BITFIELD from './BITFIELD'; +import { parseArgs } from './generic-transformers'; + +describe('BITFIELD', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(BITFIELD, 'key', [{ + operation: 'OVERFLOW', + behavior: 'WRAP' + }, { + operation: 'GET', + encoding: 'i8', + offset: 0 + }, { + operation: 'OVERFLOW', + behavior: 'SAT' + }, { + operation: 'SET', + encoding: 'i16', + offset: 1, + value: 0 + }, { + operation: 'OVERFLOW', + behavior: 'FAIL' + }, { + operation: 'INCRBY', + encoding: 'i32', + offset: 2, + increment: 1 + }]), + ['BITFIELD', 'key', 'OVERFLOW', 'WRAP', 'GET', 'i8', '0', 'OVERFLOW', 'SAT', 'SET', 'i16', '1', '0', 'OVERFLOW', 'FAIL', 'INCRBY', 'i32', '2', '1'] + ); + }); + + testUtils.testAll('bitField', async client => { + const a = client.bitField('key', [{ + operation: 'GET', + encoding: 'i8', + offset: 0 + }]); + + assert.deepEqual( + await client.bitField('key', [{ + operation: 'GET', + encoding: 'i8', + offset: 0 + }]), + [0] + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/BITFIELD.ts b/packages/client/lib/commands/BITFIELD.ts new file mode 100644 index 00000000000..984c90e270e --- /dev/null +++ b/packages/client/lib/commands/BITFIELD.ts @@ -0,0 +1,92 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, NumberReply, NullReply, Command } from '../RESP/types'; + +export type BitFieldEncoding = `${'i' | 'u'}${number}`; + +export interface BitFieldOperation { + operation: S; +} + +export interface BitFieldGetOperation extends BitFieldOperation<'GET'> { + encoding: BitFieldEncoding; + offset: number | string; +} + +export interface BitFieldSetOperation extends BitFieldOperation<'SET'> { + encoding: BitFieldEncoding; + offset: number | string; + value: number; +} + +export interface BitFieldIncrByOperation extends BitFieldOperation<'INCRBY'> { + encoding: BitFieldEncoding; + offset: number | string; + increment: number; +} + +export interface BitFieldOverflowOperation extends BitFieldOperation<'OVERFLOW'> { + behavior: string; +} + +export type BitFieldOperations = Array< + BitFieldGetOperation | + BitFieldSetOperation | + BitFieldIncrByOperation | + BitFieldOverflowOperation +>; + +export type BitFieldRoOperations = Array< + Omit +>; + +export default { + IS_READ_ONLY: false, + /** + * Performs arbitrary bitfield integer operations on strings + * @param parser - The Redis command parser + * @param key - The key holding the string + * @param operations - Array of bitfield operations to perform: GET, SET, INCRBY or OVERFLOW + */ + parseCommand(parser: CommandParser, key: RedisArgument, operations: BitFieldOperations) { + parser.push('BITFIELD'); + parser.pushKey(key); + + for (const options of operations) { + switch (options.operation) { + case 'GET': + parser.push( + 'GET', + options.encoding, + options.offset.toString() + ); + break; + + case 'SET': + parser.push( + 'SET', + options.encoding, + options.offset.toString(), + options.value.toString() + ); + break; + + case 'INCRBY': + parser.push( + 'INCRBY', + options.encoding, + options.offset.toString(), + options.increment.toString() + ); + break; + + case 'OVERFLOW': + parser.push( + 'OVERFLOW', + options.behavior + ); + break; + } + } + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/BITFIELD_RO.spec.ts b/packages/client/lib/commands/BITFIELD_RO.spec.ts new file mode 100644 index 00000000000..f2c1797412f --- /dev/null +++ b/packages/client/lib/commands/BITFIELD_RO.spec.ts @@ -0,0 +1,31 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import BITFIELD_RO from './BITFIELD_RO'; +import { parseArgs } from './generic-transformers'; + +describe('BITFIELD_RO', () => { + testUtils.isVersionGreaterThanHook([6, 2]); + + it('parseCommand', () => { + assert.deepEqual( + parseArgs(BITFIELD_RO, 'key', [{ + encoding: 'i8', + offset: 0 + }]), + ['BITFIELD_RO', 'key', 'GET', 'i8', '0'] + ); + }); + + testUtils.testAll('bitFieldRo', async client => { + assert.deepEqual( + await client.bitFieldRo('key', [{ + encoding: 'i8', + offset: 0 + }]), + [0] + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/BITFIELD_RO.ts b/packages/client/lib/commands/BITFIELD_RO.ts new file mode 100644 index 00000000000..e81a3ce4fbe --- /dev/null +++ b/packages/client/lib/commands/BITFIELD_RO.ts @@ -0,0 +1,29 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, NumberReply, Command } from '../RESP/types'; +import { BitFieldGetOperation } from './BITFIELD'; + +export type BitFieldRoOperations = Array< + Omit +>; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Performs read-only bitfield integer operations on strings + * @param parser - The Redis command parser + * @param key - The key holding the string + * @param operations - Array of GET operations to perform on the bitfield + */ + parseCommand(parser: CommandParser, key: RedisArgument, operations: BitFieldRoOperations) { + parser.push('BITFIELD_RO'); + parser.pushKey(key); + + for (const operation of operations) { + parser.push('GET'); + parser.push(operation.encoding); + parser.push(operation.offset.toString()) + } + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/BITOP.spec.ts b/packages/client/lib/commands/BITOP.spec.ts new file mode 100644 index 00000000000..65fe6f86338 --- /dev/null +++ b/packages/client/lib/commands/BITOP.spec.ts @@ -0,0 +1,96 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import BITOP, { BitOperations } from './BITOP'; +import { parseArgs } from './generic-transformers'; + +describe('BITOP', () => { + describe('transformArguments', () => { + it('single key', () => { + assert.deepEqual( + parseArgs(BITOP, 'AND', 'destKey', 'key'), + ['BITOP', 'AND', 'destKey', 'key'] + ); + }); + + it('multiple keys', () => { + assert.deepEqual( + parseArgs(BITOP, 'AND', 'destKey', ['1', '2']), + ['BITOP', 'AND', 'destKey', '1', '2'] + ); + }); + }); + + for (const op of ['AND', 'OR', 'XOR'] as BitOperations[]) { + testUtils.testAll(`bitOp ${op} with non-existing keys`, async client => { + assert.equal( + await client.bitOp(op, '{tag}destKey', ['{tag}key1', '{tag}key2']), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); + + testUtils.testAll(`bitOp ${op} with existing keys`, async client => { + await client.set('{tag}key1', 'value1'); + await client.set('{tag}key2', 'value2'); + + assert.equal( + await client.bitOp(op, '{tag}destKey', ['{tag}key1', '{tag}key2']), + 6 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); + } + + // NOT operation requires only one key + testUtils.testAll('bitOp NOT with non-existing keys', async client => { + assert.equal( + await client.bitOp('NOT', '{tag}destKey', '{tag}key'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); + + testUtils.testAll('bitOp NOT with existing keys', async client => { + await client.set('{tag}key', 'value'); + + assert.equal( + await client.bitOp('NOT', '{tag}destKey', '{tag}key'), + 5 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); + + // newer operations supported since Redis 8.2 + for (const op of ['DIFF', 'DIFF1', 'ANDOR', 'ONE'] as BitOperations[]) { + testUtils.testAll(`bitOp ${op} with non-existing keys`, async client => { + assert.equal( + await client.bitOp(op, '{tag}destKey', ['{tag}key1', '{tag}key2']), + 0 + ); + }, { + client: { ...GLOBAL.SERVERS.OPEN, minimumDockerVersion: [8, 2] }, + cluster: { ...GLOBAL.CLUSTERS.OPEN, minimumDockerVersion: [8, 2] }, + }); + + testUtils.testAll(`bitOp ${op} with existing keys`, async client => { + await client.set('{tag}key1', 'value1'); + await client.set('{tag}key2', 'value2'); + + assert.equal( + await client.bitOp(op, '{tag}destKey', ['{tag}key1', '{tag}key2']), + 6 + ); + }, { + client: { ...GLOBAL.SERVERS.OPEN, minimumDockerVersion: [8, 2] }, + cluster: { ...GLOBAL.CLUSTERS.OPEN, minimumDockerVersion: [8, 2] }, + }); + } +}); diff --git a/packages/client/lib/commands/BITOP.ts b/packages/client/lib/commands/BITOP.ts new file mode 100644 index 00000000000..da8b97ceda6 --- /dev/null +++ b/packages/client/lib/commands/BITOP.ts @@ -0,0 +1,27 @@ +import { CommandParser } from '../client/parser'; +import { NumberReply, Command, RedisArgument } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export type BitOperations = 'AND' | 'OR' | 'XOR' | 'NOT' | 'DIFF' | 'DIFF1' | 'ANDOR' | 'ONE'; + +export default { + IS_READ_ONLY: false, + /** + * Performs bitwise operations between strings + * @param parser - The Redis command parser + * @param operation - Bitwise operation to perform: AND, OR, XOR, NOT, DIFF, DIFF1, ANDOR, ONE + * @param destKey - Destination key to store the result + * @param key - Source key(s) to perform operation on + */ + parseCommand( + parser: CommandParser, + operation: BitOperations, + destKey: RedisArgument, + key: RedisVariadicArgument + ) { + parser.push('BITOP', operation); + parser.pushKey(destKey); + parser.pushKeys(key); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/BITPOS.spec.ts b/packages/client/lib/commands/BITPOS.spec.ts new file mode 100644 index 00000000000..c699deab83c --- /dev/null +++ b/packages/client/lib/commands/BITPOS.spec.ts @@ -0,0 +1,46 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import BITPOS from './BITPOS'; +import { parseArgs } from './generic-transformers'; + +describe('BITPOS', () => { + describe('parseCommand', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(BITPOS, 'key', 1), + ['BITPOS', 'key', '1'] + ); + }); + + it('with start', () => { + assert.deepEqual( + parseArgs(BITPOS, 'key', 1, 1), + ['BITPOS', 'key', '1', '1'] + ); + }); + + it('with start and end', () => { + assert.deepEqual( + parseArgs(BITPOS, 'key', 1, 1, -1), + ['BITPOS', 'key', '1', '1', '-1'] + ); + }); + + it('with start, end and mode', () => { + assert.deepEqual( + parseArgs(BITPOS, 'key', 1, 1, -1, 'BIT'), + ['BITPOS', 'key', '1', '1', '-1', 'BIT'] + ); + }); + }); + + testUtils.testAll('bitPos', async client => { + assert.equal( + await client.bitPos('key', 1, 1), + -1 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/BITPOS.ts b/packages/client/lib/commands/BITPOS.ts new file mode 100644 index 00000000000..cbc1aeaf610 --- /dev/null +++ b/packages/client/lib/commands/BITPOS.ts @@ -0,0 +1,41 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; +import { BitValue } from './generic-transformers'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Returns the position of first bit set to 0 or 1 in a string + * @param parser - The Redis command parser + * @param key - The key holding the string + * @param bit - The bit value to look for (0 or 1) + * @param start - Optional starting position in bytes/bits + * @param end - Optional ending position in bytes/bits + * @param mode - Optional counting mode: BYTE or BIT + */ + parseCommand(parser: CommandParser, + key: RedisArgument, + bit: BitValue, + start?: number, + end?: number, + mode?: 'BYTE' | 'BIT' + ) { + parser.push('BITPOS'); + parser.pushKey(key); + parser.push(bit.toString()); + + if (start !== undefined) { + parser.push(start.toString()); + } + + if (end !== undefined) { + parser.push(end.toString()); + } + + if (mode) { + parser.push(mode); + } + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/BLMOVE.spec.ts b/packages/client/lib/commands/BLMOVE.spec.ts new file mode 100644 index 00000000000..d4e9e024a8c --- /dev/null +++ b/packages/client/lib/commands/BLMOVE.spec.ts @@ -0,0 +1,36 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL, BLOCKING_MIN_VALUE } from '../test-utils'; +import BLMOVE from './BLMOVE'; +import { parseArgs } from './generic-transformers'; + +describe('BLMOVE', () => { + testUtils.isVersionGreaterThanHook([6, 2]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(BLMOVE, 'source', 'destination', 'LEFT', 'RIGHT', 0), + ['BLMOVE', 'source', 'destination', 'LEFT', 'RIGHT', '0'] + ); + }); + + testUtils.testAll('blMove - null', async client => { + assert.equal( + await client.blMove('{tag}source', '{tag}destination', 'LEFT', 'RIGHT', BLOCKING_MIN_VALUE), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); + + testUtils.testAll('blMove - with member', async client => { + const [, reply] = await Promise.all([ + client.lPush('{tag}source', 'element'), + client.blMove('{tag}source', '{tag}destination', 'LEFT', 'RIGHT', BLOCKING_MIN_VALUE) + ]); + assert.equal(reply, 'element'); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/BLMOVE.ts b/packages/client/lib/commands/BLMOVE.ts new file mode 100644 index 00000000000..c2b04301f84 --- /dev/null +++ b/packages/client/lib/commands/BLMOVE.ts @@ -0,0 +1,29 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, BlobStringReply, NullReply, Command } from '../RESP/types'; +import { ListSide } from './generic-transformers'; + +export default { + IS_READ_ONLY: false, + /** + * Pop an element from a list, push it to another list and return it; or block until one is available + * @param parser - The Redis command parser + * @param source - Key of the source list + * @param destination - Key of the destination list + * @param sourceSide - Side of source list to pop from (LEFT or RIGHT) + * @param destinationSide - Side of destination list to push to (LEFT or RIGHT) + * @param timeout - Timeout in seconds, 0 to block indefinitely + */ + parseCommand( + parser: CommandParser, + source: RedisArgument, + destination: RedisArgument, + sourceSide: ListSide, + destinationSide: ListSide, + timeout: number + ) { + parser.push('BLMOVE'); + parser.pushKeys([source, destination]); + parser.push(sourceSide, destinationSide, timeout.toString()) + }, + transformReply: undefined as unknown as () => BlobStringReply | NullReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/BLMPOP.spec.ts b/packages/client/lib/commands/BLMPOP.spec.ts new file mode 100644 index 00000000000..6cda524b50f --- /dev/null +++ b/packages/client/lib/commands/BLMPOP.spec.ts @@ -0,0 +1,50 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL, BLOCKING_MIN_VALUE } from '../test-utils'; +import BLMPOP from './BLMPOP'; +import { parseArgs } from './generic-transformers'; + +describe('BLMPOP', () => { + testUtils.isVersionGreaterThanHook([7]); + + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(BLMPOP, 0, 'key', 'LEFT'), + ['BLMPOP', '0', '1', 'key', 'LEFT'] + ); + }); + + it('with COUNT', () => { + assert.deepEqual( + parseArgs(BLMPOP, 0, 'key', 'LEFT', { + COUNT: 1 + }), + ['BLMPOP', '0', '1', 'key', 'LEFT', 'COUNT', '1'] + ); + }); + }); + + testUtils.testAll('blmPop - null', async client => { + assert.equal( + await client.blmPop(BLOCKING_MIN_VALUE, 'key', 'RIGHT'), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); + + testUtils.testAll('blmPop - with member', async client => { + const [, reply] = await Promise.all([ + client.lPush('key', 'element'), + client.blmPop(BLOCKING_MIN_VALUE, 'key', 'RIGHT') + ]); + assert.deepEqual(reply, [ + 'key', + ['element'] + ]); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/BLMPOP.ts b/packages/client/lib/commands/BLMPOP.ts new file mode 100644 index 00000000000..b858218b580 --- /dev/null +++ b/packages/client/lib/commands/BLMPOP.ts @@ -0,0 +1,18 @@ +import { CommandParser } from '../client/parser'; +import { Command } from '../RESP/types'; +import LMPOP, { LMPopArguments, parseLMPopArguments } from './LMPOP'; + +export default { + IS_READ_ONLY: false, + /** + * Pops elements from multiple lists; blocks until elements are available + * @param parser - The Redis command parser + * @param timeout - Timeout in seconds, 0 to block indefinitely + * @param args - Additional arguments for LMPOP command + */ + parseCommand(parser: CommandParser, timeout: number, ...args: LMPopArguments) { + parser.push('BLMPOP', timeout.toString()); + parseLMPopArguments(parser, ...args); + }, + transformReply: LMPOP.transformReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/BLPOP.spec.ts b/packages/client/lib/commands/BLPOP.spec.ts new file mode 100644 index 00000000000..1bb53a774b7 --- /dev/null +++ b/packages/client/lib/commands/BLPOP.spec.ts @@ -0,0 +1,47 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL, BLOCKING_MIN_VALUE } from '../test-utils'; +import BLPOP from './BLPOP'; +import { parseArgs } from './generic-transformers'; + +describe('BLPOP', () => { + describe('transformArguments', () => { + it('single', () => { + assert.deepEqual( + parseArgs(BLPOP, 'key', 0), + ['BLPOP', 'key', '0'] + ); + }); + + it('multiple', () => { + assert.deepEqual( + parseArgs(BLPOP, ['1', '2'], 0), + ['BLPOP', '1', '2', '0'] + ); + }); + }); + + testUtils.testAll('blPop - null', async client => { + assert.equal( + await client.blPop('key', BLOCKING_MIN_VALUE), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); + + testUtils.testAll('blPop - with member', async client => { + const [, reply] = await Promise.all([ + client.lPush('key', 'element'), + client.blPop('key', 1) + ]); + + assert.deepEqual(reply, { + key: 'key', + element: 'element' + }); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/BLPOP.ts b/packages/client/lib/commands/BLPOP.ts new file mode 100644 index 00000000000..6ffccc6417c --- /dev/null +++ b/packages/client/lib/commands/BLPOP.ts @@ -0,0 +1,26 @@ +import { CommandParser } from '../client/parser'; +import { UnwrapReply, NullReply, TuplesReply, BlobStringReply, Command } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export default { + IS_READ_ONLY: true, + /** + * Removes and returns the first element in a list, or blocks until one is available + * @param parser - The Redis command parser + * @param key - Key of the list to pop from, or array of keys to try sequentially + * @param timeout - Maximum seconds to block, 0 to block indefinitely + */ + parseCommand(parser: CommandParser, key: RedisVariadicArgument, timeout: number) { + parser.push('BLPOP'); + parser.pushKeys(key); + parser.push(timeout.toString()); + }, + transformReply(reply: UnwrapReply>) { + if (reply === null) return null; + + return { + key: reply[0], + element: reply[1] + }; + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/BRPOP.spec.ts b/packages/client/lib/commands/BRPOP.spec.ts new file mode 100644 index 00000000000..de23bb34a92 --- /dev/null +++ b/packages/client/lib/commands/BRPOP.spec.ts @@ -0,0 +1,47 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL, BLOCKING_MIN_VALUE } from '../test-utils'; +import BRPOP from './BRPOP'; +import { parseArgs } from './generic-transformers'; + +describe('BRPOP', () => { + describe('transformArguments', () => { + it('single', () => { + assert.deepEqual( + parseArgs(BRPOP, 'key', 0), + ['BRPOP', 'key', '0'] + ); + }); + + it('multiple', () => { + assert.deepEqual( + parseArgs(BRPOP, ['1', '2'], 0), + ['BRPOP', '1', '2', '0'] + ); + }); + }); + + testUtils.testAll('brPop - null', async client => { + assert.equal( + await client.brPop('key', BLOCKING_MIN_VALUE), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); + + testUtils.testAll('brPopblPop - with member', async client => { + const [, reply] = await Promise.all([ + client.lPush('key', 'element'), + client.brPop('key', 1) + ]); + + assert.deepEqual(reply, { + key: 'key', + element: 'element' + }); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/BRPOP.ts b/packages/client/lib/commands/BRPOP.ts new file mode 100644 index 00000000000..994a60ec902 --- /dev/null +++ b/packages/client/lib/commands/BRPOP.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '../client/parser'; +import { Command } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; +import BLPOP from './BLPOP'; + +export default { + IS_READ_ONLY: true, + /** + * Removes and returns the last element in a list, or blocks until one is available + * @param parser - The Redis command parser + * @param key - Key of the list to pop from, or array of keys to try sequentially + * @param timeout - Maximum seconds to block, 0 to block indefinitely + */ + parseCommand(parser: CommandParser, key: RedisVariadicArgument, timeout: number) { + parser.push('BRPOP'); + parser.pushKeys(key); + parser.push(timeout.toString()); + }, + transformReply: BLPOP.transformReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/BRPOPLPUSH.spec.ts b/packages/client/lib/commands/BRPOPLPUSH.spec.ts new file mode 100644 index 00000000000..6c2a2a2c900 --- /dev/null +++ b/packages/client/lib/commands/BRPOPLPUSH.spec.ts @@ -0,0 +1,43 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL, BLOCKING_MIN_VALUE } from '../test-utils'; +import BRPOPLPUSH from './BRPOPLPUSH'; +import { parseArgs } from './generic-transformers'; + +describe('BRPOPLPUSH', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(BRPOPLPUSH, 'source', 'destination', 0), + ['BRPOPLPUSH', 'source', 'destination', '0'] + ); + }); + + testUtils.testAll('brPopLPush - null', async client => { + assert.equal( + await client.brPopLPush( + '{tag}source', + '{tag}destination', + BLOCKING_MIN_VALUE + ), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); + + testUtils.testAll('brPopLPush - with member', async client => { + const [, reply] = await Promise.all([ + client.lPush('{tag}source', 'element'), + client.brPopLPush( + '{tag}source', + '{tag}destination', + 0 + ) + ]); + + assert.equal(reply, 'element'); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/BRPOPLPUSH.ts b/packages/client/lib/commands/BRPOPLPUSH.ts new file mode 100644 index 00000000000..b62d263cfaf --- /dev/null +++ b/packages/client/lib/commands/BRPOPLPUSH.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, BlobStringReply, NullReply, Command } from '../RESP/types'; + +export default { + IS_READ_ONLY: false, + /** + * Pops an element from a list, pushes it to another list and returns it; blocks until element is available + * @param parser - The Redis command parser + * @param source - Key of the source list to pop from + * @param destination - Key of the destination list to push to + * @param timeout - Maximum seconds to block, 0 to block indefinitely + */ + parseCommand(parser: CommandParser, source: RedisArgument, destination: RedisArgument, timeout: number) { + parser.push('BRPOPLPUSH'); + parser.pushKeys([source, destination]); + parser.push(timeout.toString()); + }, + transformReply: undefined as unknown as () => BlobStringReply | NullReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/BZMPOP.spec.ts b/packages/client/lib/commands/BZMPOP.spec.ts new file mode 100644 index 00000000000..8b082a214ee --- /dev/null +++ b/packages/client/lib/commands/BZMPOP.spec.ts @@ -0,0 +1,56 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL, BLOCKING_MIN_VALUE } from '../test-utils'; +import BZMPOP from './BZMPOP'; +import { parseArgs } from './generic-transformers'; + +describe('BZMPOP', () => { + testUtils.isVersionGreaterThanHook([7]); + + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(BZMPOP, 0, 'key', 'MIN'), + ['BZMPOP', '0', '1', 'key', 'MIN'] + ); + }); + + it('with COUNT', () => { + assert.deepEqual( + parseArgs(BZMPOP, 0, 'key', 'MIN', { + COUNT: 2 + }), + ['BZMPOP', '0', '1', 'key', 'MIN', 'COUNT', '2'] + ); + }); + }); + + testUtils.testAll('bzmPop - null', async client => { + assert.equal( + await client.bzmPop(BLOCKING_MIN_VALUE, 'key', 'MAX'), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.SERVERS.OPEN + }); + + testUtils.testAll('bzmPop - with member', async client => { + const key = 'key', + member = { + value: 'a', + score: 1 + }, + [, reply] = await Promise.all([ + client.zAdd(key, member), + client.bzmPop(BLOCKING_MIN_VALUE, key, 'MAX') + ]); + + assert.deepEqual(reply, { + key, + members: [member] + }); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.SERVERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/BZMPOP.ts b/packages/client/lib/commands/BZMPOP.ts new file mode 100644 index 00000000000..1d67adbccbc --- /dev/null +++ b/packages/client/lib/commands/BZMPOP.ts @@ -0,0 +1,18 @@ +import { CommandParser } from '../client/parser'; +import { Command } from '../RESP/types'; +import ZMPOP, { parseZMPopArguments, ZMPopArguments } from './ZMPOP'; + +export default { + IS_READ_ONLY: false, + /** + * Removes and returns members from one or more sorted sets in the specified order; blocks until elements are available + * @param parser - The Redis command parser + * @param timeout - Maximum seconds to block, 0 to block indefinitely + * @param args - Additional arguments specifying the keys, min/max count, and order (MIN/MAX) + */ + parseCommand(parser: CommandParser, timeout: number, ...args: ZMPopArguments) { + parser.push('BZMPOP', timeout.toString()); + parseZMPopArguments(parser, ...args); + }, + transformReply: ZMPOP.transformReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/BZPOPMAX.spec.ts b/packages/client/lib/commands/BZPOPMAX.spec.ts new file mode 100644 index 00000000000..fbf60862327 --- /dev/null +++ b/packages/client/lib/commands/BZPOPMAX.spec.ts @@ -0,0 +1,52 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL, BLOCKING_MIN_VALUE } from '../test-utils'; +import BZPOPMAX from './BZPOPMAX'; +import { parseArgs } from './generic-transformers'; + +describe('BZPOPMAX', () => { + describe('transformArguments', () => { + it('single', () => { + assert.deepEqual( + parseArgs(BZPOPMAX, 'key', 0), + ['BZPOPMAX', 'key', '0'] + ); + }); + + it('multiple', () => { + assert.deepEqual( + parseArgs(BZPOPMAX, ['1', '2'], 0), + ['BZPOPMAX', '1', '2', '0'] + ); + }); + }); + + testUtils.testAll('bzPopMax - null', async client => { + assert.equal( + await client.bzPopMax('key', BLOCKING_MIN_VALUE), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.SERVERS.OPEN + }); + + testUtils.testAll('bzPopMax - with member', async client => { + const key = 'key', + member = { + value: 'a', + score: 1 + }, + [, reply] = await Promise.all([ + client.zAdd(key, member), + client.bzPopMax(key, BLOCKING_MIN_VALUE) + ]); + + assert.deepEqual(reply, { + key, + ...member + }); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.SERVERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/BZPOPMAX.ts b/packages/client/lib/commands/BZPOPMAX.ts new file mode 100644 index 00000000000..cffbd5e877b --- /dev/null +++ b/packages/client/lib/commands/BZPOPMAX.ts @@ -0,0 +1,39 @@ +import { CommandParser } from '../client/parser'; +import { NullReply, TuplesReply, BlobStringReply, DoubleReply, UnwrapReply, Command, TypeMapping } from '../RESP/types'; +import { RedisVariadicArgument, transformDoubleReply } from './generic-transformers'; + +export default { + IS_READ_ONLY: false, + /** + * Removes and returns the member with the highest score in a sorted set, or blocks until one is available + * @param parser - The Redis command parser + * @param keys - Key of the sorted set, or array of keys to try sequentially + * @param timeout - Maximum seconds to block, 0 to block indefinitely + */ + parseCommand(parser: CommandParser, keys: RedisVariadicArgument, timeout: number) { + parser.push('BZPOPMAX'); + parser.pushKeys(keys); + parser.push(timeout.toString()); + }, + transformReply: { + 2( + reply: UnwrapReply>, + preserve?: any, + typeMapping?: TypeMapping + ) { + return reply === null ? null : { + key: reply[0], + value: reply[1], + score: transformDoubleReply[2](reply[2], preserve, typeMapping) + }; + }, + 3(reply: UnwrapReply>) { + return reply === null ? null : { + key: reply[0], + value: reply[1], + score: reply[2] + }; + } + } +} as const satisfies Command; + diff --git a/packages/client/lib/commands/BZPOPMIN.spec.ts b/packages/client/lib/commands/BZPOPMIN.spec.ts new file mode 100644 index 00000000000..2f8cab8dedf --- /dev/null +++ b/packages/client/lib/commands/BZPOPMIN.spec.ts @@ -0,0 +1,52 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL, BLOCKING_MIN_VALUE } from '../test-utils'; +import BZPOPMIN from './BZPOPMIN'; +import { parseArgs } from './generic-transformers'; + +describe('BZPOPMIN', () => { + describe('transformArguments', () => { + it('single', () => { + assert.deepEqual( + parseArgs(BZPOPMIN, 'key', 0), + ['BZPOPMIN', 'key', '0'] + ); + }); + + it('multiple', () => { + assert.deepEqual( + parseArgs(BZPOPMIN, ['1', '2'], 0), + ['BZPOPMIN', '1', '2', '0'] + ); + }); + }); + + testUtils.testAll('bzPopMin - null', async client => { + assert.equal( + await client.bzPopMin('key', BLOCKING_MIN_VALUE), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.SERVERS.OPEN + }); + + testUtils.testAll('bzPopMin - with member', async client => { + const key = 'key', + member = { + value: 'a', + score: 1 + }, + [, reply] = await Promise.all([ + client.zAdd(key, member), + client.bzPopMin(key, BLOCKING_MIN_VALUE) + ]); + + assert.deepEqual(reply, { + key, + ...member + }); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.SERVERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/BZPOPMIN.ts b/packages/client/lib/commands/BZPOPMIN.ts new file mode 100644 index 00000000000..911a8654e5a --- /dev/null +++ b/packages/client/lib/commands/BZPOPMIN.ts @@ -0,0 +1,21 @@ +import { CommandParser } from '../client/parser'; +import { Command } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; +import BZPOPMAX from './BZPOPMAX'; + +export default { + IS_READ_ONLY: BZPOPMAX.IS_READ_ONLY, + /** + * Removes and returns the member with the lowest score in a sorted set, or blocks until one is available + * @param parser - The Redis command parser + * @param keys - Key of the sorted set, or array of keys to try sequentially + * @param timeout - Maximum seconds to block, 0 to block indefinitely + */ + parseCommand(parser: CommandParser, keys: RedisVariadicArgument, timeout: number) { + parser.push('BZPOPMIN'); + parser.pushKeys(keys); + parser.push(timeout.toString()); + }, + transformReply: BZPOPMAX.transformReply +} as const satisfies Command; + diff --git a/packages/client/lib/commands/CLIENT_CACHING.spec.ts b/packages/client/lib/commands/CLIENT_CACHING.spec.ts new file mode 100644 index 00000000000..ad3511b3e97 --- /dev/null +++ b/packages/client/lib/commands/CLIENT_CACHING.spec.ts @@ -0,0 +1,21 @@ +import { strict as assert } from 'node:assert'; +import CLIENT_CACHING from './CLIENT_CACHING'; +import { parseArgs } from './generic-transformers'; + +describe('CLIENT CACHING', () => { + describe('transformArguments', () => { + it('true', () => { + assert.deepEqual( + parseArgs(CLIENT_CACHING, true), + ['CLIENT', 'CACHING', 'YES'] + ); + }); + + it('false', () => { + assert.deepEqual( + parseArgs(CLIENT_CACHING, false), + ['CLIENT', 'CACHING', 'NO'] + ); + }); + }); +}); diff --git a/packages/client/lib/commands/CLIENT_CACHING.ts b/packages/client/lib/commands/CLIENT_CACHING.ts new file mode 100644 index 00000000000..f65230b5c24 --- /dev/null +++ b/packages/client/lib/commands/CLIENT_CACHING.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Instructs the server about tracking or not keys in the next request + * @param parser - The Redis command parser + * @param value - Whether to enable (true) or disable (false) tracking + */ + parseCommand(parser: CommandParser, value: boolean) { + parser.push( + 'CLIENT', + 'CACHING', + value ? 'YES' : 'NO' + ); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/CLIENT_GETNAME.spec.ts b/packages/client/lib/commands/CLIENT_GETNAME.spec.ts new file mode 100644 index 00000000000..5b0dfdb8437 --- /dev/null +++ b/packages/client/lib/commands/CLIENT_GETNAME.spec.ts @@ -0,0 +1,20 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import CLIENT_GETNAME from './CLIENT_GETNAME'; +import { parseArgs } from './generic-transformers'; + +describe('CLIENT GETNAME', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(CLIENT_GETNAME), + ['CLIENT', 'GETNAME'] + ); + }); + + testUtils.testWithClient('client.clientGetName', async client => { + assert.equal( + await client.clientGetName(), + null + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/CLIENT_GETNAME.ts b/packages/client/lib/commands/CLIENT_GETNAME.ts new file mode 100644 index 00000000000..f61d82d0548 --- /dev/null +++ b/packages/client/lib/commands/CLIENT_GETNAME.ts @@ -0,0 +1,15 @@ +import { CommandParser } from '../client/parser'; +import { BlobStringReply, NullReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Returns the name of the current connection + * @param parser - The Redis command parser + */ + parseCommand(parser: CommandParser) { + parser.push('CLIENT', 'GETNAME'); + }, + transformReply: undefined as unknown as () => BlobStringReply | NullReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/CLIENT_GETREDIR.spec.ts b/packages/client/lib/commands/CLIENT_GETREDIR.spec.ts new file mode 100644 index 00000000000..a7c375fec26 --- /dev/null +++ b/packages/client/lib/commands/CLIENT_GETREDIR.spec.ts @@ -0,0 +1,12 @@ +import { strict as assert } from 'node:assert'; +import CLIENT_GETREDIR from './CLIENT_GETREDIR'; +import { parseArgs } from './generic-transformers'; + +describe('CLIENT GETREDIR', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(CLIENT_GETREDIR), + ['CLIENT', 'GETREDIR'] + ); + }); +}); diff --git a/packages/client/lib/commands/CLIENT_GETREDIR.ts b/packages/client/lib/commands/CLIENT_GETREDIR.ts new file mode 100644 index 00000000000..ce50e86bf11 --- /dev/null +++ b/packages/client/lib/commands/CLIENT_GETREDIR.ts @@ -0,0 +1,15 @@ +import { CommandParser } from '../client/parser'; +import { NumberReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Returns the ID of the client to which the current client is redirecting tracking notifications + * @param parser - The Redis command parser + */ + parseCommand(parser: CommandParser) { + parser.push('CLIENT', 'GETREDIR'); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/CLIENT_ID.spec.ts b/packages/client/lib/commands/CLIENT_ID.spec.ts new file mode 100644 index 00000000000..51b308adf2c --- /dev/null +++ b/packages/client/lib/commands/CLIENT_ID.spec.ts @@ -0,0 +1,20 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import CLIENT_ID from './CLIENT_ID'; +import { parseArgs } from './generic-transformers'; + +describe('CLIENT ID', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(CLIENT_ID), + ['CLIENT', 'ID'] + ); + }); + + testUtils.testWithClient('client.clientId', async client => { + assert.equal( + typeof (await client.clientId()), + 'number' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/CLIENT_ID.ts b/packages/client/lib/commands/CLIENT_ID.ts new file mode 100644 index 00000000000..9daa7f90135 --- /dev/null +++ b/packages/client/lib/commands/CLIENT_ID.ts @@ -0,0 +1,15 @@ +import { CommandParser } from '../client/parser'; +import { NumberReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Returns the client ID for the current connection + * @param parser - The Redis command parser + */ + parseCommand(parser: CommandParser) { + parser.push('CLIENT', 'ID'); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/CLIENT_INFO.spec.ts b/packages/client/lib/commands/CLIENT_INFO.spec.ts new file mode 100644 index 00000000000..96881e6c1aa --- /dev/null +++ b/packages/client/lib/commands/CLIENT_INFO.spec.ts @@ -0,0 +1,137 @@ +import { strict as assert } from 'node:assert'; +import CLIENT_INFO from './CLIENT_INFO'; +import testUtils, { GLOBAL } from '../test-utils'; +import { parseArgs } from './generic-transformers'; +import { version } from '../../package.json'; + +describe('CLIENT INFO', () => { + testUtils.isVersionGreaterThanHook([6, 2]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(CLIENT_INFO), + ['CLIENT', 'INFO'] + ); + }); + + testUtils.testWithClient('client.clientInfo', async client => { + const reply = await client.clientInfo(); + assert.equal(typeof reply.id, 'number'); + assert.equal(typeof reply.addr, 'string'); + assert.equal(typeof reply.laddr, 'string'); + assert.equal(typeof reply.fd, 'number'); + assert.equal(typeof reply.name, 'string'); + assert.equal(typeof reply.age, 'number'); + assert.equal(typeof reply.idle, 'number'); + assert.equal(typeof reply.flags, 'string'); + assert.equal(typeof reply.db, 'number'); + assert.equal(typeof reply.sub, 'number'); + assert.equal(typeof reply.psub, 'number'); + assert.equal(typeof reply.multi, 'number'); + assert.equal(typeof reply.qbuf, 'number'); + assert.equal(typeof reply.qbufFree, 'number'); + assert.equal(typeof reply.argvMem, 'number'); + assert.equal(typeof reply.obl, 'number'); + assert.equal(typeof reply.oll, 'number'); + assert.equal(typeof reply.omem, 'number'); + assert.equal(typeof reply.totMem, 'number'); + assert.equal(typeof reply.events, 'string'); + assert.equal(typeof reply.cmd, 'string'); + assert.equal(typeof reply.user, 'string'); + assert.equal(typeof reply.redir, 'number'); + + if (testUtils.isVersionGreaterThan([7, 0])) { + assert.equal(typeof reply.multiMem, 'number'); + assert.equal(typeof reply.resp, 'number'); + + if (testUtils.isVersionGreaterThan([7, 0, 3])) { + assert.equal(typeof reply.ssub, 'number'); + } + } + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClient('client.clientInfo Redis < 7', async client => { + const reply = await client.clientInfo(); + if (!testUtils.isVersionGreaterThan([7])) { + assert.strictEqual(reply.libName, undefined, 'LibName should be undefined for Redis < 7'); + assert.strictEqual(reply.libVer, undefined, 'LibVer should be undefined for Redis < 7'); + } + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClientIfVersionWithinRange([[7], 'LATEST'], 'client.clientInfo Redis>=7 info disabled', async client => { + const reply = await client.clientInfo(); + assert.equal(reply.libName, ''); + assert.equal(reply.libVer, ''); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + disableClientInfo: true + } + }); + + testUtils.testWithClientIfVersionWithinRange([[7], 'LATEST'], 'client.clientInfo Redis>=7 resp unset, info enabled, tag set', async client => { + const reply = await client.clientInfo(); + assert.equal(reply.libName, 'node-redis(client1)'); + assert.equal(reply.libVer, version); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + clientInfoTag: 'client1' + } + }); + + testUtils.testWithClientIfVersionWithinRange([[7], 'LATEST'], 'client.clientInfo Redis>=7 resp unset, info enabled, tag unset', async client => { + const reply = await client.clientInfo(); + assert.equal(reply.libName, 'node-redis'); + assert.equal(reply.libVer, version); + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClientIfVersionWithinRange([[7], 'LATEST'], 'client.clientInfo Redis>=7 resp2 info enabled', async client => { + const reply = await client.clientInfo(); + assert.equal(reply.libName, 'node-redis(client1)'); + assert.equal(reply.libVer, version); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 2, + clientInfoTag: 'client1' + } + }); + + testUtils.testWithClientIfVersionWithinRange([[7], 'LATEST'], 'client.clientInfo Redis>=7 resp2 info disabled', async client => { + const reply = await client.clientInfo(); + assert.equal(reply.libName, ''); + assert.equal(reply.libVer, ''); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + disableClientInfo: true, + RESP: 2 + } + }); + + testUtils.testWithClientIfVersionWithinRange([[7], 'LATEST'], 'client.clientInfo Redis>=7 resp3 info enabled', async client => { + const reply = await client.clientInfo(); + assert.equal(reply.libName, 'node-redis(client1)'); + assert.equal(reply.libVer, version); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3, + clientInfoTag: 'client1' + } + }); + + testUtils.testWithClientIfVersionWithinRange([[7], 'LATEST'], 'client.clientInfo Redis>=7 resp3 info disabled', async client => { + const reply = await client.clientInfo(); + assert.equal(reply.libName, ''); + assert.equal(reply.libVer, ''); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + disableClientInfo: true, + RESP: 3 + } + }); + +}); diff --git a/packages/client/lib/commands/CLIENT_INFO.ts b/packages/client/lib/commands/CLIENT_INFO.ts new file mode 100644 index 00000000000..5fed01c52e1 --- /dev/null +++ b/packages/client/lib/commands/CLIENT_INFO.ts @@ -0,0 +1,130 @@ +import { CommandParser } from '../client/parser'; +import { Command, VerbatimStringReply } from '../RESP/types'; + +export interface ClientInfoReply { + id: number; + addr: string; + /** + * available since 6.2 + */ + laddr?: string; + fd: number; + name: string; + age: number; + idle: number; + flags: string; + db: number; + sub: number; + psub: number; + /** + * available since 7.0.3 + */ + ssub?: number; + multi: number; + qbuf: number; + qbufFree: number; + /** + * available since 6.0 + */ + argvMem?: number; + /** + * available since 7.0 + */ + multiMem?: number; + obl: number; + oll: number; + omem: number; + /** + * available since 6.0 + */ + totMem?: number; + events: string; + cmd: string; + /** + * available since 6.0 + */ + user?: string; + /** + * available since 6.2 + */ + redir?: number; + /** + * available since 7.0 + */ + resp?: number; + /** + * available since 7.0 + */ + libName?: string; + /** + * available since 7.0 + */ + libVer?: string; +} + +const CLIENT_INFO_REGEX = /([^\s=]+)=([^\s]*)/g; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Returns information and statistics about the current client connection + * @param parser - The Redis command parser + */ + parseCommand(parser: CommandParser) { + parser.push('CLIENT', 'INFO'); + }, + transformReply(rawReply: VerbatimStringReply) { + const map: Record = {}; + for (const item of rawReply.toString().matchAll(CLIENT_INFO_REGEX)) { + map[item[1]] = item[2]; + } + const reply: ClientInfoReply = { + id: Number(map.id), + addr: map.addr, + fd: Number(map.fd), + name: map.name, + age: Number(map.age), + idle: Number(map.idle), + flags: map.flags, + db: Number(map.db), + sub: Number(map.sub), + psub: Number(map.psub), + multi: Number(map.multi), + qbuf: Number(map.qbuf), + qbufFree: Number(map['qbuf-free']), + argvMem: Number(map['argv-mem']), + obl: Number(map.obl), + oll: Number(map.oll), + omem: Number(map.omem), + totMem: Number(map['tot-mem']), + events: map.events, + cmd: map.cmd, + user: map.user, + libName: map['lib-name'], + libVer: map['lib-ver'] + }; + + if (map.laddr !== undefined) { + reply.laddr = map.laddr; + } + + if (map.redir !== undefined) { + reply.redir = Number(map.redir); + } + + if (map.ssub !== undefined) { + reply.ssub = Number(map.ssub); + } + + if (map['multi-mem'] !== undefined) { + reply.multiMem = Number(map['multi-mem']); + } + + if (map.resp !== undefined) { + reply.resp = Number(map.resp); + } + + return reply; + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/CLIENT_KILL.spec.ts b/packages/client/lib/commands/CLIENT_KILL.spec.ts new file mode 100644 index 00000000000..5078a267516 --- /dev/null +++ b/packages/client/lib/commands/CLIENT_KILL.spec.ts @@ -0,0 +1,121 @@ +import { strict as assert } from 'node:assert'; +import CLIENT_KILL, { CLIENT_KILL_FILTERS } from './CLIENT_KILL'; +import { parseArgs } from './generic-transformers'; + +describe('CLIENT KILL', () => { + describe('transformArguments', () => { + it('ADDRESS', () => { + assert.deepEqual( + parseArgs(CLIENT_KILL, { + filter: CLIENT_KILL_FILTERS.ADDRESS, + address: 'ip:6379' + }), + ['CLIENT', 'KILL', 'ADDR', 'ip:6379'] + ); + }); + + it('LOCAL_ADDRESS', () => { + assert.deepEqual( + parseArgs(CLIENT_KILL, { + filter: CLIENT_KILL_FILTERS.LOCAL_ADDRESS, + localAddress: 'ip:6379' + }), + ['CLIENT', 'KILL', 'LADDR', 'ip:6379'] + ); + }); + + describe('ID', () => { + it('string', () => { + assert.deepEqual( + parseArgs(CLIENT_KILL, { + filter: CLIENT_KILL_FILTERS.ID, + id: '1' + }), + ['CLIENT', 'KILL', 'ID', '1'] + ); + }); + + it('number', () => { + assert.deepEqual( + parseArgs(CLIENT_KILL, { + filter: CLIENT_KILL_FILTERS.ID, + id: 1 + }), + ['CLIENT', 'KILL', 'ID', '1'] + ); + }); + }); + + it('TYPE', () => { + assert.deepEqual( + parseArgs(CLIENT_KILL, { + filter: CLIENT_KILL_FILTERS.TYPE, + type: 'master' + }), + ['CLIENT', 'KILL', 'TYPE', 'master'] + ); + }); + + it('USER', () => { + assert.deepEqual( + parseArgs(CLIENT_KILL, { + filter: CLIENT_KILL_FILTERS.USER, + username: 'username' + }), + ['CLIENT', 'KILL', 'USER', 'username'] + ); + }); + + it('MAXAGE', () => { + assert.deepEqual( + parseArgs(CLIENT_KILL, { + filter: CLIENT_KILL_FILTERS.MAXAGE, + maxAge: 10 + }), + ['CLIENT', 'KILL', 'MAXAGE', '10'] + ); + }); + + describe('SKIP_ME', () => { + it('undefined', () => { + assert.deepEqual( + parseArgs(CLIENT_KILL, CLIENT_KILL_FILTERS.SKIP_ME), + ['CLIENT', 'KILL', 'SKIPME'] + ); + }); + + it('true', () => { + assert.deepEqual( + parseArgs(CLIENT_KILL, { + filter: CLIENT_KILL_FILTERS.SKIP_ME, + skipMe: true + }), + ['CLIENT', 'KILL', 'SKIPME', 'yes'] + ); + }); + + it('false', () => { + assert.deepEqual( + parseArgs(CLIENT_KILL, { + filter: CLIENT_KILL_FILTERS.SKIP_ME, + skipMe: false + }), + ['CLIENT', 'KILL', 'SKIPME', 'no'] + ); + }); + }); + + it('TYPE & SKIP_ME', () => { + assert.deepEqual( + parseArgs(CLIENT_KILL, [ + { + filter: CLIENT_KILL_FILTERS.TYPE, + type: 'master' + }, + CLIENT_KILL_FILTERS.SKIP_ME + ]), + ['CLIENT', 'KILL', 'TYPE', 'master', 'SKIPME'] + ); + }); + }); +}); diff --git a/packages/client/lib/commands/CLIENT_KILL.ts b/packages/client/lib/commands/CLIENT_KILL.ts new file mode 100644 index 00000000000..d7f0d0e0829 --- /dev/null +++ b/packages/client/lib/commands/CLIENT_KILL.ts @@ -0,0 +1,114 @@ +import { CommandParser } from '../client/parser'; +import { NumberReply, Command } from '../RESP/types'; + +export const CLIENT_KILL_FILTERS = { + ADDRESS: 'ADDR', + LOCAL_ADDRESS: 'LADDR', + ID: 'ID', + TYPE: 'TYPE', + USER: 'USER', + SKIP_ME: 'SKIPME', + MAXAGE: 'MAXAGE' +} as const; + +type CLIENT_KILL_FILTERS = typeof CLIENT_KILL_FILTERS; + +export interface ClientKillFilterCommon { + filter: T; +} + +export interface ClientKillAddress extends ClientKillFilterCommon { + address: `${string}:${number}`; +} + +export interface ClientKillLocalAddress extends ClientKillFilterCommon { + localAddress: `${string}:${number}`; +} + +export interface ClientKillId extends ClientKillFilterCommon { + id: number | `${number}`; +} + +export interface ClientKillType extends ClientKillFilterCommon { + type: 'normal' | 'master' | 'replica' | 'pubsub'; +} + +export interface ClientKillUser extends ClientKillFilterCommon { + username: string; +} + +export type ClientKillSkipMe = CLIENT_KILL_FILTERS['SKIP_ME'] | (ClientKillFilterCommon & { + skipMe: boolean; +}); + +export interface ClientKillMaxAge extends ClientKillFilterCommon { + maxAge: number; +} + +export type ClientKillFilter = ClientKillAddress | ClientKillLocalAddress | ClientKillId | ClientKillType | ClientKillUser | ClientKillSkipMe | ClientKillMaxAge; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Closes client connections matching the specified filters + * @param parser - The Redis command parser + * @param filters - One or more filters to match client connections to kill + */ + parseCommand(parser: CommandParser, filters: ClientKillFilter | Array) { + parser.push('CLIENT', 'KILL'); + + if (Array.isArray(filters)) { + for (const filter of filters) { + pushFilter(parser, filter); + } + } else { + pushFilter(parser, filters); + } + + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; + +function pushFilter(parser: CommandParser, filter: ClientKillFilter): void { + if (filter === CLIENT_KILL_FILTERS.SKIP_ME) { + parser.push('SKIPME'); + return; + } + + parser.push(filter.filter); + + switch (filter.filter) { + case CLIENT_KILL_FILTERS.ADDRESS: + parser.push(filter.address); + break; + + case CLIENT_KILL_FILTERS.LOCAL_ADDRESS: + parser.push(filter.localAddress); + break; + + case CLIENT_KILL_FILTERS.ID: + parser.push( + typeof filter.id === 'number' ? + filter.id.toString() : + filter.id + ); + break; + + case CLIENT_KILL_FILTERS.TYPE: + parser.push(filter.type); + break; + + case CLIENT_KILL_FILTERS.USER: + parser.push(filter.username); + break; + + case CLIENT_KILL_FILTERS.SKIP_ME: + parser.push(filter.skipMe ? 'yes' : 'no'); + break; + + case CLIENT_KILL_FILTERS.MAXAGE: + parser.push(filter.maxAge.toString()); + break; + } +} diff --git a/packages/client/lib/commands/CLIENT_LIST.spec.ts b/packages/client/lib/commands/CLIENT_LIST.spec.ts new file mode 100644 index 00000000000..34709c5f14f --- /dev/null +++ b/packages/client/lib/commands/CLIENT_LIST.spec.ts @@ -0,0 +1,78 @@ +import { strict as assert } from 'node:assert'; +import CLIENT_LIST from './CLIENT_LIST'; +import testUtils, { GLOBAL } from '../test-utils'; +import { parseArgs } from './generic-transformers'; + +describe('CLIENT LIST', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(CLIENT_LIST), + ['CLIENT', 'LIST'] + ); + }); + + it('with TYPE', () => { + assert.deepEqual( + parseArgs(CLIENT_LIST, { + TYPE: 'NORMAL' + }), + ['CLIENT', 'LIST', 'TYPE', 'NORMAL'] + ); + }); + + it('with ID', () => { + assert.deepEqual( + parseArgs(CLIENT_LIST, { + ID: ['1', '2'] + }), + ['CLIENT', 'LIST', 'ID', '1', '2'] + ); + }); + }); + + testUtils.testWithClient('client.clientList', async client => { + const reply = await client.clientList(); + assert.ok(Array.isArray(reply)); + for (const item of reply) { + assert.equal(typeof item.id, 'number'); + assert.equal(typeof item.addr, 'string'); + assert.equal(typeof item.fd, 'number'); + assert.equal(typeof item.name, 'string'); + assert.equal(typeof item.age, 'number'); + assert.equal(typeof item.idle, 'number'); + assert.equal(typeof item.flags, 'string'); + assert.equal(typeof item.db, 'number'); + assert.equal(typeof item.sub, 'number'); + assert.equal(typeof item.psub, 'number'); + assert.equal(typeof item.multi, 'number'); + assert.equal(typeof item.qbuf, 'number'); + assert.equal(typeof item.qbufFree, 'number'); + assert.equal(typeof item.obl, 'number'); + assert.equal(typeof item.oll, 'number'); + assert.equal(typeof item.omem, 'number'); + assert.equal(typeof item.events, 'string'); + assert.equal(typeof item.cmd, 'string'); + + if (testUtils.isVersionGreaterThan([6, 0])) { + assert.equal(typeof item.argvMem, 'number'); + assert.equal(typeof item.totMem, 'number'); + assert.equal(typeof item.user, 'string'); + + if (testUtils.isVersionGreaterThan([6, 2])) { + assert.equal(typeof item.redir, 'number'); + assert.equal(typeof item.laddr, 'string'); + + if (testUtils.isVersionGreaterThan([7, 0])) { + assert.equal(typeof item.multiMem, 'number'); + assert.equal(typeof item.resp, 'number'); + + if (testUtils.isVersionGreaterThan([7, 0, 3])) { + assert.equal(typeof item.ssub, 'number'); + } + } + } + } + } + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/CLIENT_LIST.ts b/packages/client/lib/commands/CLIENT_LIST.ts new file mode 100644 index 00000000000..1c1d9c3ec50 --- /dev/null +++ b/packages/client/lib/commands/CLIENT_LIST.ts @@ -0,0 +1,46 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, VerbatimStringReply, Command } from '../RESP/types'; +import CLIENT_INFO, { ClientInfoReply } from './CLIENT_INFO'; + +export interface ListFilterType { + TYPE: 'NORMAL' | 'MASTER' | 'REPLICA' | 'PUBSUB'; + ID?: never; +} + +export interface ListFilterId { + ID: Array; + TYPE?: never; +} + +export type ListFilter = ListFilterType | ListFilterId; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Returns information about all client connections. Can be filtered by type or ID + * @param parser - The Redis command parser + * @param filter - Optional filter to return only specific client types or IDs + */ + parseCommand(parser: CommandParser, filter?: ListFilter) { + parser.push('CLIENT', 'LIST'); + if (filter) { + if (filter.TYPE !== undefined) { + parser.push('TYPE', filter.TYPE); + } else { + parser.push('ID'); + parser.pushVariadic(filter.ID); + } + } + }, + transformReply(rawReply: VerbatimStringReply): Array { + const split = rawReply.toString().split('\n'), + length = split.length - 1, + reply: Array = []; + for (let i = 0; i < length; i++) { + reply.push(CLIENT_INFO.transformReply(split[i] as unknown as VerbatimStringReply)); + } + + return reply; + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/CLIENT_NO-EVICT.spec.ts b/packages/client/lib/commands/CLIENT_NO-EVICT.spec.ts new file mode 100644 index 00000000000..50afd413492 --- /dev/null +++ b/packages/client/lib/commands/CLIENT_NO-EVICT.spec.ts @@ -0,0 +1,31 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import CLIENT_NO_EVICT from './CLIENT_NO-EVICT'; +import { parseArgs } from './generic-transformers'; + +describe('CLIENT NO-EVICT', () => { + testUtils.isVersionGreaterThanHook([7]); + + describe('transformArguments', () => { + it('true', () => { + assert.deepEqual( + parseArgs(CLIENT_NO_EVICT, true), + ['CLIENT', 'NO-EVICT', 'ON'] + ); + }); + + it('false', () => { + assert.deepEqual( + parseArgs(CLIENT_NO_EVICT, false), + ['CLIENT', 'NO-EVICT', 'OFF'] + ); + }); + }); + + testUtils.testWithClient('client.clientNoEvict', async client => { + assert.equal( + await client.clientNoEvict(true), + 'OK' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/CLIENT_NO-EVICT.ts b/packages/client/lib/commands/CLIENT_NO-EVICT.ts new file mode 100644 index 00000000000..116d226d036 --- /dev/null +++ b/packages/client/lib/commands/CLIENT_NO-EVICT.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Controls whether to prevent the client's connections from being evicted + * @param parser - The Redis command parser + * @param value - Whether to enable (true) or disable (false) the no-evict mode + */ + parseCommand(parser: CommandParser, value: boolean) { + parser.push( + 'CLIENT', + 'NO-EVICT', + value ? 'ON' : 'OFF' + ); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/CLIENT_NO-TOUCH.spec.ts b/packages/client/lib/commands/CLIENT_NO-TOUCH.spec.ts new file mode 100644 index 00000000000..ec5c9f18ae9 --- /dev/null +++ b/packages/client/lib/commands/CLIENT_NO-TOUCH.spec.ts @@ -0,0 +1,31 @@ +import { strict as assert } from 'assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import CLIENT_NO_TOUCH from './CLIENT_NO-TOUCH'; +import { parseArgs } from './generic-transformers'; + +describe('CLIENT NO-TOUCH', () => { + testUtils.isVersionGreaterThanHook([7, 2]); + + describe('transformArguments', () => { + it('true', () => { + assert.deepEqual( + parseArgs(CLIENT_NO_TOUCH, true), + ['CLIENT', 'NO-TOUCH', 'ON'] + ); + }); + + it('false', () => { + assert.deepEqual( + parseArgs(CLIENT_NO_TOUCH, false), + ['CLIENT', 'NO-TOUCH', 'OFF'] + ); + }); + }); + + testUtils.testWithClient('client.clientNoTouch', async client => { + assert.equal( + await client.clientNoTouch(true), + 'OK' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/CLIENT_NO-TOUCH.ts b/packages/client/lib/commands/CLIENT_NO-TOUCH.ts new file mode 100644 index 00000000000..167b31f3600 --- /dev/null +++ b/packages/client/lib/commands/CLIENT_NO-TOUCH.ts @@ -0,0 +1,21 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Controls whether to prevent the client from touching the LRU/LFU of keys + * @param parser - The Redis command parser + * @param value - Whether to enable (true) or disable (false) the no-touch mode + */ + parseCommand(parser: CommandParser, value: boolean) { + parser.push( + 'CLIENT', + 'NO-TOUCH', + value ? 'ON' : 'OFF' + ); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; + diff --git a/packages/client/lib/commands/CLIENT_PAUSE.spec.ts b/packages/client/lib/commands/CLIENT_PAUSE.spec.ts new file mode 100644 index 00000000000..e213433afbe --- /dev/null +++ b/packages/client/lib/commands/CLIENT_PAUSE.spec.ts @@ -0,0 +1,29 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import CLIENT_PAUSE from './CLIENT_PAUSE'; +import { parseArgs } from './generic-transformers'; + +describe('CLIENT PAUSE', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(CLIENT_PAUSE, 0), + ['CLIENT', 'PAUSE', '0'] + ); + }); + + it('with mode', () => { + assert.deepEqual( + parseArgs(CLIENT_PAUSE, 0, 'ALL'), + ['CLIENT', 'PAUSE', '0', 'ALL'] + ); + }); + }); + + testUtils.testWithClient('client.clientPause', async client => { + assert.equal( + await client.clientPause(0), + 'OK' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/CLIENT_PAUSE.ts b/packages/client/lib/commands/CLIENT_PAUSE.ts new file mode 100644 index 00000000000..4d0638df89d --- /dev/null +++ b/packages/client/lib/commands/CLIENT_PAUSE.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Stops the server from processing client commands for the specified duration + * @param parser - The Redis command parser + * @param timeout - Time in milliseconds to pause command processing + * @param mode - Optional mode: 'WRITE' to pause only write commands, 'ALL' to pause all commands + */ + parseCommand(parser: CommandParser, timeout: number, mode?: 'WRITE' | 'ALL') { + parser.push('CLIENT', 'PAUSE', timeout.toString()); + if (mode) { + parser.push(mode); + } + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/CLIENT_SETNAME.spec.ts b/packages/client/lib/commands/CLIENT_SETNAME.spec.ts new file mode 100644 index 00000000000..b2b339c3d19 --- /dev/null +++ b/packages/client/lib/commands/CLIENT_SETNAME.spec.ts @@ -0,0 +1,21 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; + +import CLIENT_SETNAME from './CLIENT_SETNAME'; +import { parseArgs } from './generic-transformers'; + +describe('CLIENT SETNAME', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(CLIENT_SETNAME, 'name'), + ['CLIENT', 'SETNAME', 'name'] + ); + }); + + testUtils.testWithClient('client.clientSetName', async client => { + assert.equal( + await client.clientSetName('name'), + 'OK' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/CLIENT_SETNAME.ts b/packages/client/lib/commands/CLIENT_SETNAME.ts new file mode 100644 index 00000000000..7e4fcc17d94 --- /dev/null +++ b/packages/client/lib/commands/CLIENT_SETNAME.ts @@ -0,0 +1,16 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Assigns a name to the current connection + * @param parser - The Redis command parser + * @param name - The name to assign to the connection + */ + parseCommand(parser: CommandParser, name: RedisArgument) { + parser.push('CLIENT', 'SETNAME', name); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/CLIENT_TRACKING.spec.ts b/packages/client/lib/commands/CLIENT_TRACKING.spec.ts new file mode 100644 index 00000000000..032725635ee --- /dev/null +++ b/packages/client/lib/commands/CLIENT_TRACKING.spec.ts @@ -0,0 +1,102 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import CLIENT_TRACKING from './CLIENT_TRACKING'; +import { parseArgs } from './generic-transformers'; + +describe('CLIENT TRACKING', () => { + testUtils.isVersionGreaterThanHook([6]); + + describe('transformArguments', () => { + describe('true', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(CLIENT_TRACKING, true), + ['CLIENT', 'TRACKING', 'ON'] + ); + }); + + it('with REDIRECT', () => { + assert.deepEqual( + parseArgs(CLIENT_TRACKING, true, { + REDIRECT: 1 + }), + ['CLIENT', 'TRACKING', 'ON', 'REDIRECT', '1'] + ); + }); + + describe('with BCAST', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(CLIENT_TRACKING, true, { + BCAST: true + }), + ['CLIENT', 'TRACKING', 'ON', 'BCAST'] + ); + }); + + describe('with PREFIX', () => { + it('string', () => { + assert.deepEqual( + parseArgs(CLIENT_TRACKING, true, { + BCAST: true, + PREFIX: 'prefix' + }), + ['CLIENT', 'TRACKING', 'ON', 'BCAST', 'PREFIX', 'prefix'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(CLIENT_TRACKING, true, { + BCAST: true, + PREFIX: ['1', '2'] + }), + ['CLIENT', 'TRACKING', 'ON', 'BCAST', 'PREFIX', '1', 'PREFIX', '2'] + ); + }); + }); + }); + + it('with OPTIN', () => { + assert.deepEqual( + parseArgs(CLIENT_TRACKING, true, { + OPTIN: true + }), + ['CLIENT', 'TRACKING', 'ON', 'OPTIN'] + ); + }); + + it('with OPTOUT', () => { + assert.deepEqual( + parseArgs(CLIENT_TRACKING, true, { + OPTOUT: true + }), + ['CLIENT', 'TRACKING', 'ON', 'OPTOUT'] + ); + }); + + it('with NOLOOP', () => { + assert.deepEqual( + parseArgs(CLIENT_TRACKING, true, { + NOLOOP: true + }), + ['CLIENT', 'TRACKING', 'ON', 'NOLOOP'] + ); + }); + }); + + it('false', () => { + assert.deepEqual( + parseArgs(CLIENT_TRACKING, false), + ['CLIENT', 'TRACKING', 'OFF'] + ); + }); + }); + + testUtils.testWithClient('client.clientTracking', async client => { + assert.equal( + await client.clientTracking(false), + 'OK' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/CLIENT_TRACKING.ts b/packages/client/lib/commands/CLIENT_TRACKING.ts new file mode 100644 index 00000000000..08bed098c20 --- /dev/null +++ b/packages/client/lib/commands/CLIENT_TRACKING.ts @@ -0,0 +1,93 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +interface CommonOptions { + REDIRECT?: number; + NOLOOP?: boolean; +} + +interface BroadcastOptions { + BCAST?: boolean; + PREFIX?: RedisVariadicArgument; +} + +interface OptInOptions { + OPTIN?: boolean; +} + +interface OptOutOptions { + OPTOUT?: boolean; +} + +export type ClientTrackingOptions = CommonOptions & ( + BroadcastOptions | + OptInOptions | + OptOutOptions +); + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Controls server-assisted client side caching for the current connection + * @param parser - The Redis command parser + * @param mode - Whether to enable (true) or disable (false) tracking + * @param options - Optional configuration including REDIRECT, BCAST, PREFIX, OPTIN, OPTOUT, and NOLOOP options + */ + parseCommand( + parser: CommandParser, + mode: M, + options?: M extends true ? ClientTrackingOptions : never + ) { + parser.push( + 'CLIENT', + 'TRACKING', + mode ? 'ON' : 'OFF' + ); + + if (mode) { + if (options?.REDIRECT) { + parser.push( + 'REDIRECT', + options.REDIRECT.toString() + ); + } + + if (isBroadcast(options)) { + parser.push('BCAST'); + + if (options?.PREFIX) { + if (Array.isArray(options.PREFIX)) { + for (const prefix of options.PREFIX) { + parser.push('PREFIX', prefix); + } + } else { + parser.push('PREFIX', options.PREFIX); + } + } + } else if (isOptIn(options)) { + parser.push('OPTIN'); + } else if (isOptOut(options)) { + parser.push('OPTOUT'); + } + + if (options?.NOLOOP) { + parser.push('NOLOOP'); + } + } + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; + +function isBroadcast(options?: ClientTrackingOptions): options is BroadcastOptions { + return (options as BroadcastOptions)?.BCAST === true; +} + +function isOptIn(options?: ClientTrackingOptions): options is OptInOptions { + return (options as OptInOptions)?.OPTIN === true; +} + +function isOptOut(options?: ClientTrackingOptions): options is OptOutOptions { + return (options as OptOutOptions)?.OPTOUT === true; +} diff --git a/packages/client/lib/commands/CLIENT_TRACKINGINFO.spec.ts b/packages/client/lib/commands/CLIENT_TRACKINGINFO.spec.ts new file mode 100644 index 00000000000..d776519df22 --- /dev/null +++ b/packages/client/lib/commands/CLIENT_TRACKINGINFO.spec.ts @@ -0,0 +1,26 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import CLIENT_TRACKINGINFO from './CLIENT_TRACKINGINFO'; +import { parseArgs } from './generic-transformers'; + +describe('CLIENT TRACKINGINFO', () => { + testUtils.isVersionGreaterThanHook([6, 2]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(CLIENT_TRACKINGINFO), + ['CLIENT', 'TRACKINGINFO'] + ); + }); + + testUtils.testWithClient('client.clientTrackingInfo', async client => { + assert.deepEqual( + await client.clientTrackingInfo(), + { + flags: ['off'], + redirect: -1, + prefixes: [] + } + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/CLIENT_TRACKINGINFO.ts b/packages/client/lib/commands/CLIENT_TRACKINGINFO.ts new file mode 100644 index 00000000000..1fb19e9e60b --- /dev/null +++ b/packages/client/lib/commands/CLIENT_TRACKINGINFO.ts @@ -0,0 +1,28 @@ +import { CommandParser } from '../client/parser'; +import { TuplesToMapReply, BlobStringReply, SetReply, NumberReply, ArrayReply, UnwrapReply, Resp2Reply, Command } from '../RESP/types'; + +type TrackingInfo = TuplesToMapReply<[ + [BlobStringReply<'flags'>, SetReply], + [BlobStringReply<'redirect'>, NumberReply], + [BlobStringReply<'prefixes'>, ArrayReply] +]>; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Returns information about the current connection's key tracking state + * @param parser - The Redis command parser + */ + parseCommand(parser: CommandParser) { + parser.push('CLIENT', 'TRACKINGINFO'); + }, + transformReply: { + 2: (reply: UnwrapReply>) => ({ + flags: reply[1], + redirect: reply[3], + prefixes: reply[5] + }), + 3: undefined as unknown as () => TrackingInfo + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/CLIENT_UNPAUSE.spec.ts b/packages/client/lib/commands/CLIENT_UNPAUSE.spec.ts new file mode 100644 index 00000000000..0b58cf6517e --- /dev/null +++ b/packages/client/lib/commands/CLIENT_UNPAUSE.spec.ts @@ -0,0 +1,22 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import CLIENT_UNPAUSE from './CLIENT_UNPAUSE'; +import { parseArgs } from './generic-transformers'; + +describe('CLIENT UNPAUSE', () => { + testUtils.isVersionGreaterThanHook([6, 2]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(CLIENT_UNPAUSE), + ['CLIENT', 'UNPAUSE'] + ); + }); + + testUtils.testWithClient('client.clientUnpause', async client => { + assert.equal( + await client.clientUnpause(), + 'OK' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/CLIENT_UNPAUSE.ts b/packages/client/lib/commands/CLIENT_UNPAUSE.ts new file mode 100644 index 00000000000..2ac4fde0112 --- /dev/null +++ b/packages/client/lib/commands/CLIENT_UNPAUSE.ts @@ -0,0 +1,15 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Resumes processing of client commands after a CLIENT PAUSE + * @param parser - The Redis command parser + */ + parseCommand(parser: CommandParser) { + parser.push('CLIENT', 'UNPAUSE'); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/CLUSTER_ADDSLOTS.spec.ts b/packages/client/lib/commands/CLUSTER_ADDSLOTS.spec.ts new file mode 100644 index 00000000000..4a9b1839bb4 --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_ADDSLOTS.spec.ts @@ -0,0 +1,21 @@ +import { strict as assert } from 'node:assert'; +import CLUSTER_ADDSLOTS from './CLUSTER_ADDSLOTS'; +import { parseArgs } from './generic-transformers'; + +describe('CLUSTER ADDSLOTS', () => { + describe('transformArguments', () => { + it('single', () => { + assert.deepEqual( + parseArgs(CLUSTER_ADDSLOTS, 0), + ['CLUSTER', 'ADDSLOTS', '0'] + ); + }); + + it('multiple', () => { + assert.deepEqual( + parseArgs(CLUSTER_ADDSLOTS, [0, 1]), + ['CLUSTER', 'ADDSLOTS', '0', '1'] + ); + }); + }); +}); diff --git a/packages/client/lib/commands/CLUSTER_ADDSLOTS.ts b/packages/client/lib/commands/CLUSTER_ADDSLOTS.ts new file mode 100644 index 00000000000..f833a42e5ad --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_ADDSLOTS.ts @@ -0,0 +1,17 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Assigns hash slots to the current node in a Redis Cluster + * @param parser - The Redis command parser + * @param slots - One or more hash slots to be assigned + */ + parseCommand(parser: CommandParser, slots: number | Array) { + parser.push('CLUSTER', 'ADDSLOTS'); + parser.pushVariadicNumber(slots); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/CLUSTER_ADDSLOTSRANGE.spec.ts b/packages/client/lib/commands/CLUSTER_ADDSLOTSRANGE.spec.ts new file mode 100644 index 00000000000..40706968f93 --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_ADDSLOTSRANGE.spec.ts @@ -0,0 +1,33 @@ +import { strict as assert } from 'node:assert'; +import testUtils from '../test-utils'; +import CLUSTER_ADDSLOTSRANGE from './CLUSTER_ADDSLOTSRANGE'; +import { parseArgs } from './generic-transformers'; + +describe('CLUSTER ADDSLOTSRANGE', () => { + testUtils.isVersionGreaterThanHook([7, 0]); + + describe('transformArguments', () => { + it('single', () => { + assert.deepEqual( + parseArgs(CLUSTER_ADDSLOTSRANGE, { + start: 0, + end: 1 + }), + ['CLUSTER', 'ADDSLOTSRANGE', '0', '1'] + ); + }); + + it('multiple', () => { + assert.deepEqual( + parseArgs(CLUSTER_ADDSLOTSRANGE, [{ + start: 0, + end: 1 + }, { + start: 2, + end: 3 + }]), + ['CLUSTER', 'ADDSLOTSRANGE', '0', '1', '2', '3'] + ); + }); + }); +}); diff --git a/packages/client/lib/commands/CLUSTER_ADDSLOTSRANGE.ts b/packages/client/lib/commands/CLUSTER_ADDSLOTSRANGE.ts new file mode 100644 index 00000000000..e3e7c8b4988 --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_ADDSLOTSRANGE.ts @@ -0,0 +1,18 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; +import { parseSlotRangesArguments, SlotRange } from './generic-transformers'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Assigns hash slot ranges to the current node in a Redis Cluster + * @param parser - The Redis command parser + * @param ranges - One or more slot ranges to be assigned, each specified as [start, end] + */ + parseCommand(parser: CommandParser, ranges: SlotRange | Array) { + parser.push('CLUSTER', 'ADDSLOTSRANGE'); + parseSlotRangesArguments(parser, ranges); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/CLUSTER_BUMPEPOCH.spec.ts b/packages/client/lib/commands/CLUSTER_BUMPEPOCH.spec.ts new file mode 100644 index 00000000000..f3ecde9f6a8 --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_BUMPEPOCH.spec.ts @@ -0,0 +1,21 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import CLUSTER_BUMPEPOCH from './CLUSTER_BUMPEPOCH'; +import { parseArgs } from './generic-transformers'; + +describe('CLUSTER BUMPEPOCH', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(CLUSTER_BUMPEPOCH), + ['CLUSTER', 'BUMPEPOCH'] + ); + }); + + testUtils.testWithCluster('clusterNode.clusterBumpEpoch', async cluster => { + const client = await cluster.nodeClient(cluster.masters[0]); + assert.equal( + typeof await client.clusterBumpEpoch(), + 'string' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/CLUSTER_BUMPEPOCH.ts b/packages/client/lib/commands/CLUSTER_BUMPEPOCH.ts new file mode 100644 index 00000000000..3cf9f3fb207 --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_BUMPEPOCH.ts @@ -0,0 +1,15 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Advances the cluster config epoch + * @param parser - The Redis command parser + */ + parseCommand(parser: CommandParser) { + parser.push('CLUSTER', 'BUMPEPOCH'); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'BUMPED' | 'STILL'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/CLUSTER_COUNT-FAILURE-REPORTS.spec.ts b/packages/client/lib/commands/CLUSTER_COUNT-FAILURE-REPORTS.spec.ts new file mode 100644 index 00000000000..06a901ef301 --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_COUNT-FAILURE-REPORTS.spec.ts @@ -0,0 +1,22 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import CLUSTER_COUNT_FAILURE_REPORTS from './CLUSTER_COUNT-FAILURE-REPORTS'; +import { parseArgs } from './generic-transformers'; + +describe('CLUSTER COUNT-FAILURE-REPORTS', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(CLUSTER_COUNT_FAILURE_REPORTS, '0'), + ['CLUSTER', 'COUNT-FAILURE-REPORTS', '0'] + ); + }); + + testUtils.testWithCluster('clusterNode.clusterCountFailureReports', async cluster => { + const [master] = cluster.masters, + client = await cluster.nodeClient(master); + assert.equal( + typeof await client.clusterCountFailureReports(master.id), + 'number' + ); + }, GLOBAL.CLUSTERS.OPEN); +}); diff --git a/packages/client/lib/commands/CLUSTER_COUNT-FAILURE-REPORTS.ts b/packages/client/lib/commands/CLUSTER_COUNT-FAILURE-REPORTS.ts new file mode 100644 index 00000000000..b4421b1d8fa --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_COUNT-FAILURE-REPORTS.ts @@ -0,0 +1,16 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Returns the number of failure reports for a given node + * @param parser - The Redis command parser + * @param nodeId - The ID of the node to check + */ + parseCommand(parser: CommandParser, nodeId: RedisArgument) { + parser.push('CLUSTER', 'COUNT-FAILURE-REPORTS', nodeId); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/CLUSTER_COUNTKEYSINSLOT.spec.ts b/packages/client/lib/commands/CLUSTER_COUNTKEYSINSLOT.spec.ts new file mode 100644 index 00000000000..52848409465 --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_COUNTKEYSINSLOT.spec.ts @@ -0,0 +1,21 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import CLUSTER_COUNTKEYSINSLOT from './CLUSTER_COUNTKEYSINSLOT'; +import { parseArgs } from './generic-transformers'; + +describe('CLUSTER COUNTKEYSINSLOT', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(CLUSTER_COUNTKEYSINSLOT, 0), + ['CLUSTER', 'COUNTKEYSINSLOT', '0'] + ); + }); + + testUtils.testWithCluster('clusterNode.clusterCountKeysInSlot', async cluster => { + const client = await cluster.nodeClient(cluster.masters[0]); + assert.equal( + typeof await client.clusterCountKeysInSlot(0), + 'number' + ); + }, GLOBAL.CLUSTERS.OPEN); +}); diff --git a/packages/client/lib/commands/CLUSTER_COUNTKEYSINSLOT.ts b/packages/client/lib/commands/CLUSTER_COUNTKEYSINSLOT.ts new file mode 100644 index 00000000000..df97c79161e --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_COUNTKEYSINSLOT.ts @@ -0,0 +1,16 @@ +import { CommandParser } from '../client/parser'; +import { NumberReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Returns the number of keys in the specified hash slot + * @param parser - The Redis command parser + * @param slot - The hash slot to check + */ + parseCommand(parser: CommandParser, slot: number) { + parser.push('CLUSTER', 'COUNTKEYSINSLOT', slot.toString()); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/CLUSTER_DELSLOTS.spec.ts b/packages/client/lib/commands/CLUSTER_DELSLOTS.spec.ts new file mode 100644 index 00000000000..2937fdd4d79 --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_DELSLOTS.spec.ts @@ -0,0 +1,21 @@ +import { strict as assert } from 'node:assert'; +import CLUSTER_DELSLOTS from './CLUSTER_DELSLOTS'; +import { parseArgs } from './generic-transformers'; + +describe('CLUSTER DELSLOTS', () => { + describe('transformArguments', () => { + it('single', () => { + assert.deepEqual( + parseArgs(CLUSTER_DELSLOTS, 0), + ['CLUSTER', 'DELSLOTS', '0'] + ); + }); + + it('multiple', () => { + assert.deepEqual( + parseArgs(CLUSTER_DELSLOTS, [0, 1]), + ['CLUSTER', 'DELSLOTS', '0', '1'] + ); + }); + }); +}); diff --git a/packages/client/lib/commands/CLUSTER_DELSLOTS.ts b/packages/client/lib/commands/CLUSTER_DELSLOTS.ts new file mode 100644 index 00000000000..eea7bcb699e --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_DELSLOTS.ts @@ -0,0 +1,17 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Removes hash slots from the current node in a Redis Cluster + * @param parser - The Redis command parser + * @param slots - One or more hash slots to be removed + */ + parseCommand(parser: CommandParser, slots: number | Array) { + parser.push('CLUSTER', 'DELSLOTS'); + parser.pushVariadicNumber(slots); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/CLUSTER_DELSLOTSRANGE.spec.ts b/packages/client/lib/commands/CLUSTER_DELSLOTSRANGE.spec.ts new file mode 100644 index 00000000000..6007421d11b --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_DELSLOTSRANGE.spec.ts @@ -0,0 +1,30 @@ +import { strict as assert } from 'node:assert'; +import CLUSTER_DELSLOTSRANGE from './CLUSTER_DELSLOTSRANGE'; +import { parseArgs } from './generic-transformers'; + +describe('CLUSTER DELSLOTSRANGE', () => { + describe('transformArguments', () => { + it('single', () => { + assert.deepEqual( + parseArgs(CLUSTER_DELSLOTSRANGE, { + start: 0, + end: 1 + }), + ['CLUSTER', 'DELSLOTSRANGE', '0', '1'] + ); + }); + + it('multiple', () => { + assert.deepEqual( + parseArgs(CLUSTER_DELSLOTSRANGE, [{ + start: 0, + end: 1 + }, { + start: 2, + end: 3 + }]), + ['CLUSTER', 'DELSLOTSRANGE', '0', '1', '2', '3'] + ); + }); + }); +}); diff --git a/packages/client/lib/commands/CLUSTER_DELSLOTSRANGE.ts b/packages/client/lib/commands/CLUSTER_DELSLOTSRANGE.ts new file mode 100644 index 00000000000..32b27d8ea14 --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_DELSLOTSRANGE.ts @@ -0,0 +1,18 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; +import { parseSlotRangesArguments, SlotRange } from './generic-transformers'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Removes hash slot ranges from the current node in a Redis Cluster + * @param parser - The Redis command parser + * @param ranges - One or more slot ranges to be removed, each specified as [start, end] + */ + parseCommand(parser:CommandParser, ranges: SlotRange | Array) { + parser.push('CLUSTER', 'DELSLOTSRANGE'); + parseSlotRangesArguments(parser, ranges); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/CLUSTER_FAILOVER.spec.ts b/packages/client/lib/commands/CLUSTER_FAILOVER.spec.ts new file mode 100644 index 00000000000..f8e4b986048 --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_FAILOVER.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import CLUSTER_FAILOVER, { FAILOVER_MODES } from './CLUSTER_FAILOVER'; +import { parseArgs } from './generic-transformers'; + +describe('CLUSTER FAILOVER', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(CLUSTER_FAILOVER), + ['CLUSTER', 'FAILOVER'] + ); + }); + + it('with mode', () => { + assert.deepEqual( + parseArgs(CLUSTER_FAILOVER, { + mode: FAILOVER_MODES.FORCE + }), + ['CLUSTER', 'FAILOVER', 'FORCE'] + ); + }); + }); +}); diff --git a/packages/client/lib/commands/CLUSTER_FAILOVER.ts b/packages/client/lib/commands/CLUSTER_FAILOVER.ts new file mode 100644 index 00000000000..8a228c07349 --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_FAILOVER.ts @@ -0,0 +1,31 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; + +export const FAILOVER_MODES = { + FORCE: 'FORCE', + TAKEOVER: 'TAKEOVER' +} as const; + +export type FailoverMode = typeof FAILOVER_MODES[keyof typeof FAILOVER_MODES]; + +export interface ClusterFailoverOptions { + mode?: FailoverMode; +} + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Forces a replica to perform a manual failover of its master + * @param parser - The Redis command parser + * @param options - Optional configuration with FORCE or TAKEOVER mode + */ + parseCommand(parser:CommandParser, options?: ClusterFailoverOptions) { + parser.push('CLUSTER', 'FAILOVER'); + + if (options?.mode) { + parser.push(options.mode); + } + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/CLUSTER_FLUSHSLOTS.spec.ts b/packages/client/lib/commands/CLUSTER_FLUSHSLOTS.spec.ts new file mode 100644 index 00000000000..43701adfe6a --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_FLUSHSLOTS.spec.ts @@ -0,0 +1,12 @@ +import { strict as assert } from 'node:assert'; +import CLUSTER_FLUSHSLOTS from './CLUSTER_FLUSHSLOTS'; +import { parseArgs } from './generic-transformers'; + +describe('CLUSTER FLUSHSLOTS', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(CLUSTER_FLUSHSLOTS), + ['CLUSTER', 'FLUSHSLOTS'] + ); + }); +}); diff --git a/packages/client/lib/commands/CLUSTER_FLUSHSLOTS.ts b/packages/client/lib/commands/CLUSTER_FLUSHSLOTS.ts new file mode 100644 index 00000000000..dac1f24c697 --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_FLUSHSLOTS.ts @@ -0,0 +1,15 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Deletes all hash slots from the current node in a Redis Cluster + * @param parser - The Redis command parser + */ + parseCommand(parser: CommandParser) { + parser.push('CLUSTER', 'FLUSHSLOTS'); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/CLUSTER_FORGET.spec.ts b/packages/client/lib/commands/CLUSTER_FORGET.spec.ts new file mode 100644 index 00000000000..8d02374cf87 --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_FORGET.spec.ts @@ -0,0 +1,12 @@ +import { strict as assert } from 'node:assert'; +import CLUSTER_FORGET from './CLUSTER_FORGET'; +import { parseArgs } from './generic-transformers'; + +describe('CLUSTER FORGET', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(CLUSTER_FORGET, '0'), + ['CLUSTER', 'FORGET', '0'] + ); + }); +}); diff --git a/packages/client/lib/commands/CLUSTER_FORGET.ts b/packages/client/lib/commands/CLUSTER_FORGET.ts new file mode 100644 index 00000000000..ff7cb952121 --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_FORGET.ts @@ -0,0 +1,16 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Removes a node from the cluster + * @param parser - The Redis command parser + * @param nodeId - The ID of the node to remove + */ + parseCommand(parser: CommandParser, nodeId: RedisArgument) { + parser.push('CLUSTER', 'FORGET', nodeId); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/CLUSTER_GETKEYSINSLOT.spec.ts b/packages/client/lib/commands/CLUSTER_GETKEYSINSLOT.spec.ts new file mode 100644 index 00000000000..468eecc74a9 --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_GETKEYSINSLOT.spec.ts @@ -0,0 +1,26 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import CLUSTER_GETKEYSINSLOT from './CLUSTER_GETKEYSINSLOT'; +import { parseArgs } from './generic-transformers'; + +describe('CLUSTER GETKEYSINSLOT', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(CLUSTER_GETKEYSINSLOT, 0, 10), + ['CLUSTER', 'GETKEYSINSLOT', '0', '10'] + ); + }); + + testUtils.testWithCluster('clusterNode.clusterGetKeysInSlot', async cluster => { + const slot = 12539, // "key" slot + client = await cluster.nodeClient(cluster.slots[slot].master), + [, reply] = await Promise.all([ + client.set('key', 'value'), + client.clusterGetKeysInSlot(slot, 1), + ]) + assert.ok(Array.isArray(reply)); + for (const item of reply) { + assert.equal(typeof item, 'string'); + } + }, GLOBAL.CLUSTERS.OPEN); +}); diff --git a/packages/client/lib/commands/CLUSTER_GETKEYSINSLOT.ts b/packages/client/lib/commands/CLUSTER_GETKEYSINSLOT.ts new file mode 100644 index 00000000000..90756cf6302 --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_GETKEYSINSLOT.ts @@ -0,0 +1,17 @@ +import { CommandParser } from '../client/parser'; +import { ArrayReply, BlobStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Returns a number of keys from the specified hash slot + * @param parser - The Redis command parser + * @param slot - The hash slot to get keys from + * @param count - Maximum number of keys to return + */ + parseCommand(parser: CommandParser, slot: number, count: number) { + parser.push('CLUSTER', 'GETKEYSINSLOT', slot.toString(), count.toString()); + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/CLUSTER_INFO.spec.ts b/packages/client/lib/commands/CLUSTER_INFO.spec.ts new file mode 100644 index 00000000000..01dafce8d53 --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_INFO.spec.ts @@ -0,0 +1,21 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import CLUSTER_INFO from './CLUSTER_INFO'; +import { parseArgs } from './generic-transformers'; + +describe('CLUSTER INFO', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(CLUSTER_INFO), + ['CLUSTER', 'INFO'] + ); + }); + + testUtils.testWithCluster('clusterNode.clusterInfo', async cluster => { + const client = await cluster.nodeClient(cluster.masters[0]); + assert.equal( + typeof await client.clusterInfo(), + 'string' + ); + }, GLOBAL.CLUSTERS.OPEN); +}); diff --git a/packages/client/lib/commands/CLUSTER_INFO.ts b/packages/client/lib/commands/CLUSTER_INFO.ts new file mode 100644 index 00000000000..fa220b9f645 --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_INFO.ts @@ -0,0 +1,15 @@ +import { CommandParser } from '../client/parser'; +import { VerbatimStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Returns information about the state of a Redis Cluster + * @param parser - The Redis command parser + */ + parseCommand(parser: CommandParser) { + parser.push('CLUSTER', 'INFO'); + }, + transformReply: undefined as unknown as () => VerbatimStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/CLUSTER_KEYSLOT.spec.ts b/packages/client/lib/commands/CLUSTER_KEYSLOT.spec.ts new file mode 100644 index 00000000000..188c403abb5 --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_KEYSLOT.spec.ts @@ -0,0 +1,21 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import CLUSTER_KEYSLOT from './CLUSTER_KEYSLOT'; +import { parseArgs } from './generic-transformers'; + +describe('CLUSTER KEYSLOT', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(CLUSTER_KEYSLOT, 'key'), + ['CLUSTER', 'KEYSLOT', 'key'] + ); + }); + + testUtils.testWithCluster('clusterNode.clusterKeySlot', async cluster => { + const client = await cluster.nodeClient(cluster.masters[0]); + assert.equal( + typeof await client.clusterKeySlot('key'), + 'number' + ); + }, GLOBAL.CLUSTERS.OPEN); +}); diff --git a/packages/client/lib/commands/CLUSTER_KEYSLOT.ts b/packages/client/lib/commands/CLUSTER_KEYSLOT.ts new file mode 100644 index 00000000000..1add4cb4f4a --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_KEYSLOT.ts @@ -0,0 +1,16 @@ +import { CommandParser } from '../client/parser'; +import { Command, NumberReply, RedisArgument } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Returns the hash slot number for a given key + * @param parser - The Redis command parser + * @param key - The key to get the hash slot for + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('CLUSTER', 'KEYSLOT', key); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/CLUSTER_LINKS.spec.ts b/packages/client/lib/commands/CLUSTER_LINKS.spec.ts new file mode 100644 index 00000000000..609ecfd3da9 --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_LINKS.spec.ts @@ -0,0 +1,29 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import CLUSTER_LINKS from './CLUSTER_LINKS'; +import { parseArgs } from './generic-transformers'; + +describe('CLUSTER LINKS', () => { + testUtils.isVersionGreaterThanHook([7]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(CLUSTER_LINKS), + ['CLUSTER', 'LINKS'] + ); + }); + + testUtils.testWithCluster('clusterNode.clusterLinks', async cluster => { + const client = await cluster.nodeClient(cluster.masters[0]), + links = await client.clusterLinks(); + assert.ok(Array.isArray(links)); + for (const link of links) { + assert.equal(typeof link.direction, 'string'); + assert.equal(typeof link.node, 'string'); + assert.equal(typeof link['create-time'], 'number'); + assert.equal(typeof link.events, 'string'); + assert.equal(typeof link['send-buffer-allocated'], 'number'); + assert.equal(typeof link['send-buffer-used'], 'number'); + } + }, GLOBAL.CLUSTERS.OPEN); +}); diff --git a/packages/client/lib/commands/CLUSTER_LINKS.ts b/packages/client/lib/commands/CLUSTER_LINKS.ts new file mode 100644 index 00000000000..d35f4712650 --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_LINKS.ts @@ -0,0 +1,37 @@ +import { CommandParser } from '../client/parser'; +import { ArrayReply, TuplesToMapReply, BlobStringReply, NumberReply, UnwrapReply, Resp2Reply, Command } from '../RESP/types'; + +type ClusterLinksReply = ArrayReply, BlobStringReply], + [BlobStringReply<'node'>, BlobStringReply], + [BlobStringReply<'create-time'>, NumberReply], + [BlobStringReply<'events'>, BlobStringReply], + [BlobStringReply<'send-buffer-allocated'>, NumberReply], + [BlobStringReply<'send-buffer-used'>, NumberReply], +]>>; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Returns information about all cluster links (lower level connections to other nodes) + * @param parser - The Redis command parser + */ + parseCommand(parser: CommandParser) { + parser.push('CLUSTER', 'LINKS'); + }, + transformReply: { + 2: (reply: UnwrapReply>) => reply.map(link => { + const unwrapped = link as unknown as UnwrapReply; + return { + direction: unwrapped[1], + node: unwrapped[3], + 'create-time': unwrapped[5], + events: unwrapped[7], + 'send-buffer-allocated': unwrapped[9], + 'send-buffer-used': unwrapped[11] + }; + }), + 3: undefined as unknown as () => ClusterLinksReply + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/CLUSTER_MEET.spec.ts b/packages/client/lib/commands/CLUSTER_MEET.spec.ts new file mode 100644 index 00000000000..6c063f34e45 --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_MEET.spec.ts @@ -0,0 +1,12 @@ +import { strict as assert } from 'node:assert'; +import CLUSTER_MEET from './CLUSTER_MEET'; +import { parseArgs } from './generic-transformers'; + +describe('CLUSTER MEET', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(CLUSTER_MEET, '127.0.0.1', 6379), + ['CLUSTER', 'MEET', '127.0.0.1', '6379'] + ); + }); +}); diff --git a/packages/client/lib/commands/CLUSTER_MEET.ts b/packages/client/lib/commands/CLUSTER_MEET.ts new file mode 100644 index 00000000000..e555250942c --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_MEET.ts @@ -0,0 +1,17 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Initiates a handshake with another node in the cluster + * @param parser - The Redis command parser + * @param host - Host name or IP address of the node + * @param port - TCP port of the node + */ + parseCommand(parser: CommandParser, host: string, port: number) { + parser.push('CLUSTER', 'MEET', host, port.toString()); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/CLUSTER_MYID.spec.ts b/packages/client/lib/commands/CLUSTER_MYID.spec.ts new file mode 100644 index 00000000000..78bb4495e3c --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_MYID.spec.ts @@ -0,0 +1,22 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import CLUSTER_MYID from './CLUSTER_MYID'; +import { parseArgs } from './generic-transformers'; + +describe('CLUSTER MYID', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(CLUSTER_MYID), + ['CLUSTER', 'MYID'] + ); + }); + + testUtils.testWithCluster('clusterNode.clusterMyId', async cluster => { + const [master] = cluster.masters, + client = await cluster.nodeClient(master); + assert.equal( + await client.clusterMyId(), + master.id + ); + }, GLOBAL.CLUSTERS.OPEN); +}); diff --git a/packages/client/lib/commands/CLUSTER_MYID.ts b/packages/client/lib/commands/CLUSTER_MYID.ts new file mode 100644 index 00000000000..ce4ad9c5ff2 --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_MYID.ts @@ -0,0 +1,15 @@ +import { CommandParser } from '../client/parser'; +import { BlobStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Returns the node ID of the current Redis Cluster node + * @param parser - The Redis command parser + */ + parseCommand(parser: CommandParser) { + parser.push('CLUSTER', 'MYID'); + }, + transformReply: undefined as unknown as () => BlobStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/CLUSTER_MYSHARDID.spec.ts b/packages/client/lib/commands/CLUSTER_MYSHARDID.spec.ts new file mode 100644 index 00000000000..6c2a61801bc --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_MYSHARDID.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import CLUSTER_MYSHARDID from './CLUSTER_MYSHARDID'; +import { parseArgs } from './generic-transformers'; + +describe('CLUSTER MYSHARDID', () => { + testUtils.isVersionGreaterThanHook([7, 2]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(CLUSTER_MYSHARDID), + ['CLUSTER', 'MYSHARDID'] + ); + }); + + testUtils.testWithCluster('clusterNode.clusterMyShardId', async cluster => { + const client = await cluster.nodeClient(cluster.masters[0]); + assert.equal( + typeof await client.clusterMyShardId(), + 'string' + ); + }, GLOBAL.CLUSTERS.OPEN); +}); diff --git a/packages/client/lib/commands/CLUSTER_MYSHARDID.ts b/packages/client/lib/commands/CLUSTER_MYSHARDID.ts new file mode 100644 index 00000000000..72e38e4a901 --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_MYSHARDID.ts @@ -0,0 +1,16 @@ +import { CommandParser } from '../client/parser'; +import { BlobStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Returns the shard ID of the current Redis Cluster node + * @param parser - The Redis command parser + */ + parseCommand(parser: CommandParser) { + parser.push('CLUSTER', 'MYSHARDID'); + }, + transformReply: undefined as unknown as () => BlobStringReply +} as const satisfies Command; + diff --git a/packages/client/lib/commands/CLUSTER_NODES.spec.ts b/packages/client/lib/commands/CLUSTER_NODES.spec.ts new file mode 100644 index 00000000000..a49996586b7 --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_NODES.spec.ts @@ -0,0 +1,21 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import CLUSTER_NODES from './CLUSTER_NODES'; +import { parseArgs } from './generic-transformers'; + +describe('CLUSTER NODES', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(CLUSTER_NODES), + ['CLUSTER', 'NODES'] + ); + }); + + testUtils.testWithCluster('clusterNode.clusterNodes', async cluster => { + const client = await cluster.nodeClient(cluster.masters[0]); + assert.equal( + typeof await client.clusterNodes(), + 'string' + ); + }, GLOBAL.CLUSTERS.OPEN); +}); diff --git a/packages/client/lib/commands/CLUSTER_NODES.ts b/packages/client/lib/commands/CLUSTER_NODES.ts new file mode 100644 index 00000000000..5ce8388e9c6 --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_NODES.ts @@ -0,0 +1,15 @@ +import { CommandParser } from '../client/parser'; +import { VerbatimStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Returns serialized information about the nodes in a Redis Cluster + * @param parser - The Redis command parser + */ + parseCommand(parser: CommandParser) { + parser.push('CLUSTER', 'NODES'); + }, + transformReply: undefined as unknown as () => VerbatimStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/CLUSTER_REPLICAS.spec.ts b/packages/client/lib/commands/CLUSTER_REPLICAS.spec.ts new file mode 100644 index 00000000000..11bf086bb66 --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_REPLICAS.spec.ts @@ -0,0 +1,22 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import CLUSTER_REPLICAS from './CLUSTER_REPLICAS'; +import { parseArgs } from './generic-transformers'; + +describe('CLUSTER REPLICAS', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(CLUSTER_REPLICAS, '0'), + ['CLUSTER', 'REPLICAS', '0'] + ); + }); + + testUtils.testWithCluster('clusterNode.clusterReplicas', async cluster => { + const client = await cluster.nodeClient(cluster.masters[0]), + reply = await client.clusterReplicas(cluster.masters[0].id); + assert.ok(Array.isArray(reply)); + for (const replica of reply) { + assert.equal(typeof replica, 'string'); + } + }, GLOBAL.CLUSTERS.OPEN); +}); diff --git a/packages/client/lib/commands/CLUSTER_REPLICAS.ts b/packages/client/lib/commands/CLUSTER_REPLICAS.ts new file mode 100644 index 00000000000..6c205eb73c6 --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_REPLICAS.ts @@ -0,0 +1,16 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, BlobStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Returns the replica nodes replicating from the specified primary node + * @param parser - The Redis command parser + * @param nodeId - Node ID of the primary node + */ + parseCommand(parser: CommandParser, nodeId: RedisArgument) { + parser.push('CLUSTER', 'REPLICAS', nodeId); + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/CLUSTER_REPLICATE.spec.ts b/packages/client/lib/commands/CLUSTER_REPLICATE.spec.ts new file mode 100644 index 00000000000..3f130d360bf --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_REPLICATE.spec.ts @@ -0,0 +1,12 @@ +import { strict as assert } from 'node:assert'; +import CLUSTER_REPLICATE from './CLUSTER_REPLICATE'; +import { parseArgs } from './generic-transformers'; + +describe('CLUSTER REPLICATE', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(CLUSTER_REPLICATE, '0'), + ['CLUSTER', 'REPLICATE', '0'] + ); + }); +}); diff --git a/packages/client/lib/commands/CLUSTER_REPLICATE.ts b/packages/client/lib/commands/CLUSTER_REPLICATE.ts new file mode 100644 index 00000000000..54fb19f460f --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_REPLICATE.ts @@ -0,0 +1,16 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Reconfigures a node as a replica of the specified primary node + * @param parser - The Redis command parser + * @param nodeId - Node ID of the primary node to replicate + */ + parseCommand(parser: CommandParser, nodeId: RedisArgument) { + parser.push('CLUSTER', 'REPLICATE', nodeId); + }, + transformReply: undefined as unknown as () => SimpleStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/CLUSTER_RESET.spec.ts b/packages/client/lib/commands/CLUSTER_RESET.spec.ts new file mode 100644 index 00000000000..1ef55e3f572 --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_RESET.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import CLUSTER_RESET from './CLUSTER_RESET'; +import { parseArgs } from './generic-transformers'; + +describe('CLUSTER RESET', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(CLUSTER_RESET), + ['CLUSTER', 'RESET'] + ); + }); + + it('with mode', () => { + assert.deepEqual( + parseArgs(CLUSTER_RESET, { + mode: 'HARD' + }), + ['CLUSTER', 'RESET', 'HARD'] + ); + }); + }); +}); diff --git a/packages/client/lib/commands/CLUSTER_RESET.ts b/packages/client/lib/commands/CLUSTER_RESET.ts new file mode 100644 index 00000000000..baa29e9e8b2 --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_RESET.ts @@ -0,0 +1,24 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; + +export interface ClusterResetOptions { + mode?: 'HARD' | 'SOFT'; +} + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Resets a Redis Cluster node, clearing all information and returning it to a brand new state + * @param parser - The Redis command parser + * @param options - Options for the reset operation + */ + parseCommand(parser: CommandParser, options?: ClusterResetOptions) { + parser.push('CLUSTER', 'RESET'); + + if (options?.mode) { + parser.push(options.mode); + } + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/CLUSTER_SAVECONFIG.spec.ts b/packages/client/lib/commands/CLUSTER_SAVECONFIG.spec.ts new file mode 100644 index 00000000000..a0d317ffae4 --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_SAVECONFIG.spec.ts @@ -0,0 +1,21 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import CLUSTER_SAVECONFIG from './CLUSTER_SAVECONFIG'; +import { parseArgs } from './generic-transformers'; + +describe('CLUSTER SAVECONFIG', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(CLUSTER_SAVECONFIG), + ['CLUSTER', 'SAVECONFIG'] + ); + }); + + testUtils.testWithCluster('clusterNode.clusterSaveConfig', async cluster => { + const client = await cluster.nodeClient(cluster.masters[0]); + assert.equal( + await client.clusterSaveConfig(), + 'OK' + ); + }, GLOBAL.CLUSTERS.OPEN); +}); diff --git a/packages/client/lib/commands/CLUSTER_SAVECONFIG.ts b/packages/client/lib/commands/CLUSTER_SAVECONFIG.ts new file mode 100644 index 00000000000..8a0529b2c6b --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_SAVECONFIG.ts @@ -0,0 +1,16 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Forces a Redis Cluster node to save the cluster configuration to disk + * @param parser - The Redis command parser + */ + parseCommand(parser: CommandParser) { + parser.push('CLUSTER', 'SAVECONFIG'); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; + diff --git a/packages/client/lib/commands/CLUSTER_SET-CONFIG-EPOCH.spec.ts b/packages/client/lib/commands/CLUSTER_SET-CONFIG-EPOCH.spec.ts new file mode 100644 index 00000000000..fb02ee2fe65 --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_SET-CONFIG-EPOCH.spec.ts @@ -0,0 +1,12 @@ +import { strict as assert } from 'node:assert'; +import CLUSTER_SET_CONFIG_EPOCH from './CLUSTER_SET-CONFIG-EPOCH'; +import { parseArgs } from './generic-transformers'; + +describe('CLUSTER SET-CONFIG-EPOCH', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(CLUSTER_SET_CONFIG_EPOCH, 0), + ['CLUSTER', 'SET-CONFIG-EPOCH', '0'] + ); + }); +}); diff --git a/packages/client/lib/commands/CLUSTER_SET-CONFIG-EPOCH.ts b/packages/client/lib/commands/CLUSTER_SET-CONFIG-EPOCH.ts new file mode 100644 index 00000000000..bcd89f52f8c --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_SET-CONFIG-EPOCH.ts @@ -0,0 +1,16 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Sets the configuration epoch for a Redis Cluster node + * @param parser - The Redis command parser + * @param configEpoch - The configuration epoch to set + */ + parseCommand(parser: CommandParser, configEpoch: number) { + parser.push('CLUSTER', 'SET-CONFIG-EPOCH', configEpoch.toString()); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/CLUSTER_SETSLOT.spec.ts b/packages/client/lib/commands/CLUSTER_SETSLOT.spec.ts new file mode 100644 index 00000000000..fac496c3afb --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_SETSLOT.spec.ts @@ -0,0 +1,21 @@ +import { strict as assert } from 'node:assert'; +import CLUSTER_SETSLOT, { CLUSTER_SLOT_STATES } from './CLUSTER_SETSLOT'; +import { parseArgs } from './generic-transformers'; + +describe('CLUSTER SETSLOT', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(CLUSTER_SETSLOT, 0, CLUSTER_SLOT_STATES.IMPORTING), + ['CLUSTER', 'SETSLOT', '0', 'IMPORTING'] + ); + }); + + it('with nodeId', () => { + assert.deepEqual( + parseArgs(CLUSTER_SETSLOT, 0, CLUSTER_SLOT_STATES.IMPORTING, 'nodeId'), + ['CLUSTER', 'SETSLOT', '0', 'IMPORTING', 'nodeId'] + ); + }); + }); +}); diff --git a/packages/client/lib/commands/CLUSTER_SETSLOT.ts b/packages/client/lib/commands/CLUSTER_SETSLOT.ts new file mode 100644 index 00000000000..d50982e2d11 --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_SETSLOT.ts @@ -0,0 +1,31 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '../RESP/types'; + +export const CLUSTER_SLOT_STATES = { + IMPORTING: 'IMPORTING', + MIGRATING: 'MIGRATING', + STABLE: 'STABLE', + NODE: 'NODE' +} as const; + +export type ClusterSlotState = typeof CLUSTER_SLOT_STATES[keyof typeof CLUSTER_SLOT_STATES]; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Assigns a hash slot to a specific Redis Cluster node + * @param parser - The Redis command parser + * @param slot - The slot number to assign + * @param state - The state to set for the slot (IMPORTING, MIGRATING, STABLE, NODE) + * @param nodeId - Node ID (required for IMPORTING, MIGRATING, and NODE states) + */ + parseCommand(parser: CommandParser, slot: number, state: ClusterSlotState, nodeId?: RedisArgument) { + parser.push('CLUSTER', 'SETSLOT', slot.toString(), state); + + if (nodeId) { + parser.push(nodeId); + } + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/CLUSTER_SLOTS.spec.ts b/packages/client/lib/commands/CLUSTER_SLOTS.spec.ts new file mode 100644 index 00000000000..28879b036ae --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_SLOTS.spec.ts @@ -0,0 +1,31 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import CLUSTER_SLOTS from './CLUSTER_SLOTS'; +import { parseArgs } from './generic-transformers'; + +describe('CLUSTER SLOTS', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(CLUSTER_SLOTS), + ['CLUSTER', 'SLOTS'] + ); + }); + + testUtils.testWithCluster('clusterNode.clusterSlots', async cluster => { + const client = await cluster.nodeClient(cluster.masters[0]), + slots = await client.clusterSlots(); + assert.ok(Array.isArray(slots)); + for (const { from, to, master, replicas } of slots) { + assert.equal(typeof from, 'number'); + assert.equal(typeof to, 'number'); + assert.equal(typeof master.host, 'string'); + assert.equal(typeof master.port, 'number'); + assert.equal(typeof master.id, 'string'); + for (const replica of replicas) { + assert.equal(typeof replica.host, 'string'); + assert.equal(typeof replica.port, 'number'); + assert.equal(typeof replica.id, 'string'); + } + } + }, GLOBAL.CLUSTERS.WITH_REPLICAS); +}); diff --git a/packages/client/lib/commands/CLUSTER_SLOTS.ts b/packages/client/lib/commands/CLUSTER_SLOTS.ts new file mode 100644 index 00000000000..ca6ad9cb9e2 --- /dev/null +++ b/packages/client/lib/commands/CLUSTER_SLOTS.ts @@ -0,0 +1,46 @@ +import { CommandParser } from '../client/parser'; +import { TuplesReply, BlobStringReply, NumberReply, ArrayReply, UnwrapReply, Command } from '../RESP/types'; + +type RawNode = TuplesReply<[ + host: BlobStringReply, + port: NumberReply, + id: BlobStringReply +]>; + +type ClusterSlotsRawReply = ArrayReply<[ + from: NumberReply, + to: NumberReply, + master: RawNode, + ...replicas: Array +]>; + +export type ClusterSlotsNode = ReturnType; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Returns information about which Redis Cluster node handles which hash slots + * @param parser - The Redis command parser + */ + parseCommand(parser: CommandParser) { + parser.push('CLUSTER', 'SLOTS'); + }, + transformReply(reply: UnwrapReply) { + return reply.map(([from, to, master, ...replicas]) => ({ + from, + to, + master: transformNode(master), + replicas: replicas.map(transformNode) + })); + } +} as const satisfies Command; + +function transformNode(node: RawNode) { + const [host, port, id] = node as unknown as UnwrapReply; + return { + host, + port, + id + }; +} diff --git a/packages/client/lib/commands/COMMAND.spec.ts b/packages/client/lib/commands/COMMAND.spec.ts new file mode 100644 index 00000000000..860ffc30685 --- /dev/null +++ b/packages/client/lib/commands/COMMAND.spec.ts @@ -0,0 +1,17 @@ +// import { strict as assert } from 'node:assert'; +// import testUtils, { GLOBAL } from '../test-utils'; +// import { transformArguments } from './COMMAND'; +// import { assertPingCommand } from './COMMAND_INFO.spec'; + +// describe('COMMAND', () => { +// it('transformArguments', () => { +// assert.deepEqual( +// transformArguments(), +// ['COMMAND'] +// ); +// }); + +// testUtils.testWithClient('client.command', async client => { +// assertPingCommand((await client.command()).find(command => command.name === 'ping')); +// }, GLOBAL.SERVERS.OPEN); +// }); diff --git a/packages/client/lib/commands/COMMAND.ts b/packages/client/lib/commands/COMMAND.ts new file mode 100644 index 00000000000..3d24b716a0d --- /dev/null +++ b/packages/client/lib/commands/COMMAND.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '../client/parser'; +import { ArrayReply, Command, UnwrapReply } from '../RESP/types'; +import { CommandRawReply, CommandReply, transformCommandReply } from './generic-transformers'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Returns an array with details about all Redis commands + * @param parser - The Redis command parser + */ + parseCommand(parser: CommandParser) { + parser.push('COMMAND'); + }, + // TODO: This works, as we don't currently handle any of the items returned as a map + transformReply(reply: UnwrapReply>): Array { + return reply.map(transformCommandReply); + } +} as const satisfies Command; \ No newline at end of file diff --git a/packages/client/lib/commands/COMMAND_COUNT.spec.ts b/packages/client/lib/commands/COMMAND_COUNT.spec.ts new file mode 100644 index 00000000000..a36091df482 --- /dev/null +++ b/packages/client/lib/commands/COMMAND_COUNT.spec.ts @@ -0,0 +1,20 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import COMMAND_COUNT from './COMMAND_COUNT'; +import { parseArgs } from './generic-transformers'; + +describe('COMMAND COUNT', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(COMMAND_COUNT), + ['COMMAND', 'COUNT'] + ); + }); + + testUtils.testWithClient('client.commandCount', async client => { + assert.equal( + typeof await client.commandCount(), + 'number' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/COMMAND_COUNT.ts b/packages/client/lib/commands/COMMAND_COUNT.ts new file mode 100644 index 00000000000..36b35a58d7b --- /dev/null +++ b/packages/client/lib/commands/COMMAND_COUNT.ts @@ -0,0 +1,15 @@ +import { CommandParser } from '../client/parser'; +import { NumberReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Returns the total number of commands available in the Redis server + * @param parser - The Redis command parser + */ + parseCommand(parser: CommandParser) { + parser.push('COMMAND', 'COUNT'); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/COMMAND_GETKEYS.spec.ts b/packages/client/lib/commands/COMMAND_GETKEYS.spec.ts new file mode 100644 index 00000000000..332e2d51fbd --- /dev/null +++ b/packages/client/lib/commands/COMMAND_GETKEYS.spec.ts @@ -0,0 +1,20 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import COMMAND_GETKEYS from './COMMAND_GETKEYS'; +import { parseArgs } from './generic-transformers'; + +describe('COMMAND GETKEYS', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(COMMAND_GETKEYS, ['GET', 'key']), + ['COMMAND', 'GETKEYS', 'GET', 'key'] + ); + }); + + testUtils.testWithClient('client.commandGetKeys', async client => { + assert.deepEqual( + await client.commandGetKeys(['GET', 'key']), + ['key'] + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/COMMAND_GETKEYS.ts b/packages/client/lib/commands/COMMAND_GETKEYS.ts new file mode 100644 index 00000000000..6f447c4d4d6 --- /dev/null +++ b/packages/client/lib/commands/COMMAND_GETKEYS.ts @@ -0,0 +1,17 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, BlobStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Extracts the key names from a Redis command + * @param parser - The Redis command parser + * @param args - Command arguments to analyze + */ + parseCommand(parser: CommandParser, args: Array) { + parser.push('COMMAND', 'GETKEYS'); + parser.push(...args); + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/COMMAND_GETKEYSANDFLAGS.spec.ts b/packages/client/lib/commands/COMMAND_GETKEYSANDFLAGS.spec.ts new file mode 100644 index 00000000000..49652762d65 --- /dev/null +++ b/packages/client/lib/commands/COMMAND_GETKEYSANDFLAGS.spec.ts @@ -0,0 +1,24 @@ +// import { strict as assert } from 'node:assert'; +// import testUtils, { GLOBAL } from '../test-utils'; +// import { transformArguments } from './COMMAND_GETKEYSANDFLAGS'; + +// describe('COMMAND GETKEYSANDFLAGS', () => { +// testUtils.isVersionGreaterThanHook([7]); + +// it('transformArguments', () => { +// assert.deepEqual( +// transformArguments(['GET', 'key']), +// ['COMMAND', 'GETKEYSANDFLAGS', 'GET', 'key'] +// ); +// }); + +// testUtils.testWithClient('client.commandGetKeysAndFlags', async client => { +// assert.deepEqual( +// await client.commandGetKeysAndFlags(['GET', 'key']), +// [{ +// key: 'key', +// flags: ['RO', 'access'] +// }] +// ); +// }, GLOBAL.SERVERS.OPEN); +// }); diff --git a/packages/client/lib/commands/COMMAND_GETKEYSANDFLAGS.ts b/packages/client/lib/commands/COMMAND_GETKEYSANDFLAGS.ts new file mode 100644 index 00000000000..1677adb43ec --- /dev/null +++ b/packages/client/lib/commands/COMMAND_GETKEYSANDFLAGS.ts @@ -0,0 +1,30 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, TuplesReply, BlobStringReply, SetReply, UnwrapReply, Command } from '../RESP/types'; + +export type CommandGetKeysAndFlagsRawReply = ArrayReply +]>>; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Extracts the key names and access flags from a Redis command + * @param parser - The Redis command parser + * @param args - Command arguments to analyze + */ + parseCommand(parser: CommandParser, args: Array) { + parser.push('COMMAND', 'GETKEYSANDFLAGS'); + parser.push(...args); + }, + transformReply(reply: UnwrapReply) { + return reply.map(entry => { + const [key, flags] = entry as unknown as UnwrapReply; + return { + key, + flags + }; + }); + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/COMMAND_INFO.spec.ts b/packages/client/lib/commands/COMMAND_INFO.spec.ts new file mode 100644 index 00000000000..fd8c22ae803 --- /dev/null +++ b/packages/client/lib/commands/COMMAND_INFO.spec.ts @@ -0,0 +1,49 @@ +// import { strict as assert } from 'node:assert'; +// import testUtils, { GLOBAL } from '../test-utils'; +// import { transformArguments } from './COMMAND_INFO'; +// import { CommandCategories, CommandFlags, CommandReply } from './generic-transformers'; + +// export function assertPingCommand(commandInfo: CommandReply | null | undefined): void { +// assert.deepEqual( +// commandInfo, +// { +// name: 'ping', +// arity: -1, +// flags: new Set( +// testUtils.isVersionGreaterThan([7]) ? +// [CommandFlags.FAST] : +// [CommandFlags.STALE, CommandFlags.FAST] +// ), +// firstKeyIndex: 0, +// lastKeyIndex: 0, +// step: 0, +// categories: new Set( +// testUtils.isVersionGreaterThan([6]) ? +// [CommandCategories.FAST, CommandCategories.CONNECTION] : +// [] +// ) +// } +// ); +// } + +// describe('COMMAND INFO', () => { +// it('transformArguments', () => { +// assert.deepEqual( +// transformArguments(['PING']), +// ['COMMAND', 'INFO', 'PING'] +// ); +// }); + +// describe('client.commandInfo', () => { +// testUtils.testWithClient('PING', async client => { +// assertPingCommand((await client.commandInfo(['PING']))[0]); +// }, GLOBAL.SERVERS.OPEN); + +// testUtils.testWithClient('DOSE_NOT_EXISTS', async client => { +// assert.deepEqual( +// await client.commandInfo(['DOSE_NOT_EXISTS']), +// [null] +// ); +// }, GLOBAL.SERVERS.OPEN); +// }); +// }); diff --git a/packages/client/lib/commands/COMMAND_INFO.ts b/packages/client/lib/commands/COMMAND_INFO.ts new file mode 100644 index 00000000000..19629f0ccae --- /dev/null +++ b/packages/client/lib/commands/COMMAND_INFO.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '../client/parser'; +import { ArrayReply, Command, UnwrapReply } from '../RESP/types'; +import { CommandRawReply, CommandReply, transformCommandReply } from './generic-transformers'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Returns details about specific Redis commands + * @param parser - The Redis command parser + * @param commands - Array of command names to get information about + */ + parseCommand(parser: CommandParser, commands: Array) { + parser.push('COMMAND', 'INFO', ...commands); + }, + // TODO: This works, as we don't currently handle any of the items returned as a map + transformReply(reply: UnwrapReply>): Array { + return reply.map(command => command ? transformCommandReply(command) : null); + } +} as const satisfies Command; \ No newline at end of file diff --git a/packages/client/lib/commands/COMMAND_LIST.spec.ts b/packages/client/lib/commands/COMMAND_LIST.spec.ts new file mode 100644 index 00000000000..d2ee9e66161 --- /dev/null +++ b/packages/client/lib/commands/COMMAND_LIST.spec.ts @@ -0,0 +1,63 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import COMMAND_LIST from './COMMAND_LIST'; +import { parseArgs } from './generic-transformers'; + +describe('COMMAND LIST', () => { + testUtils.isVersionGreaterThanHook([7]); + + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(COMMAND_LIST), + ['COMMAND', 'LIST'] + ); + }); + + describe('with FILTERBY', () => { + it('MODULE', () => { + assert.deepEqual( + parseArgs(COMMAND_LIST, { + FILTERBY: { + type: 'MODULE', + value: 'JSON' + } + }), + ['COMMAND', 'LIST', 'FILTERBY', 'MODULE', 'JSON'] + ); + }); + + it('ACLCAT', () => { + assert.deepEqual( + parseArgs(COMMAND_LIST, { + FILTERBY: { + type: 'ACLCAT', + value: 'admin' + } + }), + ['COMMAND', 'LIST', 'FILTERBY', 'ACLCAT', 'admin'] + ); + }); + + it('PATTERN', () => { + assert.deepEqual( + parseArgs(COMMAND_LIST, { + FILTERBY: { + type: 'PATTERN', + value: 'a*' + } + }), + ['COMMAND', 'LIST', 'FILTERBY', 'PATTERN', 'a*'] + ); + }); + }); + }); + + testUtils.testWithClient('client.commandList', async client => { + const commandList = await client.commandList(); + assert.ok(Array.isArray(commandList)); + for (const command of commandList) { + assert.ok(typeof command === 'string'); + } + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/COMMAND_LIST.ts b/packages/client/lib/commands/COMMAND_LIST.ts new file mode 100644 index 00000000000..fa218d86aa7 --- /dev/null +++ b/packages/client/lib/commands/COMMAND_LIST.ts @@ -0,0 +1,39 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, BlobStringReply, Command } from '../RESP/types'; + +export const COMMAND_LIST_FILTER_BY = { + MODULE: 'MODULE', + ACLCAT: 'ACLCAT', + PATTERN: 'PATTERN' +} as const; + +export type CommandListFilterBy = typeof COMMAND_LIST_FILTER_BY[keyof typeof COMMAND_LIST_FILTER_BY]; + +export interface CommandListOptions { + FILTERBY?: { + type: CommandListFilterBy; + value: RedisArgument; + }; +} + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Returns a list of all commands supported by the Redis server + * @param parser - The Redis command parser + * @param options - Options for filtering the command list + */ + parseCommand(parser: CommandParser, options?: CommandListOptions) { + parser.push('COMMAND', 'LIST'); + + if (options?.FILTERBY) { + parser.push( + 'FILTERBY', + options.FILTERBY.type, + options.FILTERBY.value + ); + } + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/CONFIG_GET.spec.ts b/packages/client/lib/commands/CONFIG_GET.spec.ts new file mode 100644 index 00000000000..c3f0eac76dd --- /dev/null +++ b/packages/client/lib/commands/CONFIG_GET.spec.ts @@ -0,0 +1,60 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import CONFIG_GET from './CONFIG_GET'; +import { parseArgs } from './generic-transformers'; + +describe('CONFIG GET', () => { + describe('transformArguments', () => { + it('string', () => { + assert.deepEqual( + parseArgs(CONFIG_GET, '*'), + ['CONFIG', 'GET', '*'] + ); + }); + + it('Array', () => { + assert.deepEqual( + parseArgs(CONFIG_GET, ['1', '2']), + ['CONFIG', 'GET', '1', '2'] + ); + }); + }); + + testUtils.testWithClient('client.configGet', async client => { + const config = await client.configGet('*'); + assert.equal(typeof config, 'object'); + for (const [key, value] of Object.entries(config)) { + assert.equal(typeof key, 'string'); + assert.equal(typeof value, 'string'); + } + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClient('client.configSet.getSearchConfigSettingTest | Redis >= 8', async client => { + assert.ok( + await client.configGet('search-timeout'), + 'OK' + ); + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClient('client.configSet.getTSConfigSettingTest | Redis >= 8', async client => { + assert.ok( + await client.configGet('ts-retention-policy'), + 'OK' + ); + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClient('client.configSet.getBFConfigSettingTest | Redis >= 8', async client => { + assert.ok( + await client.configGet('bf-error-rate'), + 'OK' + ); + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClient('client.configSet.getCFConfigSettingTest | Redis >= 8', async client => { + assert.ok( + await client.configGet('cf-initial-size'), + 'OK' + ); + }, GLOBAL.SERVERS.OPEN); + +}); diff --git a/packages/client/lib/commands/CONFIG_GET.ts b/packages/client/lib/commands/CONFIG_GET.ts new file mode 100644 index 00000000000..d0c80297fc4 --- /dev/null +++ b/packages/client/lib/commands/CONFIG_GET.ts @@ -0,0 +1,21 @@ +import { CommandParser } from '../client/parser'; +import { MapReply, BlobStringReply, Command } from '../RESP/types'; +import { RedisVariadicArgument, transformTuplesReply } from './generic-transformers'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Gets the values of configuration parameters + * @param parser - The Redis command parser + * @param parameters - Pattern or specific configuration parameter names + */ + parseCommand(parser: CommandParser, parameters: RedisVariadicArgument) { + parser.push('CONFIG', 'GET'); + parser.pushVariadic(parameters); + }, + transformReply: { + 2: transformTuplesReply, + 3: undefined as unknown as () => MapReply + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/CONFIG_RESETSTAT.spec.ts b/packages/client/lib/commands/CONFIG_RESETSTAT.spec.ts new file mode 100644 index 00000000000..f2f573df0dc --- /dev/null +++ b/packages/client/lib/commands/CONFIG_RESETSTAT.spec.ts @@ -0,0 +1,12 @@ +import { strict as assert } from 'node:assert'; +import CONFIG_RESETSTAT from './CONFIG_RESETSTAT'; +import { parseArgs } from './generic-transformers'; + +describe('CONFIG RESETSTAT', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(CONFIG_RESETSTAT), + ['CONFIG', 'RESETSTAT'] + ); + }); +}); diff --git a/packages/client/lib/commands/CONFIG_RESETSTAT.ts b/packages/client/lib/commands/CONFIG_RESETSTAT.ts new file mode 100644 index 00000000000..356a9b29a79 --- /dev/null +++ b/packages/client/lib/commands/CONFIG_RESETSTAT.ts @@ -0,0 +1,15 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Resets the statistics reported by Redis using the INFO command + * @param parser - The Redis command parser + */ + parseCommand(parser: CommandParser) { + parser.push('CONFIG', 'RESETSTAT'); + }, + transformReply: undefined as unknown as () => SimpleStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/CONFIG_REWRITE.spec.ts b/packages/client/lib/commands/CONFIG_REWRITE.spec.ts new file mode 100644 index 00000000000..bc006e84c80 --- /dev/null +++ b/packages/client/lib/commands/CONFIG_REWRITE.spec.ts @@ -0,0 +1,12 @@ +import { strict as assert } from 'node:assert'; +import CONFIG_REWRITE from './CONFIG_REWRITE'; +import { parseArgs } from './generic-transformers'; + +describe('CONFIG REWRITE', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(CONFIG_REWRITE), + ['CONFIG', 'REWRITE'] + ); + }); +}); diff --git a/packages/client/lib/commands/CONFIG_REWRITE.ts b/packages/client/lib/commands/CONFIG_REWRITE.ts new file mode 100644 index 00000000000..a9f2e0a41ba --- /dev/null +++ b/packages/client/lib/commands/CONFIG_REWRITE.ts @@ -0,0 +1,15 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Rewrites the Redis configuration file with the current configuration + * @param parser - The Redis command parser + */ + parseCommand(parser: CommandParser) { + parser.push('CONFIG', 'REWRITE'); + }, + transformReply: undefined as unknown as () => SimpleStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/CONFIG_SET.spec.ts b/packages/client/lib/commands/CONFIG_SET.spec.ts new file mode 100644 index 00000000000..f9f34dec937 --- /dev/null +++ b/packages/client/lib/commands/CONFIG_SET.spec.ts @@ -0,0 +1,42 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import CONFIG_SET from './CONFIG_SET'; +import { parseArgs } from './generic-transformers'; + +describe('CONFIG SET', () => { + describe('transformArguments', () => { + it('set one parameter (old version)', () => { + assert.deepEqual( + parseArgs(CONFIG_SET, 'parameter', 'value'), + ['CONFIG', 'SET', 'parameter', 'value'] + ); + }); + + it('set muiltiple parameters', () => { + assert.deepEqual( + parseArgs(CONFIG_SET, { + 1: 'a', + 2: 'b', + 3: 'c' + }), + ['CONFIG', 'SET', '1', 'a', '2', 'b', '3', 'c'] + ); + }); + }); + + testUtils.testWithClient('client.configSet', async client => { + assert.equal( + await client.configSet('maxmemory', '0'), + 'OK' + ); + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClient('client.configSet.setReadOnlySearchConfigTest | Redis >= 8', + async client => { + assert.rejects( + client.configSet('search-max-doctablesize', '0'), + new Error('ERR CONFIG SET failed (possibly related to argument \'search-max-doctablesize\') - can\'t set immutable config') + ); + }, GLOBAL.SERVERS.OPEN); + +}); diff --git a/packages/client/lib/commands/CONFIG_SET.ts b/packages/client/lib/commands/CONFIG_SET.ts new file mode 100644 index 00000000000..81b4c65c1d9 --- /dev/null +++ b/packages/client/lib/commands/CONFIG_SET.ts @@ -0,0 +1,32 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command, RedisArgument } from '../RESP/types'; + +type SingleParameter = [parameter: RedisArgument, value: RedisArgument]; + +type MultipleParameters = [config: Record]; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Sets configuration parameters to the specified values + * @param parser - The Redis command parser + * @param parameterOrConfig - Either a single parameter name or a configuration object + * @param value - Value for the parameter (when using single parameter format) + */ + parseCommand( + parser: CommandParser, + ...[parameterOrConfig, value]: SingleParameter | MultipleParameters + ) { + parser.push('CONFIG', 'SET'); + + if (typeof parameterOrConfig === 'string' || parameterOrConfig instanceof Buffer) { + parser.push(parameterOrConfig, value!); + } else { + for (const [key, value] of Object.entries(parameterOrConfig)) { + parser.push(key, value); + } + } + }, + transformReply: undefined as unknown as () => SimpleStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/COPY.spec.ts b/packages/client/lib/commands/COPY.spec.ts new file mode 100644 index 00000000000..cd0c6ec9fbe --- /dev/null +++ b/packages/client/lib/commands/COPY.spec.ts @@ -0,0 +1,55 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import COPY from './COPY'; +import { parseArgs } from './generic-transformers'; + +describe('COPY', () => { + testUtils.isVersionGreaterThanHook([6, 2]); + + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(COPY, 'source', 'destination'), + ['COPY', 'source', 'destination'] + ); + }); + + it('with destination DB flag', () => { + assert.deepEqual( + parseArgs(COPY, 'source', 'destination', { + DB: 1 + }), + ['COPY', 'source', 'destination', 'DB', '1'] + ); + }); + + it('with replace flag', () => { + assert.deepEqual( + parseArgs(COPY, 'source', 'destination', { + REPLACE: true + }), + ['COPY', 'source', 'destination', 'REPLACE'] + ); + }); + + it('with both flags', () => { + assert.deepEqual( + parseArgs(COPY, 'source', 'destination', { + DB: 1, + REPLACE: true + }), + ['COPY', 'source', 'destination', 'DB', '1', 'REPLACE'] + ); + }); + }); + + testUtils.testAll('copy', async client => { + assert.equal( + await client.copy('{tag}source', '{tag}destination'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/COPY.ts b/packages/client/lib/commands/COPY.ts new file mode 100644 index 00000000000..0d8af5636df --- /dev/null +++ b/packages/client/lib/commands/COPY.ts @@ -0,0 +1,31 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +export interface CopyCommandOptions { + DB?: number; + REPLACE?: boolean; +} + +export default { + IS_READ_ONLY: false, + /** + * Copies the value stored at the source key to the destination key + * @param parser - The Redis command parser + * @param source - Source key + * @param destination - Destination key + * @param options - Options for the copy operation + */ + parseCommand(parser: CommandParser, source: RedisArgument, destination: RedisArgument, options?: CopyCommandOptions) { + parser.push('COPY'); + parser.pushKeys([source, destination]); + + if (options?.DB) { + parser.push('DB', options.DB.toString()); + } + + if (options?.REPLACE) { + parser.push('REPLACE'); + } + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/DBSIZE.spec.ts b/packages/client/lib/commands/DBSIZE.spec.ts new file mode 100644 index 00000000000..5778e30de3e --- /dev/null +++ b/packages/client/lib/commands/DBSIZE.spec.ts @@ -0,0 +1,20 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import DBSIZE from './DBSIZE'; +import { parseArgs } from './generic-transformers'; + +describe('DBSIZE', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(DBSIZE), + ['DBSIZE'] + ); + }); + + testUtils.testWithClient('client.dbSize', async client => { + assert.equal( + await client.dbSize(), + 0 + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/DBSIZE.ts b/packages/client/lib/commands/DBSIZE.ts new file mode 100644 index 00000000000..b5777b63f7d --- /dev/null +++ b/packages/client/lib/commands/DBSIZE.ts @@ -0,0 +1,15 @@ +import { CommandParser } from '../client/parser'; +import { NumberReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Returns the number of keys in the current database + * @param parser - The Redis command parser + */ + parseCommand(parser: CommandParser) { + parser.push('DBSIZE'); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/DECR.spec.ts b/packages/client/lib/commands/DECR.spec.ts new file mode 100644 index 00000000000..69ff5a5391f --- /dev/null +++ b/packages/client/lib/commands/DECR.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import DECR from './DECR'; +import { parseArgs } from './generic-transformers'; + +describe('DECR', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(DECR, 'key'), + ['DECR', 'key'] + ); + }); + + testUtils.testAll('decr', async client => { + assert.equal( + await client.decr('key'), + -1 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/DECR.ts b/packages/client/lib/commands/DECR.ts new file mode 100644 index 00000000000..5155fba81f5 --- /dev/null +++ b/packages/client/lib/commands/DECR.ts @@ -0,0 +1,15 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +export default { + /** + * Decrements the integer value of a key by one + * @param parser - The Redis command parser + * @param key - Key to decrement + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('DECR'); + parser.pushKey(key); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/DECRBY.spec.ts b/packages/client/lib/commands/DECRBY.spec.ts new file mode 100644 index 00000000000..ae80fd714e0 --- /dev/null +++ b/packages/client/lib/commands/DECRBY.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import DECRBY from './DECRBY'; +import { parseArgs } from './generic-transformers'; + +describe('DECRBY', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(DECRBY, 'key', 2), + ['DECRBY', 'key', '2'] + ); + }); + + testUtils.testAll('decrBy', async client => { + assert.equal( + await client.decrBy('key', 2), + -2 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/DECRBY.ts b/packages/client/lib/commands/DECRBY.ts new file mode 100644 index 00000000000..9f35ee15a26 --- /dev/null +++ b/packages/client/lib/commands/DECRBY.ts @@ -0,0 +1,17 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +export default { + /** + * Decrements the integer value of a key by the given number + * @param parser - The Redis command parser + * @param key - Key to decrement + * @param decrement - Decrement amount + */ + parseCommand(parser: CommandParser, key: RedisArgument, decrement: number) { + parser.push('DECRBY'); + parser.pushKey(key); + parser.push(decrement.toString()); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/DEL.spec.ts b/packages/client/lib/commands/DEL.spec.ts new file mode 100644 index 00000000000..3d0364e7523 --- /dev/null +++ b/packages/client/lib/commands/DEL.spec.ts @@ -0,0 +1,32 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import DEL from './DEL'; +import { parseArgs } from './generic-transformers'; + +describe('DEL', () => { + describe('transformArguments', () => { + it('string', () => { + assert.deepEqual( + parseArgs(DEL, 'key'), + ['DEL', 'key'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(DEL, ['key1', 'key2']), + ['DEL', 'key1', 'key2'] + ); + }); + }); + + testUtils.testAll('del', async client => { + assert.equal( + await client.del('key'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/DEL.ts b/packages/client/lib/commands/DEL.ts new file mode 100644 index 00000000000..7ad1b1160e7 --- /dev/null +++ b/packages/client/lib/commands/DEL.ts @@ -0,0 +1,17 @@ +import { CommandParser } from '../client/parser'; +import { NumberReply, Command } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export default { + IS_READ_ONLY: false, + /** + * Removes the specified keys. A key is ignored if it does not exist + * @param parser - The Redis command parser + * @param keys - One or more keys to delete + */ + parseCommand(parser: CommandParser, keys: RedisVariadicArgument) { + parser.push('DEL'); + parser.pushKeys(keys); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/DISCARD.spec.ts b/packages/client/lib/commands/DISCARD.spec.ts new file mode 100644 index 00000000000..7aa769fc2ee --- /dev/null +++ b/packages/client/lib/commands/DISCARD.spec.ts @@ -0,0 +1,12 @@ +import { strict as assert } from 'node:assert'; +import DISCARD from './DISCARD'; +import { parseArgs } from './generic-transformers'; + +describe('DISCARD', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(DISCARD), + ['DISCARD'] + ); + }); +}); diff --git a/packages/client/lib/commands/DISCARD.ts b/packages/client/lib/commands/DISCARD.ts new file mode 100644 index 00000000000..d8c8c83791e --- /dev/null +++ b/packages/client/lib/commands/DISCARD.ts @@ -0,0 +1,13 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; + +export default { + /** + * Discards a transaction, forgetting all queued commands + * @param parser - The Redis command parser + */ + parseCommand(parser: CommandParser) { + parser.push('DISCARD'); + }, + transformReply: undefined as unknown as () => SimpleStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/DUMP.spec.ts b/packages/client/lib/commands/DUMP.spec.ts new file mode 100644 index 00000000000..76fb2ec7c18 --- /dev/null +++ b/packages/client/lib/commands/DUMP.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import DUMP from './DUMP'; +import { parseArgs } from './generic-transformers'; + +describe('DUMP', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(DUMP, 'key'), + ['DUMP', 'key'] + ); + }); + + testUtils.testAll('client.dump', async client => { + assert.equal( + await client.dump('key'), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/DUMP.ts b/packages/client/lib/commands/DUMP.ts new file mode 100644 index 00000000000..c4905cc71c4 --- /dev/null +++ b/packages/client/lib/commands/DUMP.ts @@ -0,0 +1,16 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, BlobStringReply, Command } from '../RESP/types'; + +export default { + IS_READ_ONLY: true, + /** + * Returns a serialized version of the value stored at the key + * @param parser - The Redis command parser + * @param key - Key to dump + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('DUMP'); + parser.pushKey(key); + }, + transformReply: undefined as unknown as () => BlobStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ECHO.spec.ts b/packages/client/lib/commands/ECHO.spec.ts new file mode 100644 index 00000000000..38fd1d4270e --- /dev/null +++ b/packages/client/lib/commands/ECHO.spec.ts @@ -0,0 +1,20 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ECHO from './ECHO'; +import { parseArgs } from './generic-transformers'; + +describe('ECHO', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(ECHO, 'message'), + ['ECHO', 'message'] + ); + }); + + testUtils.testWithClient('client.echo', async client => { + assert.equal( + await client.echo('message'), + 'message' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/ECHO.ts b/packages/client/lib/commands/ECHO.ts new file mode 100644 index 00000000000..b346ade50b1 --- /dev/null +++ b/packages/client/lib/commands/ECHO.ts @@ -0,0 +1,16 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, BlobStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Returns the given string + * @param parser - The Redis command parser + * @param message - Message to echo back + */ + parseCommand(parser: CommandParser, message: RedisArgument) { + parser.push('ECHO', message); + }, + transformReply: undefined as unknown as () => BlobStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/EVAL.spec.ts b/packages/client/lib/commands/EVAL.spec.ts new file mode 100644 index 00000000000..8ef16eed835 --- /dev/null +++ b/packages/client/lib/commands/EVAL.spec.ts @@ -0,0 +1,26 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import EVAL from './EVAL'; +import { parseArgs } from './generic-transformers'; + +describe('EVAL', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(EVAL, 'return KEYS[1] + ARGV[1]', { + keys: ['key'], + arguments: ['argument'] + }), + ['EVAL', 'return KEYS[1] + ARGV[1]', '1', 'key', 'argument'] + ); + }); + + testUtils.testAll('eval', async client => { + assert.equal( + await client.eval('return 1'), + 1 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/EVAL.ts b/packages/client/lib/commands/EVAL.ts new file mode 100644 index 00000000000..ff244c82aaf --- /dev/null +++ b/packages/client/lib/commands/EVAL.ts @@ -0,0 +1,39 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ReplyUnion, Command } from '../RESP/types'; + +export interface EvalOptions { + keys?: Array; + arguments?: Array; +} + +export function parseEvalArguments( + parser: CommandParser, + script: RedisArgument, + options?: EvalOptions +) { + parser.push(script); + if (options?.keys) { + parser.pushKeysLength(options.keys); + } else { + parser.push('0'); + } + + if (options?.arguments) { + parser.push(...options.arguments) + } +} + +export default { + IS_READ_ONLY: false, + /** + * Executes a Lua script server side + * @param parser - The Redis command parser + * @param script - Lua script to execute + * @param options - Script execution options including keys and arguments + */ + parseCommand(...args: Parameters) { + args[0].push('EVAL'); + parseEvalArguments(...args); + }, + transformReply: undefined as unknown as () => ReplyUnion +} as const satisfies Command; diff --git a/packages/client/lib/commands/EVALSHA.spec.ts b/packages/client/lib/commands/EVALSHA.spec.ts new file mode 100644 index 00000000000..c491d6e2308 --- /dev/null +++ b/packages/client/lib/commands/EVALSHA.spec.ts @@ -0,0 +1,15 @@ +import { strict as assert } from 'node:assert'; +import EVALSHA from './EVALSHA'; +import { parseArgs } from './generic-transformers'; + +describe('EVALSHA', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(EVALSHA, 'sha1', { + keys: ['key'], + arguments: ['argument'] + }), + ['EVALSHA', 'sha1', '1', 'key', 'argument'] + ); + }); +}); diff --git a/packages/client/lib/commands/EVALSHA.ts b/packages/client/lib/commands/EVALSHA.ts new file mode 100644 index 00000000000..29bb6ffdfcb --- /dev/null +++ b/packages/client/lib/commands/EVALSHA.ts @@ -0,0 +1,17 @@ +import { Command } from '../RESP/types'; +import EVAL, { parseEvalArguments } from './EVAL'; + +export default { + IS_READ_ONLY: false, + /** + * Executes a Lua script server side using the script's SHA1 digest + * @param parser - The Redis command parser + * @param sha1 - SHA1 digest of the script + * @param options - Script execution options including keys and arguments + */ + parseCommand(...args: Parameters) { + args[0].push('EVALSHA'); + parseEvalArguments(...args); + }, + transformReply: EVAL.transformReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/EVALSHA_RO.spec.ts b/packages/client/lib/commands/EVALSHA_RO.spec.ts new file mode 100644 index 00000000000..d3debe933fe --- /dev/null +++ b/packages/client/lib/commands/EVALSHA_RO.spec.ts @@ -0,0 +1,18 @@ +import { strict as assert } from 'node:assert'; +import testUtils from '../test-utils'; +import EVALSHA_RO from './EVALSHA_RO'; +import { parseArgs } from './generic-transformers'; + +describe('EVALSHA_RO', () => { + testUtils.isVersionGreaterThanHook([7]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(EVALSHA_RO, 'sha1', { + keys: ['key'], + arguments: ['argument'] + }), + ['EVALSHA_RO', 'sha1', '1', 'key', 'argument'] + ); + }); +}); diff --git a/packages/client/lib/commands/EVALSHA_RO.ts b/packages/client/lib/commands/EVALSHA_RO.ts new file mode 100644 index 00000000000..628ca3dee53 --- /dev/null +++ b/packages/client/lib/commands/EVALSHA_RO.ts @@ -0,0 +1,17 @@ +import { Command } from '../RESP/types'; +import EVAL, { parseEvalArguments } from './EVAL'; + +export default { + IS_READ_ONLY: true, + /** + * Executes a read-only Lua script server side using the script's SHA1 digest + * @param parser - The Redis command parser + * @param sha1 - SHA1 digest of the script + * @param options - Script execution options including keys and arguments + */ + parseCommand(...args: Parameters) { + args[0].push('EVALSHA_RO'); + parseEvalArguments(...args); + }, + transformReply: EVAL.transformReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/EVAL_RO.spec.ts b/packages/client/lib/commands/EVAL_RO.spec.ts new file mode 100644 index 00000000000..b5cf1e4e926 --- /dev/null +++ b/packages/client/lib/commands/EVAL_RO.spec.ts @@ -0,0 +1,28 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import EVAL_RO from './EVAL_RO'; +import { parseArgs } from './generic-transformers'; + +describe('EVAL_RO', () => { + testUtils.isVersionGreaterThanHook([7]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(EVAL_RO, 'return KEYS[1] + ARGV[1]', { + keys: ['key'], + arguments: ['argument'] + }), + ['EVAL_RO', 'return KEYS[1] + ARGV[1]', '1', 'key', 'argument'] + ); + }); + + testUtils.testAll('evalRo', async cluster => { + assert.equal( + await cluster.evalRo('return 1'), + 1 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/EVAL_RO.ts b/packages/client/lib/commands/EVAL_RO.ts new file mode 100644 index 00000000000..803c4f840c5 --- /dev/null +++ b/packages/client/lib/commands/EVAL_RO.ts @@ -0,0 +1,17 @@ +import { Command } from '../RESP/types'; +import EVAL, { parseEvalArguments } from './EVAL'; + +export default { + IS_READ_ONLY: true, + /** + * Executes a read-only Lua script server side + * @param parser - The Redis command parser + * @param script - Lua script to execute + * @param options - Script execution options including keys and arguments + */ + parseCommand(...args: Parameters) { + args[0].push('EVAL_RO'); + parseEvalArguments(...args); + }, + transformReply: EVAL.transformReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/EXISTS.spec.ts b/packages/client/lib/commands/EXISTS.spec.ts new file mode 100644 index 00000000000..d2802dd49b3 --- /dev/null +++ b/packages/client/lib/commands/EXISTS.spec.ts @@ -0,0 +1,32 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import EXISTS from './EXISTS'; +import { parseArgs } from './generic-transformers'; + +describe('EXISTS', () => { + describe('parseCommand', () => { + it('string', () => { + assert.deepEqual( + parseArgs(EXISTS, 'key'), + ['EXISTS', 'key'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(EXISTS, ['1', '2']), + ['EXISTS', '1', '2'] + ); + }); + }); + + testUtils.testAll('exists', async client => { + assert.equal( + await client.exists('key'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/EXISTS.ts b/packages/client/lib/commands/EXISTS.ts new file mode 100644 index 00000000000..ea6ea8cb0cd --- /dev/null +++ b/packages/client/lib/commands/EXISTS.ts @@ -0,0 +1,18 @@ +import { CommandParser } from '../client/parser'; +import { NumberReply, Command } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Determines if the specified keys exist + * @param parser - The Redis command parser + * @param keys - One or more keys to check + */ + parseCommand(parser: CommandParser, keys: RedisVariadicArgument) { + parser.push('EXISTS'); + parser.pushKeys(keys); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/EXPIRE.spec.ts b/packages/client/lib/commands/EXPIRE.spec.ts new file mode 100644 index 00000000000..f3d197b5c69 --- /dev/null +++ b/packages/client/lib/commands/EXPIRE.spec.ts @@ -0,0 +1,32 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import EXPIRE from './EXPIRE'; +import { parseArgs } from './generic-transformers'; + +describe('EXPIRE', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(EXPIRE, 'key', 1), + ['EXPIRE', 'key', '1'] + ); + }); + + it('with set option', () => { + assert.deepEqual( + parseArgs(EXPIRE, 'key', 1, 'NX'), + ['EXPIRE', 'key', '1', 'NX'] + ); + }); + }); + + testUtils.testAll('expire', async client => { + assert.equal( + await client.expire('key', 0), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/EXPIRE.ts b/packages/client/lib/commands/EXPIRE.ts new file mode 100644 index 00000000000..985b81071a3 --- /dev/null +++ b/packages/client/lib/commands/EXPIRE.ts @@ -0,0 +1,26 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +export default { + /** + * Sets a timeout on key. After the timeout has expired, the key will be automatically deleted + * @param parser - The Redis command parser + * @param key - Key to set expiration on + * @param seconds - Number of seconds until key expiration + * @param mode - Expiration mode: NX (only if key has no expiry), XX (only if key has existing expiry), GT (only if new expiry is greater than current), LT (only if new expiry is less than current) + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + seconds: number, + mode?: 'NX' | 'XX' | 'GT' | 'LT' + ) { + parser.push('EXPIRE'); + parser.pushKey(key); + parser.push(seconds.toString()); + if (mode) { + parser.push(mode); + } + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/EXPIREAT.spec.ts b/packages/client/lib/commands/EXPIREAT.spec.ts new file mode 100644 index 00000000000..1949fb051bb --- /dev/null +++ b/packages/client/lib/commands/EXPIREAT.spec.ts @@ -0,0 +1,40 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import EXPIREAT from './EXPIREAT'; +import { parseArgs } from './generic-transformers'; + +describe('EXPIREAT', () => { + describe('transformArguments', () => { + it('number', () => { + assert.deepEqual( + parseArgs(EXPIREAT, 'key', 1), + ['EXPIREAT', 'key', '1'] + ); + }); + + it('date', () => { + const d = new Date(); + assert.deepEqual( + parseArgs(EXPIREAT, 'key', d), + ['EXPIREAT', 'key', Math.floor(d.getTime() / 1000).toString()] + ); + }); + + it('with set option', () => { + assert.deepEqual( + parseArgs(EXPIREAT, 'key', 1, 'GT'), + ['EXPIREAT', 'key', '1', 'GT'] + ); + }); + }); + + testUtils.testAll('expireAt', async client => { + assert.equal( + await client.expireAt('key', 1), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/EXPIREAT.ts b/packages/client/lib/commands/EXPIREAT.ts new file mode 100644 index 00000000000..a20407aa78e --- /dev/null +++ b/packages/client/lib/commands/EXPIREAT.ts @@ -0,0 +1,27 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; +import { transformEXAT } from './generic-transformers'; + +export default { + /** + * Sets the expiration for a key at a specific Unix timestamp + * @param parser - The Redis command parser + * @param key - Key to set expiration on + * @param timestamp - Unix timestamp (seconds since January 1, 1970) or Date object + * @param mode - Expiration mode: NX (only if key has no expiry), XX (only if key has existing expiry), GT (only if new expiry is greater than current), LT (only if new expiry is less than current) + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + timestamp: number | Date, + mode?: 'NX' | 'XX' | 'GT' | 'LT' + ) { + parser.push('EXPIREAT'); + parser.pushKey(key); + parser.push(transformEXAT(timestamp)); + if (mode) { + parser.push(mode); + } + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/EXPIRETIME.spec.ts b/packages/client/lib/commands/EXPIRETIME.spec.ts new file mode 100644 index 00000000000..f2c8d3d4521 --- /dev/null +++ b/packages/client/lib/commands/EXPIRETIME.spec.ts @@ -0,0 +1,25 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import EXPIRETIME from './EXPIRETIME'; +import { parseArgs } from './generic-transformers'; + +describe('EXPIRETIME', () => { + testUtils.isVersionGreaterThanHook([7]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(EXPIRETIME, 'key'), + ['EXPIRETIME', 'key'] + ); + }); + + testUtils.testAll('expireTime', async client => { + assert.equal( + await client.expireTime('key'), + -2 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/EXPIRETIME.ts b/packages/client/lib/commands/EXPIRETIME.ts new file mode 100644 index 00000000000..faa8571eca3 --- /dev/null +++ b/packages/client/lib/commands/EXPIRETIME.ts @@ -0,0 +1,16 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +export default { + IS_READ_ONLY: true, + /** + * Returns the absolute Unix timestamp (since January 1, 1970) at which the given key will expire + * @param parser - The Redis command parser + * @param key - Key to check expiration time + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('EXPIRETIME'); + parser.pushKey(key); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/FAILOVER.spec.ts b/packages/client/lib/commands/FAILOVER.spec.ts new file mode 100644 index 00000000000..b23c3516f03 --- /dev/null +++ b/packages/client/lib/commands/FAILOVER.spec.ts @@ -0,0 +1,73 @@ +import { strict as assert } from 'node:assert'; +import FAILOVER from './FAILOVER'; +import { parseArgs } from './generic-transformers'; + +describe('FAILOVER', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(FAILOVER), + ['FAILOVER'] + ); + }); + + describe('with TO', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(FAILOVER, { + TO: { + host: 'host', + port: 6379 + } + }), + ['FAILOVER', 'TO', 'host', '6379'] + ); + }); + + it('with FORCE', () => { + assert.deepEqual( + parseArgs(FAILOVER, { + TO: { + host: 'host', + port: 6379, + FORCE: true + } + }), + ['FAILOVER', 'TO', 'host', '6379', 'FORCE'] + ); + }); + }); + + it('with ABORT', () => { + assert.deepEqual( + parseArgs(FAILOVER, { + ABORT: true + }), + ['FAILOVER', 'ABORT'] + ); + }); + + it('with TIMEOUT', () => { + assert.deepEqual( + parseArgs(FAILOVER, { + TIMEOUT: 1 + }), + ['FAILOVER', 'TIMEOUT', '1'] + ); + }); + + it('with TO, ABORT, TIMEOUT', () => { + assert.deepEqual( + parseArgs(FAILOVER, { + TO: { + host: 'host', + port: 6379 + }, + ABORT: true, + TIMEOUT: 1 + }), + ['FAILOVER', 'TO', 'host', '6379', 'ABORT', 'TIMEOUT', '1'] + ); + }); + }); +}); diff --git a/packages/client/lib/commands/FAILOVER.ts b/packages/client/lib/commands/FAILOVER.ts new file mode 100644 index 00000000000..24fa7a0347b --- /dev/null +++ b/packages/client/lib/commands/FAILOVER.ts @@ -0,0 +1,40 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; + +interface FailoverOptions { + TO?: { + host: string; + port: number; + FORCE?: true; + }; + ABORT?: true; + TIMEOUT?: number; +} + +export default { + /** + * Starts a coordinated failover between the primary and a replica + * @param parser - The Redis command parser + * @param options - Failover options including target host, abort flag, and timeout + */ + parseCommand(parser: CommandParser, options?: FailoverOptions) { + parser.push('FAILOVER'); + + if (options?.TO) { + parser.push('TO', options.TO.host, options.TO.port.toString()); + + if (options.TO.FORCE) { + parser.push('FORCE'); + } + } + + if (options?.ABORT) { + parser.push('ABORT'); + } + + if (options?.TIMEOUT) { + parser.push('TIMEOUT', options.TIMEOUT.toString()); + } + }, + transformReply: undefined as unknown as () => SimpleStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/FCALL.spec.ts b/packages/client/lib/commands/FCALL.spec.ts new file mode 100644 index 00000000000..6c3a65c1448 --- /dev/null +++ b/packages/client/lib/commands/FCALL.spec.ts @@ -0,0 +1,31 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import { MATH_FUNCTION, loadMathFunction } from './FUNCTION_LOAD.spec'; +import FCALL from './FCALL'; +import { parseArgs } from './generic-transformers'; + +describe('FCALL', () => { + testUtils.isVersionGreaterThanHook([7]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(FCALL, 'function', { + keys: ['key'], + arguments: ['argument'] + }), + ['FCALL', 'function', '1', 'key', 'argument'] + ); + }); + + testUtils.testWithClient('client.fCall', async client => { + const [,, reply] = await Promise.all([ + loadMathFunction(client), + client.set('key', '2'), + client.fCall(MATH_FUNCTION.library.square.NAME, { + keys: ['key'] + }) + ]); + + assert.equal(reply, 4); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/FCALL.ts b/packages/client/lib/commands/FCALL.ts new file mode 100644 index 00000000000..8fa56d4258e --- /dev/null +++ b/packages/client/lib/commands/FCALL.ts @@ -0,0 +1,17 @@ +import { Command } from '../RESP/types'; +import EVAL, { parseEvalArguments } from './EVAL'; + +export default { + IS_READ_ONLY: false, + /** + * Invokes a Redis function + * @param parser - The Redis command parser + * @param functionName - Name of the function to call + * @param options - Function execution options including keys and arguments + */ + parseCommand(...args: Parameters) { + args[0].push('FCALL'); + parseEvalArguments(...args); + }, + transformReply: EVAL.transformReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/FCALL_RO.spec.ts b/packages/client/lib/commands/FCALL_RO.spec.ts new file mode 100644 index 00000000000..447e00072be --- /dev/null +++ b/packages/client/lib/commands/FCALL_RO.spec.ts @@ -0,0 +1,31 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import { MATH_FUNCTION, loadMathFunction } from './FUNCTION_LOAD.spec'; +import FCALL_RO from './FCALL_RO'; +import { parseArgs } from './generic-transformers'; + +describe('FCALL_RO', () => { + testUtils.isVersionGreaterThanHook([7]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(FCALL_RO, 'function', { + keys: ['key'], + arguments: ['argument'] + }), + ['FCALL_RO', 'function', '1', 'key', 'argument'] + ); + }); + + testUtils.testWithClient('client.fCallRo', async client => { + const [,, reply] = await Promise.all([ + loadMathFunction(client), + client.set('key', '2'), + client.fCallRo(MATH_FUNCTION.library.square.NAME, { + keys: ['key'] + }) + ]); + + assert.equal(reply, 4); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/FCALL_RO.ts b/packages/client/lib/commands/FCALL_RO.ts new file mode 100644 index 00000000000..5aac38aed0b --- /dev/null +++ b/packages/client/lib/commands/FCALL_RO.ts @@ -0,0 +1,17 @@ +import { Command } from '../RESP/types'; +import EVAL, { parseEvalArguments } from './EVAL'; + +export default { + IS_READ_ONLY: false, + /** + * Invokes a read-only Redis function + * @param parser - The Redis command parser + * @param functionName - Name of the function to call + * @param options - Function execution options including keys and arguments + */ + parseCommand(...args: Parameters) { + args[0].push('FCALL_RO'); + parseEvalArguments(...args); + }, + transformReply: EVAL.transformReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/FLUSHALL.spec.ts b/packages/client/lib/commands/FLUSHALL.spec.ts new file mode 100644 index 00000000000..86daff1973a --- /dev/null +++ b/packages/client/lib/commands/FLUSHALL.spec.ts @@ -0,0 +1,36 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import FLUSHALL, { REDIS_FLUSH_MODES } from './FLUSHALL'; +import { parseArgs } from './generic-transformers'; + +describe('FLUSHALL', () => { + describe('transformArguments', () => { + it('default', () => { + assert.deepEqual( + parseArgs(FLUSHALL), + ['FLUSHALL'] + ); + }); + + it('ASYNC', () => { + assert.deepEqual( + parseArgs(FLUSHALL,REDIS_FLUSH_MODES.ASYNC), + ['FLUSHALL', 'ASYNC'] + ); + }); + + it('SYNC', () => { + assert.deepEqual( + parseArgs(FLUSHALL, REDIS_FLUSH_MODES.SYNC), + ['FLUSHALL', 'SYNC'] + ); + }); + }); + + testUtils.testWithClient('client.flushAll', async client => { + assert.equal( + await client.flushAll(), + 'OK' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/FLUSHALL.ts b/packages/client/lib/commands/FLUSHALL.ts new file mode 100644 index 00000000000..de6852d57e0 --- /dev/null +++ b/packages/client/lib/commands/FLUSHALL.ts @@ -0,0 +1,26 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; + +export const REDIS_FLUSH_MODES = { + ASYNC: 'ASYNC', + SYNC: 'SYNC' +} as const; + +export type RedisFlushMode = typeof REDIS_FLUSH_MODES[keyof typeof REDIS_FLUSH_MODES]; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: false, + /** + * Removes all keys from all databases + * @param parser - The Redis command parser + * @param mode - Optional flush mode (ASYNC or SYNC) + */ + parseCommand(parser: CommandParser, mode?: RedisFlushMode) { + parser.push('FLUSHALL'); + if (mode) { + parser.push(mode); + } + }, + transformReply: undefined as unknown as () => SimpleStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/FLUSHDB.spec.ts b/packages/client/lib/commands/FLUSHDB.spec.ts new file mode 100644 index 00000000000..795df637cb4 --- /dev/null +++ b/packages/client/lib/commands/FLUSHDB.spec.ts @@ -0,0 +1,37 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import FLUSHDB from './FLUSHDB'; +import { REDIS_FLUSH_MODES } from './FLUSHALL'; +import { parseArgs } from './generic-transformers'; + +describe('FLUSHDB', () => { + describe('transformArguments', () => { + it('default', () => { + assert.deepEqual( + parseArgs(FLUSHDB), + ['FLUSHDB'] + ); + }); + + it('ASYNC', () => { + assert.deepEqual( + parseArgs(FLUSHDB, REDIS_FLUSH_MODES.ASYNC), + ['FLUSHDB', 'ASYNC'] + ); + }); + + it('SYNC', () => { + assert.deepEqual( + parseArgs(FLUSHDB, REDIS_FLUSH_MODES.SYNC), + ['FLUSHDB', 'SYNC'] + ); + }); + }); + + testUtils.testWithClient('client.flushDb', async client => { + assert.equal( + await client.flushDb(), + 'OK' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/FLUSHDB.ts b/packages/client/lib/commands/FLUSHDB.ts new file mode 100644 index 00000000000..cd1ac201fce --- /dev/null +++ b/packages/client/lib/commands/FLUSHDB.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; +import { RedisFlushMode } from './FLUSHALL'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: false, + /** + * Removes all keys from the current database + * @param parser - The Redis command parser + * @param mode - Optional flush mode (ASYNC or SYNC) + */ + parseCommand(parser: CommandParser, mode?: RedisFlushMode) { + parser.push('FLUSHDB'); + if (mode) { + parser.push(mode); + } + }, + transformReply: undefined as unknown as () => SimpleStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/FUNCTION_DELETE.spec.ts b/packages/client/lib/commands/FUNCTION_DELETE.spec.ts new file mode 100644 index 00000000000..b33ea25916b --- /dev/null +++ b/packages/client/lib/commands/FUNCTION_DELETE.spec.ts @@ -0,0 +1,25 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import FUNCTION_DELETE from './FUNCTION_DELETE'; +import { MATH_FUNCTION, loadMathFunction } from './FUNCTION_LOAD.spec'; +import { parseArgs } from './generic-transformers'; + +describe('FUNCTION DELETE', () => { + testUtils.isVersionGreaterThanHook([7]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(FUNCTION_DELETE, 'library'), + ['FUNCTION', 'DELETE', 'library'] + ); + }); + + testUtils.testWithClient('client.functionDelete', async client => { + await loadMathFunction(client); + + assert.equal( + await client.functionDelete(MATH_FUNCTION.name), + 'OK' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/FUNCTION_DELETE.ts b/packages/client/lib/commands/FUNCTION_DELETE.ts new file mode 100644 index 00000000000..e7b59ecb0cc --- /dev/null +++ b/packages/client/lib/commands/FUNCTION_DELETE.ts @@ -0,0 +1,16 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: false, + /** + * Deletes a library and all its functions + * @param parser - The Redis command parser + * @param library - Name of the library to delete + */ + parseCommand(parser: CommandParser, library: RedisArgument) { + parser.push('FUNCTION', 'DELETE', library); + }, + transformReply: undefined as unknown as () => SimpleStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/FUNCTION_DUMP.spec.ts b/packages/client/lib/commands/FUNCTION_DUMP.spec.ts new file mode 100644 index 00000000000..bbd6302bb6a --- /dev/null +++ b/packages/client/lib/commands/FUNCTION_DUMP.spec.ts @@ -0,0 +1,22 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import FUNCTION_DUMP from './FUNCTION_DUMP'; +import { parseArgs } from './generic-transformers'; + +describe('FUNCTION DUMP', () => { + testUtils.isVersionGreaterThanHook([7]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(FUNCTION_DUMP), + ['FUNCTION', 'DUMP'] + ); + }); + + testUtils.testWithClient('client.functionDump', async client => { + assert.equal( + typeof await client.functionDump(), + 'string' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/FUNCTION_DUMP.ts b/packages/client/lib/commands/FUNCTION_DUMP.ts new file mode 100644 index 00000000000..73d6986b707 --- /dev/null +++ b/packages/client/lib/commands/FUNCTION_DUMP.ts @@ -0,0 +1,15 @@ +import { CommandParser } from '../client/parser'; +import { BlobStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Returns a serialized payload representing the current functions loaded in the server + * @param parser - The Redis command parser + */ + parseCommand(parser: CommandParser) { + parser.push('FUNCTION', 'DUMP') + }, + transformReply: undefined as unknown as () => BlobStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/FUNCTION_FLUSH.spec.ts b/packages/client/lib/commands/FUNCTION_FLUSH.spec.ts new file mode 100644 index 00000000000..4fe90bdb607 --- /dev/null +++ b/packages/client/lib/commands/FUNCTION_FLUSH.spec.ts @@ -0,0 +1,31 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import FUNCTION_FLUSH from './FUNCTION_FLUSH'; +import { parseArgs } from './generic-transformers'; + +describe('FUNCTION FLUSH', () => { + testUtils.isVersionGreaterThanHook([7]); + + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(FUNCTION_FLUSH), + ['FUNCTION', 'FLUSH'] + ); + }); + + it('with mode', () => { + assert.deepEqual( + parseArgs(FUNCTION_FLUSH, 'SYNC'), + ['FUNCTION', 'FLUSH', 'SYNC'] + ); + }); + }); + + testUtils.testWithClient('client.functionFlush', async client => { + assert.equal( + await client.functionFlush(), + 'OK' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/FUNCTION_FLUSH.ts b/packages/client/lib/commands/FUNCTION_FLUSH.ts new file mode 100644 index 00000000000..8019fc0c215 --- /dev/null +++ b/packages/client/lib/commands/FUNCTION_FLUSH.ts @@ -0,0 +1,21 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; +import { RedisFlushMode } from './FLUSHALL'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: false, + /** + * Deletes all the libraries and functions from a Redis server + * @param parser - The Redis command parser + * @param mode - Optional flush mode (ASYNC or SYNC) + */ + parseCommand(parser: CommandParser, mode?: RedisFlushMode) { + parser.push('FUNCTION', 'FLUSH'); + + if (mode) { + parser.push(mode); + } + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/FUNCTION_KILL.spec.ts b/packages/client/lib/commands/FUNCTION_KILL.spec.ts new file mode 100644 index 00000000000..c4dbd124d30 --- /dev/null +++ b/packages/client/lib/commands/FUNCTION_KILL.spec.ts @@ -0,0 +1,15 @@ +import { strict as assert } from 'node:assert'; +import testUtils from '../test-utils'; +import FUNCTION_KILL from './FUNCTION_KILL'; +import { parseArgs } from './generic-transformers'; + +describe('FUNCTION KILL', () => { + testUtils.isVersionGreaterThanHook([7]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(FUNCTION_KILL), + ['FUNCTION', 'KILL'] + ); + }); +}); diff --git a/packages/client/lib/commands/FUNCTION_KILL.ts b/packages/client/lib/commands/FUNCTION_KILL.ts new file mode 100644 index 00000000000..b1626684b62 --- /dev/null +++ b/packages/client/lib/commands/FUNCTION_KILL.ts @@ -0,0 +1,15 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Kills a function that is currently executing + * @param parser - The Redis command parser + */ + parseCommand(parser: CommandParser) { + parser.push('FUNCTION', 'KILL'); + }, + transformReply: undefined as unknown as () => SimpleStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/FUNCTION_LIST.spec.ts b/packages/client/lib/commands/FUNCTION_LIST.spec.ts new file mode 100644 index 00000000000..6d9b28acf90 --- /dev/null +++ b/packages/client/lib/commands/FUNCTION_LIST.spec.ts @@ -0,0 +1,46 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import FUNCTION_LIST from './FUNCTION_LIST'; +import { MATH_FUNCTION, loadMathFunction } from './FUNCTION_LOAD.spec'; +import { parseArgs } from './generic-transformers'; + +describe('FUNCTION LIST', () => { + testUtils.isVersionGreaterThanHook([7]); + + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(FUNCTION_LIST), + ['FUNCTION', 'LIST'] + ); + }); + + it('with LIBRARYNAME', () => { + assert.deepEqual( + parseArgs(FUNCTION_LIST, { + LIBRARYNAME: 'patter*' + }), + ['FUNCTION', 'LIST', 'LIBRARYNAME', 'patter*'] + ); + }); + }); + + testUtils.testWithClient('client.functionList', async client => { + const [, reply] = await Promise.all([ + loadMathFunction(client), + client.functionList() + ]); + + reply[0].library_name; + + assert.deepEqual(reply, [{ + library_name: MATH_FUNCTION.name, + engine: MATH_FUNCTION.engine, + functions: [{ + name: MATH_FUNCTION.library.square.NAME, + description: null, + flags: ['no-writes'] + }] + }]); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/FUNCTION_LIST.ts b/packages/client/lib/commands/FUNCTION_LIST.ts new file mode 100644 index 00000000000..64ebaea8f85 --- /dev/null +++ b/packages/client/lib/commands/FUNCTION_LIST.ts @@ -0,0 +1,55 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, TuplesToMapReply, BlobStringReply, ArrayReply, NullReply, SetReply, UnwrapReply, Resp2Reply, Command } from '../RESP/types'; + +export interface FunctionListOptions { + LIBRARYNAME?: RedisArgument; +} + +export type FunctionListReplyItem = [ + [BlobStringReply<'library_name'>, BlobStringReply | NullReply], + [BlobStringReply<'engine'>, BlobStringReply], + [BlobStringReply<'functions'>, ArrayReply, BlobStringReply], + [BlobStringReply<'description'>, BlobStringReply | NullReply], + [BlobStringReply<'flags'>, SetReply], + ]>>] +]; + +export type FunctionListReply = ArrayReply>; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: false, + /** + * Returns all libraries and functions + * @param parser - The Redis command parser + * @param options - Options for listing functions + */ + parseCommand(parser: CommandParser, options?: FunctionListOptions) { + parser.push('FUNCTION', 'LIST'); + + if (options?.LIBRARYNAME) { + parser.push('LIBRARYNAME', options.LIBRARYNAME); + } + }, + transformReply: { + 2: (reply: UnwrapReply>) => { + return reply.map(library => { + const unwrapped = library as unknown as UnwrapReply; + return { + library_name: unwrapped[1], + engine: unwrapped[3], + functions: (unwrapped[5] as unknown as UnwrapReply).map(fn => { + const unwrapped = fn as unknown as UnwrapReply; + return { + name: unwrapped[1], + description: unwrapped[3], + flags: unwrapped[5] + }; + }) + }; + }); + }, + 3: undefined as unknown as () => FunctionListReply + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/FUNCTION_LIST_WITHCODE.spec.ts b/packages/client/lib/commands/FUNCTION_LIST_WITHCODE.spec.ts new file mode 100644 index 00000000000..f44db9ba037 --- /dev/null +++ b/packages/client/lib/commands/FUNCTION_LIST_WITHCODE.spec.ts @@ -0,0 +1,49 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import FUNCTION_LIST_WITHCODE from './FUNCTION_LIST_WITHCODE'; +import { MATH_FUNCTION, loadMathFunction } from './FUNCTION_LOAD.spec'; +import { parseArgs } from './generic-transformers'; + +describe('FUNCTION LIST WITHCODE', () => { + testUtils.isVersionGreaterThanHook([7]); + + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(FUNCTION_LIST_WITHCODE), + ['FUNCTION', 'LIST', 'WITHCODE'] + ); + }); + + it('with LIBRARYNAME', () => { + assert.deepEqual( + parseArgs(FUNCTION_LIST_WITHCODE, { + LIBRARYNAME: 'patter*' + }), + ['FUNCTION', 'LIST', 'LIBRARYNAME', 'patter*', 'WITHCODE'] + ); + }); + }); + + testUtils.testWithClient('client.functionListWithCode', async client => { + const [, reply] = await Promise.all([ + loadMathFunction(client), + client.functionListWithCode() + ]); + + const a = reply[0]; + + const b = a.functions[0].description; + + assert.deepEqual(reply, [{ + library_name: MATH_FUNCTION.name, + engine: MATH_FUNCTION.engine, + functions: [{ + name: MATH_FUNCTION.library.square.NAME, + description: null, + flags: ['no-writes'] + }], + library_code: MATH_FUNCTION.code + }]); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/FUNCTION_LIST_WITHCODE.ts b/packages/client/lib/commands/FUNCTION_LIST_WITHCODE.ts new file mode 100644 index 00000000000..d5fe312d3c2 --- /dev/null +++ b/packages/client/lib/commands/FUNCTION_LIST_WITHCODE.ts @@ -0,0 +1,42 @@ +import { TuplesToMapReply, BlobStringReply, ArrayReply, UnwrapReply, Resp2Reply, Command } from '../RESP/types'; +import FUNCTION_LIST, { FunctionListReplyItem } from './FUNCTION_LIST'; + +export type FunctionListWithCodeReply = ArrayReply, BlobStringReply], +]>>; + +export default { + NOT_KEYED_COMMAND: FUNCTION_LIST.NOT_KEYED_COMMAND, + IS_READ_ONLY: FUNCTION_LIST.IS_READ_ONLY, + /** + * Returns all libraries and functions including their source code + * @param parser - The Redis command parser + * @param options - Options for listing functions + */ + parseCommand(...args: Parameters) { + FUNCTION_LIST.parseCommand(...args); + args[0].push('WITHCODE'); + }, + transformReply: { + 2: (reply: UnwrapReply>) => { + return reply.map(library => { + const unwrapped = library as unknown as UnwrapReply; + return { + library_name: unwrapped[1], + engine: unwrapped[3], + functions: (unwrapped[5] as unknown as UnwrapReply).map(fn => { + const unwrapped = fn as unknown as UnwrapReply; + return { + name: unwrapped[1], + description: unwrapped[3], + flags: unwrapped[5] + }; + }), + library_code: unwrapped[7] + }; + }); + }, + 3: undefined as unknown as () => FunctionListWithCodeReply + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/FUNCTION_LOAD.spec.ts b/packages/client/lib/commands/FUNCTION_LOAD.spec.ts new file mode 100644 index 00000000000..c0a511bffc9 --- /dev/null +++ b/packages/client/lib/commands/FUNCTION_LOAD.spec.ts @@ -0,0 +1,79 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import FUNCTION_LOAD from './FUNCTION_LOAD'; +import { RedisClientType } from '../client'; +import { NumberReply, RedisFunctions, RedisModules, RedisScripts, RespVersions } from '../RESP/types'; +import { parseArgs } from './generic-transformers'; +import { CommandParser } from '../client/parser'; + + + +export const MATH_FUNCTION = { + name: 'math', + engine: 'LUA', + code: + `#!LUA name=math + redis.register_function { + function_name = "square", + callback = function(keys, args) + local number = redis.call('GET', keys[1]) + return number * number + end, + flags = { "no-writes" } + }`, + library: { + square: { + NAME: 'square', + IS_READ_ONLY: true, + NUMBER_OF_KEYS: 1, + FIRST_KEY_INDEX: 0, + parseCommand(parser: CommandParser, key: string) { + parser.pushKey(key); + }, + transformReply: undefined as unknown as () => NumberReply + } + } +}; + +export function loadMathFunction< + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions +>( + client: RedisClientType +) { + return client.functionLoad( + MATH_FUNCTION.code, + { REPLACE: true } + ); +} + +describe('FUNCTION LOAD', () => { + testUtils.isVersionGreaterThanHook([7]); + + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(FUNCTION_LOAD, 'code'), + ['FUNCTION', 'LOAD', 'code'] + ); + }); + + it('with REPLACE', () => { + assert.deepEqual( + parseArgs(FUNCTION_LOAD, 'code', { + REPLACE: true + }), + ['FUNCTION', 'LOAD', 'REPLACE', 'code'] + ); + }); + }); + + testUtils.testWithClient('client.functionLoad', async client => { + assert.equal( + await loadMathFunction(client), + MATH_FUNCTION.name + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/FUNCTION_LOAD.ts b/packages/client/lib/commands/FUNCTION_LOAD.ts new file mode 100644 index 00000000000..0766a124afb --- /dev/null +++ b/packages/client/lib/commands/FUNCTION_LOAD.ts @@ -0,0 +1,27 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, BlobStringReply, Command } from '../RESP/types'; + +export interface FunctionLoadOptions { + REPLACE?: boolean; +} + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: false, + /** + * Loads a library to Redis + * @param parser - The Redis command parser + * @param code - Library code to load + * @param options - Function load options + */ + parseCommand(parser: CommandParser, code: RedisArgument, options?: FunctionLoadOptions) { + parser.push('FUNCTION', 'LOAD'); + + if (options?.REPLACE) { + parser.push('REPLACE'); + } + + parser.push(code); + }, + transformReply: undefined as unknown as () => BlobStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/FUNCTION_RESTORE.spec.ts b/packages/client/lib/commands/FUNCTION_RESTORE.spec.ts new file mode 100644 index 00000000000..72d7d1d6204 --- /dev/null +++ b/packages/client/lib/commands/FUNCTION_RESTORE.spec.ts @@ -0,0 +1,41 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import FUNCTION_RESTORE from './FUNCTION_RESTORE'; +import { RESP_TYPES } from '../RESP/decoder'; +import { parseArgs } from './generic-transformers'; + +describe('FUNCTION RESTORE', () => { + testUtils.isVersionGreaterThanHook([7]); + + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(FUNCTION_RESTORE, 'dump'), + ['FUNCTION', 'RESTORE', 'dump'] + ); + }); + + it('with mode', () => { + assert.deepEqual( + parseArgs(FUNCTION_RESTORE, 'dump', { + mode: 'APPEND' + }), + ['FUNCTION', 'RESTORE', 'dump', 'APPEND'] + ); + }); + }); + + testUtils.testWithClient('client.functionRestore', async client => { + assert.equal( + await client.functionRestore( + await client.withTypeMapping({ + [RESP_TYPES.BLOB_STRING]: Buffer + }).functionDump(), + { + mode: 'REPLACE' + } + ), + 'OK' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/FUNCTION_RESTORE.ts b/packages/client/lib/commands/FUNCTION_RESTORE.ts new file mode 100644 index 00000000000..f18541a614a --- /dev/null +++ b/packages/client/lib/commands/FUNCTION_RESTORE.ts @@ -0,0 +1,25 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command, RedisArgument } from '../RESP/types'; + +export interface FunctionRestoreOptions { + mode?: 'FLUSH' | 'APPEND' | 'REPLACE'; +} + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: false, + /** + * Restores libraries from the dump payload + * @param parser - The Redis command parser + * @param dump - Serialized payload of functions to restore + * @param options - Options for the restore operation + */ + parseCommand(parser: CommandParser, dump: RedisArgument, options?: FunctionRestoreOptions) { + parser.push('FUNCTION', 'RESTORE', dump); + + if (options?.mode) { + parser.push(options.mode); + } + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/FUNCTION_STATS.spec.ts b/packages/client/lib/commands/FUNCTION_STATS.spec.ts new file mode 100644 index 00000000000..a3c5e00fe72 --- /dev/null +++ b/packages/client/lib/commands/FUNCTION_STATS.spec.ts @@ -0,0 +1,26 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import FUNCTION_STATS from './FUNCTION_STATS'; +import { parseArgs } from './generic-transformers'; + +describe('FUNCTION STATS', () => { + testUtils.isVersionGreaterThanHook([7]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(FUNCTION_STATS), + ['FUNCTION', 'STATS'] + ); + }); + + testUtils.testWithClient('client.functionStats', async client => { + const stats = await client.functionStats(); + assert.equal(stats.running_script, null); + assert.equal(typeof stats.engines, 'object'); + for (const [engine, { libraries_count, functions_count }] of Object.entries(stats.engines)) { + assert.equal(typeof engine, 'string'); + assert.equal(typeof libraries_count, 'number'); + assert.equal(typeof functions_count, 'number'); + } + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/FUNCTION_STATS.ts b/packages/client/lib/commands/FUNCTION_STATS.ts new file mode 100644 index 00000000000..77eccf916bd --- /dev/null +++ b/packages/client/lib/commands/FUNCTION_STATS.ts @@ -0,0 +1,75 @@ +import { CommandParser } from '../client/parser'; +import { Command, TuplesToMapReply, BlobStringReply, NullReply, NumberReply, MapReply, Resp2Reply, UnwrapReply } from '../RESP/types'; +import { isNullReply } from './generic-transformers'; + +type RunningScript = NullReply | TuplesToMapReply<[ + [BlobStringReply<'name'>, BlobStringReply], + [BlobStringReply<'command'>, BlobStringReply], + [BlobStringReply<'duration_ms'>, NumberReply] +]>; + +type Engine = TuplesToMapReply<[ + [BlobStringReply<'libraries_count'>, NumberReply], + [BlobStringReply<'functions_count'>, NumberReply] +]>; + +type Engines = MapReply; + +type FunctionStatsReply = TuplesToMapReply<[ + [BlobStringReply<'running_script'>, RunningScript], + [BlobStringReply<'engines'>, Engines] +]>; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Returns information about the function that is currently running and information about the available execution engines + * @param parser - The Redis command parser + */ + parseCommand(parser: CommandParser) { + parser.push('FUNCTION', 'STATS'); + }, + transformReply: { + 2: (reply: UnwrapReply>) => { + return { + running_script: transformRunningScript(reply[1]), + engines: transformEngines(reply[3]) + }; + }, + 3: undefined as unknown as () => FunctionStatsReply + } +} as const satisfies Command; + +function transformRunningScript(reply: Resp2Reply) { + if (isNullReply(reply)) { + return null; + } + + const unwraped = reply as unknown as UnwrapReply; + return { + name: unwraped[1], + command: unwraped[3], + duration_ms: unwraped[5] + }; +} + +function transformEngines(reply: Resp2Reply) { + const unwraped = reply as unknown as UnwrapReply; + + const engines: Record = Object.create(null); + for (let i = 0; i < unwraped.length; i++) { + const name = unwraped[i] as BlobStringReply, + stats = unwraped[++i] as Resp2Reply, + unwrapedStats = stats as unknown as UnwrapReply; + engines[name.toString()] = { + libraries_count: unwrapedStats[1], + functions_count: unwrapedStats[3] + }; + } + + return engines; +} diff --git a/packages/client/lib/commands/GEOADD.spec.ts b/packages/client/lib/commands/GEOADD.spec.ts new file mode 100644 index 00000000000..d947141a318 --- /dev/null +++ b/packages/client/lib/commands/GEOADD.spec.ts @@ -0,0 +1,101 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import GEOADD from './GEOADD'; +import { parseArgs } from './generic-transformers'; + +describe('GEOADD', () => { + describe('transformArguments', () => { + it('one member', () => { + assert.deepEqual( + parseArgs(GEOADD, 'key', { + member: 'member', + longitude: 1, + latitude: 2 + }), + ['GEOADD', 'key', '1', '2', 'member'] + ); + }); + + it('multiple members', () => { + assert.deepEqual( + parseArgs(GEOADD, 'key', [{ + longitude: 1, + latitude: 2, + member: '3', + }, { + longitude: 4, + latitude: 5, + member: '6', + }]), + ['GEOADD', 'key', '1', '2', '3', '4', '5', '6'] + ); + }); + + it('with condition', () => { + assert.deepEqual( + parseArgs(GEOADD, 'key', { + longitude: 1, + latitude: 2, + member: 'member' + }, { + condition: 'NX' + }), + ['GEOADD', 'key', 'NX', '1', '2', 'member'] + ); + }); + + it('with NX (backwards compatibility)', () => { + assert.deepEqual( + parseArgs(GEOADD, 'key', { + longitude: 1, + latitude: 2, + member: 'member' + }, { + NX: true + }), + ['GEOADD', 'key', 'NX', '1', '2', 'member'] + ); + }); + + it('with CH', () => { + assert.deepEqual( + parseArgs(GEOADD, 'key', { + longitude: 1, + latitude: 2, + member: 'member' + }, { + CH: true + }), + ['GEOADD', 'key', 'CH', '1', '2', 'member'] + ); + }); + + it('with condition, CH', () => { + assert.deepEqual( + parseArgs(GEOADD, 'key', { + longitude: 1, + latitude: 2, + member: 'member' + }, { + condition: 'XX', + CH: true + }), + ['GEOADD', 'key', 'XX', 'CH', '1', '2', 'member'] + ); + }); + }); + + testUtils.testAll('geoAdd', async client => { + assert.equal( + await client.geoAdd('key', { + member: 'member', + longitude: 1, + latitude: 2 + }), + 1 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/GEOADD.ts b/packages/client/lib/commands/GEOADD.ts new file mode 100644 index 00000000000..3da7b0e74b6 --- /dev/null +++ b/packages/client/lib/commands/GEOADD.ts @@ -0,0 +1,73 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; +import { GeoCoordinates } from './GEOSEARCH'; + +export interface GeoMember extends GeoCoordinates { + member: RedisArgument; +} + +export interface GeoAddOptions { + condition?: 'NX' | 'XX'; + /** + * @deprecated Use `{ condition: 'NX' }` instead. + */ + NX?: boolean; + /** + * @deprecated Use `{ condition: 'XX' }` instead. + */ + XX?: boolean; + CH?: boolean; +} + +export default { + IS_READ_ONLY: false, + /** + * Adds geospatial items to the specified key + * @param parser - The Redis command parser + * @param key - Key to add the geospatial items to + * @param toAdd - Geospatial member(s) to add + * @param options - Options for the GEOADD command + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + toAdd: GeoMember | Array, + options?: GeoAddOptions + ) { + parser.push('GEOADD') + parser.pushKey(key); + + if (options?.condition) { + parser.push(options.condition); + } else if (options?.NX) { + parser.push('NX'); + } else if (options?.XX) { + parser.push('XX'); + } + + if (options?.CH) { + parser.push('CH'); + } + + if (Array.isArray(toAdd)) { + for (const member of toAdd) { + pushMember(parser, member); + } + } else { + pushMember(parser, toAdd); + } + + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; + +function pushMember( + parser: CommandParser, + { longitude, latitude, member }: GeoMember +) { + parser.push( + longitude.toString(), + latitude.toString(), + member + ); +} diff --git a/packages/client/lib/commands/GEODIST.spec.ts b/packages/client/lib/commands/GEODIST.spec.ts new file mode 100644 index 00000000000..a23df405d1d --- /dev/null +++ b/packages/client/lib/commands/GEODIST.spec.ts @@ -0,0 +1,55 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import GEODIST from './GEODIST'; +import { parseArgs } from './generic-transformers'; + +describe('GEODIST', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(GEODIST, 'key', '1', '2'), + ['GEODIST', 'key', '1', '2'] + ); + }); + + it('with unit', () => { + assert.deepEqual( + parseArgs(GEODIST, 'key', '1', '2', 'm'), + ['GEODIST', 'key', '1', '2', 'm'] + ); + }); + }); + + testUtils.testAll('geoDist null', async client => { + assert.equal( + await client.geoDist('key', '1', '2'), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); + + testUtils.testAll('geoDist with member', async client => { + const [, dist] = await Promise.all([ + client.geoAdd('key', [{ + member: '1', + longitude: 1, + latitude: 1 + }, { + member: '2', + longitude: 2, + latitude: 2 + }]), + client.geoDist('key', '1', '2') + ]); + + assert.equal( + dist, + 157270.0561 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/GEODIST.ts b/packages/client/lib/commands/GEODIST.ts new file mode 100644 index 00000000000..f86d8156ebf --- /dev/null +++ b/packages/client/lib/commands/GEODIST.ts @@ -0,0 +1,33 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, BlobStringReply, NullReply, Command } from '../RESP/types'; +import { GeoUnits } from './GEOSEARCH'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Returns the distance between two members in a geospatial index + * @param parser - The Redis command parser + * @param key - Key of the geospatial index + * @param member1 - First member in the geospatial index + * @param member2 - Second member in the geospatial index + * @param unit - Unit of distance (m, km, ft, mi) + */ + parseCommand(parser: CommandParser, + key: RedisArgument, + member1: RedisArgument, + member2: RedisArgument, + unit?: GeoUnits + ) { + parser.push('GEODIST'); + parser.pushKey(key); + parser.push(member1, member2); + + if (unit) { + parser.push(unit); + } + }, + transformReply(reply: BlobStringReply | NullReply) { + return reply === null ? null : Number(reply); + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/GEOHASH.spec.ts b/packages/client/lib/commands/GEOHASH.spec.ts new file mode 100644 index 00000000000..ad26dff8434 --- /dev/null +++ b/packages/client/lib/commands/GEOHASH.spec.ts @@ -0,0 +1,32 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import GEOHASH from './GEOHASH'; +import { parseArgs } from './generic-transformers'; + +describe('GEOHASH', () => { + describe('transformArguments', () => { + it('single member', () => { + assert.deepEqual( + parseArgs(GEOHASH, 'key', 'member'), + ['GEOHASH', 'key', 'member'] + ); + }); + + it('multiple members', () => { + assert.deepEqual( + parseArgs(GEOHASH, 'key', ['1', '2']), + ['GEOHASH', 'key', '1', '2'] + ); + }); + }); + + testUtils.testAll('geoHash', async client => { + assert.deepEqual( + await client.geoHash('key', 'member'), + [null] + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/GEOHASH.ts b/packages/client/lib/commands/GEOHASH.ts new file mode 100644 index 00000000000..bddc7a1fc0d --- /dev/null +++ b/packages/client/lib/commands/GEOHASH.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, BlobStringReply, Command } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Returns the Geohash string representation of one or more position members + * @param parser - The Redis command parser + * @param key - Key of the geospatial index + * @param member - One or more members in the geospatial index + */ + parseCommand(parser: CommandParser, key: RedisArgument, member: RedisVariadicArgument) { + parser.push('GEOHASH'); + parser.pushKey(key); + parser.pushVariadic(member); + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/GEOPOS.spec.ts b/packages/client/lib/commands/GEOPOS.spec.ts new file mode 100644 index 00000000000..002d16d0256 --- /dev/null +++ b/packages/client/lib/commands/GEOPOS.spec.ts @@ -0,0 +1,166 @@ +import { strict as assert, fail } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import GEOPOS from './GEOPOS'; +import { parseArgs } from './generic-transformers'; + +describe('GEOPOS', () => { + describe('transformArguments', () => { + it('single member', () => { + assert.deepEqual( + parseArgs(GEOPOS, 'key', 'member'), + ['GEOPOS', 'key', 'member'] + ); + }); + + it('multiple members', () => { + assert.deepEqual( + parseArgs(GEOPOS, 'key', ['1', '2']), + ['GEOPOS', 'key', '1', '2'] + ); + }); + }); + + testUtils.testAll('geoPos null', async client => { + assert.deepEqual( + await client.geoPos('key', 'member'), + [null] + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); + + testUtils.testAll('geoPos with member', async client => { + const coordinates = { + longitude: '-122.06429868936538696', + latitude: '37.37749628831998194' + }; + + await client.geoAdd('key', { + member: 'member', + ...coordinates + }); + + const result = await client.geoPos('key', 'member'); + + /** + * - Redis < 8: Returns coordinates with 14 decimal places (e.g., "-122.06429868936539") + * - Redis 8+: Returns coordinates with 17 decimal places (e.g., "-122.06429868936538696") + * + */ + const PRECISION = 13; // Number of decimal places to compare + + if (result && result.length === 1 && result[0] != null) { + const { longitude, latitude } = result[0]; + + assert.ok( + compareWithPrecision(longitude, coordinates.longitude, PRECISION), + `Longitude mismatch: ${longitude} vs ${coordinates.longitude}` + ); + assert.ok( + compareWithPrecision(latitude, coordinates.latitude, PRECISION), + `Latitude mismatch: ${latitude} vs ${coordinates.latitude}` + ); + + } else { + assert.fail('Expected a valid result'); + } + + + + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); + +describe('compareWithPrecision', () => { + it('should match exact same numbers', () => { + assert.strictEqual( + compareWithPrecision('123.456789', '123.456789', 6), + true + ); + }); + + it('should match when actual has more precision than needed', () => { + assert.strictEqual( + compareWithPrecision('123.456789123456', '123.456789', 6), + true + ); + }); + + it('should match when expected has more precision than needed', () => { + assert.strictEqual( + compareWithPrecision('123.456789', '123.456789123456', 6), + true + ); + }); + + it('should fail when decimals differ within precision', () => { + assert.strictEqual( + compareWithPrecision('123.456689', '123.456789', 6), + false + ); + }); + + it('should handle negative numbers', () => { + assert.strictEqual( + compareWithPrecision('-122.06429868936538', '-122.06429868936539', 13), + true + ); + }); + + it('should fail when integer parts differ', () => { + assert.strictEqual( + compareWithPrecision('124.456789', '123.456789', 6), + false + ); + }); + + it('should handle zero decimal places', () => { + assert.strictEqual( + compareWithPrecision('123.456789', '123.456789', 0), + true + ); + }); + + it('should handle numbers without decimal points', () => { + assert.strictEqual( + compareWithPrecision('123', '123', 6), + true + ); + }); + + it('should handle one number without decimal point', () => { + assert.strictEqual( + compareWithPrecision('123', '123.000', 3), + true + ); + }); + + it('should match Redis coordinates with different precision', () => { + assert.strictEqual( + compareWithPrecision( + '-122.06429868936538696', + '-122.06429868936539', + 13 + ), + true + ); + }); + + it('should match Redis latitude with different precision', () => { + assert.strictEqual( + compareWithPrecision( + '37.37749628831998194', + '37.37749628831998', + 14 + ), + true + ); + }); +}); + +export const compareWithPrecision = (actual: string, expected: string, decimals: number): boolean => { + return Math.abs(Number(actual) - Number(expected)) < Math.pow(10, -decimals); +}; diff --git a/packages/client/lib/commands/GEOPOS.ts b/packages/client/lib/commands/GEOPOS.ts new file mode 100644 index 00000000000..6fed2a8abc8 --- /dev/null +++ b/packages/client/lib/commands/GEOPOS.ts @@ -0,0 +1,28 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, TuplesReply, BlobStringReply, NullReply, UnwrapReply, Command } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Returns the longitude and latitude of one or more members in a geospatial index + * @param parser - The Redis command parser + * @param key - Key of the geospatial index + * @param member - One or more members in the geospatial index + */ + parseCommand(parser: CommandParser, key: RedisArgument, member: RedisVariadicArgument) { + parser.push('GEOPOS'); + parser.pushKey(key); + parser.pushVariadic(member); + }, + transformReply(reply: UnwrapReply | NullReply>>) { + return reply.map(item => { + const unwrapped = item as unknown as UnwrapReply; + return unwrapped === null ? null : { + longitude: unwrapped[0], + latitude: unwrapped[1] + }; + }); + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/GEORADIUS.spec.ts b/packages/client/lib/commands/GEORADIUS.spec.ts new file mode 100644 index 00000000000..3c33395c5f6 --- /dev/null +++ b/packages/client/lib/commands/GEORADIUS.spec.ts @@ -0,0 +1,29 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import GEORADIUS from './GEORADIUS'; +import { parseArgs } from './generic-transformers'; + +describe('GEORADIUS', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(GEORADIUS, 'key', { + longitude: 1, + latitude: 2 + }, 3, 'm'), + ['GEORADIUS', 'key', '1', '2', '3', 'm'] + ); + }); + + testUtils.testAll('geoRadius', async client => { + assert.deepEqual( + await client.geoRadius('key', { + longitude: 1, + latitude: 2 + }, 3, 'm'), + [] + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/GEORADIUS.ts b/packages/client/lib/commands/GEORADIUS.ts new file mode 100644 index 00000000000..2f622415064 --- /dev/null +++ b/packages/client/lib/commands/GEORADIUS.ts @@ -0,0 +1,36 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, BlobStringReply, Command } from '../RESP/types'; +import { GeoCoordinates, GeoUnits, GeoSearchOptions, parseGeoSearchOptions } from './GEOSEARCH'; + +export function parseGeoRadiusArguments( + parser: CommandParser, + key: RedisArgument, + from: GeoCoordinates, + radius: number, + unit: GeoUnits, + options?: GeoSearchOptions +) { + parser.pushKey(key); + parser.push(from.longitude.toString(), from.latitude.toString(), radius.toString(), unit); + + parseGeoSearchOptions(parser, options) +} + +export default { + IS_READ_ONLY: false, + /** + * Queries members in a geospatial index based on a radius from a center point + * @param parser - The Redis command parser + * @param key - Key of the geospatial index + * @param from - Center coordinates for the search + * @param radius - Radius of the search area + * @param unit - Unit of distance (m, km, ft, mi) + * @param options - Additional search options + */ + parseCommand(...args: Parameters) { + args[0].push('GEORADIUS'); + return parseGeoRadiusArguments(...args); + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; + diff --git a/packages/client/lib/commands/GEORADIUSBYMEMBER.spec.ts b/packages/client/lib/commands/GEORADIUSBYMEMBER.spec.ts new file mode 100644 index 00000000000..c81c3d75815 --- /dev/null +++ b/packages/client/lib/commands/GEORADIUSBYMEMBER.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import GEORADIUSBYMEMBER from './GEORADIUSBYMEMBER'; +import { parseArgs } from './generic-transformers'; + +describe('GEORADIUSBYMEMBER', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(GEORADIUSBYMEMBER, 'key', 'member', 3, 'm'), + ['GEORADIUSBYMEMBER', 'key', 'member', '3', 'm'] + ); + }); + + testUtils.testAll('geoRadiusByMember', async client => { + assert.deepEqual( + await client.geoRadiusByMember('key', 'member', 3, 'm'), + [] + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/GEORADIUSBYMEMBER.ts b/packages/client/lib/commands/GEORADIUSBYMEMBER.ts new file mode 100644 index 00000000000..ee29ab84115 --- /dev/null +++ b/packages/client/lib/commands/GEORADIUSBYMEMBER.ts @@ -0,0 +1,42 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, BlobStringReply, Command } from '../RESP/types'; +import { GeoUnits, GeoSearchOptions, parseGeoSearchOptions } from './GEOSEARCH'; + +export function parseGeoRadiusByMemberArguments( + parser: CommandParser, + key: RedisArgument, + from: RedisArgument, + radius: number, + unit: GeoUnits, + options?: GeoSearchOptions +) { + parser.pushKey(key); + parser.push(from, radius.toString(), unit); + + parseGeoSearchOptions(parser, options); +} + +export default { + IS_READ_ONLY: false, + /** + * Queries members in a geospatial index based on a radius from a member + * @param parser - The Redis command parser + * @param key - Key of the geospatial index + * @param from - Member name to use as center point + * @param radius - Radius of the search area + * @param unit - Unit of distance (m, km, ft, mi) + * @param options - Additional search options + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + from: RedisArgument, + radius: number, + unit: GeoUnits, + options?: GeoSearchOptions + ) { + parser.push('GEORADIUSBYMEMBER'); + parseGeoRadiusByMemberArguments(parser, key, from, radius, unit, options); + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/GEORADIUSBYMEMBER_RO.spec.ts b/packages/client/lib/commands/GEORADIUSBYMEMBER_RO.spec.ts new file mode 100644 index 00000000000..bd4aa86dec1 --- /dev/null +++ b/packages/client/lib/commands/GEORADIUSBYMEMBER_RO.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import GEORADIUSBYMEMBER_RO from './GEORADIUSBYMEMBER_RO'; +import { parseArgs } from './generic-transformers'; + +describe('GEORADIUSBYMEMBER_RO', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(GEORADIUSBYMEMBER_RO, 'key', 'member', 3, 'm'), + ['GEORADIUSBYMEMBER_RO', 'key', 'member', '3', 'm'] + ); + }); + + testUtils.testAll('geoRadiusByMemberRo', async client => { + assert.deepEqual( + await client.geoRadiusByMemberRo('key', 'member', 3, 'm'), + [] + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/GEORADIUSBYMEMBER_RO.ts b/packages/client/lib/commands/GEORADIUSBYMEMBER_RO.ts new file mode 100644 index 00000000000..8629694588a --- /dev/null +++ b/packages/client/lib/commands/GEORADIUSBYMEMBER_RO.ts @@ -0,0 +1,22 @@ +import { Command } from '../RESP/types'; +import GEORADIUSBYMEMBER, { parseGeoRadiusByMemberArguments } from './GEORADIUSBYMEMBER'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Read-only variant that queries members in a geospatial index based on a radius from a member + * @param parser - The Redis command parser + * @param key - Key of the geospatial index + * @param from - Member name to use as center point + * @param radius - Radius of the search area + * @param unit - Unit of distance (m, km, ft, mi) + * @param options - Additional search options + */ + parseCommand(...args: Parameters) { + const parser = args[0]; + parser.push('GEORADIUSBYMEMBER_RO'); + parseGeoRadiusByMemberArguments(...args); + }, + transformReply: GEORADIUSBYMEMBER.transformReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/GEORADIUSBYMEMBER_RO_WITH.spec.ts b/packages/client/lib/commands/GEORADIUSBYMEMBER_RO_WITH.spec.ts new file mode 100644 index 00000000000..52b31b03594 --- /dev/null +++ b/packages/client/lib/commands/GEORADIUSBYMEMBER_RO_WITH.spec.ts @@ -0,0 +1,45 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import GEORADIUSBYMEMBER_RO_WITH from './GEORADIUSBYMEMBER_RO_WITH'; +import { CommandArguments } from '../RESP/types'; +import { GEO_REPLY_WITH } from './GEOSEARCH_WITH'; +import { parseArgs } from './generic-transformers'; + +describe('GEORADIUSBYMEMBER_RO WITH', () => { + it('transformArguments', () => { + const expectedReply: CommandArguments = ['GEORADIUSBYMEMBER_RO', 'key', 'member', '3', 'm', 'WITHDIST']; + expectedReply.preserve = ['WITHDIST']; + + assert.deepEqual( + parseArgs(GEORADIUSBYMEMBER_RO_WITH, 'key', 'member', 3, 'm', [ + GEO_REPLY_WITH.DISTANCE + ]), + expectedReply + ); + }); + + testUtils.testAll('geoRadiusByMemberRoWith', async client => { + const [, reply] = await Promise.all([ + client.geoAdd('key', { + member: 'member', + longitude: 1, + latitude: 2 + }), + client.geoRadiusByMemberRoWith('key', 'member', 1, 'm', [ + GEO_REPLY_WITH.HASH, + GEO_REPLY_WITH.DISTANCE, + GEO_REPLY_WITH.COORDINATES + ]) + ]); + + assert.equal(reply.length, 1); + assert.equal(reply[0].member, 'member'); + assert.equal(typeof reply[0].distance, 'string'); + assert.equal(typeof reply[0].hash, 'number'); + assert.equal(typeof reply[0].coordinates?.longitude, 'string'); + assert.equal(typeof reply[0].coordinates?.latitude, 'string'); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/GEORADIUSBYMEMBER_RO_WITH.ts b/packages/client/lib/commands/GEORADIUSBYMEMBER_RO_WITH.ts new file mode 100644 index 00000000000..239c6cf2444 --- /dev/null +++ b/packages/client/lib/commands/GEORADIUSBYMEMBER_RO_WITH.ts @@ -0,0 +1,22 @@ +import { Command } from '../RESP/types'; +import GEORADIUSBYMEMBER_WITH, { parseGeoRadiusByMemberWithArguments } from './GEORADIUSBYMEMBER_WITH'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Read-only variant that queries members in a geospatial index based on a radius from a member with additional information + * @param parser - The Redis command parser + * @param key - Key of the geospatial index + * @param from - Member name to use as center point + * @param radius - Radius of the search area + * @param unit - Unit of distance (m, km, ft, mi) + * @param withValues - Information to include with each returned member + */ + parseCommand(...args: Parameters) { + const parser = args[0]; + parser.push('GEORADIUSBYMEMBER_RO'); + parseGeoRadiusByMemberWithArguments(...args); + }, + transformReply: GEORADIUSBYMEMBER_WITH.transformReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/GEORADIUSBYMEMBER_STORE.spec.ts b/packages/client/lib/commands/GEORADIUSBYMEMBER_STORE.spec.ts new file mode 100644 index 00000000000..9edb08d1eae --- /dev/null +++ b/packages/client/lib/commands/GEORADIUSBYMEMBER_STORE.spec.ts @@ -0,0 +1,40 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import GEORADIUSBYMEMBER_STORE from './GEORADIUSBYMEMBER_STORE'; +import { parseArgs } from './generic-transformers'; + +describe('GEORADIUSBYMEMBER STORE', () => { + describe('transformArguments', () => { + it('STORE', () => { + assert.deepEqual( + parseArgs(GEORADIUSBYMEMBER_STORE, 'key', 'member', 3, 'm', 'destination'), + ['GEORADIUSBYMEMBER', 'key', 'member', '3', 'm', 'STORE', 'destination'] + ); + }); + + it('STOREDIST', () => { + assert.deepEqual( + parseArgs(GEORADIUSBYMEMBER_STORE, 'key', 'member', 3, 'm', 'destination', { + STOREDIST: true + }), + ['GEORADIUSBYMEMBER', 'key', 'member', '3', 'm', 'STOREDIST', 'destination'] + ); + }); + }); + + testUtils.testAll('geoRadiusByMemberStore', async client => { + const [, reply] = await Promise.all([ + client.geoAdd('{tag}source', { + longitude: 1, + latitude: 2, + member: 'member' + }), + client.geoRadiusByMemberStore('{tag}source', 'member', 3, 'm', '{tag}destination') + ]); + + assert.equal(reply, 1); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/GEORADIUSBYMEMBER_STORE.ts b/packages/client/lib/commands/GEORADIUSBYMEMBER_STORE.ts new file mode 100644 index 00000000000..20a1e0b6699 --- /dev/null +++ b/packages/client/lib/commands/GEORADIUSBYMEMBER_STORE.ts @@ -0,0 +1,43 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; +import GEORADIUSBYMEMBER, { parseGeoRadiusByMemberArguments } from './GEORADIUSBYMEMBER'; +import { GeoSearchOptions, GeoUnits } from './GEOSEARCH'; + +export interface GeoRadiusStoreOptions extends GeoSearchOptions { + STOREDIST?: boolean; +} + +export default { + IS_READ_ONLY: GEORADIUSBYMEMBER.IS_READ_ONLY, + /** + * Queries members in a geospatial index based on a radius from a member and stores the results + * @param parser - The Redis command parser + * @param key - Key of the geospatial index + * @param from - Member name to use as center point + * @param radius - Radius of the search area + * @param unit - Unit of distance (m, km, ft, mi) + * @param destination - Key to store the results + * @param options - Additional search and storage options + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + from: RedisArgument, + radius: number, + unit: GeoUnits, + destination: RedisArgument, + options?: GeoRadiusStoreOptions + ) { + parser.push('GEORADIUSBYMEMBER') + parseGeoRadiusByMemberArguments(parser, key, from, radius, unit, options); + + if (options?.STOREDIST) { + parser.push('STOREDIST'); + parser.pushKey(destination); + } else { + parser.push('STORE'); + parser.pushKey(destination); + } + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/GEORADIUSBYMEMBER_WITH.spec.ts b/packages/client/lib/commands/GEORADIUSBYMEMBER_WITH.spec.ts new file mode 100644 index 00000000000..9d634d60656 --- /dev/null +++ b/packages/client/lib/commands/GEORADIUSBYMEMBER_WITH.spec.ts @@ -0,0 +1,45 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import GEORADIUSBYMEMBER_WITH from './GEORADIUSBYMEMBER_WITH'; +import { CommandArguments } from '../RESP/types'; +import { GEO_REPLY_WITH } from './GEOSEARCH_WITH'; +import { parseArgs } from './generic-transformers'; + +describe('GEORADIUSBYMEMBER WITH', () => { + it('transformArguments', () => { + const expectedReply: CommandArguments = ['GEORADIUSBYMEMBER', 'key', 'member', '3', 'm', 'WITHDIST']; + expectedReply.preserve = ['WITHDIST']; + + assert.deepEqual( + parseArgs(GEORADIUSBYMEMBER_WITH, 'key', 'member', 3, 'm', [ + GEO_REPLY_WITH.DISTANCE + ]), + expectedReply + ); + }); + + testUtils.testAll('geoRadiusByMemberWith', async client => { + const [, reply] = await Promise.all([ + client.geoAdd('key', { + member: 'member', + longitude: 1, + latitude: 2 + }), + client.geoRadiusByMemberWith('key', 'member', 1, 'm', [ + GEO_REPLY_WITH.HASH, + GEO_REPLY_WITH.DISTANCE, + GEO_REPLY_WITH.COORDINATES + ]) + ]); + + assert.equal(reply.length, 1); + assert.equal(reply[0].member, 'member'); + assert.equal(typeof reply[0].distance, 'string'); + assert.equal(typeof reply[0].hash, 'number'); + assert.equal(typeof reply[0].coordinates!.longitude, 'string'); + assert.equal(typeof reply[0].coordinates!.latitude, 'string'); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/GEORADIUSBYMEMBER_WITH.ts b/packages/client/lib/commands/GEORADIUSBYMEMBER_WITH.ts new file mode 100644 index 00000000000..9f7a01bb525 --- /dev/null +++ b/packages/client/lib/commands/GEORADIUSBYMEMBER_WITH.ts @@ -0,0 +1,49 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, Command } from '../RESP/types'; +import GEORADIUSBYMEMBER from './GEORADIUSBYMEMBER'; +import { GeoSearchOptions, GeoUnits, parseGeoSearchOptions } from './GEOSEARCH'; +import GEOSEARCH_WITH, { GeoReplyWith } from './GEOSEARCH_WITH'; + +export function parseGeoRadiusByMemberWithArguments( + parser: CommandParser, + key: RedisArgument, + from: RedisArgument, + radius: number, + unit: GeoUnits, + replyWith: Array, + options?: GeoSearchOptions +) { + parser.pushKey(key); + parser.push(from, radius.toString(), unit); + parseGeoSearchOptions(parser, options); + + parser.push(...replyWith); + parser.preserve = replyWith; +} + +export default { + IS_READ_ONLY: GEORADIUSBYMEMBER.IS_READ_ONLY, + /** + * Queries members in a geospatial index based on a radius from a member with additional information + * @param parser - The Redis command parser + * @param key - Key of the geospatial index + * @param from - Member name to use as center point + * @param radius - Radius of the search area + * @param unit - Unit of distance (m, km, ft, mi) + * @param replyWith - Information to include with each returned member + * @param options - Additional search options + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + from: RedisArgument, + radius: number, + unit: GeoUnits, + replyWith: Array, + options?: GeoSearchOptions + ) { + parser.push('GEORADIUSBYMEMBER'); + parseGeoRadiusByMemberWithArguments(parser, key, from, radius, unit, replyWith, options); + }, + transformReply: GEOSEARCH_WITH.transformReply +} as const satisfies Command; \ No newline at end of file diff --git a/packages/client/lib/commands/GEORADIUS_RO.spec.ts b/packages/client/lib/commands/GEORADIUS_RO.spec.ts new file mode 100644 index 00000000000..917eba3ab8e --- /dev/null +++ b/packages/client/lib/commands/GEORADIUS_RO.spec.ts @@ -0,0 +1,29 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import GEORADIUS_RO from './GEORADIUS_RO'; +import { parseArgs } from './generic-transformers'; + +describe('GEORADIUS_RO', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(GEORADIUS_RO, 'key', { + longitude: 1, + latitude: 2 + }, 3, 'm'), + ['GEORADIUS_RO', 'key', '1', '2', '3', 'm'] + ); + }); + + testUtils.testAll('geoRadiusRo', async client => { + assert.deepEqual( + await client.geoRadiusRo('key', { + longitude: 1, + latitude: 2 + }, 3, 'm'), + [] + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/GEORADIUS_RO.ts b/packages/client/lib/commands/GEORADIUS_RO.ts new file mode 100644 index 00000000000..29cf6f8ccd7 --- /dev/null +++ b/packages/client/lib/commands/GEORADIUS_RO.ts @@ -0,0 +1,21 @@ +import { Command } from '../RESP/types'; +import GEORADIUS, { parseGeoRadiusArguments } from './GEORADIUS'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Read-only variant that queries members in a geospatial index based on a radius from a center point + * @param parser - The Redis command parser + * @param key - Key of the geospatial index + * @param from - Center coordinates for the search + * @param radius - Radius of the search area + * @param unit - Unit of distance (m, km, ft, mi) + * @param options - Additional search options + */ + parseCommand(...args: Parameters) { + args[0].push('GEORADIUS_RO'); + parseGeoRadiusArguments(...args); + }, + transformReply: GEORADIUS.transformReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/GEORADIUS_RO_WITH.spec.ts b/packages/client/lib/commands/GEORADIUS_RO_WITH.spec.ts new file mode 100644 index 00000000000..01d79954b64 --- /dev/null +++ b/packages/client/lib/commands/GEORADIUS_RO_WITH.spec.ts @@ -0,0 +1,49 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import GEORADIUS_RO_WITH from './GEORADIUS_RO_WITH'; +import { GEO_REPLY_WITH } from './GEOSEARCH_WITH'; +import { CommandArguments } from '../RESP/types'; +import { parseArgs } from './generic-transformers'; + +describe('GEORADIUS_RO WITH', () => { + it('transformArguments', () => { + const expectedReply: CommandArguments = ['GEORADIUS_RO', 'key', '1', '2', '3', 'm', 'WITHDIST']; + expectedReply.preserve = ['WITHDIST']; + + assert.deepEqual( + parseArgs(GEORADIUS_RO_WITH, 'key', { + longitude: 1, + latitude: 2 + }, 3, 'm', [GEO_REPLY_WITH.DISTANCE]), + expectedReply + ); + }); + + testUtils.testAll('geoRadiusRoWith', async client => { + const [, reply] = await Promise.all([ + client.geoAdd('key', { + member: 'member', + longitude: 1, + latitude: 2 + }), + client.geoRadiusRoWith('key', { + longitude: 1, + latitude: 2 + }, 1, 'm', [ + GEO_REPLY_WITH.HASH, + GEO_REPLY_WITH.DISTANCE, + GEO_REPLY_WITH.COORDINATES + ]) + ]); + + assert.equal(reply.length, 1); + assert.equal(reply[0].member, 'member'); + assert.equal(typeof reply[0].distance, 'string'); + assert.equal(typeof reply[0].hash, 'number'); + assert.equal(typeof reply[0].coordinates!.longitude, 'string'); + assert.equal(typeof reply[0].coordinates!.latitude, 'string'); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/GEORADIUS_RO_WITH.ts b/packages/client/lib/commands/GEORADIUS_RO_WITH.ts new file mode 100644 index 00000000000..aaaef482f05 --- /dev/null +++ b/packages/client/lib/commands/GEORADIUS_RO_WITH.ts @@ -0,0 +1,23 @@ +import { Command } from '../RESP/types'; +import { parseGeoRadiusWithArguments } from './GEORADIUS_WITH'; +import GEORADIUS_WITH from './GEORADIUS_WITH'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Read-only variant that queries members in a geospatial index based on a radius from a center point with additional information + * @param parser - The Redis command parser + * @param key - Key of the geospatial index + * @param from - Center coordinates for the search + * @param radius - Radius of the search area + * @param unit - Unit of distance (m, km, ft, mi) + * @param replyWith - Information to include with each returned member + * @param options - Additional search options + */ + parseCommand(...args: Parameters) { + args[0].push('GEORADIUS_RO'); + parseGeoRadiusWithArguments(...args); + }, + transformReply: GEORADIUS_WITH.transformReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/GEORADIUS_STORE.spec.ts b/packages/client/lib/commands/GEORADIUS_STORE.spec.ts new file mode 100644 index 00000000000..9a9bcf37bcf --- /dev/null +++ b/packages/client/lib/commands/GEORADIUS_STORE.spec.ts @@ -0,0 +1,49 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import GEORADIUS_STORE from './GEORADIUS_STORE'; +import { parseArgs } from './generic-transformers'; + +describe('GEORADIUS STORE', () => { + describe('transformArguments', () => { + it('STORE', () => { + assert.deepEqual( + parseArgs(GEORADIUS_STORE, 'key', { + longitude: 1, + latitude: 2 + }, 3, 'm', 'destination'), + ['GEORADIUS', 'key', '1', '2', '3', 'm', 'STORE', 'destination'] + ); + }); + + it('STOREDIST', () => { + assert.deepEqual( + parseArgs(GEORADIUS_STORE, 'key', { + longitude: 1, + latitude: 2 + }, 3, 'm', 'destination', { + STOREDIST: true + }), + ['GEORADIUS', 'key', '1', '2', '3', 'm', 'STOREDIST', 'destination'] + ); + }); + }); + + testUtils.testAll('geoRadiusStore', async client => { + const [, reply] = await Promise.all([ + client.geoAdd('{tag}source', { + longitude: 1, + latitude: 2, + member: 'member' + }), + client.geoRadiusStore('{tag}source', { + longitude: 1, + latitude: 2 + }, 1, 'm', '{tag}destination') + ]); + + assert.equal(reply, 1); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/GEORADIUS_STORE.ts b/packages/client/lib/commands/GEORADIUS_STORE.ts new file mode 100644 index 00000000000..b2db8ca9882 --- /dev/null +++ b/packages/client/lib/commands/GEORADIUS_STORE.ts @@ -0,0 +1,42 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; +import GEORADIUS, { parseGeoRadiusArguments } from './GEORADIUS'; +import { GeoCoordinates, GeoSearchOptions, GeoUnits } from './GEOSEARCH'; + +export interface GeoRadiusStoreOptions extends GeoSearchOptions { + STOREDIST?: boolean; +} + +export default { + IS_READ_ONLY: GEORADIUS.IS_READ_ONLY, + /** + * Queries members in a geospatial index based on a radius from a center point and stores the results + * @param parser - The Redis command parser + * @param key - Key of the geospatial index + * @param from - Center coordinates for the search + * @param radius - Radius of the search area + * @param unit - Unit of distance (m, km, ft, mi) + * @param destination - Key to store the results + * @param options - Additional search and storage options + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + from: GeoCoordinates, + radius: number, + unit: GeoUnits, + destination: RedisArgument, + options?: GeoRadiusStoreOptions + ) { + parser.push('GEORADIUS'); + parseGeoRadiusArguments(parser, key, from, radius, unit, options); + if (options?.STOREDIST) { + parser.push('STOREDIST'); + parser.pushKey(destination); + } else { + parser.push('STORE'); + parser.pushKey(destination); + } + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/GEORADIUS_WITH.spec.ts b/packages/client/lib/commands/GEORADIUS_WITH.spec.ts new file mode 100644 index 00000000000..f514c9be96f --- /dev/null +++ b/packages/client/lib/commands/GEORADIUS_WITH.spec.ts @@ -0,0 +1,49 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import GEORADIUS_WITH from './GEORADIUS_WITH'; +import { GEO_REPLY_WITH } from './GEOSEARCH_WITH'; +import { CommandArguments } from '../RESP/types'; +import { parseArgs } from './generic-transformers'; + +describe('GEORADIUS WITH', () => { + it('transformArguments', () => { + const expectedReply: CommandArguments = ['GEORADIUS', 'key', '1', '2', '3', 'm', 'WITHDIST']; + expectedReply.preserve = ['WITHDIST']; + + assert.deepEqual( + parseArgs(GEORADIUS_WITH, 'key', { + longitude: 1, + latitude: 2 + }, 3, 'm', [GEO_REPLY_WITH.DISTANCE]), + expectedReply + ); + }); + + testUtils.testAll('geoRadiusWith', async client => { + const [, reply] = await Promise.all([ + client.geoAdd('key', { + member: 'member', + longitude: 1, + latitude: 2 + }), + client.geoRadiusWith('key', { + longitude: 1, + latitude: 2 + }, 1, 'm', [ + GEO_REPLY_WITH.HASH, + GEO_REPLY_WITH.DISTANCE, + GEO_REPLY_WITH.COORDINATES + ]) + ]); + + assert.equal(reply.length, 1); + assert.equal(reply[0].member, 'member'); + assert.equal(typeof reply[0].distance, 'string'); + assert.equal(typeof reply[0].hash, 'number'); + assert.equal(typeof reply[0].coordinates?.longitude, 'string'); + assert.equal(typeof reply[0].coordinates?.latitude, 'string'); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/GEORADIUS_WITH.ts b/packages/client/lib/commands/GEORADIUS_WITH.ts new file mode 100644 index 00000000000..5028a926145 --- /dev/null +++ b/packages/client/lib/commands/GEORADIUS_WITH.ts @@ -0,0 +1,46 @@ +import { CommandParser } from '../client/parser'; +import { Command, RedisArgument } from '../RESP/types'; +import GEORADIUS, { parseGeoRadiusArguments } from './GEORADIUS'; +import { GeoCoordinates, GeoSearchOptions, GeoUnits } from './GEOSEARCH'; +import GEOSEARCH_WITH, { GeoReplyWith } from './GEOSEARCH_WITH'; + +export function parseGeoRadiusWithArguments( + parser: CommandParser, + key: RedisArgument, + from: GeoCoordinates, + radius: number, + unit: GeoUnits, + replyWith: Array, + options?: GeoSearchOptions, +) { + parseGeoRadiusArguments(parser, key, from, radius, unit, options) + parser.pushVariadic(replyWith); + parser.preserve = replyWith; +} + +export default { + IS_READ_ONLY: GEORADIUS.IS_READ_ONLY, + /** + * Queries members in a geospatial index based on a radius from a center point with additional information + * @param parser - The Redis command parser + * @param key - Key of the geospatial index + * @param from - Center coordinates for the search + * @param radius - Radius of the search area + * @param unit - Unit of distance (m, km, ft, mi) + * @param replyWith - Information to include with each returned member + * @param options - Additional search options + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + from: GeoCoordinates, + radius: number, + unit: GeoUnits, + replyWith: Array, + options?: GeoSearchOptions + ) { + parser.push('GEORADIUS'); + parseGeoRadiusWithArguments(parser, key, from, radius, unit, replyWith, options); + }, + transformReply: GEOSEARCH_WITH.transformReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/GEOSEARCH.spec.ts b/packages/client/lib/commands/GEOSEARCH.spec.ts new file mode 100644 index 00000000000..4cd7e61a0ac --- /dev/null +++ b/packages/client/lib/commands/GEOSEARCH.spec.ts @@ -0,0 +1,88 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import GEOSEARCH from './GEOSEARCH'; +import { parseArgs } from './generic-transformers'; + +describe('GEOSEARCH', () => { + testUtils.isVersionGreaterThanHook([6, 2]); + + describe('transformArguments', () => { + it('FROMMEMBER, BYRADIUS, without options', () => { + assert.deepEqual( + parseArgs(GEOSEARCH, 'key', 'member', { + radius: 1, + unit: 'm' + }), + ['GEOSEARCH', 'key', 'FROMMEMBER', 'member', 'BYRADIUS', '1', 'm'] + ); + }); + + it('FROMLONLAT, BYBOX, without options', () => { + assert.deepEqual( + parseArgs(GEOSEARCH, 'key', { + longitude: 1, + latitude: 2 + }, { + width: 1, + height: 2, + unit: 'm' + }), + ['GEOSEARCH', 'key', 'FROMLONLAT', '1', '2', 'BYBOX', '1', '2', 'm'] + ); + }); + + it('with SORT', () => { + assert.deepEqual( + parseArgs(GEOSEARCH, 'key', 'member', { + radius: 1, + unit: 'm' + }, { + SORT: 'ASC' + }), + ['GEOSEARCH', 'key', 'FROMMEMBER', 'member', 'BYRADIUS', '1', 'm', 'ASC'] + ); + }); + + describe('with COUNT', () => { + it('number', () => { + assert.deepEqual( + parseArgs(GEOSEARCH, 'key', 'member', { + radius: 1, + unit: 'm' + }, { + COUNT: 1 + }), + ['GEOSEARCH', 'key', 'FROMMEMBER', 'member', 'BYRADIUS', '1', 'm', 'COUNT', '1'] + ); + }); + + it('with ANY', () => { + assert.deepEqual( + parseArgs(GEOSEARCH, 'key', 'member', { + radius: 1, + unit: 'm' + }, { + COUNT: { + value: 1, + ANY: true + } + }), + ['GEOSEARCH', 'key', 'FROMMEMBER', 'member', 'BYRADIUS', '1', 'm', 'COUNT', '1', 'ANY'] + ); + }); + }); + }); + + testUtils.testAll('geoSearch', async client => { + assert.deepEqual( + await client.geoSearch('key', 'member', { + radius: 1, + unit: 'm' + }), + [] + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/GEOSEARCH.ts b/packages/client/lib/commands/GEOSEARCH.ts new file mode 100644 index 00000000000..a26ccec23eb --- /dev/null +++ b/packages/client/lib/commands/GEOSEARCH.ts @@ -0,0 +1,102 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, BlobStringReply, Command } from '../RESP/types'; + +export type GeoUnits = 'm' | 'km' | 'mi' | 'ft'; + +export interface GeoCoordinates { + longitude: RedisArgument | number; + latitude: RedisArgument | number; +} + +export type GeoSearchFrom = RedisArgument | GeoCoordinates; + +export interface GeoSearchByRadius { + radius: number; + unit: GeoUnits; +} + +export interface GeoSearchByBox { + width: number; + height: number; + unit: GeoUnits; +} + +export type GeoSearchBy = GeoSearchByRadius | GeoSearchByBox; + +export function parseGeoSearchArguments( + parser: CommandParser, + key: RedisArgument, + from: GeoSearchFrom, + by: GeoSearchBy, + options?: GeoSearchOptions, +) { + parser.pushKey(key); + + if (typeof from === 'string' || from instanceof Buffer) { + parser.push('FROMMEMBER', from); + } else { + parser.push('FROMLONLAT', from.longitude.toString(), from.latitude.toString()); + } + + if ('radius' in by) { + parser.push('BYRADIUS', by.radius.toString(), by.unit); + } else { + parser.push('BYBOX', by.width.toString(), by.height.toString(), by.unit); + } + + parseGeoSearchOptions(parser, options); +} + +export type GeoCountArgument = number | { + value: number; + ANY?: boolean; +}; + +export interface GeoSearchOptions { + SORT?: 'ASC' | 'DESC'; + COUNT?: GeoCountArgument; +} + +export function parseGeoSearchOptions( + parser: CommandParser, + options?: GeoSearchOptions +) { + if (options?.SORT) { + parser.push(options.SORT); + } + + if (options?.COUNT) { + if (typeof options.COUNT === 'number') { + parser.push('COUNT', options.COUNT.toString()); + } else { + parser.push('COUNT', options.COUNT.value.toString()); + + if (options.COUNT.ANY) { + parser.push('ANY'); + } + } + } +} + +export default { + IS_READ_ONLY: true, + /** + * Queries members inside an area of a geospatial index + * @param parser - The Redis command parser + * @param key - Key of the geospatial index + * @param from - Center point of the search (member name or coordinates) + * @param by - Search area specification (radius or box dimensions) + * @param options - Additional search options + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + from: GeoSearchFrom, + by: GeoSearchBy, + options?: GeoSearchOptions + ) { + parser.push('GEOSEARCH'); + parseGeoSearchArguments(parser, key, from, by, options); + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/GEOSEARCHSTORE.spec.ts b/packages/client/lib/commands/GEOSEARCHSTORE.spec.ts new file mode 100644 index 00000000000..b8427ae0412 --- /dev/null +++ b/packages/client/lib/commands/GEOSEARCHSTORE.spec.ts @@ -0,0 +1,45 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import GEOSEARCHSTORE from './GEOSEARCHSTORE'; +import { parseArgs } from './generic-transformers'; + +describe('GEOSEARCHSTORE', () => { + testUtils.isVersionGreaterThanHook([6, 2]); + + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(GEOSEARCHSTORE, 'source', 'destination', 'member', { + radius: 1, + unit: 'm' + }), + ['GEOSEARCHSTORE', 'source', 'destination', 'FROMMEMBER', 'member', 'BYRADIUS', '1', 'm'] + ); + }); + + it('with STOREDIST', () => { + assert.deepEqual( + parseArgs(GEOSEARCHSTORE, 'destination', 'source', 'member', { + radius: 1, + unit: 'm' + }, { + STOREDIST: true + }), + ['GEOSEARCHSTORE', 'destination', 'source', 'FROMMEMBER', 'member', 'BYRADIUS', '1', 'm', 'STOREDIST'] + ); + }); + }); + + testUtils.testAll('geoSearchStore', async client => { + assert.equal( + await client.geoSearchStore('{tag}destination', '{tag}source', 'member', { + radius: 1, + unit: 'm' + }), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/GEOSEARCHSTORE.ts b/packages/client/lib/commands/GEOSEARCHSTORE.ts new file mode 100644 index 00000000000..194eafda818 --- /dev/null +++ b/packages/client/lib/commands/GEOSEARCHSTORE.ts @@ -0,0 +1,41 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; +import { GeoSearchFrom, GeoSearchBy, GeoSearchOptions, parseGeoSearchArguments } from './GEOSEARCH'; + +export interface GeoSearchStoreOptions extends GeoSearchOptions { + STOREDIST?: boolean; +} + +export default { + IS_READ_ONLY: false, + /** + * Searches a geospatial index and stores the results in a new sorted set + * @param parser - The Redis command parser + * @param destination - Key to store the results + * @param source - Key of the geospatial index to search + * @param from - Center point of the search (member name or coordinates) + * @param by - Search area specification (radius or box dimensions) + * @param options - Additional search and storage options + */ + parseCommand( + parser: CommandParser, + destination: RedisArgument, + source: RedisArgument, + from: GeoSearchFrom, + by: GeoSearchBy, + options?: GeoSearchStoreOptions + ) { + parser.push('GEOSEARCHSTORE'); + + if (destination !== undefined) { + parser.pushKey(destination); + } + + parseGeoSearchArguments(parser, source, from, by, options); + + if (options?.STOREDIST) { + parser.push('STOREDIST'); + } + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/GEOSEARCH_WITH.spec.ts b/packages/client/lib/commands/GEOSEARCH_WITH.spec.ts new file mode 100644 index 00000000000..973e5d5827f --- /dev/null +++ b/packages/client/lib/commands/GEOSEARCH_WITH.spec.ts @@ -0,0 +1,50 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import GEOSEARCH_WITH, { GEO_REPLY_WITH } from './GEOSEARCH_WITH'; +import { CommandArguments } from '../RESP/types'; +import { parseArgs } from './generic-transformers'; + +describe('GEOSEARCH WITH', () => { + testUtils.isVersionGreaterThanHook([6, 2]); + + it('transformArguments', () => { + const expectedReply: CommandArguments = ['GEOSEARCH', 'key', 'FROMMEMBER', 'member', 'BYRADIUS', '1', 'm', 'WITHDIST']; + expectedReply.preserve = ['WITHDIST']; + + assert.deepEqual( + parseArgs(GEOSEARCH_WITH, 'key', 'member', { + radius: 1, + unit: 'm' + }, [GEO_REPLY_WITH.DISTANCE]), + expectedReply + ); + }); + + testUtils.testAll('.geoSearchWith', async client => { + const [ , reply ] = await Promise.all([ + client.geoAdd('key', { + member: 'member', + longitude: 1, + latitude: 2 + }), + client.geoSearchWith('key', 'member', { + radius: 1, + unit: 'm' + }, [ + GEO_REPLY_WITH.HASH, + GEO_REPLY_WITH.DISTANCE, + GEO_REPLY_WITH.COORDINATES + ]) + ]); + + assert.equal(reply.length, 1); + assert.equal(reply[0].member, 'member'); + assert.equal(typeof reply[0].distance, 'string'); + assert.equal(typeof reply[0].hash, 'number'); + assert.equal(typeof reply[0].coordinates!.longitude, 'string'); + assert.equal(typeof reply[0].coordinates!.latitude, 'string'); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/GEOSEARCH_WITH.ts b/packages/client/lib/commands/GEOSEARCH_WITH.ts new file mode 100644 index 00000000000..dca125a816e --- /dev/null +++ b/packages/client/lib/commands/GEOSEARCH_WITH.ts @@ -0,0 +1,82 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, TuplesReply, BlobStringReply, NumberReply, DoubleReply, UnwrapReply, Command } from '../RESP/types'; +import GEOSEARCH, { GeoSearchBy, GeoSearchFrom, GeoSearchOptions } from './GEOSEARCH'; + +export const GEO_REPLY_WITH = { + DISTANCE: 'WITHDIST', + HASH: 'WITHHASH', + COORDINATES: 'WITHCOORD' +} as const; + +export type GeoReplyWith = typeof GEO_REPLY_WITH[keyof typeof GEO_REPLY_WITH]; + +export interface GeoReplyWithMember { + member: BlobStringReply; + distance?: BlobStringReply; + hash?: NumberReply; + coordinates?: { + longitude: DoubleReply; + latitude: DoubleReply; + }; +} + +export default { + IS_READ_ONLY: GEOSEARCH.IS_READ_ONLY, + /** + * Queries members inside an area of a geospatial index with additional information + * @param parser - The Redis command parser + * @param key - Key of the geospatial index + * @param from - Center point of the search (member name or coordinates) + * @param by - Search area specification (radius or box dimensions) + * @param replyWith - Information to include with each returned member + * @param options - Additional search options + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + from: GeoSearchFrom, + by: GeoSearchBy, + replyWith: Array, + options?: GeoSearchOptions + ) { + GEOSEARCH.parseCommand(parser, key, from, by, options); + parser.push(...replyWith); + parser.preserve = replyWith; + }, + transformReply( + reply: UnwrapReply]>>>, + replyWith: Array + ) { + const replyWithSet = new Set(replyWith); + let index = 0; + const distanceIndex = replyWithSet.has(GEO_REPLY_WITH.DISTANCE) && ++index, + hashIndex = replyWithSet.has(GEO_REPLY_WITH.HASH) && ++index, + coordinatesIndex = replyWithSet.has(GEO_REPLY_WITH.COORDINATES) && ++index; + + return reply.map(raw => { + const unwrapped = raw as unknown as UnwrapReply; + + const item: GeoReplyWithMember = { + member: unwrapped[0] + }; + + if (distanceIndex) { + item.distance = unwrapped[distanceIndex]; + } + + if (hashIndex) { + item.hash = unwrapped[hashIndex]; + } + + if (coordinatesIndex) { + const [longitude, latitude] = unwrapped[coordinatesIndex]; + item.coordinates = { + longitude, + latitude + }; + } + + return item; + }); + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/GET.spec.ts b/packages/client/lib/commands/GET.spec.ts new file mode 100644 index 00000000000..3e630d03e0b --- /dev/null +++ b/packages/client/lib/commands/GET.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import { parseArgs } from './generic-transformers'; +import GET from './GET'; + +describe('GET', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(GET, 'key'), + ['GET', 'key'] + ); + }); + + testUtils.testAll('get', async client => { + assert.equal( + await client.get('key'), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/GET.ts b/packages/client/lib/commands/GET.ts new file mode 100644 index 00000000000..e55c900eea2 --- /dev/null +++ b/packages/client/lib/commands/GET.ts @@ -0,0 +1,17 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, BlobStringReply, NullReply, Command } from '../RESP/types'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Gets the value of a key + * @param parser - The Redis command parser + * @param key - Key to get the value of + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('GET'); + parser.pushKey(key); + }, + transformReply: undefined as unknown as () => BlobStringReply | NullReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/GETBIT.spec.ts b/packages/client/lib/commands/GETBIT.spec.ts new file mode 100644 index 00000000000..66d2798313c --- /dev/null +++ b/packages/client/lib/commands/GETBIT.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import GETBIT from './GETBIT'; +import { parseArgs } from './generic-transformers'; + +describe('GETBIT', () => { + it('processCommand', () => { + assert.deepEqual( + parseArgs(GETBIT, 'key', 0), + ['GETBIT', 'key', '0'] + ); + }); + + testUtils.testAll('getBit', async client => { + assert.equal( + await client.getBit('key', 0), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/GETBIT.ts b/packages/client/lib/commands/GETBIT.ts new file mode 100644 index 00000000000..7d4a240473f --- /dev/null +++ b/packages/client/lib/commands/GETBIT.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '../client/parser'; +import { NumberReply, Command, RedisArgument } from '../RESP/types'; +import { BitValue } from './generic-transformers'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Returns the bit value at a given offset in a string value + * @param parser - The Redis command parser + * @param key - Key to retrieve the bit from + * @param offset - Bit offset + */ + parseCommand(parser: CommandParser, key: RedisArgument, offset: number) { + parser.push('GETBIT'); + parser.pushKey(key); + parser.push(offset.toString()); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/GETDEL.spec.ts b/packages/client/lib/commands/GETDEL.spec.ts new file mode 100644 index 00000000000..15ad5918008 --- /dev/null +++ b/packages/client/lib/commands/GETDEL.spec.ts @@ -0,0 +1,25 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import GETDEL from './GETDEL'; +import { parseArgs } from './generic-transformers'; + +describe('GETDEL', () => { + testUtils.isVersionGreaterThanHook([6, 2]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(GETDEL, 'key'), + ['GETDEL', 'key'] + ); + }); + + testUtils.testAll('getDel', async client => { + assert.equal( + await client.getDel('key'), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/GETDEL.ts b/packages/client/lib/commands/GETDEL.ts new file mode 100644 index 00000000000..7dbdc7d2535 --- /dev/null +++ b/packages/client/lib/commands/GETDEL.ts @@ -0,0 +1,16 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, BlobStringReply, NullReply, Command } from '../RESP/types'; + +export default { + IS_READ_ONLY: true, + /** + * Gets the value of a key and deletes the key + * @param parser - The Redis command parser + * @param key - Key to get and delete + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('GETDEL'); + parser.pushKey(key); + }, + transformReply: undefined as unknown as () => BlobStringReply | NullReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/GETEX.spec.ts b/packages/client/lib/commands/GETEX.spec.ts new file mode 100644 index 00000000000..5965d8f196f --- /dev/null +++ b/packages/client/lib/commands/GETEX.spec.ts @@ -0,0 +1,123 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import GETEX from './GETEX'; +import { parseArgs } from './generic-transformers'; + +describe('GETEX', () => { + testUtils.isVersionGreaterThanHook([6, 2]); + + describe('transformArguments', () => { + it('EX | PX', () => { + assert.deepEqual( + parseArgs(GETEX, 'key', { + type: 'EX', + value: 1 + }), + ['GETEX', 'key', 'EX', '1'] + ); + }); + + it('EX (backwards compatibility)', () => { + assert.deepEqual( + parseArgs(GETEX, 'key', { + EX: 1 + }), + ['GETEX', 'key', 'EX', '1'] + ); + }); + + it('PX (backwards compatibility)', () => { + assert.deepEqual( + parseArgs(GETEX, 'key', { + PX: 1 + }), + ['GETEX', 'key', 'PX', '1'] + ); + }); + + describe('EXAT | PXAT', () => { + it('number', () => { + assert.deepEqual( + parseArgs(GETEX, 'key', { + type: 'EXAT', + value: 1 + }), + ['GETEX', 'key', 'EXAT', '1'] + ); + }); + + it('date', () => { + const d = new Date(); + assert.deepEqual( + parseArgs(GETEX, 'key', { + EXAT: d + }), + ['GETEX', 'key', 'EXAT', Math.floor(d.getTime() / 1000).toString()] + ); + }); + }); + + describe('EXAT (backwards compatibility)', () => { + it('number', () => { + assert.deepEqual( + parseArgs(GETEX, 'key', { + EXAT: 1 + }), + ['GETEX', 'key', 'EXAT', '1'] + ); + }); + + it('date', () => { + const d = new Date(); + assert.deepEqual( + parseArgs(GETEX, 'key', { + EXAT: d + }), + ['GETEX', 'key', 'EXAT', Math.floor(d.getTime() / 1000).toString()] + ); + }); + }); + + describe('PXAT (backwards compatibility)', () => { + it('number', () => { + assert.deepEqual( + parseArgs(GETEX, 'key', { + PXAT: 1 + }), + ['GETEX', 'key', 'PXAT', '1'] + ); + }); + + it('date', () => { + const d = new Date(); + assert.deepEqual( + parseArgs(GETEX, 'key', { + PXAT: d + }), + ['GETEX', 'key', 'PXAT', d.getTime().toString()] + ); + }); + }); + + it('PERSIST (backwards compatibility)', () => { + assert.deepEqual( + parseArgs(GETEX, 'key', { + PERSIST: true + }), + ['GETEX', 'key', 'PERSIST'] + ); + }); + }); + + testUtils.testAll('getEx', async client => { + assert.equal( + await client.getEx('key', { + type: 'PERSIST' + }), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/GETEX.ts b/packages/client/lib/commands/GETEX.ts new file mode 100644 index 00000000000..836c2b5effe --- /dev/null +++ b/packages/client/lib/commands/GETEX.ts @@ -0,0 +1,83 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, BlobStringReply, NullReply, Command } from '../RESP/types'; +import { transformEXAT, transformPXAT } from './generic-transformers'; + +export type GetExOptions = { + type: 'EX' | 'PX'; + value: number; +} | { + type: 'EXAT' | 'PXAT'; + value: number | Date; +} | { + type: 'PERSIST'; +} | { + /** + * @deprecated Use `{ type: 'EX', value: number }` instead. + */ + EX: number; +} | { + /** + * @deprecated Use `{ type: 'PX', value: number }` instead. + */ + PX: number; +} | { + /** + * @deprecated Use `{ type: 'EXAT', value: number | Date }` instead. + */ + EXAT: number | Date; +} | { + /** + * @deprecated Use `{ type: 'PXAT', value: number | Date }` instead. + */ + PXAT: number | Date; +} | { + /** + * @deprecated Use `{ type: 'PERSIST' }` instead. + */ + PERSIST: true; +}; + +export default { + IS_READ_ONLY: true, + /** + * Gets the value of a key and optionally sets its expiration + * @param parser - The Redis command parser + * @param key - Key to get value from + * @param options - Options for setting expiration + */ + parseCommand(parser: CommandParser, key: RedisArgument, options: GetExOptions) { + parser.push('GETEX'); + parser.pushKey(key); + + if ('type' in options) { + switch (options.type) { + case 'EX': + case 'PX': + parser.push(options.type, options.value.toString()); + break; + + case 'EXAT': + case 'PXAT': + parser.push(options.type, transformEXAT(options.value)); + break; + + case 'PERSIST': + parser.push('PERSIST'); + break; + } + } else { + if ('EX' in options) { + parser.push('EX', options.EX.toString()); + } else if ('PX' in options) { + parser.push('PX', options.PX.toString()); + } else if ('EXAT' in options) { + parser.push('EXAT', transformEXAT(options.EXAT)); + } else if ('PXAT' in options) { + parser.push('PXAT', transformPXAT(options.PXAT)); + } else { // PERSIST + parser.push('PERSIST'); + } + } + }, + transformReply: undefined as unknown as () => BlobStringReply | NullReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/GETRANGE.spec.ts b/packages/client/lib/commands/GETRANGE.spec.ts new file mode 100644 index 00000000000..8a8e7dde038 --- /dev/null +++ b/packages/client/lib/commands/GETRANGE.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import GETRANGE from './GETRANGE'; +import { parseArgs } from './generic-transformers'; + +describe('GETRANGE', () => { + it('processCommand', () => { + assert.deepEqual( + parseArgs(GETRANGE, 'key', 0, -1), + ['GETRANGE', 'key', '0', '-1'] + ); + }); + + testUtils.testAll('getRange', async client => { + assert.equal( + await client.getRange('key', 0, -1), + '' + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/GETRANGE.ts b/packages/client/lib/commands/GETRANGE.ts new file mode 100644 index 00000000000..f5f1586f0a9 --- /dev/null +++ b/packages/client/lib/commands/GETRANGE.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, BlobStringReply, NullReply, Command } from '../RESP/types'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Returns a substring of the string stored at a key + * @param parser - The Redis command parser + * @param key - Key to get substring from + * @param start - Start position of the substring + * @param end - End position of the substring + */ + parseCommand(parser: CommandParser, key: RedisArgument, start: number, end: number) { + parser.push('GETRANGE'); + parser.pushKey(key); + parser.push(start.toString(), end.toString()); + }, + transformReply: undefined as unknown as () => BlobStringReply | NullReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/GETSET.spec.ts b/packages/client/lib/commands/GETSET.spec.ts new file mode 100644 index 00000000000..5b162c16cc4 --- /dev/null +++ b/packages/client/lib/commands/GETSET.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import GETSET from './GETSET'; +import { parseArgs } from './generic-transformers'; + +describe('GETSET', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(GETSET, 'key', 'value'), + ['GETSET', 'key', 'value'] + ); + }); + + testUtils.testAll('getSet', async client => { + assert.equal( + await client.getSet('key', 'value'), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/GETSET.ts b/packages/client/lib/commands/GETSET.ts new file mode 100644 index 00000000000..1b9d98f90d7 --- /dev/null +++ b/packages/client/lib/commands/GETSET.ts @@ -0,0 +1,18 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, BlobStringReply, NullReply, Command } from '../RESP/types'; + +export default { + IS_READ_ONLY: true, + /** + * Sets a key to a new value and returns its old value + * @param parser - The Redis command parser + * @param key - Key to set + * @param value - Value to set + */ + parseCommand(parser: CommandParser, key: RedisArgument, value: RedisArgument) { + parser.push('GETSET'); + parser.pushKey(key); + parser.push(value); + }, + transformReply: undefined as unknown as () => BlobStringReply | NullReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/HDEL.spec.ts b/packages/client/lib/commands/HDEL.spec.ts new file mode 100644 index 00000000000..767d916e147 --- /dev/null +++ b/packages/client/lib/commands/HDEL.spec.ts @@ -0,0 +1,32 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import HDEL from './HDEL'; +import { parseArgs } from './generic-transformers'; + +describe('HDEL', () => { + describe('transformArguments', () => { + it('string', () => { + assert.deepEqual( + parseArgs(HDEL, 'key', 'field'), + ['HDEL', 'key', 'field'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(HDEL, 'key', ['1', '2']), + ['HDEL', 'key', '1', '2'] + ); + }); + }); + + testUtils.testAll('hDel', async client => { + assert.equal( + await client.hDel('key', 'field'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/HDEL.ts b/packages/client/lib/commands/HDEL.ts new file mode 100644 index 00000000000..cc5c4dab1b9 --- /dev/null +++ b/packages/client/lib/commands/HDEL.ts @@ -0,0 +1,18 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export default { + /** + * Removes one or more fields from a hash + * @param parser - The Redis command parser + * @param key - Key of the hash + * @param field - Field(s) to remove + */ + parseCommand(parser: CommandParser, key: RedisArgument, field: RedisVariadicArgument) { + parser.push('HDEL'); + parser.pushKey(key); + parser.pushVariadic(field); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/HELLO.spec.ts b/packages/client/lib/commands/HELLO.spec.ts new file mode 100644 index 00000000000..5d11be344c1 --- /dev/null +++ b/packages/client/lib/commands/HELLO.spec.ts @@ -0,0 +1,72 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import HELLO from './HELLO'; +import { parseArgs } from './generic-transformers'; + +describe('HELLO', () => { + testUtils.isVersionGreaterThanHook([6]); + + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(HELLO), + ['HELLO'] + ); + }); + + it('with protover', () => { + assert.deepEqual( + parseArgs(HELLO, 3), + ['HELLO', '3'] + ); + }); + + it('with protover, AUTH', () => { + assert.deepEqual( + parseArgs(HELLO, 3, { + AUTH: { + username: 'username', + password: 'password' + } + }), + ['HELLO', '3', 'AUTH', 'username', 'password'] + ); + }); + + it('with protover, SETNAME', () => { + assert.deepEqual( + parseArgs(HELLO, 3, { + SETNAME: 'name' + }), + ['HELLO', '3', 'SETNAME', 'name'] + ); + }); + + it('with protover, AUTH, SETNAME', () => { + assert.deepEqual( + parseArgs(HELLO, 3, { + AUTH: { + username: 'username', + password: 'password' + }, + SETNAME: 'name' + }), + ['HELLO', '3', 'AUTH', 'username', 'password', 'SETNAME', 'name'] + ); + }); + }); + + testUtils.testWithClient('client.hello', async client => { + const reply = await client.hello(); + assert.equal(reply.server, 'redis'); + assert.equal(typeof reply.version, 'string'); + assert.equal(reply.proto, 2); + assert.equal(typeof reply.id, 'number'); + assert.equal(reply.mode, 'standalone'); + assert.equal(reply.role, 'master'); + assert.ok(reply.modules instanceof Array); + }, { + ...GLOBAL.SERVERS.OPEN, + minimumDockerVersion: [6, 2] + }); +}); diff --git a/packages/client/lib/commands/HELLO.ts b/packages/client/lib/commands/HELLO.ts new file mode 100644 index 00000000000..23feaad554a --- /dev/null +++ b/packages/client/lib/commands/HELLO.ts @@ -0,0 +1,64 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, RespVersions, TuplesToMapReply, BlobStringReply, NumberReply, ArrayReply, UnwrapReply, Resp2Reply, Command } from '../RESP/types'; + +export interface HelloOptions { + protover?: RespVersions; + AUTH?: { + username: RedisArgument; + password: RedisArgument; + }; + SETNAME?: string; +} + +export type HelloReply = TuplesToMapReply<[ + [BlobStringReply<'server'>, BlobStringReply], + [BlobStringReply<'version'>, BlobStringReply], + [BlobStringReply<'proto'>, NumberReply], + [BlobStringReply<'id'>, NumberReply], + [BlobStringReply<'mode'>, BlobStringReply], + [BlobStringReply<'role'>, BlobStringReply], + [BlobStringReply<'modules'>, ArrayReply] +]>; + +export default { + /** + * Handshakes with the Redis server and switches to the specified protocol version + * @param parser - The Redis command parser + * @param protover - Protocol version to use + * @param options - Additional options for authentication and connection naming + */ + parseCommand(parser: CommandParser, protover?: RespVersions, options?: HelloOptions) { + parser.push('HELLO'); + + if (protover) { + parser.push(protover.toString()); + + if (options?.AUTH) { + parser.push( + 'AUTH', + options.AUTH.username, + options.AUTH.password + ); + } + + if (options?.SETNAME) { + parser.push( + 'SETNAME', + options.SETNAME + ); + } + } + }, + transformReply: { + 2: (reply: UnwrapReply>) => ({ + server: reply[1], + version: reply[3], + proto: reply[5], + id: reply[7], + mode: reply[9], + role: reply[11], + modules: reply[13] + }), + 3: undefined as unknown as () => HelloReply + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/HEXISTS.spec.ts b/packages/client/lib/commands/HEXISTS.spec.ts new file mode 100644 index 00000000000..acd462ab7e2 --- /dev/null +++ b/packages/client/lib/commands/HEXISTS.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import HEXISTS from './HEXISTS'; +import { parseArgs } from './generic-transformers'; + +describe('HEXISTS', () => { + it('processCommand', () => { + assert.deepEqual( + parseArgs(HEXISTS, 'key', 'field'), + ['HEXISTS', 'key', 'field'] + ); + }); + + testUtils.testAll('hExists', async client => { + assert.equal( + await client.hExists('key', 'field'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/HEXISTS.ts b/packages/client/lib/commands/HEXISTS.ts new file mode 100644 index 00000000000..50b8f1ae8c2 --- /dev/null +++ b/packages/client/lib/commands/HEXISTS.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Determines whether a field exists in a hash + * @param parser - The Redis command parser + * @param key - Key of the hash + * @param field - Field to check + */ + parseCommand(parser: CommandParser, key: RedisArgument, field: RedisArgument) { + parser.push('HEXISTS'); + parser.pushKey(key); + parser.push(field); + }, + transformReply: undefined as unknown as () => NumberReply<0 | 1> +} as const satisfies Command; diff --git a/packages/client/lib/commands/HEXPIRE.spec.ts b/packages/client/lib/commands/HEXPIRE.spec.ts new file mode 100644 index 00000000000..d28cc065ec9 --- /dev/null +++ b/packages/client/lib/commands/HEXPIRE.spec.ts @@ -0,0 +1,41 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import HEXPIRE from './HEXPIRE'; +import { parseArgs } from './generic-transformers'; +import { HASH_EXPIRATION_TIME } from './HEXPIRETIME'; + +describe('HEXPIRE', () => { + testUtils.isVersionGreaterThanHook([7, 4]); + + describe('transformArguments', () => { + it('string', () => { + assert.deepEqual( + parseArgs(HEXPIRE, 'key', 'field', 1), + ['HEXPIRE', 'key', '1', 'FIELDS', '1', 'field'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(HEXPIRE, 'key', ['field1', 'field2'], 1), + ['HEXPIRE', 'key', '1', 'FIELDS', '2', 'field1', 'field2'] + ); + }); + + it('with set option', () => { + assert.deepEqual( + parseArgs(HEXPIRE, 'key', ['field1'], 1, 'NX'), + ['HEXPIRE', 'key', '1', 'NX', 'FIELDS', '1', 'field1'] + ); + }); + }); + + testUtils.testWithClient('hexpire', async client => { + assert.deepEqual( + await client.hExpire('key', ['field1'], 0), + [HASH_EXPIRATION_TIME.FIELD_NOT_EXISTS] + ); + }, { + ...GLOBAL.SERVERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/HEXPIRE.ts b/packages/client/lib/commands/HEXPIRE.ts new file mode 100644 index 00000000000..95ee58eac1d --- /dev/null +++ b/packages/client/lib/commands/HEXPIRE.ts @@ -0,0 +1,47 @@ +import { CommandParser } from '../client/parser'; +import { ArrayReply, Command, RedisArgument } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export const HASH_EXPIRATION = { + /** The field does not exist */ + FIELD_NOT_EXISTS: -2, + /** Specified NX | XX | GT | LT condition not met */ + CONDITION_NOT_MET: 0, + /** Expiration time was set or updated */ + UPDATED: 1, + /** Field deleted because the specified expiration time is in the past */ + DELETED: 2 +} as const; + +export type HashExpiration = typeof HASH_EXPIRATION[keyof typeof HASH_EXPIRATION]; + +export default { + /** + * Sets a timeout on hash fields. After the timeout has expired, the fields will be automatically deleted + * @param parser - The Redis command parser + * @param key - Key of the hash + * @param fields - Fields to set expiration on + * @param seconds - Number of seconds until field expiration + * @param mode - Expiration mode: NX (only if field has no expiry), XX (only if field has existing expiry), GT (only if new expiry is greater than current), LT (only if new expiry is less than current) + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + fields: RedisVariadicArgument, + seconds: number, + mode?: 'NX' | 'XX' | 'GT' | 'LT' + ) { + parser.push('HEXPIRE'); + parser.pushKey(key); + parser.push(seconds.toString()); + + if (mode) { + parser.push(mode); + } + + parser.push('FIELDS'); + + parser.pushVariadicWithLength(fields); + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/HEXPIREAT.spec.ts b/packages/client/lib/commands/HEXPIREAT.spec.ts new file mode 100644 index 00000000000..c7cc9fe749b --- /dev/null +++ b/packages/client/lib/commands/HEXPIREAT.spec.ts @@ -0,0 +1,50 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import HEXPIREAT from './HEXPIREAT'; +import { HASH_EXPIRATION_TIME } from './HEXPIRETIME'; +import { parseArgs } from './generic-transformers'; + +describe('HEXPIREAT', () => { + testUtils.isVersionGreaterThanHook([7, 4]); + + describe('transformArguments', () => { + it('string + number', () => { + assert.deepEqual( + parseArgs(HEXPIREAT, 'key', 'field', 1), + ['HEXPIREAT', 'key', '1', 'FIELDS', '1', 'field'] + ); + }); + + it('array + number', () => { + assert.deepEqual( + parseArgs(HEXPIREAT, 'key', ['field1', 'field2'], 1), + ['HEXPIREAT', 'key', '1', 'FIELDS', '2', 'field1', 'field2'] + ); + }); + + it('date', () => { + const d = new Date(); + + assert.deepEqual( + parseArgs(HEXPIREAT, 'key', ['field1'], d), + ['HEXPIREAT', 'key', Math.floor(d.getTime() / 1000).toString(), 'FIELDS', '1', 'field1'] + ); + }); + + it('with set option', () => { + assert.deepEqual( + parseArgs(HEXPIREAT, 'key', 'field1', 1, 'GT'), + ['HEXPIREAT', 'key', '1', 'GT', 'FIELDS', '1', 'field1'] + ); + }); + }); + + testUtils.testWithClient('expireAt', async client => { + assert.deepEqual( + await client.hExpireAt('key', 'field1', 1), + [HASH_EXPIRATION_TIME.FIELD_NOT_EXISTS] + ); + }, { + ...GLOBAL.SERVERS.OPEN, + }); +}); diff --git a/packages/client/lib/commands/HEXPIREAT.ts b/packages/client/lib/commands/HEXPIREAT.ts new file mode 100644 index 00000000000..c09efd4aa34 --- /dev/null +++ b/packages/client/lib/commands/HEXPIREAT.ts @@ -0,0 +1,34 @@ +import { CommandParser } from '../client/parser'; +import { RedisVariadicArgument, transformEXAT } from './generic-transformers'; +import { ArrayReply, Command, NumberReply, RedisArgument } from '../RESP/types'; + +export default { + /** + * Sets the expiration for hash fields at a specific Unix timestamp + * @param parser - The Redis command parser + * @param key - Key of the hash + * @param fields - Fields to set expiration on + * @param timestamp - Unix timestamp (seconds since January 1, 1970) or Date object + * @param mode - Expiration mode: NX (only if field has no expiry), XX (only if field has existing expiry), GT (only if new expiry is greater than current), LT (only if new expiry is less than current) + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + fields: RedisVariadicArgument, + timestamp: number | Date, + mode?: 'NX' | 'XX' | 'GT' | 'LT' + ) { + parser.push('HEXPIREAT'); + parser.pushKey(key); + parser.push(transformEXAT(timestamp)); + + if (mode) { + parser.push(mode); + } + + parser.push('FIELDS') + + parser.pushVariadicWithLength(fields); + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/HEXPIRETIME.spec.ts b/packages/client/lib/commands/HEXPIRETIME.spec.ts new file mode 100644 index 00000000000..32a8730e8a9 --- /dev/null +++ b/packages/client/lib/commands/HEXPIRETIME.spec.ts @@ -0,0 +1,33 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import HEXPIRETIME, { HASH_EXPIRATION_TIME } from './HEXPIRETIME'; +import { parseArgs } from './generic-transformers'; + +describe('HEXPIRETIME', () => { + testUtils.isVersionGreaterThanHook([7, 4]); + + describe('transformArguments', () => { + it('string', () => { + assert.deepEqual( + parseArgs(HEXPIRETIME, 'key', 'field'), + ['HEXPIRETIME', 'key', 'FIELDS', '1', 'field'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(HEXPIRETIME, 'key', ['field1', 'field2']), + ['HEXPIRETIME', 'key', 'FIELDS', '2', 'field1', 'field2'] + ); + }); + }) + + testUtils.testWithClient('hExpireTime', async client => { + assert.deepEqual( + await client.hExpireTime('key', 'field1'), + [HASH_EXPIRATION_TIME.FIELD_NOT_EXISTS] + ); + }, { + ...GLOBAL.SERVERS.OPEN, + }); +}); diff --git a/packages/client/lib/commands/HEXPIRETIME.ts b/packages/client/lib/commands/HEXPIRETIME.ts new file mode 100644 index 00000000000..94504935090 --- /dev/null +++ b/packages/client/lib/commands/HEXPIRETIME.ts @@ -0,0 +1,31 @@ +import { CommandParser } from '../client/parser'; +import { ArrayReply, Command, NumberReply, RedisArgument } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export const HASH_EXPIRATION_TIME = { + /** The field does not exist */ + FIELD_NOT_EXISTS: -2, + /** The field exists but has no associated expire */ + NO_EXPIRATION: -1, +} as const; + +export default { + IS_READ_ONLY: true, + /** + * Returns the absolute Unix timestamp (since January 1, 1970) at which the given hash fields will expire + * @param parser - The Redis command parser + * @param key - Key of the hash + * @param fields - Fields to check expiration time + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + fields: RedisVariadicArgument + ) { + parser.push('HEXPIRETIME'); + parser.pushKey(key); + parser.push('FIELDS'); + parser.pushVariadicWithLength(fields); + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/HGET.spec.ts b/packages/client/lib/commands/HGET.spec.ts new file mode 100644 index 00000000000..47061876aea --- /dev/null +++ b/packages/client/lib/commands/HGET.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import HGET from './HGET'; +import { parseArgs } from './generic-transformers'; + +describe('HGET', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(HGET, 'key', 'field'), + ['HGET', 'key', 'field'] + ); + }); + + testUtils.testAll('hGet', async client => { + assert.equal( + await client.hGet('key', 'field'), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/HGET.ts b/packages/client/lib/commands/HGET.ts new file mode 100644 index 00000000000..8c4d690992b --- /dev/null +++ b/packages/client/lib/commands/HGET.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, BlobStringReply, NullReply, Command } from '../RESP/types'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Gets the value of a field in a hash + * @param parser - The Redis command parser + * @param key - Key of the hash + * @param field - Field to get the value of + */ + parseCommand(parser: CommandParser, key: RedisArgument, field: RedisArgument) { + parser.push('HGET'); + parser.pushKey(key); + parser.push(field); + }, + transformReply: undefined as unknown as () => BlobStringReply | NullReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/HGETALL.spec.ts b/packages/client/lib/commands/HGETALL.spec.ts new file mode 100644 index 00000000000..93d122bae07 --- /dev/null +++ b/packages/client/lib/commands/HGETALL.spec.ts @@ -0,0 +1,34 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; + +describe('HGETALL', () => { + + testUtils.testAll('hGetAll empty', async client => { + assert.deepEqual( + await client.hGetAll('key'), + Object.create(null) + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); + + testUtils.testAll('hGetAll with value', async client => { + const [, reply] = await Promise.all([ + client.hSet('key', 'field', 'value'), + client.hGetAll('key') + ]); + assert.deepEqual( + reply, + Object.create(null, { + field: { + value: 'value', + enumerable: true + } + }) + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/HGETALL.ts b/packages/client/lib/commands/HGETALL.ts new file mode 100644 index 00000000000..13238ab6ea3 --- /dev/null +++ b/packages/client/lib/commands/HGETALL.ts @@ -0,0 +1,22 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, MapReply, BlobStringReply, Command } from '../RESP/types'; +import { transformTuplesReply } from './generic-transformers'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Gets all fields and values in a hash + * @param parser - The Redis command parser + * @param key - Key of the hash + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('HGETALL'); + parser.pushKey(key); + }, + TRANSFORM_LEGACY_REPLY: true, + transformReply: { + 2: transformTuplesReply, + 3: undefined as unknown as () => MapReply + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/HGETDEL.spec.ts b/packages/client/lib/commands/HGETDEL.spec.ts new file mode 100644 index 00000000000..b2e19967f1d --- /dev/null +++ b/packages/client/lib/commands/HGETDEL.spec.ts @@ -0,0 +1,48 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import { BasicCommandParser } from '../client/parser'; +import HGETDEL from './HGETDEL'; + +describe('HGETDEL parseCommand', () => { + it('hGetDel parseCommand base', () => { + const parser = new BasicCommandParser; + HGETDEL.parseCommand(parser, 'key', 'field'); + assert.deepEqual(parser.redisArgs, ['HGETDEL', 'key', 'FIELDS', '1', 'field']); + }); + + it('hGetDel parseCommand variadic', () => { + const parser = new BasicCommandParser; + HGETDEL.parseCommand(parser, 'key', ['field1', 'field2']); + assert.deepEqual(parser.redisArgs, ['HGETDEL', 'key', 'FIELDS', '2', 'field1', 'field2']); + }); +}); + + +describe('HGETDEL call', () => { + testUtils.testWithClientIfVersionWithinRange([[8], 'LATEST'], 'hGetDel empty single field', async client => { + assert.deepEqual( + await client.hGetDel('key', 'filed1'), + [null] + ); + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClientIfVersionWithinRange([[8], 'LATEST'], 'hGetDel empty multiple fields', async client => { + assert.deepEqual( + await client.hGetDel('key', ['filed1', 'field2']), + [null, null] + ); + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClientIfVersionWithinRange([[8], 'LATEST'], 'hGetDel partially populated multiple fields', async client => { + await client.hSet('key', 'field1', 'value1') + assert.deepEqual( + await client.hGetDel('key', ['field1', 'field2']), + ['value1', null] + ); + + assert.deepEqual( + await client.hGetDel('key', 'field1'), + [null] + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/HGETDEL.ts b/packages/client/lib/commands/HGETDEL.ts new file mode 100644 index 00000000000..8b55cae3ed5 --- /dev/null +++ b/packages/client/lib/commands/HGETDEL.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '../client/parser'; +import { RedisVariadicArgument } from './generic-transformers'; +import { RedisArgument, ArrayReply, BlobStringReply, NullReply, Command } from '../RESP/types'; + +export default { + /** + * Gets and deletes the specified fields from a hash + * @param parser - The Redis command parser + * @param key - Key of the hash + * @param fields - Fields to get and delete + */ + parseCommand(parser: CommandParser, key: RedisArgument, fields: RedisVariadicArgument) { + parser.push('HGETDEL'); + parser.pushKey(key); + parser.push('FIELDS') + parser.pushVariadicWithLength(fields); + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/HGETEX.spec.ts b/packages/client/lib/commands/HGETEX.spec.ts new file mode 100644 index 00000000000..2625a0ac023 --- /dev/null +++ b/packages/client/lib/commands/HGETEX.spec.ts @@ -0,0 +1,78 @@ +import { strict as assert } from 'node:assert'; +import testUtils,{ GLOBAL } from '../test-utils'; +import { BasicCommandParser } from '../client/parser'; +import HGETEX from './HGETEX'; +import { setTimeout } from 'timers/promises'; + +describe('HGETEX parseCommand', () => { + it('hGetEx parseCommand base', () => { + const parser = new BasicCommandParser; + HGETEX.parseCommand(parser, 'key', 'field'); + assert.deepEqual(parser.redisArgs, ['HGETEX', 'key', 'FIELDS', '1', 'field']); + }); + + it('hGetEx parseCommand expiration PERSIST string', () => { + const parser = new BasicCommandParser; + HGETEX.parseCommand(parser, 'key', 'field', {expiration: 'PERSIST'}); + assert.deepEqual(parser.redisArgs, ['HGETEX', 'key', 'PERSIST', 'FIELDS', '1', 'field']); + }); + + it('hGetEx parseCommand expiration PERSIST obj', () => { + const parser = new BasicCommandParser; + HGETEX.parseCommand(parser, 'key', 'field', {expiration: {type: 'PERSIST'}}); + assert.deepEqual(parser.redisArgs, ['HGETEX', 'key', 'PERSIST', 'FIELDS', '1', 'field']); + }); + + it('hGetEx parseCommand expiration EX obj', () => { + const parser = new BasicCommandParser; + HGETEX.parseCommand(parser, 'key', 'field', {expiration: {type: 'EX', value: 1000}}); + assert.deepEqual(parser.redisArgs, ['HGETEX', 'key', 'EX', '1000', 'FIELDS', '1', 'field']); + }); + + it('hGetEx parseCommand expiration EXAT obj variadic', () => { + const parser = new BasicCommandParser; + HGETEX.parseCommand(parser, 'key', ['field1', 'field2'], {expiration: {type: 'EXAT', value: 1000}}); + assert.deepEqual(parser.redisArgs, ['HGETEX', 'key', 'EXAT', '1000', 'FIELDS', '2', 'field1', 'field2']); + }); +}); + + +describe('HGETEX call', () => { + testUtils.testWithClientIfVersionWithinRange([[8], 'LATEST'], 'hGetEx empty single field', async client => { + assert.deepEqual( + await client.hGetEx('key', 'field1', {expiration: 'PERSIST'}), + [null] + ); + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClientIfVersionWithinRange([[8], 'LATEST'], 'hGetEx empty multiple fields', async client => { + assert.deepEqual( + await client.hGetEx('key', ['field1', 'field2'], {expiration: 'PERSIST'}), + [null, null] + ); + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClientIfVersionWithinRange([[8], 'LATEST'], 'hGetEx set expiry', async client => { + await client.hSet('key', 'field', 'value') + assert.deepEqual( + await client.hGetEx('key', 'field', {expiration: {type: 'PX', value: 50}}), + ['value'] + ); + await setTimeout(100) + assert.deepEqual( + await client.hGet('key', 'field'), + null + ); + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClientIfVersionWithinRange([[8], 'LATEST'], 'gGetEx set expiry PERSIST', async client => { + await client.hSet('key', 'field', 'value') + await client.hGetEx('key', 'field', {expiration: {type: 'PX', value: 50}}) + await client.hGetEx('key', 'field', {expiration: 'PERSIST'}) + await setTimeout(100) + assert.deepEqual( + await client.hGet('key', 'field'), + 'value' + ) + }, GLOBAL.SERVERS.OPEN); +}); \ No newline at end of file diff --git a/packages/client/lib/commands/HGETEX.ts b/packages/client/lib/commands/HGETEX.ts new file mode 100644 index 00000000000..6b039575a27 --- /dev/null +++ b/packages/client/lib/commands/HGETEX.ts @@ -0,0 +1,49 @@ +import { CommandParser } from '../client/parser'; +import { RedisVariadicArgument } from './generic-transformers'; +import { ArrayReply, Command, BlobStringReply, NullReply, RedisArgument } from '../RESP/types'; + +export interface HGetExOptions { + expiration?: { + type: 'EX' | 'PX' | 'EXAT' | 'PXAT'; + value: number; + } | { + type: 'PERSIST'; + } | 'PERSIST'; +} + +export default { + /** + * Gets the values of the specified fields in a hash and optionally sets their expiration + * @param parser - The Redis command parser + * @param key - Key of the hash + * @param fields - Fields to get values from + * @param options - Options for setting expiration + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + fields: RedisVariadicArgument, + options?: HGetExOptions + ) { + parser.push('HGETEX'); + parser.pushKey(key); + + if (options?.expiration) { + if (typeof options.expiration === 'string') { + parser.push(options.expiration); + } else if (options.expiration.type === 'PERSIST') { + parser.push('PERSIST'); + } else { + parser.push( + options.expiration.type, + options.expiration.value.toString() + ); + } + } + + parser.push('FIELDS') + + parser.pushVariadicWithLength(fields); + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/HINCRBY.spec.ts b/packages/client/lib/commands/HINCRBY.spec.ts new file mode 100644 index 00000000000..ad382d97a99 --- /dev/null +++ b/packages/client/lib/commands/HINCRBY.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import HINCRBY from './HINCRBY'; +import { parseArgs } from './generic-transformers'; + +describe('HINCRBY', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(HINCRBY, 'key', 'field', 1), + ['HINCRBY', 'key', 'field', '1'] + ); + }); + + testUtils.testAll('hIncrBy', async client => { + assert.equal( + await client.hIncrBy('key', 'field', 1), + 1 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/HINCRBY.ts b/packages/client/lib/commands/HINCRBY.ts new file mode 100644 index 00000000000..cb028315f4c --- /dev/null +++ b/packages/client/lib/commands/HINCRBY.ts @@ -0,0 +1,23 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +export default { + /** + * Increments the integer value of a field in a hash by the given number + * @param parser - The Redis command parser + * @param key - Key of the hash + * @param field - Field to increment + * @param increment - Increment amount + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + field: RedisArgument, + increment: number + ) { + parser.push('HINCRBY'); + parser.pushKey(key); + parser.push(field, increment.toString()); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/HINCRBYFLOAT.spec.ts b/packages/client/lib/commands/HINCRBYFLOAT.spec.ts new file mode 100644 index 00000000000..2edbd6f9477 --- /dev/null +++ b/packages/client/lib/commands/HINCRBYFLOAT.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import HINCRBYFLOAT from './HINCRBYFLOAT'; +import { parseArgs } from './generic-transformers'; + +describe('HINCRBYFLOAT', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(HINCRBYFLOAT, 'key', 'field', 1.5), + ['HINCRBYFLOAT', 'key', 'field', '1.5'] + ); + }); + + testUtils.testAll('hIncrByFloat', async client => { + assert.equal( + await client.hIncrByFloat('key', 'field', 1.5), + '1.5' + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/HINCRBYFLOAT.ts b/packages/client/lib/commands/HINCRBYFLOAT.ts new file mode 100644 index 00000000000..6d85fa50432 --- /dev/null +++ b/packages/client/lib/commands/HINCRBYFLOAT.ts @@ -0,0 +1,23 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, BlobStringReply, Command } from '../RESP/types'; + +export default { + /** + * Increments the float value of a field in a hash by the given amount + * @param parser - The Redis command parser + * @param key - Key of the hash + * @param field - Field to increment + * @param increment - Increment amount (float) + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + field: RedisArgument, + increment: number + ) { + parser.push('HINCRBYFLOAT'); + parser.pushKey(key); + parser.push(field, increment.toString()); + }, + transformReply: undefined as unknown as () => BlobStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/HKEYS.spec.ts b/packages/client/lib/commands/HKEYS.spec.ts new file mode 100644 index 00000000000..58445696d20 --- /dev/null +++ b/packages/client/lib/commands/HKEYS.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import HKEYS from './HKEYS'; +import { parseArgs } from './generic-transformers'; + +describe('HKEYS', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(HKEYS, 'key'), + ['HKEYS', 'key'] + ); + }); + + testUtils.testAll('hKeys', async client => { + assert.deepEqual( + await client.hKeys('key'), + [] + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/HKEYS.ts b/packages/client/lib/commands/HKEYS.ts new file mode 100644 index 00000000000..bf2783eb2dc --- /dev/null +++ b/packages/client/lib/commands/HKEYS.ts @@ -0,0 +1,17 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, BlobStringReply, Command } from '../RESP/types'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Gets all field names in a hash + * @param parser - The Redis command parser + * @param key - Key of the hash + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('HKEYS') + parser.pushKey(key); + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/HLEN.spec.ts b/packages/client/lib/commands/HLEN.spec.ts new file mode 100644 index 00000000000..640e461ad07 --- /dev/null +++ b/packages/client/lib/commands/HLEN.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import HLEN from './HLEN'; +import { parseArgs } from './generic-transformers'; + +describe('HLEN', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(HLEN, 'key'), + ['HLEN', 'key'] + ); + }); + + testUtils.testAll('hLen', async client => { + assert.equal( + await client.hLen('key'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/HLEN.ts b/packages/client/lib/commands/HLEN.ts new file mode 100644 index 00000000000..7ffbdeee9d6 --- /dev/null +++ b/packages/client/lib/commands/HLEN.ts @@ -0,0 +1,17 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Gets the number of fields in a hash. + * @param parser - The Redis command parser. + * @param key - Key of the hash. + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('HLEN'); + parser.pushKey(key); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/HMGET.spec.ts b/packages/client/lib/commands/HMGET.spec.ts new file mode 100644 index 00000000000..8cc90e4abd5 --- /dev/null +++ b/packages/client/lib/commands/HMGET.spec.ts @@ -0,0 +1,32 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import HMGET from './HMGET'; +import { parseArgs } from './generic-transformers'; + +describe('HMGET', () => { + describe('parseCommand', () => { + it('string', () => { + assert.deepEqual( + parseArgs(HMGET, 'key', 'field'), + ['HMGET', 'key', 'field'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(HMGET, 'key', ['field1', 'field2']), + ['HMGET', 'key', 'field1', 'field2'] + ); + }); + }); + + testUtils.testAll('hmGet', async client => { + assert.deepEqual( + await client.hmGet('key', 'field'), + [null] + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/HMGET.ts b/packages/client/lib/commands/HMGET.ts new file mode 100644 index 00000000000..18a7baa219e --- /dev/null +++ b/packages/client/lib/commands/HMGET.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, BlobStringReply, NullReply, Command } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Gets the values of all the specified fields in a hash. + * @param parser - The Redis command parser. + * @param key - Key of the hash. + * @param fields - Fields to get from the hash. + */ + parseCommand(parser: CommandParser, key: RedisArgument, fields: RedisVariadicArgument) { + parser.push('HMGET'); + parser.pushKey(key); + parser.pushVariadic(fields); + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/HPERSIST.spec.ts b/packages/client/lib/commands/HPERSIST.spec.ts new file mode 100644 index 00000000000..0b317977cbf --- /dev/null +++ b/packages/client/lib/commands/HPERSIST.spec.ts @@ -0,0 +1,34 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import HPERSIST from './HPERSIST'; +import { HASH_EXPIRATION_TIME } from './HEXPIRETIME'; +import { parseArgs } from './generic-transformers'; + +describe('HPERSIST', () => { + testUtils.isVersionGreaterThanHook([7, 4]); + + describe('transformArguments', () => { + it('string', () => { + assert.deepEqual( + parseArgs(HPERSIST, 'key', 'field'), + ['HPERSIST', 'key', 'FIELDS', '1', 'field'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(HPERSIST, 'key', ['field1', 'field2']), + ['HPERSIST', 'key', 'FIELDS', '2', 'field1', 'field2'] + ); + }); + }) + + testUtils.testWithClient('hPersist', async client => { + assert.deepEqual( + await client.hPersist('key', 'field1'), + [HASH_EXPIRATION_TIME.FIELD_NOT_EXISTS] + ); + }, { + ...GLOBAL.SERVERS.OPEN, + }); +}); diff --git a/packages/client/lib/commands/HPERSIST.ts b/packages/client/lib/commands/HPERSIST.ts new file mode 100644 index 00000000000..00ab1f4b4b5 --- /dev/null +++ b/packages/client/lib/commands/HPERSIST.ts @@ -0,0 +1,23 @@ +import { CommandParser } from '../client/parser'; +import { ArrayReply, Command, NullReply, NumberReply, RedisArgument } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export default { + /** + * Removes the expiration from the specified fields in a hash. + * @param parser - The Redis command parser. + * @param key - Key of the hash. + * @param fields - Fields to remove expiration from. + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + fields: RedisVariadicArgument + ) { + parser.push('HPERSIST'); + parser.pushKey(key); + parser.push('FIELDS'); + parser.pushVariadicWithLength(fields); + }, + transformReply: undefined as unknown as () => ArrayReply | NullReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/HPEXPIRE.spec.ts b/packages/client/lib/commands/HPEXPIRE.spec.ts new file mode 100644 index 00000000000..2f68fb9b7f3 --- /dev/null +++ b/packages/client/lib/commands/HPEXPIRE.spec.ts @@ -0,0 +1,41 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import HPEXPIRE from './HPEXPIRE'; +import { HASH_EXPIRATION_TIME } from './HEXPIRETIME'; +import { parseArgs } from './generic-transformers'; + +describe('HEXPIRE', () => { + testUtils.isVersionGreaterThanHook([7, 4]); + + describe('transformArguments', () => { + it('string', () => { + assert.deepEqual( + parseArgs(HPEXPIRE, 'key', 'field', 1), + ['HPEXPIRE', 'key', '1', 'FIELDS', '1', 'field'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(HPEXPIRE, 'key', ['field1', 'field2'], 1), + ['HPEXPIRE', 'key', '1', 'FIELDS', '2', 'field1', 'field2'] + ); + }); + + it('with set option', () => { + assert.deepEqual( + parseArgs(HPEXPIRE, 'key', ['field1'], 1, 'NX'), + ['HPEXPIRE', 'key', '1', 'NX', 'FIELDS', '1', 'field1'] + ); + }); + }); + + testUtils.testWithClient('hexpire', async client => { + assert.deepEqual( + await client.hpExpire('key', ['field1'], 0), + [HASH_EXPIRATION_TIME.FIELD_NOT_EXISTS] + ); + }, { + ...GLOBAL.SERVERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/HPEXPIRE.ts b/packages/client/lib/commands/HPEXPIRE.ts new file mode 100644 index 00000000000..2e20c96bb13 --- /dev/null +++ b/packages/client/lib/commands/HPEXPIRE.ts @@ -0,0 +1,36 @@ +import { CommandParser } from '../client/parser'; +import { ArrayReply, Command, NullReply, RedisArgument } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; +import { HashExpiration } from './HEXPIRE'; + +export default { + /** + * Parses the arguments for the `HPEXPIRE` command. + * + * @param parser - The command parser instance. + * @param key - The key of the hash. + * @param fields - The fields to set the expiration for. + * @param ms - The expiration time in milliseconds. + * @param mode - Optional mode for the command ('NX', 'XX', 'GT', 'LT'). + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + fields: RedisVariadicArgument, + ms: number, + mode?: 'NX' | 'XX' | 'GT' | 'LT', + ) { + parser.push('HPEXPIRE'); + parser.pushKey(key); + parser.push(ms.toString()); + + if (mode) { + parser.push(mode); + } + + parser.push('FIELDS') + + parser.pushVariadicWithLength(fields); + }, + transformReply: undefined as unknown as () => ArrayReply | NullReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/HPEXPIREAT.spec.ts b/packages/client/lib/commands/HPEXPIREAT.spec.ts new file mode 100644 index 00000000000..7c369980bf4 --- /dev/null +++ b/packages/client/lib/commands/HPEXPIREAT.spec.ts @@ -0,0 +1,49 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import HPEXPIREAT from './HPEXPIREAT'; +import { HASH_EXPIRATION_TIME } from './HEXPIRETIME'; +import { parseArgs } from './generic-transformers'; + +describe('HPEXPIREAT', () => { + testUtils.isVersionGreaterThanHook([7, 4]); + + describe('transformArguments', () => { + it('string + number', () => { + assert.deepEqual( + parseArgs(HPEXPIREAT, 'key', 'field', 1), + ['HPEXPIREAT', 'key', '1', 'FIELDS', '1', 'field'] + ); + }); + + it('array + number', () => { + assert.deepEqual( + parseArgs(HPEXPIREAT, 'key', ['field1', 'field2'], 1), + ['HPEXPIREAT', 'key', '1', 'FIELDS', '2', 'field1', 'field2'] + ); + }); + + it('date', () => { + const d = new Date(); + assert.deepEqual( + parseArgs(HPEXPIREAT, 'key', ['field1'], d), + ['HPEXPIREAT', 'key', d.getTime().toString(), 'FIELDS', '1', 'field1'] + ); + }); + + it('with set option', () => { + assert.deepEqual( + parseArgs(HPEXPIREAT, 'key', ['field1'], 1, 'XX'), + ['HPEXPIREAT', 'key', '1', 'XX', 'FIELDS', '1', 'field1'] + ); + }); + }); + + testUtils.testWithClient('hpExpireAt', async client => { + assert.deepEqual( + await client.hpExpireAt('key', ['field1'], 1), + [HASH_EXPIRATION_TIME.FIELD_NOT_EXISTS] + ); + }, { + ...GLOBAL.SERVERS.OPEN, + }); +}); diff --git a/packages/client/lib/commands/HPEXPIREAT.ts b/packages/client/lib/commands/HPEXPIREAT.ts new file mode 100644 index 00000000000..58fedc84765 --- /dev/null +++ b/packages/client/lib/commands/HPEXPIREAT.ts @@ -0,0 +1,37 @@ +import { CommandParser } from '../client/parser'; +import { ArrayReply, Command, NullReply, RedisArgument } from '../RESP/types'; +import { RedisVariadicArgument, transformPXAT } from './generic-transformers'; +import { HashExpiration } from './HEXPIRE'; + +export default { + IS_READ_ONLY: true, + /** + * Parses the arguments for the `HPEXPIREAT` command. + * + * @param parser - The command parser instance. + * @param key - The key of the hash. + * @param fields - The fields to set the expiration for. + * @param timestamp - The expiration timestamp (Unix timestamp or Date object). + * @param mode - Optional mode for the command ('NX', 'XX', 'GT', 'LT'). + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + fields: RedisVariadicArgument, + timestamp: number | Date, + mode?: 'NX' | 'XX' | 'GT' | 'LT' + ) { + parser.push('HPEXPIREAT'); + parser.pushKey(key); + parser.push(transformPXAT(timestamp)); + + if (mode) { + parser.push(mode); + } + + parser.push('FIELDS') + + parser.pushVariadicWithLength(fields); + }, + transformReply: undefined as unknown as () => ArrayReply | NullReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/HPEXPIRETIME.spec.ts b/packages/client/lib/commands/HPEXPIRETIME.spec.ts new file mode 100644 index 00000000000..5673a725afc --- /dev/null +++ b/packages/client/lib/commands/HPEXPIRETIME.spec.ts @@ -0,0 +1,34 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import HPEXPIRETIME from './HPEXPIRETIME'; +import { HASH_EXPIRATION_TIME } from './HEXPIRETIME'; +import { parseArgs } from './generic-transformers'; + +describe('HPEXPIRETIME', () => { + testUtils.isVersionGreaterThanHook([7, 4]); + + describe('transformArguments', () => { + it('string', () => { + assert.deepEqual( + parseArgs(HPEXPIRETIME, 'key', 'field'), + ['HPEXPIRETIME', 'key', 'FIELDS', '1', 'field'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(HPEXPIRETIME, 'key', ['field1', 'field2']), + ['HPEXPIRETIME', 'key', 'FIELDS', '2', 'field1', 'field2'] + ); + }); + }); + + testUtils.testWithClient('hpExpireTime', async client => { + assert.deepEqual( + await client.hpExpireTime('key', 'field1'), + [HASH_EXPIRATION_TIME.FIELD_NOT_EXISTS] + ); + }, { + ...GLOBAL.SERVERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/HPEXPIRETIME.ts b/packages/client/lib/commands/HPEXPIRETIME.ts new file mode 100644 index 00000000000..d27a15749ae --- /dev/null +++ b/packages/client/lib/commands/HPEXPIRETIME.ts @@ -0,0 +1,26 @@ +import { CommandParser } from '../client/parser'; +import { ArrayReply, Command, NullReply, NumberReply, RedisArgument } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export default { + IS_READ_ONLY: true, + /** + * Constructs the HPEXPIRETIME command + * + * @param parser - The command parser + * @param key - The key to retrieve expiration time for + * @param fields - The fields to retrieve expiration time for + * @see https://redis.io/commands/hpexpiretime/ + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + fields: RedisVariadicArgument, + ) { + parser.push('HPEXPIRETIME'); + parser.pushKey(key); + parser.push('FIELDS'); + parser.pushVariadicWithLength(fields); + }, + transformReply: undefined as unknown as () => ArrayReply | NullReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/HPTTL.spec.ts b/packages/client/lib/commands/HPTTL.spec.ts new file mode 100644 index 00000000000..baaa11b19c8 --- /dev/null +++ b/packages/client/lib/commands/HPTTL.spec.ts @@ -0,0 +1,34 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import HPTTL from './HPTTL'; +import { HASH_EXPIRATION_TIME } from './HEXPIRETIME'; +import { parseArgs } from './generic-transformers'; + +describe('HPTTL', () => { + testUtils.isVersionGreaterThanHook([7, 4]); + + describe('transformArguments', () => { + it('string', () => { + assert.deepEqual( + parseArgs(HPTTL, 'key', 'field'), + ['HPTTL', 'key', 'FIELDS', '1', 'field'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(HPTTL, 'key', ['field1', 'field2']), + ['HPTTL', 'key', 'FIELDS', '2', 'field1', 'field2'] + ); + }); + }); + + testUtils.testWithClient('hpTTL', async client => { + assert.deepEqual( + await client.hpTTL('key', 'field1'), + [HASH_EXPIRATION_TIME.FIELD_NOT_EXISTS] + ); + }, { + ...GLOBAL.SERVERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/HPTTL.ts b/packages/client/lib/commands/HPTTL.ts new file mode 100644 index 00000000000..f177e6b5a02 --- /dev/null +++ b/packages/client/lib/commands/HPTTL.ts @@ -0,0 +1,26 @@ +import { CommandParser } from '../client/parser'; +import { ArrayReply, Command, NullReply, NumberReply, RedisArgument } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export default { + IS_READ_ONLY: true, + /** + * Constructs the HPTTL command + * + * @param parser - The command parser + * @param key - The key to check time-to-live for + * @param fields - The fields to check time-to-live for + * @see https://redis.io/commands/hpttl/ + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + fields: RedisVariadicArgument + ) { + parser.push('HPTTL'); + parser.pushKey(key); + parser.push('FIELDS'); + parser.pushVariadicWithLength(fields); + }, + transformReply: undefined as unknown as () => ArrayReply | NullReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/HRANDFIELD.spec.ts b/packages/client/lib/commands/HRANDFIELD.spec.ts new file mode 100644 index 00000000000..151636057a0 --- /dev/null +++ b/packages/client/lib/commands/HRANDFIELD.spec.ts @@ -0,0 +1,25 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import HRANDFIELD from './HRANDFIELD'; +import { parseArgs } from './generic-transformers'; + +describe('HRANDFIELD', () => { + testUtils.isVersionGreaterThanHook([6, 2]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(HRANDFIELD, 'key'), + ['HRANDFIELD', 'key'] + ); + }); + + testUtils.testAll('hRandField', async client => { + assert.equal( + await client.hRandField('key'), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/HRANDFIELD.ts b/packages/client/lib/commands/HRANDFIELD.ts new file mode 100644 index 00000000000..38cc34dcee5 --- /dev/null +++ b/packages/client/lib/commands/HRANDFIELD.ts @@ -0,0 +1,18 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, BlobStringReply, NullReply, Command } from '../RESP/types'; + +export default { + IS_READ_ONLY: true, + /** + * Constructs the HRANDFIELD command + * + * @param parser - The command parser + * @param key - The key of the hash to get a random field from + * @see https://redis.io/commands/hrandfield/ + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('HRANDFIELD'); + parser.pushKey(key); + }, + transformReply: undefined as unknown as () => BlobStringReply | NullReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/HRANDFIELD_COUNT.spec.ts b/packages/client/lib/commands/HRANDFIELD_COUNT.spec.ts new file mode 100644 index 00000000000..ee3fc984d55 --- /dev/null +++ b/packages/client/lib/commands/HRANDFIELD_COUNT.spec.ts @@ -0,0 +1,25 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import HRANDFIELD_COUNT from './HRANDFIELD_COUNT'; +import { parseArgs } from './generic-transformers'; + +describe('HRANDFIELD COUNT', () => { + testUtils.isVersionGreaterThanHook([6, 2, 5]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(HRANDFIELD_COUNT, 'key', 1), + ['HRANDFIELD', 'key', '1'] + ); + }); + + testUtils.testAll('hRandFieldCount', async client => { + assert.deepEqual( + await client.hRandFieldCount('key', 1), + [] + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/HRANDFIELD_COUNT.ts b/packages/client/lib/commands/HRANDFIELD_COUNT.ts new file mode 100644 index 00000000000..99c5cb0dada --- /dev/null +++ b/packages/client/lib/commands/HRANDFIELD_COUNT.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, BlobStringReply, Command } from '../RESP/types'; + +export default { + IS_READ_ONLY: true, + /** + * Constructs the HRANDFIELD command with count parameter + * + * @param parser - The command parser + * @param key - The key of the hash to get random fields from + * @param count - The number of fields to return (positive: unique fields, negative: may repeat fields) + * @see https://redis.io/commands/hrandfield/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, count: number) { + parser.push('HRANDFIELD'); + parser.pushKey(key); + parser.push(count.toString()); + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/HRANDFIELD_COUNT_WITHVALUES.spec.ts b/packages/client/lib/commands/HRANDFIELD_COUNT_WITHVALUES.spec.ts new file mode 100644 index 00000000000..e69de29bb2d diff --git a/packages/client/lib/commands/HRANDFIELD_COUNT_WITHVALUES.ts b/packages/client/lib/commands/HRANDFIELD_COUNT_WITHVALUES.ts new file mode 100644 index 00000000000..e247006c6a7 --- /dev/null +++ b/packages/client/lib/commands/HRANDFIELD_COUNT_WITHVALUES.ts @@ -0,0 +1,49 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, TuplesReply, BlobStringReply, UnwrapReply, Command } from '../RESP/types'; + +export type HRandFieldCountWithValuesReply = Array<{ + field: BlobStringReply; + value: BlobStringReply; +}>; + +export default { + IS_READ_ONLY: true, + /** + * Constructs the HRANDFIELD command with count parameter and WITHVALUES option + * + * @param parser - The command parser + * @param key - The key of the hash to get random fields from + * @param count - The number of fields to return (positive: unique fields, negative: may repeat fields) + * @see https://redis.io/commands/hrandfield/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, count: number) { + parser.push('HRANDFIELD'); + parser.pushKey(key); + parser.push(count.toString(), 'WITHVALUES'); + }, + transformReply: { + 2: (rawReply: UnwrapReply>) => { + const reply: HRandFieldCountWithValuesReply = []; + + let i = 0; + while (i < rawReply.length) { + reply.push({ + field: rawReply[i++], + value: rawReply[i++] + }); + } + + return reply; + }, + 3: (reply: UnwrapReply>>) => { + return reply.map(entry => { + const [field, value] = entry as unknown as UnwrapReply; + return { + field, + value + }; + }) satisfies HRandFieldCountWithValuesReply; + } + } +} as const satisfies Command; + \ No newline at end of file diff --git a/packages/client/lib/commands/HSCAN.spec.ts b/packages/client/lib/commands/HSCAN.spec.ts new file mode 100644 index 00000000000..9e489f6190d --- /dev/null +++ b/packages/client/lib/commands/HSCAN.spec.ts @@ -0,0 +1,83 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import { parseArgs } from './generic-transformers'; +import HSCAN from './HSCAN'; + +describe('HSCAN', () => { + describe('transformArguments', () => { + it('cusror only', () => { + assert.deepEqual( + parseArgs(HSCAN, 'key', '0'), + ['HSCAN', 'key', '0'] + ); + }); + + it('with MATCH', () => { + assert.deepEqual( + parseArgs(HSCAN, 'key', '0', { + MATCH: 'pattern' + }), + ['HSCAN', 'key', '0', 'MATCH', 'pattern'] + ); + }); + + it('with COUNT', () => { + assert.deepEqual( + parseArgs(HSCAN, 'key', '0', { + COUNT: 1 + }), + ['HSCAN', 'key', '0', 'COUNT', '1'] + ); + }); + + it('with MATCH & COUNT', () => { + assert.deepEqual( + parseArgs(HSCAN, 'key', '0', { + MATCH: 'pattern', + COUNT: 1 + }), + ['HSCAN', 'key', '0', 'MATCH', 'pattern', 'COUNT', '1'] + ); + }); + }); + + describe('transformReply', () => { + it('without tuples', () => { + assert.deepEqual( + HSCAN.transformReply(['0' as any, []]), + { + cursor: '0', + entries: [] + } + ); + }); + + it('with tuples', () => { + assert.deepEqual( + HSCAN.transformReply(['0' as any, ['field', 'value'] as any]), + { + cursor: '0', + entries: [{ + field: 'field', + value: 'value' + }] + } + ); + }); + }); + + testUtils.testWithClient('client.hScan', async client => { + const [, reply] = await Promise.all([ + client.hSet('key', 'field', 'value'), + client.hScan('key', '0') + ]); + + assert.deepEqual(reply, { + cursor: '0', + entries: [{ + field: 'field', + value: 'value' + }] + }); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/HSCAN.ts b/packages/client/lib/commands/HSCAN.ts new file mode 100644 index 00000000000..78141814ff1 --- /dev/null +++ b/packages/client/lib/commands/HSCAN.ts @@ -0,0 +1,46 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, BlobStringReply, Command } from '../RESP/types'; +import { ScanCommonOptions, parseScanArguments } from './SCAN'; + +export interface HScanEntry { + field: BlobStringReply; + value: BlobStringReply; +} + +export default { + IS_READ_ONLY: true, + /** + * Constructs the HSCAN command + * + * @param parser - The command parser + * @param key - The key of the hash to scan + * @param cursor - The cursor position to start scanning from + * @param options - Options for the scan (COUNT, MATCH, TYPE) + * @see https://redis.io/commands/hscan/ + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + cursor: RedisArgument, + options?: ScanCommonOptions + ) { + parser.push('HSCAN'); + parser.pushKey(key); + parseScanArguments(parser, cursor, options); + }, + transformReply([cursor, rawEntries]: [BlobStringReply, Array]) { + const entries = []; + let i = 0; + while (i < rawEntries.length) { + entries.push({ + field: rawEntries[i++], + value: rawEntries[i++] + } satisfies HScanEntry); + } + + return { + cursor, + entries + }; + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/HSCAN_NOVALUES.spec.ts b/packages/client/lib/commands/HSCAN_NOVALUES.spec.ts new file mode 100644 index 00000000000..83a452a6897 --- /dev/null +++ b/packages/client/lib/commands/HSCAN_NOVALUES.spec.ts @@ -0,0 +1,82 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import HSCAN_NOVALUES from './HSCAN_NOVALUES'; +import { parseArgs } from './generic-transformers'; + +describe('HSCAN_NOVALUES', () => { + testUtils.isVersionGreaterThanHook([7,4]); + + describe('transformArguments', () => { + it('cusror only', () => { + assert.deepEqual( + parseArgs(HSCAN_NOVALUES, 'key', '0'), + ['HSCAN', 'key', '0', 'NOVALUES'] + ); + }); + + it('with MATCH', () => { + assert.deepEqual( + parseArgs(HSCAN_NOVALUES, 'key', '0', { + MATCH: 'pattern' + }), + ['HSCAN', 'key', '0', 'MATCH', 'pattern', 'NOVALUES'] + ); + }); + + it('with COUNT', () => { + assert.deepEqual( + parseArgs(HSCAN_NOVALUES, 'key', '0', { + COUNT: 1 + }), + ['HSCAN', 'key', '0', 'COUNT', '1', 'NOVALUES'] + ); + }); + + it('with MATCH & COUNT', () => { + assert.deepEqual( + parseArgs(HSCAN_NOVALUES, 'key', '0', { + MATCH: 'pattern', + COUNT: 1 + }), + ['HSCAN', 'key', '0', 'MATCH', 'pattern', 'COUNT', '1', 'NOVALUES'] + ); + }); + }); + + describe('transformReply', () => { + it('without keys', () => { + assert.deepEqual( + HSCAN_NOVALUES.transformReply(['0' as any, []]), + { + cursor: '0', + fields: [] + } + ); + }); + + it('with keys', () => { + assert.deepEqual( + HSCAN_NOVALUES.transformReply(['0' as any, ['key1', 'key2'] as any]), + { + cursor: '0', + fields: ['key1', 'key2'] + } + ); + }); + }); + + + testUtils.testWithClient('client.hScanNoValues', async client => { + const [, reply] = await Promise.all([ + client.hSet('key', 'field', 'value'), + client.hScanNoValues('key', '0') + ]); + + assert.deepEqual(reply, { + cursor: '0', + fields: [ + 'field', + ] + }); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/HSCAN_NOVALUES.ts b/packages/client/lib/commands/HSCAN_NOVALUES.ts new file mode 100644 index 00000000000..8f7afe52b8e --- /dev/null +++ b/packages/client/lib/commands/HSCAN_NOVALUES.ts @@ -0,0 +1,24 @@ +import { BlobStringReply, Command } from '../RESP/types'; +import HSCAN from './HSCAN'; + +export default { + IS_READ_ONLY: true, + /** + * Constructs the HSCAN command with NOVALUES option + * + * @param args - The same parameters as HSCAN command + * @see https://redis.io/commands/hscan/ + */ + parseCommand(...args: Parameters) { + const parser = args[0]; + + HSCAN.parseCommand(...args); + parser.push('NOVALUES'); + }, + transformReply([cursor, fields]: [BlobStringReply, Array]) { + return { + cursor, + fields + }; + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/HSET.spec.ts b/packages/client/lib/commands/HSET.spec.ts new file mode 100644 index 00000000000..2cb53e6485a --- /dev/null +++ b/packages/client/lib/commands/HSET.spec.ts @@ -0,0 +1,71 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import HSET from './HSET'; +import { parseArgs } from './generic-transformers'; + +describe('HSET', () => { + describe('transformArguments', () => { + describe('field, value', () => { + it('string', () => { + assert.deepEqual( + parseArgs(HSET, 'key', 'field', 'value'), + ['HSET', 'key', 'field', 'value'] + ); + }); + + it('number', () => { + assert.deepEqual( + parseArgs(HSET, 'key', 1, 2), + ['HSET', 'key', '1', '2'] + ); + }); + + it('Buffer', () => { + assert.deepEqual( + parseArgs(HSET, Buffer.from('key'), Buffer.from('field'), Buffer.from('value')), + ['HSET', Buffer.from('key'), Buffer.from('field'), Buffer.from('value')] + ); + }); + }); + + it('Map', () => { + assert.deepEqual( + parseArgs(HSET, 'key', new Map([['field', 'value']])), + ['HSET', 'key', 'field', 'value'] + ); + }); + + it('Array', () => { + assert.deepEqual( + parseArgs(HSET, 'key', [['field', 'value']]), + ['HSET', 'key', 'field', 'value'] + ); + }); + + describe('Object', () => { + it('string', () => { + assert.deepEqual( + parseArgs(HSET, 'key', { field: 'value' }), + ['HSET', 'key', 'field', 'value'] + ); + }); + + it('Buffer', () => { + assert.deepEqual( + parseArgs(HSET, 'key', { field: Buffer.from('value') }), + ['HSET', 'key', 'field', Buffer.from('value')] + ); + }); + }); + }); + + testUtils.testAll('hSet', async client => { + assert.equal( + await client.hSet('key', 'field', 'value'), + 1 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/HSET.ts b/packages/client/lib/commands/HSET.ts new file mode 100644 index 00000000000..7dc4da8d3cf --- /dev/null +++ b/packages/client/lib/commands/HSET.ts @@ -0,0 +1,83 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +export type HashTypes = RedisArgument | number; + +type HSETObject = Record; + +type HSETMap = Map; + +type HSETTuples = Array<[HashTypes, HashTypes]> | Array; + +type GenericArguments = [key: RedisArgument]; + +type SingleFieldArguments = [...generic: GenericArguments, field: HashTypes, value: HashTypes]; + +type MultipleFieldsArguments = [...generic: GenericArguments, value: HSETObject | HSETMap | HSETTuples]; + +export type HSETArguments = SingleFieldArguments | MultipleFieldsArguments; + +export default { + /** + * Constructs the HSET command + * + * @param parser - The command parser + * @param key - The key of the hash + * @param value - Either the field name (when using single field) or an object/map/array of field-value pairs + * @param fieldValue - The value to set (only used with single field variant) + * @see https://redis.io/commands/hset/ + */ + parseCommand(parser: CommandParser, ...[key, value, fieldValue]: SingleFieldArguments | MultipleFieldsArguments) { + parser.push('HSET'); + parser.pushKey(key); + + if (typeof value === 'string' || typeof value === 'number' || value instanceof Buffer) { + parser.push( + convertValue(value), + convertValue(fieldValue!) + ); + } else if (value instanceof Map) { + pushMap(parser, value); + } else if (Array.isArray(value)) { + pushTuples(parser, value); + } else { + pushObject(parser, value); + } + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; + +function pushMap(parser: CommandParser, map: HSETMap): void { + for (const [key, value] of map.entries()) { + parser.push( + convertValue(key), + convertValue(value) + ); + } +} + +function pushTuples(parser: CommandParser, tuples: HSETTuples): void { + for (const tuple of tuples) { + if (Array.isArray(tuple)) { + pushTuples(parser, tuple); + continue; + } + + parser.push(convertValue(tuple)); + } +} + +function pushObject(parser: CommandParser, object: HSETObject): void { + for (const key of Object.keys(object)) { + parser.push( + convertValue(key), + convertValue(object[key]) + ); + } +} + +function convertValue(value: HashTypes): RedisArgument { + return typeof value === 'number' ? + value.toString() : + value; +} diff --git a/packages/client/lib/commands/HSETEX.spec.ts b/packages/client/lib/commands/HSETEX.spec.ts new file mode 100644 index 00000000000..fc38e0f0f45 --- /dev/null +++ b/packages/client/lib/commands/HSETEX.spec.ts @@ -0,0 +1,98 @@ +import { strict as assert } from 'node:assert'; +import testUtils,{ GLOBAL } from '../test-utils'; +import { BasicCommandParser } from '../client/parser'; +import HSETEX from './HSETEX'; + +describe('HSETEX parseCommand', () => { + it('hSetEx parseCommand base', () => { + const parser = new BasicCommandParser; + HSETEX.parseCommand(parser, 'key', ['field', 'value']); + assert.deepEqual(parser.redisArgs, ['HSETEX', 'key', 'FIELDS', '1', 'field', 'value']); + }); + + it('hSetEx parseCommand base empty obj', () => { + const parser = new BasicCommandParser; + assert.throws(() => {HSETEX.parseCommand(parser, 'key', {})}); + }); + + it('hSetEx parseCommand base one key obj', () => { + const parser = new BasicCommandParser; + HSETEX.parseCommand(parser, 'key', {'k': 'v'}); + assert.deepEqual(parser.redisArgs, ['HSETEX', 'key', 'FIELDS', '1', 'k', 'v']); + }); + + it('hSetEx parseCommand array', () => { + const parser = new BasicCommandParser; + HSETEX.parseCommand(parser, 'key', ['field1', 'value1', 'field2', 'value2']); + assert.deepEqual(parser.redisArgs, ['HSETEX', 'key', 'FIELDS', '2', 'field1', 'value1', 'field2', 'value2']); + }); + + it('hSetEx parseCommand array invalid args, throws an error', () => { + const parser = new BasicCommandParser; + assert.throws(() => {HSETEX.parseCommand(parser, 'key', ['field1', 'value1', 'field2'])}); + }); + + it('hSetEx parseCommand array in array', () => { + const parser1 = new BasicCommandParser; + HSETEX.parseCommand(parser1, 'key', [['field1', 'value1'], ['field2', 'value2']]); + assert.deepEqual(parser1.redisArgs, ['HSETEX', 'key', 'FIELDS', '2', 'field1', 'value1', 'field2', 'value2']); + + const parser2 = new BasicCommandParser; + HSETEX.parseCommand(parser2, 'key', [['field1', 'value1'], ['field2', 'value2'], ['field3', 'value3']]); + assert.deepEqual(parser2.redisArgs, ['HSETEX', 'key', 'FIELDS', '3', 'field1', 'value1', 'field2', 'value2', 'field3', 'value3']); + }); + + it('hSetEx parseCommand map', () => { + const parser1 = new BasicCommandParser; + HSETEX.parseCommand(parser1, 'key', new Map([['field1', 'value1'], ['field2', 'value2']])); + assert.deepEqual(parser1.redisArgs, ['HSETEX', 'key', 'FIELDS', '2', 'field1', 'value1', 'field2', 'value2']); + }); + + it('hSetEx parseCommand obj', () => { + const parser1 = new BasicCommandParser; + HSETEX.parseCommand(parser1, 'key', {field1: "value1", field2: "value2"}); + assert.deepEqual(parser1.redisArgs, ['HSETEX', 'key', 'FIELDS', '2', 'field1', 'value1', 'field2', 'value2']); + }); + + it('hSetEx parseCommand options FNX KEEPTTL', () => { + const parser = new BasicCommandParser; + HSETEX.parseCommand(parser, 'key', ['field', 'value'], {mode: 'FNX', expiration: 'KEEPTTL'}); + assert.deepEqual(parser.redisArgs, ['HSETEX', 'key', 'FNX', 'KEEPTTL', 'FIELDS', '1', 'field', 'value']); + }); + + it('hSetEx parseCommand options FXX EX 500', () => { + const parser = new BasicCommandParser; + HSETEX.parseCommand(parser, 'key', ['field', 'value'], {mode: 'FXX', expiration: {type: 'EX', value: 500}}); + assert.deepEqual(parser.redisArgs, ['HSETEX', 'key', 'FXX', 'EX', '500', 'FIELDS', '1', 'field', 'value']); + }); +}); + + +describe('HSETEX call', () => { + testUtils.testWithClientIfVersionWithinRange([[8], 'LATEST'], 'hSetEx calls', async client => { + assert.deepEqual( + await client.hSetEx('key_hsetex_call', ['field1', 'value1'], {expiration: {type: "EX", value: 500}, mode: "FNX"}), + 1 + ); + + assert.deepEqual( + await client.hSetEx('key_hsetex_call', ['field1', 'value1', 'field2', 'value2'], {expiration: {type: "EX", value: 500}, mode: "FXX"}), + 0 + ); + + assert.deepEqual( + await client.hSetEx('key_hsetex_call', ['field1', 'value1', 'field2', 'value2'], {expiration: {type: "EX", value: 500}, mode: "FNX"}), + 0 + ); + + assert.deepEqual( + await client.hSetEx('key_hsetex_call', ['field2', 'value2'], {expiration: {type: "EX", value: 500}, mode: "FNX"}), + 1 + ); + + assert.deepEqual( + await client.hSetEx('key_hsetex_call', ['field1', 'value1', 'field2', 'value2'], {expiration: {type: "EX", value: 500}, mode: "FXX"}), + 1 + ); + }, GLOBAL.SERVERS.OPEN); +}); \ No newline at end of file diff --git a/packages/client/lib/commands/HSETEX.ts b/packages/client/lib/commands/HSETEX.ts new file mode 100644 index 00000000000..316b95a91c3 --- /dev/null +++ b/packages/client/lib/commands/HSETEX.ts @@ -0,0 +1,119 @@ +import { BasicCommandParser, CommandParser } from '../client/parser'; +import { Command, NumberReply, RedisArgument } from '../RESP/types'; + +export interface HSetExOptions { + expiration?: { + type: 'EX' | 'PX' | 'EXAT' | 'PXAT'; + value: number; + } | { + type: 'KEEPTTL'; + } | 'KEEPTTL'; + mode?: 'FNX' | 'FXX' + } + +export type HashTypes = RedisArgument | number; + +type HSETEXObject = Record; + +type HSETEXMap = Map; + +type HSETEXTuples = Array<[HashTypes, HashTypes]> | Array; + +export default { + /** + * Constructs the HSETEX command + * + * @param parser - The command parser + * @param key - The key of the hash + * @param fields - Object, Map, or Array of field-value pairs to set + * @param options - Optional configuration for expiration and mode settings + * @see https://redis.io/commands/hsetex/ + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + fields: HSETEXObject | HSETEXMap | HSETEXTuples, + options?: HSetExOptions + ) { + parser.push('HSETEX'); + parser.pushKey(key); + + if (options?.mode) { + parser.push(options.mode) + } + if (options?.expiration) { + if (typeof options.expiration === 'string') { + parser.push(options.expiration); + } else if (options.expiration.type === 'KEEPTTL') { + parser.push('KEEPTTL'); + } else { + parser.push( + options.expiration.type, + options.expiration.value.toString() + ); + } + } + + parser.push('FIELDS') + if (fields instanceof Map) { + pushMap(parser, fields); + } else if (Array.isArray(fields)) { + pushTuples(parser, fields); + } else { + pushObject(parser, fields); + } + }, + transformReply: undefined as unknown as () => NumberReply<0 | 1> +} as const satisfies Command; + + +function pushMap(parser: CommandParser, map: HSETEXMap): void { + parser.push(map.size.toString()) + for (const [key, value] of map.entries()) { + parser.push( + convertValue(key), + convertValue(value) + ); + } +} + +function pushTuples(parser: CommandParser, tuples: HSETEXTuples): void { + const tmpParser = new BasicCommandParser + _pushTuples(tmpParser, tuples) + + if (tmpParser.redisArgs.length%2 != 0) { + throw Error('invalid number of arguments, expected key value ....[key value] pairs, got key without value') + } + + parser.push((tmpParser.redisArgs.length/2).toString()) + parser.push(...tmpParser.redisArgs) +} + +function _pushTuples(parser: CommandParser, tuples: HSETEXTuples): void { + for (const tuple of tuples) { + if (Array.isArray(tuple)) { + _pushTuples(parser, tuple); + continue; + } + parser.push(convertValue(tuple)); + } +} + +function pushObject(parser: CommandParser, object: HSETEXObject): void { + const len = Object.keys(object).length + if (len == 0) { + throw Error('object without keys') + } + + parser.push(len.toString()) + for (const key of Object.keys(object)) { + parser.push( + convertValue(key), + convertValue(object[key]) + ); + } +} + +function convertValue(value: HashTypes): RedisArgument { + return typeof value === 'number' ? value.toString() : value; +} \ No newline at end of file diff --git a/packages/client/lib/commands/HSETNX.spec.ts b/packages/client/lib/commands/HSETNX.spec.ts new file mode 100644 index 00000000000..e65f9fb219c --- /dev/null +++ b/packages/client/lib/commands/HSETNX.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import HSETNX from './HSETNX'; +import { parseArgs } from './generic-transformers'; + +describe('HSETNX', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(HSETNX, 'key', 'field', 'value'), + ['HSETNX', 'key', 'field', 'value'] + ); + }); + + testUtils.testAll('hSetNX', async client => { + assert.equal( + await client.hSetNX('key', 'field', 'value'), + 1 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/HSETNX.ts b/packages/client/lib/commands/HSETNX.ts new file mode 100644 index 00000000000..dc10b6c5e00 --- /dev/null +++ b/packages/client/lib/commands/HSETNX.ts @@ -0,0 +1,26 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, Command, NumberReply } from '../RESP/types'; + +export default { + IS_READ_ONLY: true, + /** + * Constructs the HSETNX command + * + * @param parser - The command parser + * @param key - The key of the hash + * @param field - The field to set if it does not exist + * @param value - The value to set + * @see https://redis.io/commands/hsetnx/ + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + field: RedisArgument, + value: RedisArgument + ) { + parser.push('HSETNX'); + parser.pushKey(key); + parser.push(field, value); + }, + transformReply: undefined as unknown as () => NumberReply<0 | 1> +} as const satisfies Command; diff --git a/packages/client/lib/commands/HSTRLEN.spec.ts b/packages/client/lib/commands/HSTRLEN.spec.ts new file mode 100644 index 00000000000..47dd0eaf795 --- /dev/null +++ b/packages/client/lib/commands/HSTRLEN.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import HSTRLEN from './HSTRLEN'; +import { parseArgs } from './generic-transformers'; + +describe('HSTRLEN', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(HSTRLEN, 'key', 'field'), + ['HSTRLEN', 'key', 'field'] + ); + }); + + testUtils.testAll('hStrLen', async client => { + assert.equal( + await client.hStrLen('key', 'field'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/HSTRLEN.ts b/packages/client/lib/commands/HSTRLEN.ts new file mode 100644 index 00000000000..016c14e27a8 --- /dev/null +++ b/packages/client/lib/commands/HSTRLEN.ts @@ -0,0 +1,21 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, BlobStringReply, Command } from '../RESP/types'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Constructs the HSTRLEN command + * + * @param parser - The command parser + * @param key - The key of the hash + * @param field - The field to get the string length of + * @see https://redis.io/commands/hstrlen/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, field: RedisArgument) { + parser.push('HSTRLEN'); + parser.pushKey(key); + parser.push(field); + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/HTTL.spec.ts b/packages/client/lib/commands/HTTL.spec.ts new file mode 100644 index 00000000000..a79500e4d06 --- /dev/null +++ b/packages/client/lib/commands/HTTL.spec.ts @@ -0,0 +1,34 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import HTTL from './HTTL'; +import { HASH_EXPIRATION_TIME } from './HEXPIRETIME'; +import { parseArgs } from './generic-transformers'; + +describe('HTTL', () => { + testUtils.isVersionGreaterThanHook([7, 4]); + + describe('transformArguments', () => { + it('string', () => { + assert.deepEqual( + parseArgs(HTTL, 'key', 'field'), + ['HTTL', 'key', 'FIELDS', '1', 'field'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(HTTL, 'key', ['field1', 'field2']), + ['HTTL', 'key', 'FIELDS', '2', 'field1', 'field2'] + ); + }); + }); + + testUtils.testWithClient('hTTL', async client => { + assert.deepEqual( + await client.hTTL('key', 'field1'), + [HASH_EXPIRATION_TIME.FIELD_NOT_EXISTS] + ); + }, { + ...GLOBAL.SERVERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/HTTL.ts b/packages/client/lib/commands/HTTL.ts new file mode 100644 index 00000000000..710b4c7c1ff --- /dev/null +++ b/packages/client/lib/commands/HTTL.ts @@ -0,0 +1,24 @@ +import { CommandParser } from '../client/parser'; +import { ArrayReply, Command, NullReply, NumberReply, RedisArgument } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export default { + IS_READ_ONLY: true, + /** + * Returns the remaining time to live of field(s) in a hash. + * @param parser - The Redis command parser. + * @param key - Key of the hash. + * @param fields - Fields to check time to live. + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + fields: RedisVariadicArgument + ) { + parser.push('HTTL'); + parser.pushKey(key); + parser.push('FIELDS'); + parser.pushVariadicWithLength(fields); + }, + transformReply: undefined as unknown as () => ArrayReply | NullReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/HVALS.spec.ts b/packages/client/lib/commands/HVALS.spec.ts new file mode 100644 index 00000000000..89cbb52861c --- /dev/null +++ b/packages/client/lib/commands/HVALS.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import HVALS from './HVALS'; +import { parseArgs } from './generic-transformers'; + +describe('HVALS', () => { + it('processCommand', () => { + assert.deepEqual( + parseArgs(HVALS, 'key'), + ['HVALS', 'key'] + ); + }); + + testUtils.testAll('hVals', async client => { + assert.deepEqual( + await client.hVals('key'), + [] + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/HVALS.ts b/packages/client/lib/commands/HVALS.ts new file mode 100644 index 00000000000..faa5fe43442 --- /dev/null +++ b/packages/client/lib/commands/HVALS.ts @@ -0,0 +1,17 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, BlobStringReply, Command } from '../RESP/types'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Gets all values in a hash. + * @param parser - The Redis command parser. + * @param key - Key of the hash. + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('HVALS'); + parser.pushKey(key); + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/INCR.spec.ts b/packages/client/lib/commands/INCR.spec.ts new file mode 100644 index 00000000000..0fe7ed7f8e6 --- /dev/null +++ b/packages/client/lib/commands/INCR.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import INCR from './INCR'; +import { parseArgs } from './generic-transformers'; + +describe('INCR', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(INCR, 'key'), + ['INCR', 'key'] + ); + }); + + testUtils.testAll('incr', async client => { + assert.equal( + await client.incr('key'), + 1 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/INCR.ts b/packages/client/lib/commands/INCR.ts new file mode 100644 index 00000000000..0a294ccdc5a --- /dev/null +++ b/packages/client/lib/commands/INCR.ts @@ -0,0 +1,17 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +export default { + /** + * Constructs the INCR command + * + * @param parser - The command parser + * @param key - The key to increment + * @see https://redis.io/commands/incr/ + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('INCR'); + parser.pushKey(key); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/INCRBY.spec.ts b/packages/client/lib/commands/INCRBY.spec.ts new file mode 100644 index 00000000000..e2a5842f20a --- /dev/null +++ b/packages/client/lib/commands/INCRBY.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import INCRBY from './INCRBY'; +import { parseArgs } from './generic-transformers'; + +describe('INCRBY', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(INCRBY, 'key', 1), + ['INCRBY', 'key', '1'] + ); + }); + + testUtils.testAll('incrBy', async client => { + assert.equal( + await client.incrBy('key', 1), + 1 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/INCRBY.ts b/packages/client/lib/commands/INCRBY.ts new file mode 100644 index 00000000000..f23ec1a74a4 --- /dev/null +++ b/packages/client/lib/commands/INCRBY.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +export default { + /** + * Constructs the INCRBY command + * + * @param parser - The command parser + * @param key - The key to increment + * @param increment - The amount to increment by + * @see https://redis.io/commands/incrby/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, increment: number) { + parser.push('INCRBY'); + parser.pushKey(key); + parser.push(increment.toString()); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/INCRBYFLOAT.spec.ts b/packages/client/lib/commands/INCRBYFLOAT.spec.ts new file mode 100644 index 00000000000..57596970708 --- /dev/null +++ b/packages/client/lib/commands/INCRBYFLOAT.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import INCRBYFLOAT from './INCRBYFLOAT'; +import { parseArgs } from './generic-transformers'; + +describe('INCRBYFLOAT', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(INCRBYFLOAT, 'key', 1.5), + ['INCRBYFLOAT', 'key', '1.5'] + ); + }); + + testUtils.testAll('incrByFloat', async client => { + assert.equal( + await client.incrByFloat('key', 1.5), + '1.5' + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/INCRBYFLOAT.ts b/packages/client/lib/commands/INCRBYFLOAT.ts new file mode 100644 index 00000000000..9effa756db5 --- /dev/null +++ b/packages/client/lib/commands/INCRBYFLOAT.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, BlobStringReply, Command } from '../RESP/types'; + +export default { + /** + * Constructs the INCRBYFLOAT command + * + * @param parser - The command parser + * @param key - The key to increment + * @param increment - The floating-point value to increment by + * @see https://redis.io/commands/incrbyfloat/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, increment: number) { + parser.push('INCRBYFLOAT'); + parser.pushKey(key); + parser.push(increment.toString()); + }, + transformReply: undefined as unknown as () => BlobStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/INFO.spec.ts b/packages/client/lib/commands/INFO.spec.ts new file mode 100644 index 00000000000..7ee8a95c137 --- /dev/null +++ b/packages/client/lib/commands/INFO.spec.ts @@ -0,0 +1,29 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import INFO from './INFO'; +import { parseArgs } from './generic-transformers'; + +describe('INFO', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(INFO), + ['INFO'] + ); + }); + + it('server section', () => { + assert.deepEqual( + parseArgs(INFO, 'server'), + ['INFO', 'server'] + ); + }); + }); + + testUtils.testWithClient('client.info', async client => { + assert.equal( + typeof await client.info(), + 'string' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/INFO.ts b/packages/client/lib/commands/INFO.ts new file mode 100644 index 00000000000..799fcb1825a --- /dev/null +++ b/packages/client/lib/commands/INFO.ts @@ -0,0 +1,22 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, VerbatimStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Constructs the INFO command + * + * @param parser - The command parser + * @param section - Optional specific section of information to retrieve + * @see https://redis.io/commands/info/ + */ + parseCommand(parser: CommandParser, section?: RedisArgument) { + parser.push('INFO'); + + if (section) { + parser.push(section); + } + }, + transformReply: undefined as unknown as () => VerbatimStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/KEYS.spec.ts b/packages/client/lib/commands/KEYS.spec.ts new file mode 100644 index 00000000000..8100559a7e9 --- /dev/null +++ b/packages/client/lib/commands/KEYS.spec.ts @@ -0,0 +1,11 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; + +describe('KEYS', () => { + testUtils.testWithClient('keys', async client => { + assert.deepEqual( + await client.keys('pattern'), + [] + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/KEYS.ts b/packages/client/lib/commands/KEYS.ts new file mode 100644 index 00000000000..eb240c26ceb --- /dev/null +++ b/packages/client/lib/commands/KEYS.ts @@ -0,0 +1,18 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, BlobStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Constructs the KEYS command + * + * @param parser - The command parser + * @param pattern - The pattern to match keys against + * @see https://redis.io/commands/keys/ + */ + parseCommand(parser: CommandParser, pattern: RedisArgument) { + parser.push('KEYS', pattern); + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/LASTSAVE.spec.ts b/packages/client/lib/commands/LASTSAVE.spec.ts new file mode 100644 index 00000000000..fba26811170 --- /dev/null +++ b/packages/client/lib/commands/LASTSAVE.spec.ts @@ -0,0 +1,20 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import LASTSAVE from './LASTSAVE'; +import { parseArgs } from './generic-transformers'; + +describe('LASTSAVE', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(LASTSAVE), + ['LASTSAVE'] + ); + }); + + testUtils.testWithClient('client.lastSave', async client => { + assert.equal( + typeof await client.lastSave(), + 'number' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/LASTSAVE.ts b/packages/client/lib/commands/LASTSAVE.ts new file mode 100644 index 00000000000..fbbc6a0046a --- /dev/null +++ b/packages/client/lib/commands/LASTSAVE.ts @@ -0,0 +1,17 @@ +import { CommandParser } from '../client/parser'; +import { NumberReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Constructs the LASTSAVE command + * + * @param parser - The command parser + * @see https://redis.io/commands/lastsave/ + */ + parseCommand(parser: CommandParser) { + parser.push('LASTSAVE'); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/LATENCY_DOCTOR.spec.ts b/packages/client/lib/commands/LATENCY_DOCTOR.spec.ts new file mode 100644 index 00000000000..654751b5b57 --- /dev/null +++ b/packages/client/lib/commands/LATENCY_DOCTOR.spec.ts @@ -0,0 +1,20 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import LATENCY_DOCTOR from './LATENCY_DOCTOR'; +import { parseArgs } from './generic-transformers'; + +describe('LATENCY DOCTOR', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(LATENCY_DOCTOR), + ['LATENCY', 'DOCTOR'] + ); + }); + + testUtils.testWithClient('client.latencyDoctor', async client => { + assert.equal( + typeof await client.latencyDoctor(), + 'string' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/LATENCY_DOCTOR.ts b/packages/client/lib/commands/LATENCY_DOCTOR.ts new file mode 100644 index 00000000000..5ba7ee6a7bf --- /dev/null +++ b/packages/client/lib/commands/LATENCY_DOCTOR.ts @@ -0,0 +1,17 @@ +import { CommandParser } from '../client/parser'; +import { BlobStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Constructs the LATENCY DOCTOR command + * + * @param parser - The command parser + * @see https://redis.io/commands/latency-doctor/ + */ + parseCommand(parser: CommandParser) { + parser.push('LATENCY', 'DOCTOR'); + }, + transformReply: undefined as unknown as () => BlobStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/LATENCY_GRAPH.spec.ts b/packages/client/lib/commands/LATENCY_GRAPH.spec.ts new file mode 100644 index 00000000000..7135dc1c420 --- /dev/null +++ b/packages/client/lib/commands/LATENCY_GRAPH.spec.ts @@ -0,0 +1,27 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import LATENCY_GRAPH from './LATENCY_GRAPH'; +import { parseArgs } from './generic-transformers'; + +describe('LATENCY GRAPH', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(LATENCY_GRAPH, 'command'), + [ + 'LATENCY', + 'GRAPH', + 'command' + ] + ); + }); + + testUtils.testWithClient('client.latencyGraph', async client => { + const [,, reply] = await Promise.all([ + client.configSet('latency-monitor-threshold', '1'), + client.sendCommand(['DEBUG', 'SLEEP', '0.001']), + client.latencyGraph('command') + ]); + + assert.equal(typeof reply, 'string'); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/LATENCY_GRAPH.ts b/packages/client/lib/commands/LATENCY_GRAPH.ts new file mode 100644 index 00000000000..8c53624c741 --- /dev/null +++ b/packages/client/lib/commands/LATENCY_GRAPH.ts @@ -0,0 +1,39 @@ +import { CommandParser } from '../client/parser'; +import { BlobStringReply, Command } from '../RESP/types'; + +export const LATENCY_EVENTS = { + ACTIVE_DEFRAG_CYCLE: 'active-defrag-cycle', + AOF_FSYNC_ALWAYS: 'aof-fsync-always', + AOF_STAT: 'aof-stat', + AOF_REWRITE_DIFF_WRITE: 'aof-rewrite-diff-write', + AOF_RENAME: 'aof-rename', + AOF_WRITE: 'aof-write', + AOF_WRITE_ACTIVE_CHILD: 'aof-write-active-child', + AOF_WRITE_ALONE: 'aof-write-alone', + AOF_WRITE_PENDING_FSYNC: 'aof-write-pending-fsync', + COMMAND: 'command', + EXPIRE_CYCLE: 'expire-cycle', + EVICTION_CYCLE: 'eviction-cycle', + EVICTION_DEL: 'eviction-del', + FAST_COMMAND: 'fast-command', + FORK: 'fork', + RDB_UNLINK_TEMP_FILE: 'rdb-unlink-temp-file' +} as const; + +export type LatencyEvent = typeof LATENCY_EVENTS[keyof typeof LATENCY_EVENTS]; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Constructs the LATENCY GRAPH command + * + * @param parser - The command parser + * @param event - The latency event to get the graph for + * @see https://redis.io/commands/latency-graph/ + */ + parseCommand(parser: CommandParser, event: LatencyEvent) { + parser.push('LATENCY', 'GRAPH', event); + }, + transformReply: undefined as unknown as () => BlobStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/LATENCY_HISTORY.spec.ts b/packages/client/lib/commands/LATENCY_HISTORY.spec.ts new file mode 100644 index 00000000000..64f94d0d1a3 --- /dev/null +++ b/packages/client/lib/commands/LATENCY_HISTORY.spec.ts @@ -0,0 +1,27 @@ +import { strict as assert } from 'assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import LATENCY_HISTORY from './LATENCY_HISTORY'; +import { parseArgs } from './generic-transformers'; + +describe('LATENCY HISTORY', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(LATENCY_HISTORY, 'command'), + ['LATENCY', 'HISTORY', 'command'] + ); + }); + + testUtils.testWithClient('client.latencyHistory', async client => { + const [,, reply] = await Promise.all([ + client.configSet('latency-monitor-threshold', '100'), + client.sendCommand(['DEBUG', 'SLEEP', '1']), + client.latencyHistory('command') + ]); + + assert.ok(Array.isArray(reply)); + for (const [timestamp, latency] of reply) { + assert.equal(typeof timestamp, 'number'); + assert.equal(typeof latency, 'number'); + } + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/LATENCY_HISTORY.ts b/packages/client/lib/commands/LATENCY_HISTORY.ts new file mode 100644 index 00000000000..dec7129befa --- /dev/null +++ b/packages/client/lib/commands/LATENCY_HISTORY.ts @@ -0,0 +1,41 @@ +import { CommandParser } from '../client/parser'; +import { ArrayReply, TuplesReply, NumberReply, Command } from '../RESP/types'; + +export type LatencyEventType = ( + 'active-defrag-cycle' | + 'aof-fsync-always' | + 'aof-stat' | + 'aof-rewrite-diff-write' | + 'aof-rename' | + 'aof-write' | + 'aof-write-active-child' | + 'aof-write-alone' | + 'aof-write-pending-fsync' | + 'command' | + 'expire-cycle' | + 'eviction-cycle' | + 'eviction-del' | + 'fast-command' | + 'fork' | + 'rdb-unlink-temp-file' +); + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Constructs the LATENCY HISTORY command + * + * @param parser - The command parser + * @param event - The latency event to get the history for + * @see https://redis.io/commands/latency-history/ + */ + parseCommand(parser: CommandParser, event: LatencyEventType) { + parser.push('LATENCY', 'HISTORY', event); + }, + transformReply: undefined as unknown as () => ArrayReply> +} as const satisfies Command; + diff --git a/packages/client/lib/commands/LATENCY_LATEST.spec.ts b/packages/client/lib/commands/LATENCY_LATEST.spec.ts new file mode 100644 index 00000000000..2cd2d9a5e06 --- /dev/null +++ b/packages/client/lib/commands/LATENCY_LATEST.spec.ts @@ -0,0 +1,28 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import LATENCY_LATEST from './LATENCY_LATEST'; +import { parseArgs } from './generic-transformers'; + +describe('LATENCY LATEST', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(LATENCY_LATEST), + ['LATENCY', 'LATEST'] + ); + }); + + testUtils.testWithClient('client.latencyLatest', async client => { + const [,, reply] = await Promise.all([ + client.configSet('latency-monitor-threshold', '100'), + client.sendCommand(['DEBUG', 'SLEEP', '1']), + client.latencyLatest() + ]); + assert.ok(Array.isArray(reply)); + for (const [name, timestamp, latestLatency, allTimeLatency] of reply) { + assert.equal(typeof name, 'string'); + assert.equal(typeof timestamp, 'number'); + assert.equal(typeof latestLatency, 'number'); + assert.equal(typeof allTimeLatency, 'number'); + } + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/LATENCY_LATEST.ts b/packages/client/lib/commands/LATENCY_LATEST.ts new file mode 100644 index 00000000000..8fbdd46a13a --- /dev/null +++ b/packages/client/lib/commands/LATENCY_LATEST.ts @@ -0,0 +1,23 @@ +import { CommandParser } from '../client/parser'; +import { ArrayReply, BlobStringReply, NumberReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Constructs the LATENCY LATEST command + * + * @param parser - The command parser + * @see https://redis.io/commands/latency-latest/ + */ + parseCommand(parser: CommandParser) { + parser.push('LATENCY', 'LATEST'); + }, + transformReply: undefined as unknown as () => ArrayReply<[ + name: BlobStringReply, + timestamp: NumberReply, + latestLatency: NumberReply, + allTimeLatency: NumberReply + ]> +} as const satisfies Command; + diff --git a/packages/client/lib/commands/LATENCY_RESET.spec.ts b/packages/client/lib/commands/LATENCY_RESET.spec.ts new file mode 100644 index 00000000000..030d0d78e0a --- /dev/null +++ b/packages/client/lib/commands/LATENCY_RESET.spec.ts @@ -0,0 +1,104 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import LATENCY_RESET, { LATENCY_EVENTS } from './LATENCY_RESET'; +import { parseArgs } from './generic-transformers'; + +describe('LATENCY RESET', function () { + + + it('transformArguments with no events', () => { + assert.deepEqual( + parseArgs(LATENCY_RESET), + [ + 'LATENCY', + 'RESET' + ] + ); + }); + + it('transformArguments with one event', () => { + assert.deepEqual( + parseArgs(LATENCY_RESET, LATENCY_EVENTS.COMMAND), + [ + 'LATENCY', + 'RESET', + 'command' + ] + ); + }); + + it('transformArguments with multiple events', () => { + assert.deepEqual( + parseArgs(LATENCY_RESET, LATENCY_EVENTS.COMMAND, LATENCY_EVENTS.FORK), + [ + 'LATENCY', + 'RESET', + 'command', + 'fork' + ] + ); + }); + + + testUtils.testWithClient('client.latencyReset', async client => { + + await client.configSet('latency-monitor-threshold', '1'); + + + await client.sendCommand(['DEBUG', 'SLEEP', '0.1']); + + + const latestLatencyBeforeReset = await client.latencyLatest(); + assert.ok(latestLatencyBeforeReset.length > 0, 'Expected latency events to be recorded before first reset.'); + assert.equal(latestLatencyBeforeReset[0][0], 'command', 'Expected "command" event to be recorded.'); + assert.ok(Number(latestLatencyBeforeReset[0][2]) >= 100, 'Expected latest latency for "command" to be at least 100ms.'); + + + const replyAll = await client.latencyReset(); + + assert.equal(typeof replyAll, 'number'); + assert.ok(replyAll >= 0); + + + const latestLatencyAfterAllReset = await client.latencyLatest(); + assert.deepEqual(latestLatencyAfterAllReset, [], 'Expected no latency events after resetting all.'); + + + await client.sendCommand(['DEBUG', 'SLEEP', '0.05']); + const latestLatencyBeforeSpecificReset = await client.latencyLatest(); + assert.ok(latestLatencyBeforeSpecificReset.length > 0, 'Expected latency events before specific reset.'); + + + const replySpecific = await client.latencyReset(LATENCY_EVENTS.COMMAND); + assert.equal(typeof replySpecific, 'number'); + assert.ok(replySpecific >= 0); + + + const latestLatencyAfterSpecificReset = await client.latencyLatest(); + assert.deepEqual(latestLatencyAfterSpecificReset, [], 'Expected no latency events after specific reset of "command".'); + + + await client.sendCommand(['DEBUG', 'SLEEP', '0.02']); + + + const latestLatencyBeforeMultipleReset = await client.latencyLatest(); + assert.ok(latestLatencyBeforeMultipleReset.length > 0, 'Expected latency events before multiple reset.'); + + + const replyMultiple = await client.latencyReset(LATENCY_EVENTS.COMMAND, LATENCY_EVENTS.FORK); + assert.equal(typeof replyMultiple, 'number'); + assert.ok(replyMultiple >= 0); + + const latestLatencyAfterMultipleReset = await client.latencyLatest(); + assert.deepEqual(latestLatencyAfterMultipleReset, [], 'Expected no latency events after multiple specified resets.'); + + }, { + + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + socket: { + connectTimeout: 300000 + } + } + }); +}); diff --git a/packages/client/lib/commands/LATENCY_RESET.ts b/packages/client/lib/commands/LATENCY_RESET.ts new file mode 100644 index 00000000000..0efa5767630 --- /dev/null +++ b/packages/client/lib/commands/LATENCY_RESET.ts @@ -0,0 +1,24 @@ +import { CommandParser } from '../client/parser'; +import { Command } from '../RESP/types'; +import { LATENCY_EVENTS, LatencyEvent } from './LATENCY_GRAPH'; + +export { LATENCY_EVENTS, LatencyEvent }; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: false, + /** + * Constructs the LATENCY RESET command + * * @param parser - The command parser + * @param events - The latency events to reset. If not specified, all events are reset. + * @see https://redis.io/commands/latency-reset/ + */ + parseCommand(parser: CommandParser, ...events: Array) { + const args = ['LATENCY', 'RESET']; + if (events.length > 0) { + args.push(...events); + } + parser.push(...args); + }, + transformReply: undefined as unknown as () => number +} as const satisfies Command; diff --git a/packages/client/lib/commands/LCS.spec.ts b/packages/client/lib/commands/LCS.spec.ts new file mode 100644 index 00000000000..aedbb1b34e3 --- /dev/null +++ b/packages/client/lib/commands/LCS.spec.ts @@ -0,0 +1,25 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import LCS from './LCS'; +import { parseArgs } from './generic-transformers'; + +describe('LCS', () => { + testUtils.isVersionGreaterThanHook([7]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(LCS, '1', '2'), + ['LCS', '1', '2'] + ); + }); + + testUtils.testAll('lcs', async client => { + assert.equal( + await client.lcs('{tag}1', '{tag}2'), + '' + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/LCS.ts b/packages/client/lib/commands/LCS.ts new file mode 100644 index 00000000000..9b2317e147f --- /dev/null +++ b/packages/client/lib/commands/LCS.ts @@ -0,0 +1,23 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, BlobStringReply, Command } from '../RESP/types'; + +export default { + IS_READ_ONLY: true, + /** + * Constructs the LCS command (Longest Common Substring) + * + * @param parser - The command parser + * @param key1 - First key containing the first string + * @param key2 - Second key containing the second string + * @see https://redis.io/commands/lcs/ + */ + parseCommand( + parser: CommandParser, + key1: RedisArgument, + key2: RedisArgument + ) { + parser.push('LCS'); + parser.pushKeys([key1, key2]); + }, + transformReply: undefined as unknown as () => BlobStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/LCS_IDX.spec.ts b/packages/client/lib/commands/LCS_IDX.spec.ts new file mode 100644 index 00000000000..c4cc6681d85 --- /dev/null +++ b/packages/client/lib/commands/LCS_IDX.spec.ts @@ -0,0 +1,35 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import LCS_IDX from './LCS_IDX'; +import { parseArgs } from './generic-transformers'; + +describe('LCS IDX', () => { + testUtils.isVersionGreaterThanHook([7]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(LCS_IDX, '1', '2'), + ['LCS', '1', '2', 'IDX'] + ); + }); + + testUtils.testWithClient('client.lcsIdx', async client => { + const [, reply] = await Promise.all([ + client.mSet({ + '1': 'abc', + '2': 'bc' + }), + client.lcsIdx('1', '2') + ]); + + assert.deepEqual( + reply, + { + matches: [ + [[1, 2], [0, 1]] + ], + len: 2 + } + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/LCS_IDX.ts b/packages/client/lib/commands/LCS_IDX.ts new file mode 100644 index 00000000000..684aa99efb0 --- /dev/null +++ b/packages/client/lib/commands/LCS_IDX.ts @@ -0,0 +1,58 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, TuplesToMapReply, BlobStringReply, ArrayReply, NumberReply, UnwrapReply, Resp2Reply, Command, TuplesReply } from '../RESP/types'; +import LCS from './LCS'; + +export interface LcsIdxOptions { + MINMATCHLEN?: number; +} + +export type LcsIdxRange = TuplesReply<[ + start: NumberReply, + end: NumberReply +]>; + +export type LcsIdxMatches = ArrayReply< + TuplesReply<[ + key1: LcsIdxRange, + key2: LcsIdxRange + ]> +>; + +export type LcsIdxReply = TuplesToMapReply<[ + [BlobStringReply<'matches'>, LcsIdxMatches], + [BlobStringReply<'len'>, NumberReply] +]>; + +export default { + IS_READ_ONLY: LCS.IS_READ_ONLY, + /** + * Constructs the LCS command with IDX option + * + * @param parser - The command parser + * @param key1 - First key containing the first string + * @param key2 - Second key containing the second string + * @param options - Additional options for the LCS IDX command + * @see https://redis.io/commands/lcs/ + */ + parseCommand( + parser: CommandParser, + key1: RedisArgument, + key2: RedisArgument, + options?: LcsIdxOptions + ) { + LCS.parseCommand(parser, key1, key2); + + parser.push('IDX'); + + if (options?.MINMATCHLEN) { + parser.push('MINMATCHLEN', options.MINMATCHLEN.toString()); + } + }, + transformReply: { + 2: (reply: UnwrapReply>) => ({ + matches: reply[1], + len: reply[3] + }), + 3: undefined as unknown as () => LcsIdxReply + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/LCS_IDX_WITHMATCHLEN.spec.ts b/packages/client/lib/commands/LCS_IDX_WITHMATCHLEN.spec.ts new file mode 100644 index 00000000000..92ecad4761c --- /dev/null +++ b/packages/client/lib/commands/LCS_IDX_WITHMATCHLEN.spec.ts @@ -0,0 +1,35 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import LCS_IDX_WITHMATCHLEN from './LCS_IDX_WITHMATCHLEN'; +import { parseArgs } from './generic-transformers'; + +describe('LCS IDX WITHMATCHLEN', () => { + testUtils.isVersionGreaterThanHook([7]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(LCS_IDX_WITHMATCHLEN, '1', '2'), + ['LCS', '1', '2', 'IDX', 'WITHMATCHLEN'] + ); + }); + + testUtils.testWithClient('client.lcsIdxWithMatchLen', async client => { + const [, reply] = await Promise.all([ + client.mSet({ + '1': 'abc', + '2': 'bc' + }), + client.lcsIdxWithMatchLen('1', '2') + ]); + + assert.deepEqual( + reply, + { + matches: [ + [[1, 2], [0, 1], 2] + ], + len: 2 + } + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/LCS_IDX_WITHMATCHLEN.ts b/packages/client/lib/commands/LCS_IDX_WITHMATCHLEN.ts new file mode 100644 index 00000000000..f3578b789fc --- /dev/null +++ b/packages/client/lib/commands/LCS_IDX_WITHMATCHLEN.ts @@ -0,0 +1,37 @@ +import { TuplesToMapReply, BlobStringReply, ArrayReply, TuplesReply, NumberReply, UnwrapReply, Resp2Reply, Command } from '../RESP/types'; +import LCS_IDX, { LcsIdxRange } from './LCS_IDX'; + +export type LcsIdxWithMatchLenMatches = ArrayReply< + TuplesReply<[ + key1: LcsIdxRange, + key2: LcsIdxRange, + len: NumberReply + ]> +>; + +export type LcsIdxWithMatchLenReply = TuplesToMapReply<[ + [BlobStringReply<'matches'>, LcsIdxWithMatchLenMatches], + [BlobStringReply<'len'>, NumberReply] +]>; + +export default { + IS_READ_ONLY: LCS_IDX.IS_READ_ONLY, + /** + * Constructs the LCS command with IDX and WITHMATCHLEN options + * + * @param args - The same parameters as LCS_IDX command + * @see https://redis.io/commands/lcs/ + */ + parseCommand(...args: Parameters) { + const parser = args[0]; + LCS_IDX.parseCommand(...args); + parser.push('WITHMATCHLEN'); + }, + transformReply: { + 2: (reply: UnwrapReply>) => ({ + matches: reply[1], + len: reply[3] + }), + 3: undefined as unknown as () => LcsIdxWithMatchLenReply + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/LCS_LEN.spec.ts b/packages/client/lib/commands/LCS_LEN.spec.ts new file mode 100644 index 00000000000..53a2e83c326 --- /dev/null +++ b/packages/client/lib/commands/LCS_LEN.spec.ts @@ -0,0 +1,25 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import LCS_LEN from './LCS_LEN'; +import { parseArgs } from './generic-transformers'; + +describe('LCS_LEN', () => { + testUtils.isVersionGreaterThanHook([7]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(LCS_LEN, '1', '2'), + ['LCS', '1', '2', 'LEN'] + ); + }); + + testUtils.testAll('lcsLen', async client => { + assert.equal( + await client.lcsLen('{tag}1', '{tag}2'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/LCS_LEN.ts b/packages/client/lib/commands/LCS_LEN.ts new file mode 100644 index 00000000000..bb35c3d9209 --- /dev/null +++ b/packages/client/lib/commands/LCS_LEN.ts @@ -0,0 +1,19 @@ +import { NumberReply, Command } from '../RESP/types'; +import LCS from './LCS'; + +export default { + IS_READ_ONLY: LCS.IS_READ_ONLY, + /** + * Constructs the LCS command with LEN option + * + * @param args - The same parameters as LCS command + * @see https://redis.io/commands/lcs/ + */ + parseCommand(...args: Parameters) { + const parser = args[0]; + + LCS.parseCommand(...args); + parser.push('LEN'); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/LINDEX.spec.ts b/packages/client/lib/commands/LINDEX.spec.ts new file mode 100644 index 00000000000..41eff474a1a --- /dev/null +++ b/packages/client/lib/commands/LINDEX.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import LINDEX from './LINDEX'; +import { parseArgs } from './generic-transformers'; + +describe('LINDEX', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(LINDEX, 'key', 0), + ['LINDEX', 'key', '0'] + ); + }); + + testUtils.testAll('lIndex', async client => { + assert.equal( + await client.lIndex('key', 0), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); \ No newline at end of file diff --git a/packages/client/lib/commands/LINDEX.ts b/packages/client/lib/commands/LINDEX.ts new file mode 100644 index 00000000000..dd7671a41c6 --- /dev/null +++ b/packages/client/lib/commands/LINDEX.ts @@ -0,0 +1,21 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, BlobStringReply, NullReply, Command } from '../RESP/types'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Constructs the LINDEX command + * + * @param parser - The command parser + * @param key - The key of the list + * @param index - The index of the element to retrieve + * @see https://redis.io/commands/lindex/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, index: number) { + parser.push('LINDEX'); + parser.pushKey(key); + parser.push(index.toString()); + }, + transformReply: undefined as unknown as () => BlobStringReply | NullReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/LINSERT.spec.ts b/packages/client/lib/commands/LINSERT.spec.ts new file mode 100644 index 00000000000..c3c89d56c12 --- /dev/null +++ b/packages/client/lib/commands/LINSERT.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import LINSERT from './LINSERT'; +import { parseArgs } from './generic-transformers'; + +describe('LINSERT', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(LINSERT, 'key', 'BEFORE', 'pivot', 'element'), + ['LINSERT', 'key', 'BEFORE', 'pivot', 'element'] + ); + }); + + testUtils.testAll('lInsert', async client => { + assert.equal( + await client.lInsert('key', 'BEFORE', 'pivot', 'element'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/LINSERT.ts b/packages/client/lib/commands/LINSERT.ts new file mode 100644 index 00000000000..ede230191ba --- /dev/null +++ b/packages/client/lib/commands/LINSERT.ts @@ -0,0 +1,30 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +type LInsertPosition = 'BEFORE' | 'AFTER'; + +export default { + IS_READ_ONLY: true, + /** + * Constructs the LINSERT command + * + * @param parser - The command parser + * @param key - The key of the list + * @param position - The position where to insert (BEFORE or AFTER) + * @param pivot - The element to find in the list + * @param element - The element to insert + * @see https://redis.io/commands/linsert/ + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + position: LInsertPosition, + pivot: RedisArgument, + element: RedisArgument + ) { + parser.push('LINSERT'); + parser.pushKey(key); + parser.push(position, pivot, element); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/LLEN.spec.ts b/packages/client/lib/commands/LLEN.spec.ts new file mode 100644 index 00000000000..d86078d0b48 --- /dev/null +++ b/packages/client/lib/commands/LLEN.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import LLEN from './LLEN'; +import { parseArgs } from './generic-transformers'; + +describe('LLEN', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(LLEN, 'key'), + ['LLEN', 'key'] + ); + }); + + testUtils.testAll('lLen', async client => { + assert.equal( + await client.lLen('key'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/LLEN.ts b/packages/client/lib/commands/LLEN.ts new file mode 100644 index 00000000000..7ece6823bb5 --- /dev/null +++ b/packages/client/lib/commands/LLEN.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Constructs the LLEN command + * + * @param parser - The command parser + * @param key - The key of the list to get the length of + * @see https://redis.io/commands/llen/ + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('LLEN'); + parser.pushKey(key); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/LMOVE.spec.ts b/packages/client/lib/commands/LMOVE.spec.ts new file mode 100644 index 00000000000..bed3ff8eab0 --- /dev/null +++ b/packages/client/lib/commands/LMOVE.spec.ts @@ -0,0 +1,25 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import LMOVE from './LMOVE'; +import { parseArgs } from './generic-transformers'; + +describe('LMOVE', () => { + testUtils.isVersionGreaterThanHook([6, 2]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(LMOVE, 'source', 'destination', 'LEFT', 'RIGHT'), + ['LMOVE', 'source', 'destination', 'LEFT', 'RIGHT'] + ); + }); + + testUtils.testAll('lMove', async client => { + assert.equal( + await client.lMove('{tag}source', '{tag}destination', 'LEFT', 'RIGHT'), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/LMOVE.ts b/packages/client/lib/commands/LMOVE.ts new file mode 100644 index 00000000000..9ed0003b23d --- /dev/null +++ b/packages/client/lib/commands/LMOVE.ts @@ -0,0 +1,29 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, BlobStringReply, NullReply, Command } from '../RESP/types'; +import { ListSide } from './generic-transformers'; + +export default { + IS_READ_ONLY: false, + /** + * Constructs the LMOVE command + * + * @param parser - The command parser + * @param source - The source list key + * @param destination - The destination list key + * @param sourceSide - The side to pop from (LEFT or RIGHT) + * @param destinationSide - The side to push to (LEFT or RIGHT) + * @see https://redis.io/commands/lmove/ + */ + parseCommand( + parser: CommandParser, + source: RedisArgument, + destination: RedisArgument, + sourceSide: ListSide, + destinationSide: ListSide + ) { + parser.push('LMOVE'); + parser.pushKeys([source, destination]); + parser.push(sourceSide, destinationSide); + }, + transformReply: undefined as unknown as () => BlobStringReply | NullReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/LMPOP.spec.ts b/packages/client/lib/commands/LMPOP.spec.ts new file mode 100644 index 00000000000..bd2cf869e74 --- /dev/null +++ b/packages/client/lib/commands/LMPOP.spec.ts @@ -0,0 +1,51 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import LMPOP from './LMPOP'; +import { parseArgs } from './generic-transformers'; + +describe('LMPOP', () => { + testUtils.isVersionGreaterThanHook([7]); + + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(LMPOP, 'key', 'LEFT'), + ['LMPOP', '1', 'key', 'LEFT'] + ); + }); + + it('with COUNT', () => { + assert.deepEqual( + parseArgs(LMPOP, 'key', 'LEFT', { + COUNT: 2 + }), + ['LMPOP', '1', 'key', 'LEFT', 'COUNT', '2'] + ); + }); + }); + + testUtils.testAll('lmPop - null', async client => { + assert.equal( + await client.lmPop('key', 'RIGHT'), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.SERVERS.OPEN + }); + + testUtils.testAll('lmPop - with member', async client => { + const [, reply] = await Promise.all([ + client.lPush('key', 'element'), + client.lmPop('key', 'RIGHT') + ]); + + assert.deepEqual(reply, [ + 'key', + ['element'] + ]); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.SERVERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/LMPOP.ts b/packages/client/lib/commands/LMPOP.ts new file mode 100644 index 00000000000..54dc40c1c3d --- /dev/null +++ b/packages/client/lib/commands/LMPOP.ts @@ -0,0 +1,42 @@ +import { CommandParser } from '../client/parser'; +import { NullReply, TuplesReply, BlobStringReply, Command } from '../RESP/types'; +import { ListSide, RedisVariadicArgument, Tail } from './generic-transformers'; + +export interface LMPopOptions { + COUNT?: number; +} + +export function parseLMPopArguments( + parser: CommandParser, + keys: RedisVariadicArgument, + side: ListSide, + options?: LMPopOptions +) { + parser.pushKeysLength(keys); + parser.push(side); + + if (options?.COUNT !== undefined) { + parser.push('COUNT', options.COUNT.toString()); + } +} + +export type LMPopArguments = Tail>; + +export default { + IS_READ_ONLY: false, + /** + * Constructs the LMPOP command + * + * @param parser - The command parser + * @param args - Arguments including keys, side (LEFT or RIGHT), and options + * @see https://redis.io/commands/lmpop/ + */ + parseCommand(parser: CommandParser, ...args: LMPopArguments) { + parser.push('LMPOP'); + parseLMPopArguments(parser, ...args); + }, + transformReply: undefined as unknown as () => NullReply | TuplesReply<[ + key: BlobStringReply, + elements: Array + ]> +} as const satisfies Command; diff --git a/packages/client/lib/commands/LOLWUT.spec.ts b/packages/client/lib/commands/LOLWUT.spec.ts new file mode 100644 index 00000000000..b06030b0d0e --- /dev/null +++ b/packages/client/lib/commands/LOLWUT.spec.ts @@ -0,0 +1,36 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import LOLWUT from './LOLWUT'; +import { parseArgs } from './generic-transformers'; + +describe('LOLWUT', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(LOLWUT), + ['LOLWUT'] + ); + }); + + it('with version', () => { + assert.deepEqual( + parseArgs(LOLWUT, 5), + ['LOLWUT', 'VERSION', '5'] + ); + }); + + it('with version and optional arguments', () => { + assert.deepEqual( + parseArgs(LOLWUT, 5, 1, 2, 3), + ['LOLWUT', 'VERSION', '5', '1', '2', '3'] + ); + }); + }); + + testUtils.testWithClient('client.LOLWUT', async client => { + assert.equal( + typeof (await client.LOLWUT()), + 'string' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/LOLWUT.ts b/packages/client/lib/commands/LOLWUT.ts new file mode 100644 index 00000000000..5e07a103720 --- /dev/null +++ b/packages/client/lib/commands/LOLWUT.ts @@ -0,0 +1,26 @@ +import { CommandParser } from '../client/parser'; +import { BlobStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Constructs the LOLWUT command + * + * @param parser - The command parser + * @param version - Optional version parameter + * @param optionalArguments - Additional optional numeric arguments + * @see https://redis.io/commands/lolwut/ + */ + parseCommand(parser: CommandParser, version?: number, ...optionalArguments: Array) { + parser.push('LOLWUT'); + if (version) { + parser.push( + 'VERSION', + version.toString() + ); + parser.pushVariadic(optionalArguments.map(String)); + } + }, + transformReply: undefined as unknown as () => BlobStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/LPOP.spec.ts b/packages/client/lib/commands/LPOP.spec.ts new file mode 100644 index 00000000000..93449bdbf5f --- /dev/null +++ b/packages/client/lib/commands/LPOP.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import LPOP from './LPOP'; +import { parseArgs } from './generic-transformers'; + +describe('LPOP', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(LPOP, 'key'), + ['LPOP', 'key'] + ); + }); + + testUtils.testAll('lPop', async client => { + assert.equal( + await client.lPop('key'), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/LPOP.ts b/packages/client/lib/commands/LPOP.ts new file mode 100644 index 00000000000..aaa83be465d --- /dev/null +++ b/packages/client/lib/commands/LPOP.ts @@ -0,0 +1,17 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, BlobStringReply, NullReply, Command } from '../RESP/types'; + +export default { + /** + * Constructs the LPOP command + * + * @param parser - The command parser + * @param key - The key of the list to pop from + * @see https://redis.io/commands/lpop/ + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('LPOP'); + parser.pushKey(key); + }, + transformReply: undefined as unknown as () => BlobStringReply | NullReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/LPOP_COUNT.spec.ts b/packages/client/lib/commands/LPOP_COUNT.spec.ts new file mode 100644 index 00000000000..04bb3648d0a --- /dev/null +++ b/packages/client/lib/commands/LPOP_COUNT.spec.ts @@ -0,0 +1,25 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import LPOP_COUNT from './LPOP_COUNT'; +import { parseArgs } from './generic-transformers'; + +describe('LPOP COUNT', () => { + testUtils.isVersionGreaterThanHook([6, 2]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(LPOP_COUNT, 'key', 1), + ['LPOP', 'key', '1'] + ); + }); + + testUtils.testAll('lPopCount', async client => { + assert.equal( + await client.lPopCount('key', 1), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/LPOP_COUNT.ts b/packages/client/lib/commands/LPOP_COUNT.ts new file mode 100644 index 00000000000..cdc0dc41a22 --- /dev/null +++ b/packages/client/lib/commands/LPOP_COUNT.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NullReply, ArrayReply, BlobStringReply, Command } from '../RESP/types'; +import LPOP from './LPOP'; + +export default { + IS_READ_ONLY: false, + /** + * Constructs the LPOP command with count parameter + * + * @param parser - The command parser + * @param key - The key of the list to pop from + * @param count - The number of elements to pop + * @see https://redis.io/commands/lpop/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, count: number) { + LPOP.parseCommand(parser, key); + parser.push(count.toString()) + }, + transformReply: undefined as unknown as () => NullReply | ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/LPOS.spec.ts b/packages/client/lib/commands/LPOS.spec.ts new file mode 100644 index 00000000000..f26af3f540f --- /dev/null +++ b/packages/client/lib/commands/LPOS.spec.ts @@ -0,0 +1,55 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import LPOS from './LPOS'; +import { parseArgs } from './generic-transformers'; + +describe('LPOS', () => { + testUtils.isVersionGreaterThanHook([6, 0, 6]); + + describe('processCommand', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(LPOS, 'key', 'element'), + ['LPOS', 'key', 'element'] + ); + }); + + it('with RANK', () => { + assert.deepEqual( + parseArgs(LPOS, 'key', 'element', { + RANK: 0 + }), + ['LPOS', 'key', 'element', 'RANK', '0'] + ); + }); + + it('with MAXLEN', () => { + assert.deepEqual( + parseArgs(LPOS, 'key', 'element', { + MAXLEN: 10 + }), + ['LPOS', 'key', 'element', 'MAXLEN', '10'] + ); + }); + + it('with RANK, MAXLEN', () => { + assert.deepEqual( + parseArgs(LPOS, 'key', 'element', { + RANK: 0, + MAXLEN: 10 + }), + ['LPOS', 'key', 'element', 'RANK', '0', 'MAXLEN', '10'] + ); + }); + }); + + testUtils.testAll('lPos', async client => { + assert.equal( + await client.lPos('key', 'element'), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/LPOS.ts b/packages/client/lib/commands/LPOS.ts new file mode 100644 index 00000000000..54078b8185f --- /dev/null +++ b/packages/client/lib/commands/LPOS.ts @@ -0,0 +1,40 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, NullReply, Command } from '../RESP/types'; + +export interface LPosOptions { + RANK?: number; + MAXLEN?: number; +} + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Constructs the LPOS command + * + * @param parser - The command parser + * @param key - The key of the list + * @param element - The element to search for + * @param options - Optional parameters for RANK and MAXLEN + * @see https://redis.io/commands/lpos/ + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + element: RedisArgument, + options?: LPosOptions + ) { + parser.push('LPOS'); + parser.pushKey(key); + parser.push(element); + + if (options?.RANK !== undefined) { + parser.push('RANK', options.RANK.toString()); + } + + if (options?.MAXLEN !== undefined) { + parser.push('MAXLEN', options.MAXLEN.toString()); + } + }, + transformReply: undefined as unknown as () => NumberReply | NullReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/LPOS_COUNT.spec.ts b/packages/client/lib/commands/LPOS_COUNT.spec.ts new file mode 100644 index 00000000000..702ef5a746b --- /dev/null +++ b/packages/client/lib/commands/LPOS_COUNT.spec.ts @@ -0,0 +1,55 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import LPOS_COUNT from './LPOS_COUNT'; +import { parseArgs } from './generic-transformers'; + +describe('LPOS COUNT', () => { + testUtils.isVersionGreaterThanHook([6, 0, 6]); + + describe('processCommand', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(LPOS_COUNT, 'key', 'element', 0), + ['LPOS', 'key', 'element', 'COUNT', '0'] + ); + }); + + it('with RANK', () => { + assert.deepEqual( + parseArgs(LPOS_COUNT, 'key', 'element', 0, { + RANK: 0 + }), + ['LPOS', 'key', 'element', 'RANK', '0', 'COUNT', '0'] + ); + }); + + it('with MAXLEN', () => { + assert.deepEqual( + parseArgs(LPOS_COUNT, 'key', 'element', 0, { + MAXLEN: 10 + }), + ['LPOS', 'key', 'element', 'MAXLEN', '10', 'COUNT', '0'] + ); + }); + + it('with RANK, MAXLEN', () => { + assert.deepEqual( + parseArgs(LPOS_COUNT, 'key', 'element', 0, { + RANK: 0, + MAXLEN: 10 + }), + ['LPOS', 'key', 'element', 'RANK', '0', 'MAXLEN', '10', 'COUNT', '0'] + ); + }); + }); + + testUtils.testAll('lPosCount', async client => { + assert.deepEqual( + await client.lPosCount('key', 'element', 0), + [] + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/LPOS_COUNT.ts b/packages/client/lib/commands/LPOS_COUNT.ts new file mode 100644 index 00000000000..ace6e49c1e5 --- /dev/null +++ b/packages/client/lib/commands/LPOS_COUNT.ts @@ -0,0 +1,30 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, NumberReply, Command } from '../RESP/types'; +import LPOS, { LPosOptions } from './LPOS'; + +export default { + CACHEABLE: LPOS.CACHEABLE, + IS_READ_ONLY: LPOS.IS_READ_ONLY, + /** + * Constructs the LPOS command with COUNT option + * + * @param parser - The command parser + * @param key - The key of the list + * @param element - The element to search for + * @param count - The number of positions to return + * @param options - Optional parameters for RANK and MAXLEN + * @see https://redis.io/commands/lpos/ + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + element: RedisArgument, + count: number, + options?: LPosOptions + ) { + LPOS.parseCommand(parser, key, element, options); + + parser.push('COUNT', count.toString()); + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/LPUSH.spec.ts b/packages/client/lib/commands/LPUSH.spec.ts new file mode 100644 index 00000000000..09c7d9da772 --- /dev/null +++ b/packages/client/lib/commands/LPUSH.spec.ts @@ -0,0 +1,32 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import LPUSH from './LPUSH'; +import { parseArgs } from './generic-transformers'; + +describe('LPUSH', () => { + describe('transformArguments', () => { + it('string', () => { + assert.deepEqual( + parseArgs(LPUSH, 'key', 'field'), + ['LPUSH', 'key', 'field'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(LPUSH, 'key', ['1', '2']), + ['LPUSH', 'key', '1', '2'] + ); + }); + }); + + testUtils.testAll('lPush', async client => { + assert.equal( + await client.lPush('key', 'field'), + 1 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/LPUSH.ts b/packages/client/lib/commands/LPUSH.ts new file mode 100644 index 00000000000..89e1e094870 --- /dev/null +++ b/packages/client/lib/commands/LPUSH.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export default { + /** + * Constructs the LPUSH command + * + * @param parser - The command parser + * @param key - The key of the list + * @param elements - One or more elements to push to the list + * @see https://redis.io/commands/lpush/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, elements: RedisVariadicArgument) { + parser.push('LPUSH'); + parser.pushKey(key); + parser.pushVariadic(elements); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/LPUSHX.spec.ts b/packages/client/lib/commands/LPUSHX.spec.ts new file mode 100644 index 00000000000..179a0ddb29e --- /dev/null +++ b/packages/client/lib/commands/LPUSHX.spec.ts @@ -0,0 +1,32 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import LPUSHX from './LPUSHX'; +import { parseArgs } from './generic-transformers'; + +describe('LPUSHX', () => { + describe('transformArguments', () => { + it('string', () => { + assert.deepEqual( + parseArgs(LPUSHX, 'key', 'element'), + ['LPUSHX', 'key', 'element'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(LPUSHX, 'key', ['1', '2']), + ['LPUSHX', 'key', '1', '2'] + ); + }); + }); + + testUtils.testAll('lPushX', async client => { + assert.equal( + await client.lPushX('key', 'element'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/LPUSHX.ts b/packages/client/lib/commands/LPUSHX.ts new file mode 100644 index 00000000000..e87bd4ff0d5 --- /dev/null +++ b/packages/client/lib/commands/LPUSHX.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export default { + /** + * Constructs the LPUSHX command + * + * @param parser - The command parser + * @param key - The key of the list + * @param elements - One or more elements to push to the list if it exists + * @see https://redis.io/commands/lpushx/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, elements: RedisVariadicArgument) { + parser.push('LPUSHX'); + parser.pushKey(key); + parser.pushVariadic(elements); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/LRANGE.spec.ts b/packages/client/lib/commands/LRANGE.spec.ts new file mode 100644 index 00000000000..c0bb046d898 --- /dev/null +++ b/packages/client/lib/commands/LRANGE.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import LRANGE from './LRANGE'; +import { parseArgs } from './generic-transformers'; + +describe('LRANGE', () => { + it('processCommand', () => { + assert.deepEqual( + parseArgs(LRANGE, 'key', 0, -1), + ['LRANGE', 'key', '0', '-1'] + ); + }); + + testUtils.testAll('lRange', async client => { + assert.deepEqual( + await client.lRange('key', 0, -1), + [] + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/LRANGE.ts b/packages/client/lib/commands/LRANGE.ts new file mode 100644 index 00000000000..040bb6b4498 --- /dev/null +++ b/packages/client/lib/commands/LRANGE.ts @@ -0,0 +1,22 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, BlobStringReply, Command } from '../RESP/types'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Constructs the LRANGE command + * + * @param parser - The command parser + * @param key - The key of the list + * @param start - The starting index + * @param stop - The ending index + * @see https://redis.io/commands/lrange/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, start: number, stop: number) { + parser.push('LRANGE'); + parser.pushKey(key); + parser.push(start.toString(), stop.toString()) + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/LREM.spec.ts b/packages/client/lib/commands/LREM.spec.ts new file mode 100644 index 00000000000..2a36d8ee2f1 --- /dev/null +++ b/packages/client/lib/commands/LREM.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import LREM from './LREM'; +import { parseArgs } from './generic-transformers'; + +describe('LREM', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(LREM, 'key', 0, 'element'), + ['LREM', 'key', '0', 'element'] + ); + }); + + testUtils.testAll('lRem', async client => { + assert.equal( + await client.lRem('key', 0, 'element'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/LREM.ts b/packages/client/lib/commands/LREM.ts new file mode 100644 index 00000000000..4e5de0fa78c --- /dev/null +++ b/packages/client/lib/commands/LREM.ts @@ -0,0 +1,22 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +export default { + IS_READ_ONLY: true, + /** + * Constructs the LREM command + * + * @param parser - The command parser + * @param key - The key of the list + * @param count - The count of elements to remove (negative: from tail to head, 0: all occurrences, positive: from head to tail) + * @param element - The element to remove + * @see https://redis.io/commands/lrem/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, count: number, element: RedisArgument) { + parser.push('LREM'); + parser.pushKey(key); + parser.push(count.toString()); + parser.push(element); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/LSET.spec.ts b/packages/client/lib/commands/LSET.spec.ts new file mode 100644 index 00000000000..c7522942402 --- /dev/null +++ b/packages/client/lib/commands/LSET.spec.ts @@ -0,0 +1,24 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import LSET from './LSET'; +import { parseArgs } from './generic-transformers'; + +describe('LSET', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(LSET, 'key', 0, 'element'), + ['LSET', 'key', '0', 'element'] + ); + }); + + testUtils.testAll('lSet', async client => { + await client.lPush('key', 'element'); + assert.equal( + await client.lSet('key', 0, 'element'), + 'OK' + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/LSET.ts b/packages/client/lib/commands/LSET.ts new file mode 100644 index 00000000000..052961a316e --- /dev/null +++ b/packages/client/lib/commands/LSET.ts @@ -0,0 +1,21 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '../RESP/types'; + +export default { + IS_READ_ONLY: true, + /** + * Constructs the LSET command + * + * @param parser - The command parser + * @param key - The key of the list + * @param index - The index of the element to replace + * @param element - The new value to set + * @see https://redis.io/commands/lset/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, index: number, element: RedisArgument) { + parser.push('LSET'); + parser.pushKey(key); + parser.push(index.toString(), element); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/LTRIM.spec.ts b/packages/client/lib/commands/LTRIM.spec.ts new file mode 100644 index 00000000000..5b6d77c91de --- /dev/null +++ b/packages/client/lib/commands/LTRIM.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import LTRIM from './LTRIM'; +import { parseArgs } from './generic-transformers'; + +describe('LTRIM', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(LTRIM, 'key', 0, -1), + ['LTRIM', 'key', '0', '-1'] + ); + }); + + testUtils.testAll('lTrim', async client => { + assert.equal( + await client.lTrim('key', 0, -1), + 'OK' + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/LTRIM.ts b/packages/client/lib/commands/LTRIM.ts new file mode 100644 index 00000000000..31c2b66b5a9 --- /dev/null +++ b/packages/client/lib/commands/LTRIM.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '../RESP/types'; + +export default { + /** + * Constructs the LTRIM command + * + * @param parser - The command parser + * @param key - The key of the list + * @param start - The starting index + * @param stop - The ending index + * @see https://redis.io/commands/ltrim/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, start: number, stop: number) { + parser.push('LTRIM'); + parser.pushKey(key); + parser.push(start.toString(), stop.toString()); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/MEMORY_DOCTOR.spec.ts b/packages/client/lib/commands/MEMORY_DOCTOR.spec.ts new file mode 100644 index 00000000000..9d822f8e07e --- /dev/null +++ b/packages/client/lib/commands/MEMORY_DOCTOR.spec.ts @@ -0,0 +1,20 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import MEMORY_DOCTOR from './MEMORY_DOCTOR'; +import { parseArgs } from './generic-transformers'; + +describe('MEMORY DOCTOR', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(MEMORY_DOCTOR), + ['MEMORY', 'DOCTOR'] + ); + }); + + testUtils.testWithClient('client.memoryDoctor', async client => { + assert.equal( + typeof (await client.memoryDoctor()), + 'string' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/MEMORY_DOCTOR.ts b/packages/client/lib/commands/MEMORY_DOCTOR.ts new file mode 100644 index 00000000000..21e42ccc7ea --- /dev/null +++ b/packages/client/lib/commands/MEMORY_DOCTOR.ts @@ -0,0 +1,17 @@ +import { CommandParser } from '../client/parser'; +import { BlobStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Constructs the MEMORY DOCTOR command + * + * @param parser - The command parser + * @see https://redis.io/commands/memory-doctor/ + */ + parseCommand(parser: CommandParser) { + parser.push('MEMORY', 'DOCTOR'); + }, + transformReply: undefined as unknown as () => BlobStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/MEMORY_MALLOC-STATS.spec.ts b/packages/client/lib/commands/MEMORY_MALLOC-STATS.spec.ts new file mode 100644 index 00000000000..a4a85f5b994 --- /dev/null +++ b/packages/client/lib/commands/MEMORY_MALLOC-STATS.spec.ts @@ -0,0 +1,20 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import MEMORY_MALLOC_STATS from './MEMORY_MALLOC-STATS'; +import { parseArgs } from './generic-transformers'; + +describe('MEMORY MALLOC-STATS', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(MEMORY_MALLOC_STATS), + ['MEMORY', 'MALLOC-STATS'] + ); + }); + + testUtils.testWithClient('client.memoryMallocStats', async client => { + assert.equal( + typeof (await client.memoryMallocStats()), + 'string' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/MEMORY_MALLOC-STATS.ts b/packages/client/lib/commands/MEMORY_MALLOC-STATS.ts new file mode 100644 index 00000000000..69ad8c37a85 --- /dev/null +++ b/packages/client/lib/commands/MEMORY_MALLOC-STATS.ts @@ -0,0 +1,18 @@ +import { CommandParser } from '../client/parser'; +import { BlobStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Constructs the MEMORY MALLOC-STATS command + * + * @param parser - The command parser + * @see https://redis.io/commands/memory-malloc-stats/ + */ + parseCommand(parser: CommandParser) { + parser.push('MEMORY', 'MALLOC-STATS'); + }, + transformReply: undefined as unknown as () => BlobStringReply +} as const satisfies Command; + diff --git a/packages/client/lib/commands/MEMORY_PURGE.spec.ts b/packages/client/lib/commands/MEMORY_PURGE.spec.ts new file mode 100644 index 00000000000..be5fb738b0a --- /dev/null +++ b/packages/client/lib/commands/MEMORY_PURGE.spec.ts @@ -0,0 +1,20 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import MEMORY_PURGE from './MEMORY_PURGE'; +import { parseArgs } from './generic-transformers'; + +describe('MEMORY PURGE', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(MEMORY_PURGE), + ['MEMORY', 'PURGE'] + ); + }); + + testUtils.testWithClient('client.memoryPurge', async client => { + assert.equal( + await client.memoryPurge(), + 'OK' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/MEMORY_PURGE.ts b/packages/client/lib/commands/MEMORY_PURGE.ts new file mode 100644 index 00000000000..39f837016ad --- /dev/null +++ b/packages/client/lib/commands/MEMORY_PURGE.ts @@ -0,0 +1,18 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: false, + /** + * Constructs the MEMORY PURGE command + * + * @param parser - The command parser + * @see https://redis.io/commands/memory-purge/ + */ + parseCommand(parser: CommandParser) { + parser.push('MEMORY', 'PURGE'); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; + diff --git a/packages/client/lib/commands/MEMORY_STATS.spec.ts b/packages/client/lib/commands/MEMORY_STATS.spec.ts new file mode 100644 index 00000000000..6aad05116af --- /dev/null +++ b/packages/client/lib/commands/MEMORY_STATS.spec.ts @@ -0,0 +1,47 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import MEMORY_STATS from './MEMORY_STATS'; +import { parseArgs } from './generic-transformers'; + +describe('MEMORY STATS', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(MEMORY_STATS), + ['MEMORY', 'STATS'] + ); + }); + + testUtils.testWithClient('client.memoryStats', async client => { + const memoryStats = await client.memoryStats(); + assert.equal(typeof memoryStats['peak.allocated'], 'number'); + assert.equal(typeof memoryStats['total.allocated'], 'number'); + assert.equal(typeof memoryStats['startup.allocated'], 'number'); + assert.equal(typeof memoryStats['replication.backlog'], 'number'); + assert.equal(typeof memoryStats['clients.slaves'], 'number'); + assert.equal(typeof memoryStats['clients.normal'], 'number'); + assert.equal(typeof memoryStats['aof.buffer'], 'number'); + assert.equal(typeof memoryStats['lua.caches'], 'number'); + assert.equal(typeof memoryStats['overhead.total'], 'number'); + assert.equal(typeof memoryStats['keys.count'], 'number'); + assert.equal(typeof memoryStats['keys.bytes-per-key'], 'number'); + assert.equal(typeof memoryStats['dataset.bytes'], 'number'); + assert.equal(typeof memoryStats['dataset.percentage'], 'number'); + assert.equal(typeof memoryStats['peak.percentage'], 'number'); + assert.equal(typeof memoryStats['allocator.allocated'], 'number'); + assert.equal(typeof memoryStats['allocator.active'], 'number'); + assert.equal(typeof memoryStats['allocator.resident'], 'number'); + assert.equal(typeof memoryStats['allocator-fragmentation.ratio'], 'number', 'allocator-fragmentation.ratio'); + assert.equal(typeof memoryStats['allocator-fragmentation.bytes'], 'number'); + assert.equal(typeof memoryStats['allocator-rss.ratio'], 'number', 'allocator-rss.ratio'); + assert.equal(typeof memoryStats['allocator-rss.bytes'], 'number'); + assert.equal(typeof memoryStats['rss-overhead.ratio'], 'number', 'rss-overhead.ratio'); + assert.equal(typeof memoryStats['rss-overhead.bytes'], 'number'); + assert.equal(typeof memoryStats['fragmentation'], 'number', 'fragmentation'); + assert.equal(typeof memoryStats['fragmentation.bytes'], 'number'); + + if (testUtils.isVersionGreaterThan([7])) { + assert.equal(typeof memoryStats['cluster.links'], 'number'); + assert.equal(typeof memoryStats['functions.caches'], 'number'); + } + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/MEMORY_STATS.ts b/packages/client/lib/commands/MEMORY_STATS.ts new file mode 100644 index 00000000000..9391a91613c --- /dev/null +++ b/packages/client/lib/commands/MEMORY_STATS.ts @@ -0,0 +1,75 @@ +import { CommandParser } from '../client/parser'; +import { TuplesToMapReply, BlobStringReply, NumberReply, DoubleReply, ArrayReply, UnwrapReply, Command, TypeMapping } from '../RESP/types'; +import { transformDoubleReply } from './generic-transformers'; + +export type MemoryStatsReply = TuplesToMapReply<[ + [BlobStringReply<'peak.allocated'>, NumberReply], + [BlobStringReply<'total.allocated'>, NumberReply], + [BlobStringReply<'startup.allocated'>, NumberReply], + [BlobStringReply<'replication.backlog'>, NumberReply], + [BlobStringReply<'clients.slaves'>, NumberReply], + [BlobStringReply<'clients.normal'>, NumberReply], + /** added in 7.0 */ + [BlobStringReply<'cluster.links'>, NumberReply], + [BlobStringReply<'aof.buffer'>, NumberReply], + [BlobStringReply<'lua.caches'>, NumberReply], + /** added in 7.0 */ + [BlobStringReply<'functions.caches'>, NumberReply], + // FIXME: 'db.0', and perhaps others' is here and is a map that should be handled? + [BlobStringReply<'overhead.total'>, NumberReply], + [BlobStringReply<'keys.count'>, NumberReply], + [BlobStringReply<'keys.bytes-per-key'>, NumberReply], + [BlobStringReply<'dataset.bytes'>, NumberReply], + [BlobStringReply<'dataset.percentage'>, DoubleReply], + [BlobStringReply<'peak.percentage'>, DoubleReply], + [BlobStringReply<'allocator.allocated'>, NumberReply], + [BlobStringReply<'allocator.active'>, NumberReply], + [BlobStringReply<'allocator.resident'>, NumberReply], + [BlobStringReply<'allocator-fragmentation.ratio'>, DoubleReply], + [BlobStringReply<'allocator-fragmentation.bytes'>, NumberReply], + [BlobStringReply<'allocator-rss.ratio'>, DoubleReply], + [BlobStringReply<'allocator-rss.bytes'>, NumberReply], + [BlobStringReply<'rss-overhead.ratio'>, DoubleReply], + [BlobStringReply<'rss-overhead.bytes'>, NumberReply], + [BlobStringReply<'fragmentation'>, DoubleReply], + [BlobStringReply<'fragmentation.bytes'>, NumberReply] +]>; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Constructs the MEMORY STATS command + * + * @param parser - The command parser + * @see https://redis.io/commands/memory-stats/ + */ + parseCommand(parser: CommandParser) { + parser.push('MEMORY', 'STATS'); + }, + transformReply: { + 2: (rawReply: UnwrapReply>, preserve?: any, typeMapping?: TypeMapping) => { + const reply: any = {}; + + let i = 0; + while (i < rawReply.length) { + switch(rawReply[i].toString()) { + case 'dataset.percentage': + case 'peak.percentage': + case 'allocator-fragmentation.ratio': + case 'allocator-rss.ratio': + case 'rss-overhead.ratio': + case 'fragmentation': + reply[rawReply[i++] as any] = transformDoubleReply[2](rawReply[i++] as unknown as BlobStringReply, preserve, typeMapping); + break; + default: + reply[rawReply[i++] as any] = rawReply[i++]; + } + + } + + return reply as MemoryStatsReply; + }, + 3: undefined as unknown as () => MemoryStatsReply + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/MEMORY_USAGE.spec.ts b/packages/client/lib/commands/MEMORY_USAGE.spec.ts new file mode 100644 index 00000000000..edf673564ee --- /dev/null +++ b/packages/client/lib/commands/MEMORY_USAGE.spec.ts @@ -0,0 +1,31 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import MEMORY_USAGE from './MEMORY_USAGE'; +import { parseArgs } from './generic-transformers'; + +describe('MEMORY USAGE', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(MEMORY_USAGE, 'key'), + ['MEMORY', 'USAGE', 'key'] + ); + }); + + it('with SAMPLES', () => { + assert.deepEqual( + parseArgs(MEMORY_USAGE, 'key', { + SAMPLES: 1 + }), + ['MEMORY', 'USAGE', 'key', 'SAMPLES', '1'] + ); + }); + }); + + testUtils.testWithClient('client.memoryUsage', async client => { + assert.equal( + await client.memoryUsage('key'), + null + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/MEMORY_USAGE.ts b/packages/client/lib/commands/MEMORY_USAGE.ts new file mode 100644 index 00000000000..a1fa79f6210 --- /dev/null +++ b/packages/client/lib/commands/MEMORY_USAGE.ts @@ -0,0 +1,27 @@ +import { CommandParser } from '../client/parser'; +import { NumberReply, NullReply, Command, RedisArgument } from '../RESP/types'; + +export interface MemoryUsageOptions { + SAMPLES?: number; +} + +export default { + IS_READ_ONLY: true, + /** + * Constructs the MEMORY USAGE command + * + * @param parser - The command parser + * @param key - The key to get memory usage for + * @param options - Optional parameters including SAMPLES + * @see https://redis.io/commands/memory-usage/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, options?: MemoryUsageOptions) { + parser.push('MEMORY', 'USAGE'); + parser.pushKey(key); + + if (options?.SAMPLES) { + parser.push('SAMPLES', options.SAMPLES.toString()); + } + }, + transformReply: undefined as unknown as () => NumberReply | NullReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/MGET.spec.ts b/packages/client/lib/commands/MGET.spec.ts new file mode 100644 index 00000000000..048fa6f0a58 --- /dev/null +++ b/packages/client/lib/commands/MGET.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import MGET from './MGET'; +import { parseArgs } from './generic-transformers'; + +describe('MGET', () => { + it('processCommand', () => { + assert.deepEqual( + parseArgs(MGET, ['1', '2']), + ['MGET', '1', '2'] + ); + }); + + testUtils.testAll('mGet', async client => { + assert.deepEqual( + await client.mGet(['key']), + [null] + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/MGET.ts b/packages/client/lib/commands/MGET.ts new file mode 100644 index 00000000000..22145dd3485 --- /dev/null +++ b/packages/client/lib/commands/MGET.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, BlobStringReply, NullReply, Command } from '../RESP/types'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Constructs the MGET command + * + * @param parser - The command parser + * @param keys - Array of keys to get + * @see https://redis.io/commands/mget/ + */ + parseCommand(parser: CommandParser, keys: Array) { + parser.push('MGET'); + parser.pushKeys(keys); + }, + transformReply: undefined as unknown as () => Array +} as const satisfies Command; diff --git a/packages/client/lib/commands/MIGRATE.spec.ts b/packages/client/lib/commands/MIGRATE.spec.ts new file mode 100644 index 00000000000..dd2fbdc82ff --- /dev/null +++ b/packages/client/lib/commands/MIGRATE.spec.ts @@ -0,0 +1,77 @@ +import { strict as assert } from 'node:assert'; +import MIGRATE from './MIGRATE'; +import { parseArgs } from './generic-transformers'; + +describe('MIGRATE', () => { + describe('transformArguments', () => { + it('single key', () => { + assert.deepEqual( + parseArgs(MIGRATE, '127.0.0.1', 6379, 'key', 0, 10), + ['MIGRATE', '127.0.0.1', '6379', 'key', '0', '10'] + ); + }); + + it('multiple keys', () => { + assert.deepEqual( + parseArgs(MIGRATE, '127.0.0.1', 6379, ['1', '2'], 0, 10), + ['MIGRATE', '127.0.0.1', '6379', '', '0', '10', 'KEYS', '1', '2'] + ); + }); + + it('with COPY', () => { + assert.deepEqual( + parseArgs(MIGRATE, '127.0.0.1', 6379, 'key', 0, 10, { + COPY: true + }), + ['MIGRATE', '127.0.0.1', '6379', 'key', '0', '10', 'COPY'] + ); + }); + + it('with REPLACE', () => { + assert.deepEqual( + parseArgs(MIGRATE, '127.0.0.1', 6379, 'key', 0, 10, { + REPLACE: true + }), + ['MIGRATE', '127.0.0.1', '6379', 'key', '0', '10', 'REPLACE'] + ); + }); + + describe('with AUTH', () => { + it('password only', () => { + assert.deepEqual( + parseArgs(MIGRATE, '127.0.0.1', 6379, 'key', 0, 10, { + AUTH: { + password: 'password' + } + }), + ['MIGRATE', '127.0.0.1', '6379', 'key', '0', '10', 'AUTH', 'password'] + ); + }); + + it('username & password', () => { + assert.deepEqual( + parseArgs(MIGRATE, '127.0.0.1', 6379, 'key', 0, 10, { + AUTH: { + username: 'username', + password: 'password' + } + }), + ['MIGRATE', '127.0.0.1', '6379', 'key', '0', '10', 'AUTH2', 'username', 'password'] + ); + }); + }); + + it('with COPY, REPLACE, AUTH', () => { + assert.deepEqual( + parseArgs(MIGRATE, '127.0.0.1', 6379, 'key', 0, 10, { + COPY: true, + REPLACE: true, + AUTH: { + password: 'password' + } + }), + ['MIGRATE', '127.0.0.1', '6379', 'key', '0', '10', 'COPY', 'REPLACE', 'AUTH', 'password'] + ); + }); + }); +}); diff --git a/packages/client/lib/commands/MIGRATE.ts b/packages/client/lib/commands/MIGRATE.ts new file mode 100644 index 00000000000..ba798e331ab --- /dev/null +++ b/packages/client/lib/commands/MIGRATE.ts @@ -0,0 +1,77 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '../RESP/types'; +import { AuthOptions } from './AUTH'; + +export interface MigrateOptions { + COPY?: true; + REPLACE?: true; + AUTH?: AuthOptions; +} + +export default { + IS_READ_ONLY: false, + /** + * Constructs the MIGRATE command + * + * @param parser - The command parser + * @param host - Target Redis instance host + * @param port - Target Redis instance port + * @param key - Key or keys to migrate + * @param destinationDb - Target database index + * @param timeout - Timeout in milliseconds + * @param options - Optional parameters including COPY, REPLACE, and AUTH + * @see https://redis.io/commands/migrate/ + */ + parseCommand( + parser: CommandParser, + host: RedisArgument, + port: number, + key: RedisArgument | Array, + destinationDb: number, + timeout: number, + options?: MigrateOptions + ) { + parser.push('MIGRATE', host, port.toString()); + const isKeyArray = Array.isArray(key); + + if (isKeyArray) { + parser.push(''); + } else { + parser.push(key); + } + + parser.push( + destinationDb.toString(), + timeout.toString() + ); + + if (options?.COPY) { + parser.push('COPY'); + } + + if (options?.REPLACE) { + parser.push('REPLACE'); + } + + if (options?.AUTH) { + if (options.AUTH.username) { + parser.push( + 'AUTH2', + options.AUTH.username, + options.AUTH.password + ); + } else { + parser.push( + 'AUTH', + options.AUTH.password + ); + } + } + + if (isKeyArray) { + parser.push('KEYS'); + parser.pushVariadic(key); + } + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/MODULE_LIST.spec.ts b/packages/client/lib/commands/MODULE_LIST.spec.ts new file mode 100644 index 00000000000..0aab973cf21 --- /dev/null +++ b/packages/client/lib/commands/MODULE_LIST.spec.ts @@ -0,0 +1,12 @@ +import { strict as assert } from 'node:assert'; +import MODULE_LIST from './MODULE_LIST'; +import { parseArgs } from './generic-transformers'; + +describe('MODULE LIST', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(MODULE_LIST), + ['MODULE', 'LIST'] + ); + }); +}); diff --git a/packages/client/lib/commands/MODULE_LIST.ts b/packages/client/lib/commands/MODULE_LIST.ts new file mode 100644 index 00000000000..8183c419a66 --- /dev/null +++ b/packages/client/lib/commands/MODULE_LIST.ts @@ -0,0 +1,33 @@ +import { CommandParser } from '../client/parser'; +import { ArrayReply, TuplesToMapReply, BlobStringReply, NumberReply, UnwrapReply, Resp2Reply, Command } from '../RESP/types'; + +export type ModuleListReply = ArrayReply, BlobStringReply], + [BlobStringReply<'ver'>, NumberReply], +]>>; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Constructs the MODULE LIST command + * + * @param parser - The command parser + * @see https://redis.io/commands/module-list/ + */ + parseCommand(parser: CommandParser) { + parser.push('MODULE', 'LIST'); + }, + transformReply: { + 2: (reply: UnwrapReply>) => { + return reply.map(module => { + const unwrapped = module as unknown as UnwrapReply; + return { + name: unwrapped[1], + ver: unwrapped[3] + }; + }); + }, + 3: undefined as unknown as () => ModuleListReply + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/MODULE_LOAD.spec.ts b/packages/client/lib/commands/MODULE_LOAD.spec.ts new file mode 100644 index 00000000000..418dd9b5daf --- /dev/null +++ b/packages/client/lib/commands/MODULE_LOAD.spec.ts @@ -0,0 +1,21 @@ +import { strict as assert } from 'node:assert'; +import MODULE_LOAD from './MODULE_LOAD'; +import { parseArgs } from './generic-transformers'; + +describe('MODULE LOAD', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(MODULE_LOAD, 'path'), + ['MODULE', 'LOAD', 'path'] + ); + }); + + it('with module args', () => { + assert.deepEqual( + parseArgs(MODULE_LOAD, 'path', ['1', '2']), + ['MODULE', 'LOAD', 'path', '1', '2'] + ); + }); + }); +}); diff --git a/packages/client/lib/commands/MODULE_LOAD.ts b/packages/client/lib/commands/MODULE_LOAD.ts new file mode 100644 index 00000000000..9dd6176ebaa --- /dev/null +++ b/packages/client/lib/commands/MODULE_LOAD.ts @@ -0,0 +1,23 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Constructs the MODULE LOAD command + * + * @param parser - The command parser + * @param path - Path to the module file + * @param moduleArguments - Optional arguments to pass to the module + * @see https://redis.io/commands/module-load/ + */ + parseCommand(parser: CommandParser, path: RedisArgument, moduleArguments?: Array) { + parser.push('MODULE', 'LOAD', path); + + if (moduleArguments) { + parser.push(...moduleArguments); + } + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/MODULE_UNLOAD.spec.ts b/packages/client/lib/commands/MODULE_UNLOAD.spec.ts new file mode 100644 index 00000000000..581f41e03c8 --- /dev/null +++ b/packages/client/lib/commands/MODULE_UNLOAD.spec.ts @@ -0,0 +1,12 @@ +import { strict as assert } from 'node:assert'; +import MODULE_UNLOAD from './MODULE_UNLOAD'; +import { parseArgs } from './generic-transformers'; + +describe('MODULE UNLOAD', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(MODULE_UNLOAD, 'name'), + ['MODULE', 'UNLOAD', 'name'] + ); + }); +}); diff --git a/packages/client/lib/commands/MODULE_UNLOAD.ts b/packages/client/lib/commands/MODULE_UNLOAD.ts new file mode 100644 index 00000000000..6d19b2b2a73 --- /dev/null +++ b/packages/client/lib/commands/MODULE_UNLOAD.ts @@ -0,0 +1,18 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Constructs the MODULE UNLOAD command + * + * @param parser - The command parser + * @param name - The name of the module to unload + * @see https://redis.io/commands/module-unload/ + */ + parseCommand(parser: CommandParser, name: RedisArgument) { + parser.push('MODULE', 'UNLOAD', name); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/MOVE.spec.ts b/packages/client/lib/commands/MOVE.spec.ts new file mode 100644 index 00000000000..91a01378b22 --- /dev/null +++ b/packages/client/lib/commands/MOVE.spec.ts @@ -0,0 +1,20 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import MOVE from './MOVE'; +import { parseArgs } from './generic-transformers'; + +describe('MOVE', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(MOVE, 'key', 1), + ['MOVE', 'key', '1'] + ); + }); + + testUtils.testWithClient('client.move', async client => { + assert.equal( + await client.move('key', 1), + 0 + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/MOVE.ts b/packages/client/lib/commands/MOVE.ts new file mode 100644 index 00000000000..0c08a6fa100 --- /dev/null +++ b/packages/client/lib/commands/MOVE.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +export default { + /** + * Constructs the MOVE command + * + * @param parser - The command parser + * @param key - The key to move + * @param db - The destination database index + * @see https://redis.io/commands/move/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, db: number) { + parser.push('MOVE'); + parser.pushKey(key); + parser.push(db.toString()); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/MSET.spec.ts b/packages/client/lib/commands/MSET.spec.ts new file mode 100644 index 00000000000..cfb14eceb05 --- /dev/null +++ b/packages/client/lib/commands/MSET.spec.ts @@ -0,0 +1,39 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import MSET from './MSET'; +import { parseArgs } from './generic-transformers'; + +describe('MSET', () => { + describe('transformArguments', () => { + it("['key1', 'value1', 'key2', 'value2']", () => { + assert.deepEqual( + parseArgs(MSET, ['key1', 'value1', 'key2', 'value2']), + ['MSET', 'key1', 'value1', 'key2', 'value2'] + ); + }); + + it("[['key1', 'value1'], ['key2', 'value2']]", () => { + assert.deepEqual( + parseArgs(MSET, [['key1', 'value1'], ['key2', 'value2']]), + ['MSET', 'key1', 'value1', 'key2', 'value2'] + ); + }); + + it("{key1: 'value1'. key2: 'value2'}", () => { + assert.deepEqual( + parseArgs(MSET, { key1: 'value1', key2: 'value2' }), + ['MSET', 'key1', 'value1', 'key2', 'value2'] + ); + }); + }); + + testUtils.testAll('mSet', async client => { + assert.equal( + await client.mSet(['{tag}key1', 'value1', '{tag}key2', 'value2']), + 'OK' + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/MSET.ts b/packages/client/lib/commands/MSET.ts new file mode 100644 index 00000000000..ab734bae5c7 --- /dev/null +++ b/packages/client/lib/commands/MSET.ts @@ -0,0 +1,48 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '../RESP/types'; + +export type MSetArguments = + Array<[RedisArgument, RedisArgument]> | + Array | + Record; + +export function parseMSetArguments(parser: CommandParser, toSet: MSetArguments) { + if (Array.isArray(toSet)) { + if (toSet.length == 0) { + throw new Error("empty toSet Argument") + } + if (Array.isArray(toSet[0])) { + for (const tuple of (toSet as Array<[RedisArgument, RedisArgument]>)) { + parser.pushKey(tuple[0]); + parser.push(tuple[1]); + } + } else { + const arr = toSet as Array; + for (let i=0; i < arr.length; i += 2) { + parser.pushKey(arr[i]); + parser.push(arr[i+1]); + } + } + } else { + for (const tuple of Object.entries(toSet)) { + parser.pushKey(tuple[0]); + parser.push(tuple[1]); + } + } +} + +export default { + IS_READ_ONLY: true, + /** + * Constructs the MSET command + * + * @param parser - The command parser + * @param toSet - Key-value pairs to set (array of tuples, flat array, or object) + * @see https://redis.io/commands/mset/ + */ + parseCommand(parser: CommandParser, toSet: MSetArguments) { + parser.push('MSET'); + return parseMSetArguments(parser, toSet); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/MSETNX.spec.ts b/packages/client/lib/commands/MSETNX.spec.ts new file mode 100644 index 00000000000..0a9f636abc7 --- /dev/null +++ b/packages/client/lib/commands/MSETNX.spec.ts @@ -0,0 +1,39 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import MSETNX from './MSETNX'; +import { parseArgs } from './generic-transformers'; + +describe('MSETNX', () => { + describe('transformArguments', () => { + it("['key1', 'value1', 'key2', 'value2']", () => { + assert.deepEqual( + parseArgs(MSETNX, ['key1', 'value1', 'key2', 'value2']), + ['MSETNX', 'key1', 'value1', 'key2', 'value2'] + ); + }); + + it("[['key1', 'value1'], ['key2', 'value2']]", () => { + assert.deepEqual( + parseArgs(MSETNX, [['key1', 'value1'], ['key2', 'value2']]), + ['MSETNX', 'key1', 'value1', 'key2', 'value2'] + ); + }); + + it("{key1: 'value1'. key2: 'value2'}", () => { + assert.deepEqual( + parseArgs(MSETNX, { key1: 'value1', key2: 'value2' }), + ['MSETNX', 'key1', 'value1', 'key2', 'value2'] + ); + }); + }); + + testUtils.testAll('mSetNX', async client => { + assert.equal( + await client.mSetNX(['{key}1', 'value1', '{key}2', 'value2']), + 1 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/MSETNX.ts b/packages/client/lib/commands/MSETNX.ts new file mode 100644 index 00000000000..9a2186023f6 --- /dev/null +++ b/packages/client/lib/commands/MSETNX.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; +import { MSetArguments, parseMSetArguments } from './MSET'; + +export default { + IS_READ_ONLY: true, + /** + * Constructs the MSETNX command + * + * @param parser - The command parser + * @param toSet - Key-value pairs to set if none of the keys exist (array of tuples, flat array, or object) + * @see https://redis.io/commands/msetnx/ + */ + parseCommand(parser: CommandParser, toSet: MSetArguments) { + parser.push('MSETNX'); + return parseMSetArguments(parser, toSet); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/OBJECT_ENCODING.spec.ts b/packages/client/lib/commands/OBJECT_ENCODING.spec.ts new file mode 100644 index 00000000000..34f82be9b8d --- /dev/null +++ b/packages/client/lib/commands/OBJECT_ENCODING.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import OBJECT_ENCODING from './OBJECT_ENCODING'; +import { parseArgs } from './generic-transformers'; + +describe('OBJECT ENCODING', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(OBJECT_ENCODING, 'key'), + ['OBJECT', 'ENCODING', 'key'] + ); + }); + + testUtils.testAll('objectEncoding', async client => { + assert.equal( + await client.objectEncoding('key'), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/OBJECT_ENCODING.ts b/packages/client/lib/commands/OBJECT_ENCODING.ts new file mode 100644 index 00000000000..2c0f6b41bbc --- /dev/null +++ b/packages/client/lib/commands/OBJECT_ENCODING.ts @@ -0,0 +1,18 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, BlobStringReply, NullReply, Command } from '../RESP/types'; + +export default { + IS_READ_ONLY: true, + /** + * Constructs the OBJECT ENCODING command + * + * @param parser - The command parser + * @param key - The key to get the internal encoding for + * @see https://redis.io/commands/object-encoding/ + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('OBJECT', 'ENCODING'); + parser.pushKey(key); + }, + transformReply: undefined as unknown as () => BlobStringReply | NullReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/OBJECT_FREQ.spec.ts b/packages/client/lib/commands/OBJECT_FREQ.spec.ts new file mode 100644 index 00000000000..081501b12e6 --- /dev/null +++ b/packages/client/lib/commands/OBJECT_FREQ.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import OBJECT_FREQ from './OBJECT_FREQ'; +import { parseArgs } from './generic-transformers'; + +describe('OBJECT FREQ', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(OBJECT_FREQ, 'key'), + ['OBJECT', 'FREQ', 'key'] + ); + }); + + testUtils.testAll('client.objectFreq', async client => { + assert.equal( + await client.objectFreq('key'), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/OBJECT_FREQ.ts b/packages/client/lib/commands/OBJECT_FREQ.ts new file mode 100644 index 00000000000..42a310a97c5 --- /dev/null +++ b/packages/client/lib/commands/OBJECT_FREQ.ts @@ -0,0 +1,18 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, NullReply, Command } from '../RESP/types'; + +export default { + IS_READ_ONLY: true, + /** + * Constructs the OBJECT FREQ command + * + * @param parser - The command parser + * @param key - The key to get the access frequency for + * @see https://redis.io/commands/object-freq/ + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('OBJECT', 'FREQ'); + parser.pushKey(key); + }, + transformReply: undefined as unknown as () => NumberReply | NullReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/OBJECT_IDLETIME.spec.ts b/packages/client/lib/commands/OBJECT_IDLETIME.spec.ts new file mode 100644 index 00000000000..30d47b8133f --- /dev/null +++ b/packages/client/lib/commands/OBJECT_IDLETIME.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import OBJECT_IDLETIME from './OBJECT_IDLETIME'; +import { parseArgs } from './generic-transformers'; + +describe('OBJECT IDLETIME', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(OBJECT_IDLETIME, 'key'), + ['OBJECT', 'IDLETIME', 'key'] + ); + }); + + testUtils.testAll('client.objectIdleTime', async client => { + assert.equal( + await client.objectIdleTime('key'), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/OBJECT_IDLETIME.ts b/packages/client/lib/commands/OBJECT_IDLETIME.ts new file mode 100644 index 00000000000..2d4afeda65a --- /dev/null +++ b/packages/client/lib/commands/OBJECT_IDLETIME.ts @@ -0,0 +1,18 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, NullReply, Command } from '../RESP/types'; + +export default { + IS_READ_ONLY: true, + /** + * Constructs the OBJECT IDLETIME command + * + * @param parser - The command parser + * @param key - The key to get the idle time for + * @see https://redis.io/commands/object-idletime/ + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('OBJECT', 'IDLETIME'); + parser.pushKey(key); + }, + transformReply: undefined as unknown as () => NumberReply | NullReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/OBJECT_REFCOUNT.spec.ts b/packages/client/lib/commands/OBJECT_REFCOUNT.spec.ts new file mode 100644 index 00000000000..8bac08a2e5b --- /dev/null +++ b/packages/client/lib/commands/OBJECT_REFCOUNT.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import OBJECT_REFCOUNT from './OBJECT_REFCOUNT'; +import { parseArgs } from './generic-transformers'; + +describe('OBJECT REFCOUNT', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(OBJECT_REFCOUNT, 'key'), + ['OBJECT', 'REFCOUNT', 'key'] + ); + }); + + testUtils.testAll('client.objectRefCount', async client => { + assert.equal( + await client.objectRefCount('key'), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/OBJECT_REFCOUNT.ts b/packages/client/lib/commands/OBJECT_REFCOUNT.ts new file mode 100644 index 00000000000..7948a4941de --- /dev/null +++ b/packages/client/lib/commands/OBJECT_REFCOUNT.ts @@ -0,0 +1,18 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, NullReply, Command } from '../RESP/types'; + +export default { + IS_READ_ONLY: true, + /** + * Constructs the OBJECT REFCOUNT command + * + * @param parser - The command parser + * @param key - The key to get the reference count for + * @see https://redis.io/commands/object-refcount/ + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('OBJECT', 'REFCOUNT'); + parser.pushKey(key); + }, + transformReply: undefined as unknown as () => NumberReply | NullReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/PERSIST.spec.ts b/packages/client/lib/commands/PERSIST.spec.ts new file mode 100644 index 00000000000..fff6d7b3a76 --- /dev/null +++ b/packages/client/lib/commands/PERSIST.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import PERSIST from './PERSIST'; +import { parseArgs } from './generic-transformers'; + +describe('PERSIST', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(PERSIST, 'key'), + ['PERSIST', 'key'] + ); + }); + + testUtils.testAll('persist', async client => { + assert.equal( + await client.persist('key'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/PERSIST.ts b/packages/client/lib/commands/PERSIST.ts new file mode 100644 index 00000000000..3b1f4a7062c --- /dev/null +++ b/packages/client/lib/commands/PERSIST.ts @@ -0,0 +1,17 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +export default { + /** + * Constructs the PERSIST command + * + * @param parser - The command parser + * @param key - The key to remove the expiration from + * @see https://redis.io/commands/persist/ + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('PERSIST'); + parser.pushKey(key); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/PEXPIRE.spec.ts b/packages/client/lib/commands/PEXPIRE.spec.ts new file mode 100644 index 00000000000..368bc9b4907 --- /dev/null +++ b/packages/client/lib/commands/PEXPIRE.spec.ts @@ -0,0 +1,32 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import PEXPIRE from './PEXPIRE'; +import { parseArgs } from './generic-transformers'; + +describe('PEXPIRE', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(PEXPIRE, 'key', 1), + ['PEXPIRE', 'key', '1'] + ); + }); + + it('with set option', () => { + assert.deepEqual( + parseArgs(PEXPIRE, 'key', 1, 'GT'), + ['PEXPIRE', 'key', '1', 'GT'] + ); + }); + }); + + testUtils.testAll('pExpire', async client => { + assert.equal( + await client.pExpire('key', 1), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/PEXPIRE.ts b/packages/client/lib/commands/PEXPIRE.ts new file mode 100644 index 00000000000..f1d96076885 --- /dev/null +++ b/packages/client/lib/commands/PEXPIRE.ts @@ -0,0 +1,30 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +export default { + IS_READ_ONLY: true, + /** + * Constructs the PEXPIRE command + * + * @param parser - The command parser + * @param key - The key to set the expiration for + * @param ms - The expiration time in milliseconds + * @param mode - Optional mode for the command ('NX', 'XX', 'GT', 'LT') + * @see https://redis.io/commands/pexpire/ + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + ms: number, + mode?: 'NX' | 'XX' | 'GT' | 'LT' + ) { + parser.push('PEXPIRE'); + parser.pushKey(key); + parser.push(ms.toString()); + + if (mode) { + parser.push(mode); + } + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/PEXPIREAT.spec.ts b/packages/client/lib/commands/PEXPIREAT.spec.ts new file mode 100644 index 00000000000..f1053920403 --- /dev/null +++ b/packages/client/lib/commands/PEXPIREAT.spec.ts @@ -0,0 +1,40 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import PEXPIREAT from './PEXPIREAT'; +import { parseArgs } from './generic-transformers'; + +describe('PEXPIREAT', () => { + describe('transformArguments', () => { + it('number', () => { + assert.deepEqual( + parseArgs(PEXPIREAT, 'key', 1), + ['PEXPIREAT', 'key', '1'] + ); + }); + + it('date', () => { + const d = new Date(); + assert.deepEqual( + parseArgs(PEXPIREAT, 'key', d), + ['PEXPIREAT', 'key', d.getTime().toString()] + ); + }); + + it('with set option', () => { + assert.deepEqual( + parseArgs(PEXPIREAT, 'key', 1, 'XX'), + ['PEXPIREAT', 'key', '1', 'XX'] + ); + }); + }); + + testUtils.testAll('pExpireAt', async client => { + assert.equal( + await client.pExpireAt('key', 1), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/PEXPIREAT.ts b/packages/client/lib/commands/PEXPIREAT.ts new file mode 100644 index 00000000000..072cc33bbb4 --- /dev/null +++ b/packages/client/lib/commands/PEXPIREAT.ts @@ -0,0 +1,31 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; +import { transformPXAT } from './generic-transformers'; + +export default { + IS_READ_ONLY: true, + /** + * Constructs the PEXPIREAT command + * + * @param parser - The command parser + * @param key - The key to set the expiration for + * @param msTimestamp - The expiration timestamp in milliseconds (Unix timestamp or Date object) + * @param mode - Optional mode for the command ('NX', 'XX', 'GT', 'LT') + * @see https://redis.io/commands/pexpireat/ + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + msTimestamp: number | Date, + mode?: 'NX' | 'XX' | 'GT' | 'LT' + ) { + parser.push('PEXPIREAT'); + parser.pushKey(key); + parser.push(transformPXAT(msTimestamp)); + + if (mode) { + parser.push(mode); + } + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/PEXPIRETIME.spec.ts b/packages/client/lib/commands/PEXPIRETIME.spec.ts new file mode 100644 index 00000000000..dbfc69e80dc --- /dev/null +++ b/packages/client/lib/commands/PEXPIRETIME.spec.ts @@ -0,0 +1,25 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import PEXPIRETIME from './PEXPIRETIME'; +import { parseArgs } from './generic-transformers'; + +describe('PEXPIRETIME', () => { + testUtils.isVersionGreaterThanHook([7]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(PEXPIRETIME, 'key'), + ['PEXPIRETIME', 'key'] + ); + }); + + testUtils.testAll('pExpireTime', async client => { + assert.equal( + await client.pExpireTime('key'), + -2 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/PEXPIRETIME.ts b/packages/client/lib/commands/PEXPIRETIME.ts new file mode 100644 index 00000000000..6b3488662c3 --- /dev/null +++ b/packages/client/lib/commands/PEXPIRETIME.ts @@ -0,0 +1,18 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +export default { + IS_READ_ONLY: true, + /** + * Constructs the PEXPIRETIME command + * + * @param parser - The command parser + * @param key - The key to get the expiration time for in milliseconds + * @see https://redis.io/commands/pexpiretime/ + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('PEXPIRETIME'); + parser.pushKey(key); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/PFADD.spec.ts b/packages/client/lib/commands/PFADD.spec.ts new file mode 100644 index 00000000000..55c4311e638 --- /dev/null +++ b/packages/client/lib/commands/PFADD.spec.ts @@ -0,0 +1,32 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import PFADD from './PFADD'; +import { parseArgs } from './generic-transformers'; + +describe('PFADD', () => { + describe('transformArguments', () => { + it('string', () => { + assert.deepEqual( + parseArgs(PFADD, 'key', 'element'), + ['PFADD', 'key', 'element'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(PFADD, 'key', ['1', '2']), + ['PFADD', 'key', '1', '2'] + ); + }); + }); + + testUtils.testAll('pfAdd', async client => { + assert.equal( + await client.pfAdd('key', '1'), + 1 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/PFADD.ts b/packages/client/lib/commands/PFADD.ts new file mode 100644 index 00000000000..f5d2a280ca0 --- /dev/null +++ b/packages/client/lib/commands/PFADD.ts @@ -0,0 +1,23 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export default { + IS_READ_ONLY: true, + /** + * Constructs the PFADD command + * + * @param parser - The command parser + * @param key - The key of the HyperLogLog + * @param element - Optional elements to add + * @see https://redis.io/commands/pfadd/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, element?: RedisVariadicArgument) { + parser.push('PFADD') + parser.pushKey(key); + if (element) { + parser.pushVariadic(element); + } + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/PFCOUNT.spec.ts b/packages/client/lib/commands/PFCOUNT.spec.ts new file mode 100644 index 00000000000..aec2ebecf0b --- /dev/null +++ b/packages/client/lib/commands/PFCOUNT.spec.ts @@ -0,0 +1,32 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import PFCOUNT from './PFCOUNT'; +import { parseArgs } from './generic-transformers'; + +describe('PFCOUNT', () => { + describe('transformArguments', () => { + it('string', () => { + assert.deepEqual( + parseArgs(PFCOUNT, 'key'), + ['PFCOUNT', 'key'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(PFCOUNT, ['1', '2']), + ['PFCOUNT', '1', '2'] + ); + }); + }); + + testUtils.testAll('pfCount', async client => { + assert.equal( + await client.pfCount('key'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/PFCOUNT.ts b/packages/client/lib/commands/PFCOUNT.ts new file mode 100644 index 00000000000..1358fed7d67 --- /dev/null +++ b/packages/client/lib/commands/PFCOUNT.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '../client/parser'; +import { NumberReply, Command } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export default { + IS_READ_ONLY: true, + /** + * Constructs the PFCOUNT command + * + * @param parser - The command parser + * @param keys - One or more keys of HyperLogLog structures to count + * @see https://redis.io/commands/pfcount/ + */ + parseCommand(parser: CommandParser, keys: RedisVariadicArgument) { + parser.push('PFCOUNT'); + parser.pushKeys(keys); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/PFMERGE.spec.ts b/packages/client/lib/commands/PFMERGE.spec.ts new file mode 100644 index 00000000000..a286e932913 --- /dev/null +++ b/packages/client/lib/commands/PFMERGE.spec.ts @@ -0,0 +1,32 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import PFMERGE from './PFMERGE'; +import { parseArgs } from './generic-transformers'; + +describe('PFMERGE', () => { + describe('transformArguments', () => { + it('string', () => { + assert.deepEqual( + parseArgs(PFMERGE, 'destination', 'source'), + ['PFMERGE', 'destination', 'source'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(PFMERGE, 'destination', ['1', '2']), + ['PFMERGE', 'destination', '1', '2'] + ); + }); + }); + + testUtils.testAll('pfMerge', async client => { + assert.equal( + await client.pfMerge('{tag}destination', '{tag}source'), + 'OK' + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/PFMERGE.ts b/packages/client/lib/commands/PFMERGE.ts new file mode 100644 index 00000000000..834a5dfbf55 --- /dev/null +++ b/packages/client/lib/commands/PFMERGE.ts @@ -0,0 +1,26 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export default { + /** + * Constructs the PFMERGE command + * + * @param parser - The command parser + * @param destination - The destination key to merge to + * @param sources - One or more source keys to merge from + * @see https://redis.io/commands/pfmerge/ + */ + parseCommand( + parser: CommandParser, + destination: RedisArgument, + sources?: RedisVariadicArgument + ) { + parser.push('PFMERGE'); + parser.pushKey(destination); + if (sources) { + parser.pushKeys(sources); + } + }, + transformReply: undefined as unknown as () => SimpleStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/PING.spec.ts b/packages/client/lib/commands/PING.spec.ts new file mode 100644 index 00000000000..56f513685f4 --- /dev/null +++ b/packages/client/lib/commands/PING.spec.ts @@ -0,0 +1,32 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import PING from './PING'; +import { parseArgs } from './generic-transformers'; + +describe('PING', () => { + describe('transformArguments', () => { + it('default', () => { + assert.deepEqual( + parseArgs(PING), + ['PING'] + ); + }); + + it('with message', () => { + assert.deepEqual( + parseArgs(PING, 'message'), + ['PING', 'message'] + ); + }); + }); + + testUtils.testAll('ping', async client => { + assert.equal( + await client.ping(), + 'PONG' + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/PING.ts b/packages/client/lib/commands/PING.ts new file mode 100644 index 00000000000..1e8d21e1584 --- /dev/null +++ b/packages/client/lib/commands/PING.ts @@ -0,0 +1,21 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, SimpleStringReply, BlobStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Constructs the PING command + * + * @param parser - The command parser + * @param message - Optional message to be returned instead of PONG + * @see https://redis.io/commands/ping/ + */ + parseCommand(parser: CommandParser, message?: RedisArgument) { + parser.push('PING'); + if (message) { + parser.push(message); + } + }, + transformReply: undefined as unknown as () => SimpleStringReply | BlobStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/PSETEX.spec.ts b/packages/client/lib/commands/PSETEX.spec.ts new file mode 100644 index 00000000000..8580e2f8e9d --- /dev/null +++ b/packages/client/lib/commands/PSETEX.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import PSETEX from './PSETEX'; +import { parseArgs } from './generic-transformers'; + +describe('PSETEX', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(PSETEX, 'key', 1, 'value'), + ['PSETEX', 'key', '1', 'value'] + ); + }); + + testUtils.testAll('pSetEx', async client => { + assert.equal( + await client.pSetEx('key', 1, 'value'), + 'OK' + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/PSETEX.ts b/packages/client/lib/commands/PSETEX.ts new file mode 100644 index 00000000000..5b6d83bd694 --- /dev/null +++ b/packages/client/lib/commands/PSETEX.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '../RESP/types'; + +export default { + /** + * Constructs the PSETEX command + * + * @param parser - The command parser + * @param key - The key to set + * @param ms - The expiration time in milliseconds + * @param value - The value to set + * @see https://redis.io/commands/psetex/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, ms: number, value: RedisArgument) { + parser.push('PSETEX'); + parser.pushKey(key); + parser.push(ms.toString(), value); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/PTTL.spec.ts b/packages/client/lib/commands/PTTL.spec.ts new file mode 100644 index 00000000000..deb04bad97e --- /dev/null +++ b/packages/client/lib/commands/PTTL.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import PTTL from './PTTL'; +import { parseArgs } from './generic-transformers'; + +describe('PTTL', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(PTTL, 'key'), + ['PTTL', 'key'] + ); + }); + + testUtils.testAll('pTTL', async client => { + assert.equal( + await client.pTTL('key'), + -2 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/PTTL.ts b/packages/client/lib/commands/PTTL.ts new file mode 100644 index 00000000000..9d408aeee17 --- /dev/null +++ b/packages/client/lib/commands/PTTL.ts @@ -0,0 +1,18 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +export default { + IS_READ_ONLY: true, + /** + * Constructs the PTTL command + * + * @param parser - The command parser + * @param key - The key to get the time to live in milliseconds + * @see https://redis.io/commands/pttl/ + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('PTTL'); + parser.pushKey(key); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/PUBLISH.spec.ts b/packages/client/lib/commands/PUBLISH.spec.ts new file mode 100644 index 00000000000..930adc8c4d7 --- /dev/null +++ b/packages/client/lib/commands/PUBLISH.spec.ts @@ -0,0 +1,20 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import PUBLISH from './PUBLISH'; +import { parseArgs } from './generic-transformers'; + +describe('PUBLISH', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(PUBLISH, 'channel', 'message'), + ['PUBLISH', 'channel', 'message'] + ); + }); + + testUtils.testWithClient('client.publish', async client => { + assert.equal( + await client.publish('channel', 'message'), + 0 + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/PUBLISH.ts b/packages/client/lib/commands/PUBLISH.ts new file mode 100644 index 00000000000..197a2b069eb --- /dev/null +++ b/packages/client/lib/commands/PUBLISH.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + IS_FORWARD_COMMAND: true, + /** + * Constructs the PUBLISH command + * + * @param parser - The command parser + * @param channel - The channel to publish to + * @param message - The message to publish + * @see https://redis.io/commands/publish/ + */ + parseCommand(parser: CommandParser, channel: RedisArgument, message: RedisArgument) { + parser.push('PUBLISH', channel, message); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/PUBSUB_CHANNELS.spec.ts b/packages/client/lib/commands/PUBSUB_CHANNELS.spec.ts new file mode 100644 index 00000000000..369e339a497 --- /dev/null +++ b/packages/client/lib/commands/PUBSUB_CHANNELS.spec.ts @@ -0,0 +1,29 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import PUBSUB_CHANNELS from './PUBSUB_CHANNELS'; +import { parseArgs } from './generic-transformers'; + +describe('PUBSUB CHANNELS', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(PUBSUB_CHANNELS), + ['PUBSUB', 'CHANNELS'] + ); + }); + + it('with pattern', () => { + assert.deepEqual( + parseArgs(PUBSUB_CHANNELS, 'patter*'), + ['PUBSUB', 'CHANNELS', 'patter*'] + ); + }); + }); + + testUtils.testWithClient('client.pubSubChannels', async client => { + assert.deepEqual( + await client.pubSubChannels(), + [] + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/PUBSUB_CHANNELS.ts b/packages/client/lib/commands/PUBSUB_CHANNELS.ts new file mode 100644 index 00000000000..c9eb9bf7b4e --- /dev/null +++ b/packages/client/lib/commands/PUBSUB_CHANNELS.ts @@ -0,0 +1,23 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, BlobStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Constructs the PUBSUB CHANNELS command + * + * @param parser - The command parser + * @param pattern - Optional pattern to filter channels + * @see https://redis.io/commands/pubsub-channels/ + */ + parseCommand(parser: CommandParser, pattern?: RedisArgument) { + parser.push('PUBSUB', 'CHANNELS'); + + if (pattern) { + parser.push(pattern); + } + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; + diff --git a/packages/client/lib/commands/PUBSUB_NUMPAT.spec.ts b/packages/client/lib/commands/PUBSUB_NUMPAT.spec.ts new file mode 100644 index 00000000000..d75256bb43c --- /dev/null +++ b/packages/client/lib/commands/PUBSUB_NUMPAT.spec.ts @@ -0,0 +1,20 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import PUBSUB_NUMPAT from './PUBSUB_NUMPAT'; +import { parseArgs } from './generic-transformers'; + +describe('PUBSUB NUMPAT', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(PUBSUB_NUMPAT), + ['PUBSUB', 'NUMPAT'] + ); + }); + + testUtils.testWithClient('client.pubSubNumPat', async client => { + assert.equal( + await client.pubSubNumPat(), + 0 + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/PUBSUB_NUMPAT.ts b/packages/client/lib/commands/PUBSUB_NUMPAT.ts new file mode 100644 index 00000000000..4b876db88f1 --- /dev/null +++ b/packages/client/lib/commands/PUBSUB_NUMPAT.ts @@ -0,0 +1,17 @@ +import { CommandParser } from '../client/parser'; +import { NumberReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Constructs the PUBSUB NUMPAT command + * + * @param parser - The command parser + * @see https://redis.io/commands/pubsub-numpat/ + */ + parseCommand(parser: CommandParser) { + parser.push('PUBSUB', 'NUMPAT'); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/PUBSUB_NUMSUB.spec.ts b/packages/client/lib/commands/PUBSUB_NUMSUB.spec.ts new file mode 100644 index 00000000000..11339ae2bb5 --- /dev/null +++ b/packages/client/lib/commands/PUBSUB_NUMSUB.spec.ts @@ -0,0 +1,36 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import PUBSUB_NUMSUB from './PUBSUB_NUMSUB'; +import { parseArgs } from './generic-transformers'; + +describe('PUBSUB NUMSUB', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(PUBSUB_NUMSUB), + ['PUBSUB', 'NUMSUB'] + ); + }); + + it('string', () => { + assert.deepEqual( + parseArgs(PUBSUB_NUMSUB, 'channel'), + ['PUBSUB', 'NUMSUB', 'channel'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(PUBSUB_NUMSUB, ['1', '2']), + ['PUBSUB', 'NUMSUB', '1', '2'] + ); + }); + }); + + testUtils.testWithClient('client.pubSubNumSub', async client => { + assert.deepEqual( + await client.pubSubNumSub(), + Object.create(null) + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/PUBSUB_NUMSUB.ts b/packages/client/lib/commands/PUBSUB_NUMSUB.ts new file mode 100644 index 00000000000..da6647dc553 --- /dev/null +++ b/packages/client/lib/commands/PUBSUB_NUMSUB.ts @@ -0,0 +1,37 @@ +import { CommandParser } from '../client/parser'; +import { ArrayReply, BlobStringReply, NumberReply, UnwrapReply, Command } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Constructs the PUBSUB NUMSUB command + * + * @param parser - The command parser + * @param channels - Optional channel names to get subscription count for + * @see https://redis.io/commands/pubsub-numsub/ + */ + parseCommand(parser: CommandParser, channels?: RedisVariadicArgument) { + parser.push('PUBSUB', 'NUMSUB'); + + if (channels) { + parser.pushVariadic(channels); + } + }, + /** + * Transforms the PUBSUB NUMSUB reply into a record of channel name to subscriber count + * + * @param rawReply - The raw reply from Redis + * @returns Record mapping channel names to their subscriber counts + */ + transformReply(rawReply: UnwrapReply>) { + const reply = Object.create(null); + let i = 0; + while (i < rawReply.length) { + reply[rawReply[i++].toString()] = rawReply[i++].toString(); + } + + return reply as Record; + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/PUBSUB_SHARDCHANNELS.spec.ts b/packages/client/lib/commands/PUBSUB_SHARDCHANNELS.spec.ts new file mode 100644 index 00000000000..36597a9cfd8 --- /dev/null +++ b/packages/client/lib/commands/PUBSUB_SHARDCHANNELS.spec.ts @@ -0,0 +1,31 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import PUBSUB_SHARDCHANNELS from './PUBSUB_SHARDCHANNELS'; +import { parseArgs } from './generic-transformers'; + +describe('PUBSUB SHARDCHANNELS', () => { + testUtils.isVersionGreaterThanHook([7]); + + describe('transformArguments', () => { + it('without pattern', () => { + assert.deepEqual( + parseArgs(PUBSUB_SHARDCHANNELS), + ['PUBSUB', 'SHARDCHANNELS'] + ); + }); + + it('with pattern', () => { + assert.deepEqual( + parseArgs(PUBSUB_SHARDCHANNELS, 'patter*'), + ['PUBSUB', 'SHARDCHANNELS', 'patter*'] + ); + }); + }); + + testUtils.testWithClient('client.pubSubShardChannels', async client => { + assert.deepEqual( + await client.pubSubShardChannels(), + [] + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/PUBSUB_SHARDCHANNELS.ts b/packages/client/lib/commands/PUBSUB_SHARDCHANNELS.ts new file mode 100644 index 00000000000..30601de55df --- /dev/null +++ b/packages/client/lib/commands/PUBSUB_SHARDCHANNELS.ts @@ -0,0 +1,22 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, BlobStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Constructs the PUBSUB SHARDCHANNELS command + * + * @param parser - The command parser + * @param pattern - Optional pattern to filter shard channels + * @see https://redis.io/commands/pubsub-shardchannels/ + */ + parseCommand(parser: CommandParser, pattern?: RedisArgument) { + parser.push('PUBSUB', 'SHARDCHANNELS'); + + if (pattern) { + parser.push(pattern); + } + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/PUBSUB_SHARDNUMSUB.spec.ts b/packages/client/lib/commands/PUBSUB_SHARDNUMSUB.spec.ts new file mode 100644 index 00000000000..e335941897d --- /dev/null +++ b/packages/client/lib/commands/PUBSUB_SHARDNUMSUB.spec.ts @@ -0,0 +1,49 @@ +import { strict as assert } from 'assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import PUBSUB_SHARDNUMSUB from './PUBSUB_SHARDNUMSUB'; +import { parseArgs } from './generic-transformers'; + +describe('PUBSUB SHARDNUMSUB', () => { + testUtils.isVersionGreaterThanHook([7]); + + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(PUBSUB_SHARDNUMSUB), + ['PUBSUB', 'SHARDNUMSUB'] + ); + }); + + it('string', () => { + assert.deepEqual( + parseArgs(PUBSUB_SHARDNUMSUB, 'channel'), + ['PUBSUB', 'SHARDNUMSUB', 'channel'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(PUBSUB_SHARDNUMSUB, ['1', '2']), + ['PUBSUB', 'SHARDNUMSUB', '1', '2'] + ); + }); + }); + + testUtils.testWithClient('client.pubSubShardNumSub', async client => { + assert.deepEqual( + await client.pubSubShardNumSub(['foo', 'bar']), + Object.create(null, { + foo: { + value: 0, + configurable: true, + enumerable: true + }, + bar: { + value: 0, + configurable: true, + enumerable: true + } + }) + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/PUBSUB_SHARDNUMSUB.ts b/packages/client/lib/commands/PUBSUB_SHARDNUMSUB.ts new file mode 100644 index 00000000000..9d54a113d78 --- /dev/null +++ b/packages/client/lib/commands/PUBSUB_SHARDNUMSUB.ts @@ -0,0 +1,37 @@ +import { CommandParser } from '../client/parser'; +import { ArrayReply, BlobStringReply, NumberReply, UnwrapReply, Command } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export default { + IS_READ_ONLY: true, + /** + * Constructs the PUBSUB SHARDNUMSUB command + * + * @param parser - The command parser + * @param channels - Optional shard channel names to get subscription count for + * @see https://redis.io/commands/pubsub-shardnumsub/ + */ + parseCommand(parser: CommandParser, channels?: RedisVariadicArgument) { + parser.push('PUBSUB', 'SHARDNUMSUB'); + + if (channels) { + parser.pushVariadic(channels); + } + }, + /** + * Transforms the PUBSUB SHARDNUMSUB reply into a record of shard channel name to subscriber count + * + * @param reply - The raw reply from Redis + * @returns Record mapping shard channel names to their subscriber counts + */ + transformReply(reply: UnwrapReply>) { + const transformedReply: Record = Object.create(null); + + for (let i = 0; i < reply.length; i += 2) { + transformedReply[(reply[i] as BlobStringReply).toString()] = reply[i + 1] as NumberReply; + } + + return transformedReply; + } +} as const satisfies Command; + diff --git a/packages/client/lib/commands/RANDOMKEY.spec.ts b/packages/client/lib/commands/RANDOMKEY.spec.ts new file mode 100644 index 00000000000..f86617a3b75 --- /dev/null +++ b/packages/client/lib/commands/RANDOMKEY.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import RANDOMKEY from './RANDOMKEY'; +import { parseArgs } from './generic-transformers'; + +describe('RANDOMKEY', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(RANDOMKEY), + ['RANDOMKEY'] + ); + }); + + testUtils.testAll('randomKey', async client => { + assert.equal( + await client.randomKey(), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/RANDOMKEY.ts b/packages/client/lib/commands/RANDOMKEY.ts new file mode 100644 index 00000000000..263f539113b --- /dev/null +++ b/packages/client/lib/commands/RANDOMKEY.ts @@ -0,0 +1,17 @@ +import { CommandParser } from '../client/parser'; +import { NumberReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Constructs the RANDOMKEY command + * + * @param parser - The command parser + * @see https://redis.io/commands/randomkey/ + */ + parseCommand(parser: CommandParser) { + parser.push('RANDOMKEY'); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/READONLY.spec.ts b/packages/client/lib/commands/READONLY.spec.ts new file mode 100644 index 00000000000..ac303322330 --- /dev/null +++ b/packages/client/lib/commands/READONLY.spec.ts @@ -0,0 +1,12 @@ +import { strict as assert } from 'node:assert'; +import READONLY from './READONLY'; +import { parseArgs } from './generic-transformers'; + +describe('READONLY', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(READONLY), + ['READONLY'] + ); + }); +}); diff --git a/packages/client/lib/commands/READONLY.ts b/packages/client/lib/commands/READONLY.ts new file mode 100644 index 00000000000..16eef975818 --- /dev/null +++ b/packages/client/lib/commands/READONLY.ts @@ -0,0 +1,17 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Constructs the READONLY command + * + * @param parser - The command parser + * @see https://redis.io/commands/readonly/ + */ + parseCommand(parser: CommandParser) { + parser.push('READONLY'); + }, + transformReply: undefined as unknown as () => SimpleStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/READWRITE.spec.ts b/packages/client/lib/commands/READWRITE.spec.ts new file mode 100644 index 00000000000..cc3f99a5d16 --- /dev/null +++ b/packages/client/lib/commands/READWRITE.spec.ts @@ -0,0 +1,12 @@ +import { strict as assert } from 'node:assert'; +import READWRITE from './READWRITE'; +import { parseArgs } from './generic-transformers'; + +describe('READWRITE', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(READWRITE), + ['READWRITE'] + ); + }); +}); diff --git a/packages/client/lib/commands/READWRITE.ts b/packages/client/lib/commands/READWRITE.ts new file mode 100644 index 00000000000..f747366448c --- /dev/null +++ b/packages/client/lib/commands/READWRITE.ts @@ -0,0 +1,17 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Constructs the READWRITE command + * + * @param parser - The command parser + * @see https://redis.io/commands/readwrite/ + */ + parseCommand(parser: CommandParser) { + parser.push('READWRITE'); + }, + transformReply: undefined as unknown as () => SimpleStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/RENAME.spec.ts b/packages/client/lib/commands/RENAME.spec.ts new file mode 100644 index 00000000000..05dd9417b96 --- /dev/null +++ b/packages/client/lib/commands/RENAME.spec.ts @@ -0,0 +1,25 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import RENAME from './RENAME'; +import { parseArgs } from './generic-transformers'; + +describe('RENAME', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(RENAME, 'source', 'destination'), + ['RENAME', 'source', 'destination'] + ); + }); + + testUtils.testAll('rename', async client => { + const [, reply] = await Promise.all([ + client.set('{tag}source', 'value'), + client.rename('{tag}source', '{tag}destination') + ]); + + assert.equal(reply, 'OK'); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/RENAME.ts b/packages/client/lib/commands/RENAME.ts new file mode 100644 index 00000000000..0033758d128 --- /dev/null +++ b/packages/client/lib/commands/RENAME.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '../RESP/types'; + +export default { + IS_READ_ONLY: true, + /** + * Constructs the RENAME command + * + * @param parser - The command parser + * @param key - The key to rename + * @param newKey - The new key name + * @see https://redis.io/commands/rename/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, newKey: RedisArgument) { + parser.push('RENAME'); + parser.pushKeys([key, newKey]); + }, + transformReply: undefined as unknown as () => SimpleStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/RENAMENX.spec.ts b/packages/client/lib/commands/RENAMENX.spec.ts new file mode 100644 index 00000000000..2367b453322 --- /dev/null +++ b/packages/client/lib/commands/RENAMENX.spec.ts @@ -0,0 +1,25 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import RENAMENX from './RENAMENX'; +import { parseArgs } from './generic-transformers'; + +describe('RENAMENX', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(RENAMENX, 'source', 'destination'), + ['RENAMENX', 'source', 'destination'] + ); + }); + + testUtils.testAll('renameNX', async client => { + const [, reply] = await Promise.all([ + client.set('{tag}source', 'value'), + client.renameNX('{tag}source', '{tag}destination') + ]); + + assert.equal(reply, 1); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/RENAMENX.ts b/packages/client/lib/commands/RENAMENX.ts new file mode 100644 index 00000000000..38c12dee727 --- /dev/null +++ b/packages/client/lib/commands/RENAMENX.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +export default { + IS_READ_ONLY: true, + /** + * Constructs the RENAMENX command + * + * @param parser - The command parser + * @param key - The key to rename + * @param newKey - The new key name, if it doesn't exist + * @see https://redis.io/commands/renamenx/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, newKey: RedisArgument) { + parser.push('RENAMENX'); + parser.pushKeys([key, newKey]); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/REPLICAOF.spec.ts b/packages/client/lib/commands/REPLICAOF.spec.ts new file mode 100644 index 00000000000..13668639494 --- /dev/null +++ b/packages/client/lib/commands/REPLICAOF.spec.ts @@ -0,0 +1,12 @@ +import { strict as assert } from 'node:assert'; +import REPLICAOF from './REPLICAOF'; +import { parseArgs } from './generic-transformers'; + +describe('REPLICAOF', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(REPLICAOF, 'host', 1), + ['REPLICAOF', 'host', '1'] + ); + }); +}); diff --git a/packages/client/lib/commands/REPLICAOF.ts b/packages/client/lib/commands/REPLICAOF.ts new file mode 100644 index 00000000000..08d4167fff4 --- /dev/null +++ b/packages/client/lib/commands/REPLICAOF.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Constructs the REPLICAOF command + * + * @param parser - The command parser + * @param host - The host of the master to replicate from + * @param port - The port of the master to replicate from + * @see https://redis.io/commands/replicaof/ + */ + parseCommand(parser: CommandParser, host: string, port: number) { + parser.push('REPLICAOF', host, port.toString()); + }, + transformReply: undefined as unknown as () => SimpleStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/RESTORE-ASKING.spec.ts b/packages/client/lib/commands/RESTORE-ASKING.spec.ts new file mode 100644 index 00000000000..1258cf68e2d --- /dev/null +++ b/packages/client/lib/commands/RESTORE-ASKING.spec.ts @@ -0,0 +1,12 @@ +import { strict as assert } from 'node:assert'; +import RESTORE_ASKING from './RESTORE-ASKING'; +import { parseArgs } from './generic-transformers'; + +describe('RESTORE-ASKING', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(RESTORE_ASKING), + ['RESTORE-ASKING'] + ); + }); +}); diff --git a/packages/client/lib/commands/RESTORE-ASKING.ts b/packages/client/lib/commands/RESTORE-ASKING.ts new file mode 100644 index 00000000000..947ee9544d9 --- /dev/null +++ b/packages/client/lib/commands/RESTORE-ASKING.ts @@ -0,0 +1,17 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Constructs the RESTORE-ASKING command + * + * @param parser - The command parser + * @see https://redis.io/commands/restore-asking/ + */ + parseCommand(parser: CommandParser) { + parser.push('RESTORE-ASKING'); + }, + transformReply: undefined as unknown as () => SimpleStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/RESTORE.spec.ts b/packages/client/lib/commands/RESTORE.spec.ts new file mode 100644 index 00000000000..6083b2eb1a5 --- /dev/null +++ b/packages/client/lib/commands/RESTORE.spec.ts @@ -0,0 +1,85 @@ +import { strict as assert } from 'assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import RESTORE from './RESTORE'; +import { RESP_TYPES } from '../RESP/decoder'; +import { parseArgs } from './generic-transformers'; + +describe('RESTORE', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(RESTORE, 'key', 0, 'value'), + ['RESTORE', 'key', '0', 'value'] + ); + }); + + it('with REPLACE', () => { + assert.deepEqual( + parseArgs(RESTORE, 'key', 0, 'value', { + REPLACE: true + }), + ['RESTORE', 'key', '0', 'value', 'REPLACE'] + ); + }); + + it('with ABSTTL', () => { + assert.deepEqual( + parseArgs(RESTORE, 'key', 0, 'value', { + ABSTTL: true + }), + ['RESTORE', 'key', '0', 'value', 'ABSTTL'] + ); + }); + + it('with IDLETIME', () => { + assert.deepEqual( + parseArgs(RESTORE, 'key', 0, 'value', { + IDLETIME: 1 + }), + ['RESTORE', 'key', '0', 'value', 'IDLETIME', '1'] + ); + }); + + it('with FREQ', () => { + assert.deepEqual( + parseArgs(RESTORE, 'key', 0, 'value', { + FREQ: 1 + }), + ['RESTORE', 'key', '0', 'value', 'FREQ', '1'] + ); + }); + + it('with REPLACE, ABSTTL, IDLETIME and FREQ', () => { + assert.deepEqual( + parseArgs(RESTORE, 'key', 0, 'value', { + REPLACE: true, + ABSTTL: true, + IDLETIME: 1, + FREQ: 2 + }), + ['RESTORE', 'key', '0', 'value', 'REPLACE', 'ABSTTL', 'IDLETIME', '1', 'FREQ', '2'] + ); + }); + }); + + testUtils.testWithClient('client.restore', async client => { + const [, dump] = await Promise.all([ + client.set('source', 'value'), + client.dump('source') + ]); + + assert.equal( + await client.restore('destination', 0, dump), + 'OK' + ); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + commandOptions: { + typeMapping: { + [RESP_TYPES.BLOB_STRING]: Buffer + } + } + } + }); +}); diff --git a/packages/client/lib/commands/RESTORE.ts b/packages/client/lib/commands/RESTORE.ts new file mode 100644 index 00000000000..5b07a773cc4 --- /dev/null +++ b/packages/client/lib/commands/RESTORE.ts @@ -0,0 +1,59 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '../RESP/types'; + +/** + * Options for the RESTORE command + * + * @property REPLACE - Replace existing key + * @property ABSTTL - Use the TTL value as absolute timestamp + * @property IDLETIME - Set the idle time (seconds) for the key + * @property FREQ - Set the frequency counter for LFU policy + */ +export interface RestoreOptions { + REPLACE?: boolean; + ABSTTL?: boolean; + IDLETIME?: number; + FREQ?: number; +} + +export default { + IS_READ_ONLY: false, + /** + * Constructs the RESTORE command + * + * @param parser - The command parser + * @param key - The key to restore + * @param ttl - Time to live in milliseconds, 0 for no expiry + * @param serializedValue - The serialized value from DUMP command + * @param options - Options for the RESTORE command + * @see https://redis.io/commands/restore/ + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + ttl: number, + serializedValue: RedisArgument, + options?: RestoreOptions + ) { + parser.push('RESTORE'); + parser.pushKey(key); + parser.push(ttl.toString(), serializedValue); + + if (options?.REPLACE) { + parser.push('REPLACE'); + } + + if (options?.ABSTTL) { + parser.push('ABSTTL'); + } + + if (options?.IDLETIME) { + parser.push('IDLETIME', options.IDLETIME.toString()); + } + + if (options?.FREQ) { + parser.push('FREQ', options.FREQ.toString()); + } + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/ROLE.spec.ts b/packages/client/lib/commands/ROLE.spec.ts new file mode 100644 index 00000000000..09ce6ed3427 --- /dev/null +++ b/packages/client/lib/commands/ROLE.spec.ts @@ -0,0 +1,70 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ROLE from './ROLE'; +import { parseArgs } from './generic-transformers'; + +describe('ROLE', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(ROLE), + ['ROLE'] + ); + }); + + describe('transformReply', () => { + it('master', () => { + assert.deepEqual( + ROLE.transformReply(['master', 3129659, [['127.0.0.1', '9001', '3129242'], ['127.0.0.1', '9002', '3129543']]] as any), + { + role: 'master', + replicationOffest: 3129659, + replicas: [{ + host: '127.0.0.1', + port: 9001, + replicationOffest: 3129242 + }, { + host: '127.0.0.1', + port: 9002, + replicationOffest: 3129543 + }] + } + ); + }); + + it('replica', () => { + assert.deepEqual( + ROLE.transformReply(['slave', '127.0.0.1', 9000, 'connected', 3167038] as any), + { + role: 'slave', + master: { + host: '127.0.0.1', + port: 9000 + }, + state: 'connected', + dataReceived: 3167038 + } + ); + }); + + it('sentinel', () => { + assert.deepEqual( + ROLE.transformReply(['sentinel', ['resque-master', 'html-fragments-master', 'stats-master', 'metadata-master']] as any), + { + role: 'sentinel', + masterNames: ['resque-master', 'html-fragments-master', 'stats-master', 'metadata-master'] + } + ); + }); + }); + + testUtils.testWithClient('client.role', async client => { + assert.deepEqual( + await client.role(), + { + role: 'master', + replicationOffest: 0, + replicas: [] + } + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/ROLE.ts b/packages/client/lib/commands/ROLE.ts new file mode 100644 index 00000000000..749ac4935fa --- /dev/null +++ b/packages/client/lib/commands/ROLE.ts @@ -0,0 +1,95 @@ +import { CommandParser } from '../client/parser'; +import { BlobStringReply, NumberReply, ArrayReply, TuplesReply, UnwrapReply, Command } from '../RESP/types'; + +/** + * Role information returned for a Redis master + */ +type MasterRole = [ + role: BlobStringReply<'master'>, + replicationOffest: NumberReply, + replicas: ArrayReply> +]; + +/** + * Role information returned for a Redis slave + */ +type SlaveRole = [ + role: BlobStringReply<'slave'>, + masterHost: BlobStringReply, + masterPort: NumberReply, + state: BlobStringReply<'connect' | 'connecting' | 'sync' | 'connected'>, + dataReceived: NumberReply +]; + +/** + * Role information returned for a Redis sentinel + */ +type SentinelRole = [ + role: BlobStringReply<'sentinel'>, + masterNames: ArrayReply +]; + +/** + * Combined role type for Redis instance role information + */ +type Role = TuplesReply; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Constructs the ROLE command + * + * @param parser - The command parser + * @see https://redis.io/commands/role/ + */ + parseCommand(parser: CommandParser) { + parser.push('ROLE'); + }, + /** + * Transforms the ROLE reply into a structured object + * + * @param reply - The raw reply from Redis + * @returns Structured object representing role information + */ + transformReply(reply: UnwrapReply) { + switch (reply[0] as unknown as UnwrapReply) { + case 'master': { + const [role, replicationOffest, replicas] = reply as MasterRole; + return { + role, + replicationOffest, + replicas: (replicas as unknown as UnwrapReply).map(replica => { + const [host, port, replicationOffest] = replica as unknown as UnwrapReply; + return { + host, + port: Number(port), + replicationOffest: Number(replicationOffest) + }; + }) + }; + } + + case 'slave': { + const [role, masterHost, masterPort, state, dataReceived] = reply as SlaveRole; + return { + role, + master: { + host: masterHost, + port: masterPort + }, + state, + dataReceived, + }; + } + + case 'sentinel': { + const [role, masterNames] = reply as SentinelRole; + return { + role, + masterNames + }; + } + } + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/RPOP.spec.ts b/packages/client/lib/commands/RPOP.spec.ts new file mode 100644 index 00000000000..844965eae1a --- /dev/null +++ b/packages/client/lib/commands/RPOP.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import RPOP from './RPOP'; +import { parseArgs } from './generic-transformers'; + +describe('RPOP', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(RPOP, 'key'), + ['RPOP', 'key'] + ); + }); + + testUtils.testAll('rPop', async client => { + assert.equal( + await client.rPop('key'), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/RPOP.ts b/packages/client/lib/commands/RPOP.ts new file mode 100644 index 00000000000..4e284496579 --- /dev/null +++ b/packages/client/lib/commands/RPOP.ts @@ -0,0 +1,17 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, BlobStringReply, NullReply, Command } from '../RESP/types'; + +export default { + /** + * Constructs the RPOP command + * + * @param parser - The command parser + * @param key - The list key to pop from + * @see https://redis.io/commands/rpop/ + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('RPOP'); + parser.pushKey(key); + }, + transformReply: undefined as unknown as () => BlobStringReply | NullReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/RPOPLPUSH.spec.ts b/packages/client/lib/commands/RPOPLPUSH.spec.ts new file mode 100644 index 00000000000..728d600bc9d --- /dev/null +++ b/packages/client/lib/commands/RPOPLPUSH.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import RPOPLPUSH from './RPOPLPUSH'; +import { parseArgs } from './generic-transformers'; + +describe('RPOPLPUSH', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(RPOPLPUSH, 'source', 'destination'), + ['RPOPLPUSH', 'source', 'destination'] + ); + }); + + testUtils.testAll('rPopLPush', async client => { + assert.equal( + await client.rPopLPush('{tag}source', '{tag}destination'), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/RPOPLPUSH.ts b/packages/client/lib/commands/RPOPLPUSH.ts new file mode 100644 index 00000000000..936aeb01c8f --- /dev/null +++ b/packages/client/lib/commands/RPOPLPUSH.ts @@ -0,0 +1,18 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, BlobStringReply, NullReply, Command } from '../RESP/types'; + +export default { + /** + * Constructs the RPOPLPUSH command + * + * @param parser - The command parser + * @param source - The source list key + * @param destination - The destination list key + * @see https://redis.io/commands/rpoplpush/ + */ + parseCommand(parser: CommandParser, source: RedisArgument, destination: RedisArgument) { + parser.push('RPOPLPUSH'); + parser.pushKeys([source, destination]); + }, + transformReply: undefined as unknown as () => BlobStringReply | NullReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/RPOP_COUNT.spec.ts b/packages/client/lib/commands/RPOP_COUNT.spec.ts new file mode 100644 index 00000000000..e055d8655b5 --- /dev/null +++ b/packages/client/lib/commands/RPOP_COUNT.spec.ts @@ -0,0 +1,25 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import RPOP_COUNT from './RPOP_COUNT'; +import { parseArgs } from './generic-transformers'; + +describe('RPOP COUNT', () => { + testUtils.isVersionGreaterThanHook([6, 2]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(RPOP_COUNT, 'key', 1), + ['RPOP', 'key', '1'] + ); + }); + + testUtils.testAll('rPopCount', async client => { + assert.equal( + await client.rPopCount('key', 1), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/RPOP_COUNT.ts b/packages/client/lib/commands/RPOP_COUNT.ts new file mode 100644 index 00000000000..2a60335da94 --- /dev/null +++ b/packages/client/lib/commands/RPOP_COUNT.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, BlobStringReply, NullReply, Command } from '../RESP/types'; + +export default { + /** + * Constructs the RPOP command with count parameter + * + * @param parser - The command parser + * @param key - The list key to pop from + * @param count - The number of elements to pop + * @see https://redis.io/commands/rpop/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, count: number) { + parser.push('RPOP'); + parser.pushKey(key); + parser.push(count.toString()); + }, + transformReply: undefined as unknown as () => ArrayReply | NullReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/RPUSH.spec.ts b/packages/client/lib/commands/RPUSH.spec.ts new file mode 100644 index 00000000000..559fb7a2746 --- /dev/null +++ b/packages/client/lib/commands/RPUSH.spec.ts @@ -0,0 +1,32 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import RPUSH from './RPUSH'; +import { parseArgs } from './generic-transformers'; + +describe('RPUSH', () => { + describe('transformArguments', () => { + it('string', () => { + assert.deepEqual( + parseArgs(RPUSH, 'key', 'element'), + ['RPUSH', 'key', 'element'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(RPUSH, 'key', ['1', '2']), + ['RPUSH', 'key', '1', '2'] + ); + }); + }); + + testUtils.testAll('rPush', async client => { + assert.equal( + await client.rPush('key', 'element'), + 1 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/RPUSH.ts b/packages/client/lib/commands/RPUSH.ts new file mode 100644 index 00000000000..452623e7f0d --- /dev/null +++ b/packages/client/lib/commands/RPUSH.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export default { + /** + * Constructs the RPUSH command + * + * @param parser - The command parser + * @param key - The list key to push to + * @param element - One or more elements to push + * @see https://redis.io/commands/rpush/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, element: RedisVariadicArgument) { + parser.push('RPUSH'); + parser.pushKey(key); + parser.pushVariadic(element); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/RPUSHX.spec.ts b/packages/client/lib/commands/RPUSHX.spec.ts new file mode 100644 index 00000000000..b9fb660c5bc --- /dev/null +++ b/packages/client/lib/commands/RPUSHX.spec.ts @@ -0,0 +1,32 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import RPUSHX from './RPUSHX'; +import { parseArgs } from './generic-transformers'; + +describe('RPUSHX', () => { + describe('transformArguments', () => { + it('string', () => { + assert.deepEqual( + parseArgs(RPUSHX, 'key', 'element'), + ['RPUSHX', 'key', 'element'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(RPUSHX, 'key', ['1', '2']), + ['RPUSHX', 'key', '1', '2'] + ); + }); + }); + + testUtils.testAll('rPushX', async client => { + assert.equal( + await client.rPushX('key', 'element'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/RPUSHX.ts b/packages/client/lib/commands/RPUSHX.ts new file mode 100644 index 00000000000..a9ec4bd1ef6 --- /dev/null +++ b/packages/client/lib/commands/RPUSHX.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export default { + /** + * Constructs the RPUSHX command + * + * @param parser - The command parser + * @param key - The list key to push to (only if it exists) + * @param element - One or more elements to push + * @see https://redis.io/commands/rpushx/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, element: RedisVariadicArgument) { + parser.push('RPUSHX'); + parser.pushKey(key); + parser.pushVariadic(element); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/SADD.spec.ts b/packages/client/lib/commands/SADD.spec.ts new file mode 100644 index 00000000000..179e8602efc --- /dev/null +++ b/packages/client/lib/commands/SADD.spec.ts @@ -0,0 +1,32 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SADD from './SADD'; +import { parseArgs } from './generic-transformers'; + +describe('SADD', () => { + describe('transformArguments', () => { + it('string', () => { + assert.deepEqual( + parseArgs(SADD, 'key', 'member'), + ['SADD', 'key', 'member'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(SADD, 'key', ['1', '2']), + ['SADD', 'key', '1', '2'] + ); + }); + }); + + testUtils.testAll('sAdd', async client => { + assert.equal( + await client.sAdd('key', 'member'), + 1 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/SADD.ts b/packages/client/lib/commands/SADD.ts new file mode 100644 index 00000000000..3ee55706b95 --- /dev/null +++ b/packages/client/lib/commands/SADD.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export default { + /** + * Constructs the SADD command + * + * @param parser - The command parser + * @param key - The set key to add members to + * @param members - One or more members to add to the set + * @see https://redis.io/commands/sadd/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, members: RedisVariadicArgument) { + parser.push('SADD'); + parser.pushKey(key); + parser.pushVariadic(members); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/SAVE.spec.ts b/packages/client/lib/commands/SAVE.spec.ts new file mode 100644 index 00000000000..5f0074f7492 --- /dev/null +++ b/packages/client/lib/commands/SAVE.spec.ts @@ -0,0 +1,12 @@ +import { strict as assert } from 'node:assert'; +import SAVE from './SAVE'; +import { parseArgs } from './generic-transformers'; + +describe('SAVE', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(SAVE), + ['SAVE'] + ); + }); +}); diff --git a/packages/client/lib/commands/SAVE.ts b/packages/client/lib/commands/SAVE.ts new file mode 100644 index 00000000000..078b14da7a3 --- /dev/null +++ b/packages/client/lib/commands/SAVE.ts @@ -0,0 +1,17 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Constructs the SAVE command + * + * @param parser - The command parser + * @see https://redis.io/commands/save/ + */ + parseCommand(parser: CommandParser) { + parser.push('SAVE'); + }, + transformReply: undefined as unknown as () => SimpleStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/SCAN.spec.ts b/packages/client/lib/commands/SCAN.spec.ts new file mode 100644 index 00000000000..2a32cbebf4f --- /dev/null +++ b/packages/client/lib/commands/SCAN.spec.ts @@ -0,0 +1,63 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import { parseArgs } from './generic-transformers'; +import SCAN from './SCAN'; + +describe('SCAN', () => { + describe('transformArguments', () => { + it('cusror only', () => { + assert.deepEqual( + parseArgs(SCAN, '0'), + ['SCAN', '0'] + ); + }); + + it('with MATCH', () => { + assert.deepEqual( + parseArgs(SCAN, '0', { + MATCH: 'pattern' + }), + ['SCAN', '0', 'MATCH', 'pattern'] + ); + }); + + it('with COUNT', () => { + assert.deepEqual( + parseArgs(SCAN, '0', { + COUNT: 1 + }), + ['SCAN', '0', 'COUNT', '1'] + ); + }); + + it('with TYPE', () => { + assert.deepEqual( + parseArgs(SCAN, '0', { + TYPE: 'stream' + }), + ['SCAN', '0', 'TYPE', 'stream'] + ); + }); + + it('with MATCH & COUNT & TYPE', () => { + assert.deepEqual( + parseArgs(SCAN, '0', { + MATCH: 'pattern', + COUNT: 1, + TYPE: 'stream' + }), + ['SCAN', '0', 'MATCH', 'pattern', 'COUNT', '1', 'TYPE', 'stream'] + ); + }); + }); + + testUtils.testWithClient('client.scan', async client => { + assert.deepEqual( + await client.scan('0'), + { + cursor: '0', + keys: [] + } + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/SCAN.ts b/packages/client/lib/commands/SCAN.ts new file mode 100644 index 00000000000..d3153b786f1 --- /dev/null +++ b/packages/client/lib/commands/SCAN.ts @@ -0,0 +1,103 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, CommandArguments, BlobStringReply, ArrayReply, Command } from '../RESP/types'; + +/** + * Common options for SCAN-type commands + * + * @property MATCH - Pattern to filter returned keys + * @property COUNT - Hint for how many elements to return per iteration + */ +export interface ScanCommonOptions { + MATCH?: string; + COUNT?: number; +} + +/** + * Parses scan arguments for SCAN-type commands + * + * @param parser - The command parser + * @param cursor - The cursor position for iteration + * @param options - Scan options + */ +export function parseScanArguments( + parser: CommandParser, + cursor: RedisArgument, + options?: ScanOptions +) { + parser.push(cursor); + if (options?.MATCH) { + parser.push('MATCH', options.MATCH); + } + + if (options?.COUNT) { + parser.push('COUNT', options.COUNT.toString()); + } +} + +/** + * Pushes scan arguments to the command arguments array + * + * @param args - The command arguments array + * @param cursor - The cursor position for iteration + * @param options - Scan options + * @returns The updated command arguments array + */ +export function pushScanArguments( + args: CommandArguments, + cursor: RedisArgument, + options?: ScanOptions +): CommandArguments { + args.push(cursor.toString()); + + if (options?.MATCH) { + args.push('MATCH', options.MATCH); + } + + if (options?.COUNT) { + args.push('COUNT', options.COUNT.toString()); + } + + return args; +} + +/** + * Options for the SCAN command + * + * @property TYPE - Filter by value type + */ +export interface ScanOptions extends ScanCommonOptions { + TYPE?: RedisArgument; +} + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Constructs the SCAN command + * + * @param parser - The command parser + * @param cursor - The cursor position to start scanning from + * @param options - Scan options + * @see https://redis.io/commands/scan/ + */ + parseCommand(parser: CommandParser, cursor: RedisArgument, options?: ScanOptions) { + parser.push('SCAN'); + parseScanArguments(parser, cursor, options); + + if (options?.TYPE) { + parser.push('TYPE', options.TYPE); + } + }, + /** + * Transforms the SCAN reply into a structured object + * + * @param reply - The raw reply containing cursor and keys + * @returns Object with cursor and keys properties + */ + transformReply([cursor, keys]: [BlobStringReply, ArrayReply]) { + return { + cursor, + keys + }; + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/SCARD.spec.ts b/packages/client/lib/commands/SCARD.spec.ts new file mode 100644 index 00000000000..53434583832 --- /dev/null +++ b/packages/client/lib/commands/SCARD.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import { parseArgs } from './generic-transformers'; +import SCARD from './SCARD'; + +describe('SCARD', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(SCARD, 'key'), + ['SCARD', 'key'] + ); + }); + + testUtils.testAll('sCard', async client => { + assert.equal( + await client.sCard('key'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/SCARD.ts b/packages/client/lib/commands/SCARD.ts new file mode 100644 index 00000000000..20a2aefae00 --- /dev/null +++ b/packages/client/lib/commands/SCARD.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Constructs the SCARD command + * + * @param parser - The command parser + * @param key - The set key to get the cardinality of + * @see https://redis.io/commands/scard/ + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('SCARD'); + parser.pushKey(key); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/SCRIPT_DEBUG.spec.ts b/packages/client/lib/commands/SCRIPT_DEBUG.spec.ts new file mode 100644 index 00000000000..c98143a3415 --- /dev/null +++ b/packages/client/lib/commands/SCRIPT_DEBUG.spec.ts @@ -0,0 +1,20 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SCRIPT_DEBUG from './SCRIPT_DEBUG'; +import { parseArgs } from './generic-transformers'; + +describe('SCRIPT DEBUG', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(SCRIPT_DEBUG, 'NO'), + ['SCRIPT', 'DEBUG', 'NO'] + ); + }); + + testUtils.testWithClient('client.scriptDebug', async client => { + assert.equal( + await client.scriptDebug('NO'), + 'OK' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/SCRIPT_DEBUG.ts b/packages/client/lib/commands/SCRIPT_DEBUG.ts new file mode 100644 index 00000000000..3f09c550449 --- /dev/null +++ b/packages/client/lib/commands/SCRIPT_DEBUG.ts @@ -0,0 +1,18 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Constructs the SCRIPT DEBUG command + * + * @param parser - The command parser + * @param mode - Debug mode: YES, SYNC, or NO + * @see https://redis.io/commands/script-debug/ + */ + parseCommand(parser: CommandParser, mode: 'YES' | 'SYNC' | 'NO') { + parser.push('SCRIPT', 'DEBUG', mode); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/SCRIPT_EXISTS.spec.ts b/packages/client/lib/commands/SCRIPT_EXISTS.spec.ts new file mode 100644 index 00000000000..cf65156c72d --- /dev/null +++ b/packages/client/lib/commands/SCRIPT_EXISTS.spec.ts @@ -0,0 +1,29 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SCRIPT_EXISTS from './SCRIPT_EXISTS'; +import { parseArgs } from './generic-transformers'; + +describe('SCRIPT EXISTS', () => { + describe('transformArguments', () => { + it('string', () => { + assert.deepEqual( + parseArgs(SCRIPT_EXISTS, 'sha1'), + ['SCRIPT', 'EXISTS', 'sha1'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(SCRIPT_EXISTS, ['1', '2']), + ['SCRIPT', 'EXISTS', '1', '2'] + ); + }); + }); + + testUtils.testWithClient('client.scriptExists', async client => { + assert.deepEqual( + await client.scriptExists('sha1'), + [0] + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/SCRIPT_EXISTS.ts b/packages/client/lib/commands/SCRIPT_EXISTS.ts new file mode 100644 index 00000000000..66479654a0d --- /dev/null +++ b/packages/client/lib/commands/SCRIPT_EXISTS.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '../client/parser'; +import { ArrayReply, NumberReply, Command } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Constructs the SCRIPT EXISTS command + * + * @param parser - The command parser + * @param sha1 - One or more SHA1 digests of scripts + * @see https://redis.io/commands/script-exists/ + */ + parseCommand(parser: CommandParser, sha1: RedisVariadicArgument) { + parser.push('SCRIPT', 'EXISTS'); + parser.pushVariadic(sha1); + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/SCRIPT_FLUSH.spec.ts b/packages/client/lib/commands/SCRIPT_FLUSH.spec.ts new file mode 100644 index 00000000000..c51efd1a36c --- /dev/null +++ b/packages/client/lib/commands/SCRIPT_FLUSH.spec.ts @@ -0,0 +1,29 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SCRIPT_FLUSH from './SCRIPT_FLUSH'; +import { parseArgs } from './generic-transformers'; + +describe('SCRIPT FLUSH', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(SCRIPT_FLUSH), + ['SCRIPT', 'FLUSH'] + ); + }); + + it('with mode', () => { + assert.deepEqual( + parseArgs(SCRIPT_FLUSH, 'SYNC'), + ['SCRIPT', 'FLUSH', 'SYNC'] + ); + }); + }); + + testUtils.testWithClient('client.scriptFlush', async client => { + assert.equal( + await client.scriptFlush(), + 'OK' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/SCRIPT_FLUSH.ts b/packages/client/lib/commands/SCRIPT_FLUSH.ts new file mode 100644 index 00000000000..91b61a4e59a --- /dev/null +++ b/packages/client/lib/commands/SCRIPT_FLUSH.ts @@ -0,0 +1,22 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Constructs the SCRIPT FLUSH command + * + * @param parser - The command parser + * @param mode - Optional flush mode: ASYNC or SYNC + * @see https://redis.io/commands/script-flush/ + */ + parseCommand(parser: CommandParser, mode?: 'ASYNC' | 'SYNC') { + parser.push('SCRIPT', 'FLUSH'); + + if (mode) { + parser.push(mode); + } + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/SCRIPT_KILL.spec.ts b/packages/client/lib/commands/SCRIPT_KILL.spec.ts new file mode 100644 index 00000000000..7186efd54cf --- /dev/null +++ b/packages/client/lib/commands/SCRIPT_KILL.spec.ts @@ -0,0 +1,12 @@ +import { strict as assert } from 'node:assert'; +import SCRIPT_KILL from './SCRIPT_KILL'; +import { parseArgs } from './generic-transformers'; + +describe('SCRIPT KILL', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(SCRIPT_KILL), + ['SCRIPT', 'KILL'] + ); + }); +}); diff --git a/packages/client/lib/commands/SCRIPT_KILL.ts b/packages/client/lib/commands/SCRIPT_KILL.ts new file mode 100644 index 00000000000..ee2b2835cc1 --- /dev/null +++ b/packages/client/lib/commands/SCRIPT_KILL.ts @@ -0,0 +1,17 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Constructs the SCRIPT KILL command + * + * @param parser - The command parser + * @see https://redis.io/commands/script-kill/ + */ + parseCommand(parser: CommandParser) { + parser.push('SCRIPT', 'KILL'); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/SCRIPT_LOAD.spec.ts b/packages/client/lib/commands/SCRIPT_LOAD.spec.ts new file mode 100644 index 00000000000..b0df9887e11 --- /dev/null +++ b/packages/client/lib/commands/SCRIPT_LOAD.spec.ts @@ -0,0 +1,24 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SCRIPT_LOAD from './SCRIPT_LOAD'; +import { scriptSha1 } from '../lua-script'; +import { parseArgs } from './generic-transformers'; + +describe('SCRIPT LOAD', () => { + const SCRIPT = 'return 1;', + SCRIPT_SHA1 = scriptSha1(SCRIPT); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(SCRIPT_LOAD, SCRIPT), + ['SCRIPT', 'LOAD', SCRIPT] + ); + }); + + testUtils.testWithClient('client.scriptLoad', async client => { + assert.equal( + await client.scriptLoad(SCRIPT), + SCRIPT_SHA1 + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/SCRIPT_LOAD.ts b/packages/client/lib/commands/SCRIPT_LOAD.ts new file mode 100644 index 00000000000..6e9acb388fc --- /dev/null +++ b/packages/client/lib/commands/SCRIPT_LOAD.ts @@ -0,0 +1,18 @@ +import { CommandParser } from '../client/parser'; +import { BlobStringReply, Command, RedisArgument } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Constructs the SCRIPT LOAD command + * + * @param parser - The command parser + * @param script - The Lua script to load + * @see https://redis.io/commands/script-load/ + */ + parseCommand(parser: CommandParser, script: RedisArgument) { + parser.push('SCRIPT', 'LOAD', script); + }, + transformReply: undefined as unknown as () => BlobStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/SDIFF.spec.ts b/packages/client/lib/commands/SDIFF.spec.ts new file mode 100644 index 00000000000..a943a80688d --- /dev/null +++ b/packages/client/lib/commands/SDIFF.spec.ts @@ -0,0 +1,32 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SDIFF from './SDIFF'; +import { parseArgs } from './generic-transformers'; + +describe('SDIFF', () => { + describe('processCommand', () => { + it('string', () => { + assert.deepEqual( + parseArgs(SDIFF, 'key'), + ['SDIFF', 'key'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(SDIFF, ['1', '2']), + ['SDIFF', '1', '2'] + ); + }); + }); + + testUtils.testAll('sDiff', async client => { + assert.deepEqual( + await client.sDiff('key'), + [] + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/SDIFF.ts b/packages/client/lib/commands/SDIFF.ts new file mode 100644 index 00000000000..07d700adac6 --- /dev/null +++ b/packages/client/lib/commands/SDIFF.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '../client/parser'; +import { ArrayReply, BlobStringReply, Command } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Constructs the SDIFF command + * + * @param parser - The command parser + * @param keys - One or more set keys to compute the difference from + * @see https://redis.io/commands/sdiff/ + */ + parseCommand(parser: CommandParser, keys: RedisVariadicArgument) { + parser.push('SDIFF'); + parser.pushKeys(keys); + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/SDIFFSTORE.spec.ts b/packages/client/lib/commands/SDIFFSTORE.spec.ts new file mode 100644 index 00000000000..43213adfbb0 --- /dev/null +++ b/packages/client/lib/commands/SDIFFSTORE.spec.ts @@ -0,0 +1,32 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SDIFFSTORE from './SDIFFSTORE'; +import { parseArgs } from './generic-transformers'; + +describe('SDIFFSTORE', () => { + describe('transformArguments', () => { + it('string', () => { + assert.deepEqual( + parseArgs(SDIFFSTORE, 'destination', 'key'), + ['SDIFFSTORE', 'destination', 'key'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(SDIFFSTORE, 'destination', ['1', '2']), + ['SDIFFSTORE', 'destination', '1', '2'] + ); + }); + }); + + testUtils.testAll('sDiffStore', async client => { + assert.equal( + await client.sDiffStore('{tag}destination', '{tag}key'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/SDIFFSTORE.ts b/packages/client/lib/commands/SDIFFSTORE.ts new file mode 100644 index 00000000000..478d015d8c0 --- /dev/null +++ b/packages/client/lib/commands/SDIFFSTORE.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export default { + /** + * Constructs the SDIFFSTORE command + * + * @param parser - The command parser + * @param destination - The destination key to store the result + * @param keys - One or more set keys to compute the difference from + * @see https://redis.io/commands/sdiffstore/ + */ + parseCommand(parser: CommandParser, destination: RedisArgument, keys: RedisVariadicArgument) { + parser.push('SDIFFSTORE'); + parser.pushKey(destination); + parser.pushKeys(keys); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/SET.spec.ts b/packages/client/lib/commands/SET.spec.ts new file mode 100644 index 00000000000..b8aa57fe77b --- /dev/null +++ b/packages/client/lib/commands/SET.spec.ts @@ -0,0 +1,165 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SET from './SET'; +import { parseArgs } from './generic-transformers'; + +describe('SET', () => { + describe('transformArguments', () => { + describe('value', () => { + it('string', () => { + assert.deepEqual( + parseArgs(SET, 'key', 'value'), + ['SET', 'key', 'value'] + ); + }); + + it('number', () => { + assert.deepEqual( + parseArgs(SET, 'key', 0), + ['SET', 'key', '0'] + ); + }); + }); + + describe('expiration', () => { + it('\'KEEPTTL\'', () => { + assert.deepEqual( + parseArgs(SET, 'key', 'value', { + expiration: 'KEEPTTL' + }), + ['SET', 'key', 'value', 'KEEPTTL'] + ); + }); + + it('{ type: \'KEEPTTL\' }', () => { + assert.deepEqual( + parseArgs(SET, 'key', 'value', { + expiration: { + type: 'KEEPTTL' + } + }), + ['SET', 'key', 'value', 'KEEPTTL'] + ); + }); + + it('{ type: \'EX\' }', () => { + assert.deepEqual( + parseArgs(SET, 'key', 'value', { + expiration: { + type: 'EX', + value: 0 + } + }), + ['SET', 'key', 'value', 'EX', '0'] + ); + }); + + it('with EX (backwards compatibility)', () => { + assert.deepEqual( + parseArgs(SET, 'key', 'value', { + EX: 0 + }), + ['SET', 'key', 'value', 'EX', '0'] + ); + }); + + it('with PX (backwards compatibility)', () => { + assert.deepEqual( + parseArgs(SET, 'key', 'value', { + PX: 0 + }), + ['SET', 'key', 'value', 'PX', '0'] + ); + }); + + it('with EXAT (backwards compatibility)', () => { + assert.deepEqual( + parseArgs(SET, 'key', 'value', { + EXAT: 0 + }), + ['SET', 'key', 'value', 'EXAT', '0'] + ); + }); + + it('with PXAT (backwards compatibility)', () => { + assert.deepEqual( + parseArgs(SET, 'key', 'value', { + PXAT: 0 + }), + ['SET', 'key', 'value', 'PXAT', '0'] + ); + }); + + it('with KEEPTTL (backwards compatibility)', () => { + assert.deepEqual( + parseArgs(SET, 'key', 'value', { + KEEPTTL: true + }), + ['SET', 'key', 'value', 'KEEPTTL'] + ); + }); + }); + + describe('condition', () => { + it('with condition', () => { + assert.deepEqual( + parseArgs(SET, 'key', 'value', { + condition: 'NX' + }), + ['SET', 'key', 'value', 'NX'] + ); + }); + + it('with NX (backwards compatibility)', () => { + assert.deepEqual( + parseArgs(SET, 'key', 'value', { + NX: true + }), + ['SET', 'key', 'value', 'NX'] + ); + }); + + it('with XX (backwards compatibility)', () => { + assert.deepEqual( + parseArgs(SET, 'key', 'value', { + XX: true + }), + ['SET', 'key', 'value', 'XX'] + ); + }); + }); + + it('with GET', () => { + assert.deepEqual( + parseArgs(SET, 'key', 'value', { + GET: true + }), + ['SET', 'key', 'value', 'GET'] + ); + }); + + it('with expiration, condition, GET', () => { + assert.deepEqual( + parseArgs(SET, 'key', 'value', { + expiration: { + type: 'EX', + value: 0 + }, + condition: 'NX', + GET: true + }), + ['SET', 'key', 'value', 'EX', '0', 'NX', 'GET'] + ); + }); + }); + + testUtils.testAll('set', async client => { + assert.equal( + await client.set('key', 'value'), + 'OK' + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/SET.ts b/packages/client/lib/commands/SET.ts new file mode 100644 index 00000000000..d1384255679 --- /dev/null +++ b/packages/client/lib/commands/SET.ts @@ -0,0 +1,96 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, SimpleStringReply, BlobStringReply, NullReply, Command } from '../RESP/types'; + +export interface SetOptions { + expiration?: { + type: 'EX' | 'PX' | 'EXAT' | 'PXAT'; + value: number; + } | { + type: 'KEEPTTL'; + } | 'KEEPTTL'; + /** + * @deprecated Use `expiration` { type: 'EX', value: number } instead + */ + EX?: number; + /** + * @deprecated Use `expiration` { type: 'PX', value: number } instead + */ + PX?: number; + /** + * @deprecated Use `expiration` { type: 'EXAT', value: number } instead + */ + EXAT?: number; + /** + * @deprecated Use `expiration` { type: 'PXAT', value: number } instead + */ + PXAT?: number; + /** + * @deprecated Use `expiration` 'KEEPTTL' instead + */ + KEEPTTL?: boolean; + + condition?: 'NX' | 'XX'; + /** + * @deprecated Use `{ condition: 'NX' }` instead. + */ + NX?: boolean; + /** + * @deprecated Use `{ condition: 'XX' }` instead. + */ + XX?: boolean; + + GET?: boolean; +} + +export default { + /** + * Constructs the SET command + * + * @param parser - The command parser + * @param key - The key to set + * @param value - The value to set + * @param options - Additional options for the SET command + * @see https://redis.io/commands/set/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, value: RedisArgument | number, options?: SetOptions) { + parser.push('SET'); + parser.pushKey(key); + parser.push(typeof value === 'number' ? value.toString() : value); + + if (options?.expiration) { + if (typeof options.expiration === 'string') { + parser.push(options.expiration); + } else if (options.expiration.type === 'KEEPTTL') { + parser.push('KEEPTTL'); + } else { + parser.push( + options.expiration.type, + options.expiration.value.toString() + ); + } + } else if (options?.EX !== undefined) { + parser.push('EX', options.EX.toString()); + } else if (options?.PX !== undefined) { + parser.push('PX', options.PX.toString()); + } else if (options?.EXAT !== undefined) { + parser.push('EXAT', options.EXAT.toString()); + } else if (options?.PXAT !== undefined) { + parser.push('PXAT', options.PXAT.toString()); + } else if (options?.KEEPTTL) { + parser.push('KEEPTTL'); + } + + if (options?.condition) { + parser.push(options.condition); + } else if (options?.NX) { + parser.push('NX'); + } else if (options?.XX) { + parser.push('XX'); + } + + if (options?.GET) { + parser.push('GET'); + } + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> | BlobStringReply | NullReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/SETBIT.spec.ts b/packages/client/lib/commands/SETBIT.spec.ts new file mode 100644 index 00000000000..1eedcc69959 --- /dev/null +++ b/packages/client/lib/commands/SETBIT.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SETBIT from './SETBIT'; +import { parseArgs } from './generic-transformers'; + +describe('SETBIT', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(SETBIT, 'key', 0, 1), + ['SETBIT', 'key', '0', '1'] + ); + }); + + testUtils.testAll('setBit', async client => { + assert.equal( + await client.setBit('key', 0, 1), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/SETBIT.ts b/packages/client/lib/commands/SETBIT.ts new file mode 100644 index 00000000000..b9c29796db9 --- /dev/null +++ b/packages/client/lib/commands/SETBIT.ts @@ -0,0 +1,22 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; +import { BitValue } from './generic-transformers'; + +export default { + IS_READ_ONLY: false, + /** + * Constructs the SETBIT command + * + * @param parser - The command parser + * @param key - The key to set the bit on + * @param offset - The bit offset (zero-based) + * @param value - The bit value (0 or 1) + * @see https://redis.io/commands/setbit/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, offset: number, value: BitValue) { + parser.push('SETBIT'); + parser.pushKey(key); + parser.push(offset.toString(), value.toString()); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/SETEX.spec.ts b/packages/client/lib/commands/SETEX.spec.ts new file mode 100644 index 00000000000..7bc934ccd68 --- /dev/null +++ b/packages/client/lib/commands/SETEX.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SETEX from './SETEX'; +import { parseArgs } from './generic-transformers'; + +describe('SETEX', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(SETEX, 'key', 1, 'value'), + ['SETEX', 'key', '1', 'value'] + ); + }); + + testUtils.testAll('setEx', async client => { + assert.equal( + await client.setEx('key', 1, 'value'), + 'OK' + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/SETEX.ts b/packages/client/lib/commands/SETEX.ts new file mode 100644 index 00000000000..39c7c60f53b --- /dev/null +++ b/packages/client/lib/commands/SETEX.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '../RESP/types'; + +export default { + /** + * Constructs the SETEX command + * + * @param parser - The command parser + * @param key - The key to set + * @param seconds - The expiration time in seconds + * @param value - The value to set + * @see https://redis.io/commands/setex/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, seconds: number, value: RedisArgument) { + parser.push('SETEX'); + parser.pushKey(key); + parser.push(seconds.toString(), value); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/SETNX .spec.ts b/packages/client/lib/commands/SETNX .spec.ts new file mode 100644 index 00000000000..81a5af3d411 --- /dev/null +++ b/packages/client/lib/commands/SETNX .spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SETNX from './SETNX'; +import { parseArgs } from './generic-transformers'; + +describe('SETNX', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(SETNX, 'key', 'value'), + ['SETNX', 'key', 'value'] + ); + }); + + testUtils.testAll('setNX', async client => { + assert.equal( + await client.setNX('key', 'value'), + 1 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/SETNX.ts b/packages/client/lib/commands/SETNX.ts new file mode 100644 index 00000000000..b32b6c5ef34 --- /dev/null +++ b/packages/client/lib/commands/SETNX.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +export default { + /** + * Constructs the SETNX command + * + * @param parser - The command parser + * @param key - The key to set if it doesn't exist + * @param value - The value to set + * @see https://redis.io/commands/setnx/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, value: RedisArgument) { + parser.push('SETNX'); + parser.pushKey(key); + parser.push(value); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/SETRANGE.spec.ts b/packages/client/lib/commands/SETRANGE.spec.ts new file mode 100644 index 00000000000..acdab5bcd3b --- /dev/null +++ b/packages/client/lib/commands/SETRANGE.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SETRANGE from './SETRANGE'; +import { parseArgs } from './generic-transformers'; + +describe('SETRANGE', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(SETRANGE, 'key', 0, 'value'), + ['SETRANGE', 'key', '0', 'value'] + ); + }); + + testUtils.testAll('setRange', async client => { + assert.equal( + await client.setRange('key', 0, 'value'), + 5 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/SETRANGE.ts b/packages/client/lib/commands/SETRANGE.ts new file mode 100644 index 00000000000..366e6c28a7d --- /dev/null +++ b/packages/client/lib/commands/SETRANGE.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +export default { + /** + * Constructs the SETRANGE command + * + * @param parser - The command parser + * @param key - The key to modify + * @param offset - The offset at which to start writing + * @param value - The value to write at the offset + * @see https://redis.io/commands/setrange/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, offset: number, value: RedisArgument) { + parser.push('SETRANGE'); + parser.pushKey(key); + parser.push(offset.toString(), value); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/SHUTDOWN.spec.ts b/packages/client/lib/commands/SHUTDOWN.spec.ts new file mode 100644 index 00000000000..9c4ca852ad3 --- /dev/null +++ b/packages/client/lib/commands/SHUTDOWN.spec.ts @@ -0,0 +1,50 @@ +import { strict as assert } from 'node:assert'; +import SHUTDOWN from './SHUTDOWN'; +import { parseArgs } from './generic-transformers'; + +describe('SHUTDOWN', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(SHUTDOWN), + ['SHUTDOWN'] + ); + }); + + it('with mode', () => { + assert.deepEqual( + parseArgs(SHUTDOWN, { + mode: 'NOSAVE' + }), + ['SHUTDOWN', 'NOSAVE'] + ); + }); + + it('with NOW', () => { + assert.deepEqual( + parseArgs(SHUTDOWN, { + NOW: true + }), + ['SHUTDOWN', 'NOW'] + ); + }); + + it('with FORCE', () => { + assert.deepEqual( + parseArgs(SHUTDOWN, { + FORCE: true + }), + ['SHUTDOWN', 'FORCE'] + ); + }); + + it('with ABORT', () => { + assert.deepEqual( + parseArgs(SHUTDOWN, { + ABORT: true + }), + ['SHUTDOWN', 'ABORT'] + ); + }); + }); +}); diff --git a/packages/client/lib/commands/SHUTDOWN.ts b/packages/client/lib/commands/SHUTDOWN.ts new file mode 100644 index 00000000000..6a5416d430c --- /dev/null +++ b/packages/client/lib/commands/SHUTDOWN.ts @@ -0,0 +1,49 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; + +/** + * Options for the SHUTDOWN command + * + * @property mode - NOSAVE will not save DB, SAVE will force save DB + * @property NOW - Immediately terminate all clients + * @property FORCE - Force shutdown even in case of errors + * @property ABORT - Abort a shutdown in progress + */ +export interface ShutdownOptions { + mode?: 'NOSAVE' | 'SAVE'; + NOW?: boolean; + FORCE?: boolean; + ABORT?: boolean; +} + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: false, + /** + * Constructs the SHUTDOWN command + * + * @param parser - The command parser + * @param options - Options for the shutdown process + * @see https://redis.io/commands/shutdown/ + */ + parseCommand(parser: CommandParser, options?: ShutdownOptions) { + parser.push('SHUTDOWN'); + + if (options?.mode) { + parser.push(options.mode); + } + + if (options?.NOW) { + parser.push('NOW'); + } + + if (options?.FORCE) { + parser.push('FORCE'); + } + + if (options?.ABORT) { + parser.push('ABORT'); + } + }, + transformReply: undefined as unknown as () => void | SimpleStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/SINTER.spec.ts b/packages/client/lib/commands/SINTER.spec.ts new file mode 100644 index 00000000000..6ca7b959ca7 --- /dev/null +++ b/packages/client/lib/commands/SINTER.spec.ts @@ -0,0 +1,32 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SINTER from './SINTER'; +import { parseArgs } from './generic-transformers'; + +describe('SINTER', () => { + describe('processCommand', () => { + it('string', () => { + assert.deepEqual( + parseArgs(SINTER, 'key'), + ['SINTER', 'key'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(SINTER, ['1', '2']), + ['SINTER', '1', '2'] + ); + }); + }); + + testUtils.testAll('sInter', async client => { + assert.deepEqual( + await client.sInter('key'), + [] + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/SINTER.ts b/packages/client/lib/commands/SINTER.ts new file mode 100644 index 00000000000..a129d71fd7a --- /dev/null +++ b/packages/client/lib/commands/SINTER.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '../client/parser'; +import { ArrayReply, BlobStringReply, Command } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Constructs the SINTER command + * + * @param parser - The command parser + * @param keys - One or more set keys to compute the intersection from + * @see https://redis.io/commands/sinter/ + */ + parseCommand(parser: CommandParser, keys: RedisVariadicArgument) { + parser.push('SINTER'); + parser.pushKeys(keys); + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/SINTERCARD.spec.ts b/packages/client/lib/commands/SINTERCARD.spec.ts new file mode 100644 index 00000000000..51aed13415d --- /dev/null +++ b/packages/client/lib/commands/SINTERCARD.spec.ts @@ -0,0 +1,43 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SINTERCARD from './SINTERCARD'; +import { parseArgs } from './generic-transformers'; + +describe('SINTERCARD', () => { + testUtils.isVersionGreaterThanHook([7]); + + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(SINTERCARD, ['1', '2']), + ['SINTERCARD', '2', '1', '2'] + ); + }); + + it('with limit (backwards compatibility)', () => { + assert.deepEqual( + parseArgs(SINTERCARD, ['1', '2'], 1), + ['SINTERCARD', '2', '1', '2', 'LIMIT', '1'] + ); + }); + + it('with LIMIT', () => { + assert.deepEqual( + parseArgs(SINTERCARD, ['1', '2'], { + LIMIT: 1 + }), + ['SINTERCARD', '2', '1', '2', 'LIMIT', '1'] + ); + }); + }); + + testUtils.testAll('sInterCard', async client => { + assert.deepEqual( + await client.sInterCard('key'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/SINTERCARD.ts b/packages/client/lib/commands/SINTERCARD.ts new file mode 100644 index 00000000000..191c0881a8d --- /dev/null +++ b/packages/client/lib/commands/SINTERCARD.ts @@ -0,0 +1,35 @@ +import { CommandParser } from '../client/parser'; +import { NumberReply, Command } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +/** + * Options for the SINTERCARD command + * + * @property LIMIT - Maximum number of elements to return + */ +export interface SInterCardOptions { + LIMIT?: number; +} + +export default { + IS_READ_ONLY: true, + /** + * Constructs the SINTERCARD command + * + * @param parser - The command parser + * @param keys - One or more set keys to compute the intersection cardinality from + * @param options - Options for the SINTERCARD command or a number for LIMIT (backwards compatibility) + * @see https://redis.io/commands/sintercard/ + */ + parseCommand(parser: CommandParser, keys: RedisVariadicArgument, options?: SInterCardOptions | number) { + parser.push('SINTERCARD'); + parser.pushKeysLength(keys); + + if (typeof options === 'number') { // backwards compatibility + parser.push('LIMIT', options.toString()); + } else if (options?.LIMIT !== undefined) { + parser.push('LIMIT', options.LIMIT.toString()); + } + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/SINTERSTORE.spec.ts b/packages/client/lib/commands/SINTERSTORE.spec.ts new file mode 100644 index 00000000000..83302a5c829 --- /dev/null +++ b/packages/client/lib/commands/SINTERSTORE.spec.ts @@ -0,0 +1,32 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SINTERSTORE from './SINTERSTORE'; +import { parseArgs } from './generic-transformers'; + +describe('SINTERSTORE', () => { + describe('transformArguments', () => { + it('string', () => { + assert.deepEqual( + parseArgs(SINTERSTORE, 'destination', 'key'), + ['SINTERSTORE', 'destination', 'key'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(SINTERSTORE, 'destination', ['1', '2']), + ['SINTERSTORE', 'destination', '1', '2'] + ); + }); + }); + + testUtils.testAll('sInterStore', async client => { + assert.equal( + await client.sInterStore('{tag}destination', '{tag}key'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/SINTERSTORE.ts b/packages/client/lib/commands/SINTERSTORE.ts new file mode 100644 index 00000000000..377b63fbddc --- /dev/null +++ b/packages/client/lib/commands/SINTERSTORE.ts @@ -0,0 +1,21 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export default { + IS_READ_ONLY: false, + /** + * Constructs the SINTERSTORE command + * + * @param parser - The command parser + * @param destination - The destination key to store the result + * @param keys - One or more set keys to compute the intersection from + * @see https://redis.io/commands/sinterstore/ + */ + parseCommand(parser: CommandParser, destination: RedisArgument, keys: RedisVariadicArgument) { + parser.push('SINTERSTORE'); + parser.pushKey(destination) + parser.pushKeys(keys); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/SISMEMBER.spec.ts b/packages/client/lib/commands/SISMEMBER.spec.ts new file mode 100644 index 00000000000..4796475c52c --- /dev/null +++ b/packages/client/lib/commands/SISMEMBER.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SISMEMBER from './SISMEMBER'; +import { parseArgs } from './generic-transformers'; + +describe('SISMEMBER', () => { + it('processCommand', () => { + assert.deepEqual( + parseArgs(SISMEMBER, 'key', 'member'), + ['SISMEMBER', 'key', 'member'] + ); + }); + + testUtils.testAll('sIsMember', async client => { + assert.equal( + await client.sIsMember('key', 'member'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/SISMEMBER.ts b/packages/client/lib/commands/SISMEMBER.ts new file mode 100644 index 00000000000..3310d43d97b --- /dev/null +++ b/packages/client/lib/commands/SISMEMBER.ts @@ -0,0 +1,21 @@ +import { CommandParser } from '../client/parser'; +import { NumberReply, Command, RedisArgument } from '../RESP/types'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Constructs the SISMEMBER command + * + * @param parser - The command parser + * @param key - The set key to check membership in + * @param member - The member to check for existence + * @see https://redis.io/commands/sismember/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, member: RedisArgument) { + parser.push('SISMEMBER'); + parser.pushKey(key); + parser.push(member); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/SMEMBERS.spec.ts b/packages/client/lib/commands/SMEMBERS.spec.ts new file mode 100644 index 00000000000..6e2582e5abc --- /dev/null +++ b/packages/client/lib/commands/SMEMBERS.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SMEMBERS from './SMEMBERS'; +import { parseArgs } from './generic-transformers'; + +describe('SMEMBERS', () => { + it('processCommand', () => { + assert.deepEqual( + parseArgs(SMEMBERS, 'key'), + ['SMEMBERS', 'key'] + ); + }); + + testUtils.testAll('sMembers', async client => { + assert.deepEqual( + await client.sMembers('key'), + [] + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/SMEMBERS.ts b/packages/client/lib/commands/SMEMBERS.ts new file mode 100644 index 00000000000..399ffd86147 --- /dev/null +++ b/packages/client/lib/commands/SMEMBERS.ts @@ -0,0 +1,22 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, BlobStringReply, SetReply, Command } from '../RESP/types'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Constructs the SMEMBERS command + * + * @param parser - The command parser + * @param key - The set key to get all members from + * @see https://redis.io/commands/smembers/ + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('SMEMBERS'); + parser.pushKey(key); + }, + transformReply: { + 2: undefined as unknown as () => ArrayReply, + 3: undefined as unknown as () => SetReply + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/SMISMEMBER.spec.ts b/packages/client/lib/commands/SMISMEMBER.spec.ts new file mode 100644 index 00000000000..deff6912360 --- /dev/null +++ b/packages/client/lib/commands/SMISMEMBER.spec.ts @@ -0,0 +1,25 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SMISMEMBER from './SMISMEMBER'; +import { parseArgs } from './generic-transformers'; + +describe('SMISMEMBER', () => { + testUtils.isVersionGreaterThanHook([6, 2]); + + it('processCommand', () => { + assert.deepEqual( + parseArgs(SMISMEMBER, 'key', ['1', '2']), + ['SMISMEMBER', 'key', '1', '2'] + ); + }); + + testUtils.testAll('smIsMember', async client => { + assert.deepEqual( + await client.smIsMember('key', ['1', '2']), + [0, 0] + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/SMISMEMBER.ts b/packages/client/lib/commands/SMISMEMBER.ts new file mode 100644 index 00000000000..b5950dcfd7f --- /dev/null +++ b/packages/client/lib/commands/SMISMEMBER.ts @@ -0,0 +1,21 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, NumberReply, Command } from '../RESP/types'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Constructs the SMISMEMBER command + * + * @param parser - The command parser + * @param key - The set key to check membership in + * @param members - The members to check for existence + * @see https://redis.io/commands/smismember/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, members: Array) { + parser.push('SMISMEMBER'); + parser.pushKey(key); + parser.pushVariadic(members); + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/SMOVE.spec.ts b/packages/client/lib/commands/SMOVE.spec.ts new file mode 100644 index 00000000000..c68a6e41914 --- /dev/null +++ b/packages/client/lib/commands/SMOVE.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SMOVE from './SMOVE'; +import { parseArgs } from './generic-transformers'; + +describe('SMOVE', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(SMOVE, 'source', 'destination', 'member'), + ['SMOVE', 'source', 'destination', 'member'] + ); + }); + + testUtils.testAll('sMove', async client => { + assert.equal( + await client.sMove('{tag}source', '{tag}destination', 'member'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/SMOVE.ts b/packages/client/lib/commands/SMOVE.ts new file mode 100644 index 00000000000..d5f150b99f2 --- /dev/null +++ b/packages/client/lib/commands/SMOVE.ts @@ -0,0 +1,21 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +export default { + IS_READ_ONLY: false, + /** + * Constructs the SMOVE command + * + * @param parser - The command parser + * @param source - The source set key + * @param destination - The destination set key + * @param member - The member to move + * @see https://redis.io/commands/smove/ + */ + parseCommand(parser: CommandParser, source: RedisArgument, destination: RedisArgument, member: RedisArgument) { + parser.push('SMOVE'); + parser.pushKeys([source, destination]); + parser.push(member); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/SORT.spec.ts b/packages/client/lib/commands/SORT.spec.ts new file mode 100644 index 00000000000..330b321a1b8 --- /dev/null +++ b/packages/client/lib/commands/SORT.spec.ts @@ -0,0 +1,100 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SORT from './SORT'; +import { parseArgs } from './generic-transformers'; + +describe('SORT', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(SORT, 'key'), + ['SORT', 'key'] + ); + }); + + it('with BY', () => { + assert.deepEqual( + parseArgs(SORT, 'key', { + BY: 'pattern' + }), + ['SORT', 'key', 'BY', 'pattern'] + ); + }); + + it('with LIMIT', () => { + assert.deepEqual( + parseArgs(SORT, 'key', { + LIMIT: { + offset: 0, + count: 1 + } + }), + ['SORT', 'key', 'LIMIT', '0', '1'] + ); + }); + + describe('with GET', () => { + it('string', () => { + assert.deepEqual( + parseArgs(SORT, 'key', { + GET: 'pattern' + }), + ['SORT', 'key', 'GET', 'pattern'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(SORT, 'key', { + GET: ['1', '2'] + }), + ['SORT', 'key', 'GET', '1', 'GET', '2'] + ); + }); + }); + + it('with DIRECTION', () => { + assert.deepEqual( + parseArgs(SORT, 'key', { + DIRECTION: 'ASC' + }), + ['SORT', 'key', 'ASC'] + ); + }); + + it('with ALPHA', () => { + assert.deepEqual( + parseArgs(SORT, 'key', { + ALPHA: true + }), + ['SORT', 'key', 'ALPHA'] + ); + }); + + it('with BY, LIMIT, GET, DIRECTION, ALPHA', () => { + assert.deepEqual( + parseArgs(SORT, 'key', { + BY: 'pattern', + LIMIT: { + offset: 0, + count: 1 + }, + GET: 'pattern', + DIRECTION: 'ASC', + ALPHA: true + }), + ['SORT', 'key', 'BY', 'pattern', 'LIMIT', '0', '1', 'GET', 'pattern', 'ASC', 'ALPHA'] + ); + }); + }); + + testUtils.testAll('sort', async client => { + assert.deepEqual( + await client.sort('key'), + [] + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/SORT.ts b/packages/client/lib/commands/SORT.ts new file mode 100644 index 00000000000..5ec889f3063 --- /dev/null +++ b/packages/client/lib/commands/SORT.ts @@ -0,0 +1,84 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, BlobStringReply, Command } from '../RESP/types'; + +/** + * Options for the SORT command + * + * @property BY - Pattern for external key to sort by + * @property LIMIT - Offset and count for results pagination + * @property GET - Pattern(s) for retrieving external keys + * @property DIRECTION - Sort direction: ASC (ascending) or DESC (descending) + * @property ALPHA - Sort lexicographically instead of numerically + */ +export interface SortOptions { + BY?: RedisArgument; + LIMIT?: { + offset: number; + count: number; + }; + GET?: RedisArgument | Array; + DIRECTION?: 'ASC' | 'DESC'; + ALPHA?: boolean; +} + +/** + * Parses sort arguments for the SORT command + * + * @param parser - The command parser + * @param key - The key to sort + * @param options - Sort options + */ +export function parseSortArguments( + parser: CommandParser, + key: RedisArgument, + options?: SortOptions +) { + parser.pushKey(key); + + if (options?.BY) { + parser.push('BY', options.BY); + } + + if (options?.LIMIT) { + parser.push( + 'LIMIT', + options.LIMIT.offset.toString(), + options.LIMIT.count.toString() + ); + } + + if (options?.GET) { + if (Array.isArray(options.GET)) { + for (const pattern of options.GET) { + parser.push('GET', pattern); + } + } else { + parser.push('GET', options.GET); + } + } + + if (options?.DIRECTION) { + parser.push(options.DIRECTION); + } + + if (options?.ALPHA) { + parser.push('ALPHA'); + } +} + +export default { + IS_READ_ONLY: true, + /** + * Constructs the SORT command + * + * @param parser - The command parser + * @param key - The key to sort (list, set, or sorted set) + * @param options - Sort options + * @see https://redis.io/commands/sort/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, options?: SortOptions) { + parser.push('SORT'); + parseSortArguments(parser, key, options); + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/SORT_RO.spec.ts b/packages/client/lib/commands/SORT_RO.spec.ts new file mode 100644 index 00000000000..86f8e507033 --- /dev/null +++ b/packages/client/lib/commands/SORT_RO.spec.ts @@ -0,0 +1,102 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SORT_RO from './SORT_RO'; +import { parseArgs } from './generic-transformers'; + +describe('SORT_RO', () => { + testUtils.isVersionGreaterThanHook([7]); + + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(SORT_RO, 'key'), + ['SORT_RO', 'key'] + ); + }); + + it('with BY', () => { + assert.deepEqual( + parseArgs(SORT_RO, 'key', { + BY: 'pattern' + }), + ['SORT_RO', 'key', 'BY', 'pattern'] + ); + }); + + it('with LIMIT', () => { + assert.deepEqual( + parseArgs(SORT_RO, 'key', { + LIMIT: { + offset: 0, + count: 1 + } + }), + ['SORT_RO', 'key', 'LIMIT', '0', '1'] + ); + }); + + describe('with GET', () => { + it('string', () => { + assert.deepEqual( + parseArgs(SORT_RO, 'key', { + GET: 'pattern' + }), + ['SORT_RO', 'key', 'GET', 'pattern'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(SORT_RO, 'key', { + GET: ['1', '2'] + }), + ['SORT_RO', 'key', 'GET', '1', 'GET', '2'] + ); + }); + }); + + it('with DIRECTION', () => { + assert.deepEqual( + parseArgs(SORT_RO, 'key', { + DIRECTION: 'ASC' + }), + ['SORT_RO', 'key', 'ASC'] + ); + }); + + it('with ALPHA', () => { + assert.deepEqual( + parseArgs(SORT_RO, 'key', { + ALPHA: true + }), + ['SORT_RO', 'key', 'ALPHA'] + ); + }); + + it('with BY, LIMIT, GET, DIRECTION, ALPHA', () => { + assert.deepEqual( + parseArgs(SORT_RO, 'key', { + BY: 'pattern', + LIMIT: { + offset: 0, + count: 1 + }, + GET: 'pattern', + DIRECTION: 'ASC', + ALPHA: true, + }), + ['SORT_RO', 'key', 'BY', 'pattern', 'LIMIT', '0', '1', 'GET', 'pattern', 'ASC', 'ALPHA'] + ); + }); + }); + + testUtils.testAll('sortRo', async client => { + assert.deepEqual( + await client.sortRo('key'), + [] + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/SORT_RO.ts b/packages/client/lib/commands/SORT_RO.ts new file mode 100644 index 00000000000..5531f927d52 --- /dev/null +++ b/packages/client/lib/commands/SORT_RO.ts @@ -0,0 +1,17 @@ +import { Command } from '../RESP/types'; +import SORT, { parseSortArguments } from './SORT'; + +export default { + IS_READ_ONLY: true, + /** + * Read-only variant of SORT that sorts the elements in a list, set or sorted set. + * @param args - Same parameters as the SORT command. + */ + parseCommand(...args: Parameters) { + const parser = args[0]; + + parser.push('SORT_RO'); + parseSortArguments(...args); + }, + transformReply: SORT.transformReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/SORT_STORE.spec.ts b/packages/client/lib/commands/SORT_STORE.spec.ts new file mode 100644 index 00000000000..a812cec52c5 --- /dev/null +++ b/packages/client/lib/commands/SORT_STORE.spec.ts @@ -0,0 +1,100 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SORT_STORE from './SORT_STORE'; +import { parseArgs } from './generic-transformers'; + +describe('SORT STORE', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(SORT_STORE, 'source', 'destination'), + ['SORT', 'source', 'STORE', 'destination'] + ); + }); + + it('with BY', () => { + assert.deepEqual( + parseArgs(SORT_STORE, 'source', 'destination', { + BY: 'pattern' + }), + ['SORT', 'source', 'BY', 'pattern', 'STORE', 'destination'] + ); + }); + + it('with LIMIT', () => { + assert.deepEqual( + parseArgs(SORT_STORE, 'source', 'destination', { + LIMIT: { + offset: 0, + count: 1 + } + }), + ['SORT', 'source', 'LIMIT', '0', '1', 'STORE', 'destination'] + ); + }); + + describe('with GET', () => { + it('string', () => { + assert.deepEqual( + parseArgs(SORT_STORE, 'source', 'destination', { + GET: 'pattern' + }), + ['SORT', 'source', 'GET', 'pattern', 'STORE', 'destination'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(SORT_STORE, 'source', 'destination', { + GET: ['1', '2'] + }), + ['SORT', 'source', 'GET', '1', 'GET', '2', 'STORE', 'destination'] + ); + }); + }); + + it('with DIRECTION', () => { + assert.deepEqual( + parseArgs(SORT_STORE, 'source', 'destination', { + DIRECTION: 'ASC' + }), + ['SORT', 'source', 'ASC', 'STORE', 'destination'] + ); + }); + + it('with ALPHA', () => { + assert.deepEqual( + parseArgs(SORT_STORE, 'source', 'destination', { + ALPHA: true + }), + ['SORT', 'source', 'ALPHA', 'STORE', 'destination'] + ); + }); + + it('with BY, LIMIT, GET, DIRECTION, ALPHA', () => { + assert.deepEqual( + parseArgs(SORT_STORE, 'source', 'destination', { + BY: 'pattern', + LIMIT: { + offset: 0, + count: 1 + }, + GET: 'pattern', + DIRECTION: 'ASC', + ALPHA: true + }), + ['SORT', 'source', 'BY', 'pattern', 'LIMIT', '0', '1', 'GET', 'pattern', 'ASC', 'ALPHA', 'STORE', 'destination'] + ); + }); + }); + + testUtils.testAll('sortStore', async client => { + assert.equal( + await client.sortStore('{tag}source', '{tag}destination'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/SORT_STORE.ts b/packages/client/lib/commands/SORT_STORE.ts new file mode 100644 index 00000000000..5fd52e076df --- /dev/null +++ b/packages/client/lib/commands/SORT_STORE.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; +import SORT, { SortOptions } from './SORT'; + +export default { + IS_READ_ONLY: false, + /** + * Sorts the elements in a list, set or sorted set and stores the result in a new list. + * @param parser - The Redis command parser. + * @param source - Key of the source list, set or sorted set. + * @param destination - Destination key where the result will be stored. + * @param options - Optional sorting parameters. + */ + parseCommand(parser: CommandParser, source: RedisArgument, destination: RedisArgument, options?: SortOptions) { + SORT.parseCommand(parser, source, options); + parser.push('STORE', destination); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/SPOP.spec.ts b/packages/client/lib/commands/SPOP.spec.ts new file mode 100644 index 00000000000..542e1ba3fcb --- /dev/null +++ b/packages/client/lib/commands/SPOP.spec.ts @@ -0,0 +1,38 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SPOP from './SPOP'; +import { BasicCommandParser } from '../client/parser'; + +describe('SPOP', () => { + it('transformArguments', () => { + const parser = new BasicCommandParser(); + SPOP.parseCommand(parser, 'key'); + assert.deepEqual( + parser.redisArgs, + ['SPOP', 'key'] + ); + }); + + testUtils.testAll('sPop', async client => { + assert.equal( + await client.sPop('key'), + null + ); + + await client.sAdd('key', 'member'); + + assert.equal( + await client.sPop('key'), + 'member' + ); + + assert.equal( + await client.sPop('key'), + null + ); + + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/SPOP.ts b/packages/client/lib/commands/SPOP.ts new file mode 100644 index 00000000000..8e9450b2b01 --- /dev/null +++ b/packages/client/lib/commands/SPOP.ts @@ -0,0 +1,18 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, BlobStringReply, NullReply, Command } from '../RESP/types'; + +export default { + IS_READ_ONLY: false, + /** + * Constructs the SPOP command to remove and return a random member from a set + * + * @param parser - The command parser + * @param key - The key of the set to pop from + * @see https://redis.io/commands/spop/ + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('SPOP'); + parser.pushKey(key); + }, + transformReply: undefined as unknown as () => BlobStringReply | NullReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/SPOP_COUNT.spec.ts b/packages/client/lib/commands/SPOP_COUNT.spec.ts new file mode 100644 index 00000000000..9720101f31f --- /dev/null +++ b/packages/client/lib/commands/SPOP_COUNT.spec.ts @@ -0,0 +1,38 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SPOP_COUNT from './SPOP_COUNT'; +import { BasicCommandParser } from '../client/parser'; + +describe('SPOP_COUNT', () => { + it('transformArguments', () => { + const parser = new BasicCommandParser(); + SPOP_COUNT.parseCommand(parser, 'key', 1); + assert.deepEqual( + parser.redisArgs, + ['SPOP', 'key', '1'] + ); + }); + + testUtils.testAll('sPopCount', async client => { + + assert.deepEqual( + await client.sPopCount('key', 1), + [] + ); + + await Promise.all([ + client.sAdd('key', 'member'), + client.sAdd('key', 'member2'), + client.sAdd('key', 'member3') + ]) + + assert.deepEqual( + (await client.sPopCount('key', 3)).length, + 3 + ); + + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/SPOP_COUNT.ts b/packages/client/lib/commands/SPOP_COUNT.ts new file mode 100644 index 00000000000..a285e6f5c48 --- /dev/null +++ b/packages/client/lib/commands/SPOP_COUNT.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, Command, ArrayReply } from '../RESP/types'; + +export default { + IS_READ_ONLY: false, + /** + * Constructs the SPOP command to remove and return multiple random members from a set + * + * @param parser - The command parser + * @param key - The key of the set to pop from + * @param count - The number of members to pop + * @see https://redis.io/commands/spop/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, count: number) { + parser.push('SPOP'); + parser.pushKey(key); + parser.push(count.toString()); + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/SPUBLISH.spec.ts b/packages/client/lib/commands/SPUBLISH.spec.ts new file mode 100644 index 00000000000..5a53bc40b7d --- /dev/null +++ b/packages/client/lib/commands/SPUBLISH.spec.ts @@ -0,0 +1,25 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SPUBLISH from './SPUBLISH'; +import { parseArgs } from './generic-transformers'; + +describe('SPUBLISH', () => { + testUtils.isVersionGreaterThanHook([7]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(SPUBLISH, 'channel', 'message'), + ['SPUBLISH', 'channel', 'message'] + ); + }); + + testUtils.testAll('sPublish', async client => { + assert.equal( + await client.sPublish('channel', 'message'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/SPUBLISH.ts b/packages/client/lib/commands/SPUBLISH.ts new file mode 100644 index 00000000000..6dd9f37e66b --- /dev/null +++ b/packages/client/lib/commands/SPUBLISH.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +export default { + IS_READ_ONLY: true, + /** + * Constructs the SPUBLISH command to post a message to a Sharded Pub/Sub channel + * + * @param parser - The command parser + * @param channel - The channel to publish to + * @param message - The message to publish + * @see https://redis.io/commands/spublish/ + */ + parseCommand(parser: CommandParser, channel: RedisArgument, message: RedisArgument) { + parser.push('SPUBLISH'); + parser.pushKey(channel); + parser.push(message); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/SRANDMEMBER.spec.ts b/packages/client/lib/commands/SRANDMEMBER.spec.ts new file mode 100644 index 00000000000..637aac27b29 --- /dev/null +++ b/packages/client/lib/commands/SRANDMEMBER.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SRANDMEMBER from './SRANDMEMBER'; +import { parseArgs } from './generic-transformers'; + +describe('SRANDMEMBER', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(SRANDMEMBER, 'key'), + ['SRANDMEMBER', 'key'] + ); + }); + + testUtils.testAll('sRandMember', async client => { + assert.equal( + await client.sRandMember('key'), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/SRANDMEMBER.ts b/packages/client/lib/commands/SRANDMEMBER.ts new file mode 100644 index 00000000000..9e04e45b52a --- /dev/null +++ b/packages/client/lib/commands/SRANDMEMBER.ts @@ -0,0 +1,18 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, BlobStringReply, NullReply, Command } from '../RESP/types'; + +export default { + IS_READ_ONLY: true, + /** + * Constructs the SRANDMEMBER command to get a random member from a set + * + * @param parser - The command parser + * @param key - The key of the set to get random member from + * @see https://redis.io/commands/srandmember/ + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('SRANDMEMBER') + parser.pushKey(key); + }, + transformReply: undefined as unknown as () => BlobStringReply | NullReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/SRANDMEMBER_COUNT.spec.ts b/packages/client/lib/commands/SRANDMEMBER_COUNT.spec.ts new file mode 100644 index 00000000000..13bb0d52d96 --- /dev/null +++ b/packages/client/lib/commands/SRANDMEMBER_COUNT.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SRANDMEMBER_COUNT from './SRANDMEMBER_COUNT'; +import { parseArgs } from './generic-transformers'; + +describe('SRANDMEMBER COUNT', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(SRANDMEMBER_COUNT, 'key', 1), + ['SRANDMEMBER', 'key', '1'] + ); + }); + + testUtils.testAll('sRandMemberCount', async client => { + assert.deepEqual( + await client.sRandMemberCount('key', 1), + [] + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/SRANDMEMBER_COUNT.ts b/packages/client/lib/commands/SRANDMEMBER_COUNT.ts new file mode 100644 index 00000000000..c7dd434b710 --- /dev/null +++ b/packages/client/lib/commands/SRANDMEMBER_COUNT.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, BlobStringReply, Command } from '../RESP/types'; +import SRANDMEMBER from './SRANDMEMBER'; + +export default { + IS_READ_ONLY: SRANDMEMBER.IS_READ_ONLY, + /** + * Constructs the SRANDMEMBER command to get multiple random members from a set + * + * @param parser - The command parser + * @param key - The key of the set to get random members from + * @param count - The number of members to return. If negative, may return the same member multiple times + * @see https://redis.io/commands/srandmember/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, count: number) { + SRANDMEMBER.parseCommand(parser, key); + parser.push(count.toString()); + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/SREM.spec.ts b/packages/client/lib/commands/SREM.spec.ts new file mode 100644 index 00000000000..6def4178fc8 --- /dev/null +++ b/packages/client/lib/commands/SREM.spec.ts @@ -0,0 +1,32 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SREM from './SREM'; +import { parseArgs } from './generic-transformers'; + +describe('SREM', () => { + describe('transformArguments', () => { + it('string', () => { + assert.deepEqual( + parseArgs(SREM, 'key', 'member'), + ['SREM', 'key', 'member'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(SREM, 'key', ['1', '2']), + ['SREM', 'key', '1', '2'] + ); + }); + }); + + testUtils.testAll('sRem', async client => { + assert.equal( + await client.sRem('key', 'member'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/SREM.ts b/packages/client/lib/commands/SREM.ts new file mode 100644 index 00000000000..d97ed7774d8 --- /dev/null +++ b/packages/client/lib/commands/SREM.ts @@ -0,0 +1,22 @@ +import { CommandParser } from '../client/parser'; +import { NumberReply, Command, RedisArgument } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export default { + IS_READ_ONLY: false, + /** + * Constructs the SREM command to remove one or more members from a set + * + * @param parser - The command parser + * @param key - The key of the set to remove members from + * @param members - One or more members to remove from the set + * @returns The number of members that were removed from the set + * @see https://redis.io/commands/srem/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, members: RedisVariadicArgument) { + parser.push('SREM'); + parser.pushKey(key); + parser.pushVariadic(members); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/SSCAN.spec.ts b/packages/client/lib/commands/SSCAN.spec.ts new file mode 100644 index 00000000000..e5d689c6e98 --- /dev/null +++ b/packages/client/lib/commands/SSCAN.spec.ts @@ -0,0 +1,56 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SSCAN from './SSCAN'; +import { parseArgs } from './generic-transformers'; + +describe('SSCAN', () => { + describe('transformArguments', () => { + it('cusror only', () => { + assert.deepEqual( + parseArgs(SSCAN, 'key', '0'), + ['SSCAN', 'key', '0'] + ); + }); + + it('with MATCH', () => { + assert.deepEqual( + parseArgs(SSCAN, 'key', '0', { + MATCH: 'pattern' + }), + ['SSCAN', 'key', '0', 'MATCH', 'pattern'] + ); + }); + + it('with COUNT', () => { + assert.deepEqual( + parseArgs(SSCAN, 'key', '0', { + COUNT: 1 + }), + ['SSCAN', 'key', '0', 'COUNT', '1'] + ); + }); + + it('with MATCH & COUNT', () => { + assert.deepEqual( + parseArgs(SSCAN, 'key', '0', { + MATCH: 'pattern', + COUNT: 1 + }), + ['SSCAN', 'key', '0', 'MATCH', 'pattern', 'COUNT', '1'] + ); + }); + }); + + testUtils.testAll('sScan', async client => { + assert.deepEqual( + await client.sScan('key', '0'), + { + cursor: '0', + members: [] + } + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/SSCAN.ts b/packages/client/lib/commands/SSCAN.ts new file mode 100644 index 00000000000..14e2c079ff0 --- /dev/null +++ b/packages/client/lib/commands/SSCAN.ts @@ -0,0 +1,40 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, BlobStringReply, Command } from '../RESP/types'; +import { ScanCommonOptions, parseScanArguments} from './SCAN'; + +export default { + IS_READ_ONLY: true, + /** + * Constructs the SSCAN command to incrementally iterate over elements in a set + * + * @param parser - The command parser + * @param key - The key of the set to scan + * @param cursor - The cursor position to start scanning from + * @param options - Optional scanning parameters (COUNT and MATCH) + * @returns Iterator containing cursor position and matching members + * @see https://redis.io/commands/sscan/ + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + cursor: RedisArgument, + options?: ScanCommonOptions + ) { + parser.push('SSCAN'); + parser.pushKey(key); + parseScanArguments(parser, cursor, options); + }, + /** + * Transforms the SSCAN reply into a cursor result object + * + * @param cursor - The next cursor position + * @param members - Array of matching set members + * @returns Object containing cursor and members array + */ + transformReply([cursor, members]: [BlobStringReply, Array]) { + return { + cursor, + members + }; + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/STRLEN.spec.ts b/packages/client/lib/commands/STRLEN.spec.ts new file mode 100644 index 00000000000..dbb7a08541b --- /dev/null +++ b/packages/client/lib/commands/STRLEN.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import STRLEN from './STRLEN'; +import { parseArgs } from './generic-transformers'; + +describe('STRLEN', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(STRLEN, 'key'), + ['STRLEN', 'key'] + ); + }); + + testUtils.testAll('strLen', async client => { + assert.equal( + await client.strLen('key'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/STRLEN.ts b/packages/client/lib/commands/STRLEN.ts new file mode 100644 index 00000000000..0f0e612422a --- /dev/null +++ b/packages/client/lib/commands/STRLEN.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Constructs the STRLEN command to get the length of a string value + * + * @param parser - The command parser + * @param key - The key holding the string value + * @returns The length of the string value, or 0 when key does not exist + * @see https://redis.io/commands/strlen/ + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('STRLEN'); + parser.pushKey(key); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/SUNION.spec.ts b/packages/client/lib/commands/SUNION.spec.ts new file mode 100644 index 00000000000..a4389d4236e --- /dev/null +++ b/packages/client/lib/commands/SUNION.spec.ts @@ -0,0 +1,32 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SUNION from './SUNION'; +import { parseArgs } from './generic-transformers'; + +describe('SUNION', () => { + describe('transformArguments', () => { + it('string', () => { + assert.deepEqual( + parseArgs(SUNION, 'key'), + ['SUNION', 'key'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(SUNION, ['1', '2']), + ['SUNION', '1', '2'] + ); + }); + }); + + testUtils.testAll('sUnion', async client => { + assert.deepEqual( + await client.sUnion('key'), + [] + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/SUNION.ts b/packages/client/lib/commands/SUNION.ts new file mode 100644 index 00000000000..7acecd1d12a --- /dev/null +++ b/packages/client/lib/commands/SUNION.ts @@ -0,0 +1,21 @@ +import { CommandParser } from '../client/parser'; +import { ArrayReply, BlobStringReply, Command } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Constructs the SUNION command to return the members of the set resulting from the union of all the given sets + * + * @param parser - The command parser + * @param keys - One or more set keys to compute the union from + * @returns Array of all elements that are members of at least one of the given sets + * @see https://redis.io/commands/sunion/ + */ + parseCommand(parser: CommandParser, keys: RedisVariadicArgument) { + parser.push('SUNION'); + parser.pushKeys(keys); + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/SUNIONSTORE.spec.ts b/packages/client/lib/commands/SUNIONSTORE.spec.ts new file mode 100644 index 00000000000..8f3db2cacd7 --- /dev/null +++ b/packages/client/lib/commands/SUNIONSTORE.spec.ts @@ -0,0 +1,32 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SUNIONSTORE from './SUNIONSTORE'; +import { parseArgs } from './generic-transformers'; + +describe('SUNIONSTORE', () => { + describe('transformArguments', () => { + it('string', () => { + assert.deepEqual( + parseArgs(SUNIONSTORE, 'destination', 'key'), + ['SUNIONSTORE', 'destination', 'key'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(SUNIONSTORE, 'destination', ['1', '2']), + ['SUNIONSTORE', 'destination', '1', '2'] + ); + }); + }); + + testUtils.testAll('sUnionStore', async client => { + assert.equal( + await client.sUnionStore('{tag}destination', '{tag}key'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/SUNIONSTORE.ts b/packages/client/lib/commands/SUNIONSTORE.ts new file mode 100644 index 00000000000..0a877c9cb8d --- /dev/null +++ b/packages/client/lib/commands/SUNIONSTORE.ts @@ -0,0 +1,22 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export default { + IS_READ_ONLY: false, + /** + * Constructs the SUNIONSTORE command to store the union of multiple sets into a destination set + * + * @param parser - The command parser + * @param destination - The destination key to store the resulting set + * @param keys - One or more source set keys to compute the union from + * @returns The number of elements in the resulting set + * @see https://redis.io/commands/sunionstore/ + */ + parseCommand(parser: CommandParser, destination: RedisArgument, keys: RedisVariadicArgument) { + parser.push('SUNIONSTORE'); + parser.pushKey(destination); + parser.pushKeys(keys); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/SWAPDB.spec.ts b/packages/client/lib/commands/SWAPDB.spec.ts new file mode 100644 index 00000000000..a3b53b27218 --- /dev/null +++ b/packages/client/lib/commands/SWAPDB.spec.ts @@ -0,0 +1,20 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SWAPDB from './SWAPDB'; +import { parseArgs } from './generic-transformers'; + +describe('SWAPDB', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(SWAPDB, 0, 1), + ['SWAPDB', '0', '1'] + ); + }); + + testUtils.testWithClient('client.swapDb', async client => { + assert.equal( + await client.swapDb(0, 1), + 'OK' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/SWAPDB.ts b/packages/client/lib/commands/SWAPDB.ts new file mode 100644 index 00000000000..66b19409a2b --- /dev/null +++ b/packages/client/lib/commands/SWAPDB.ts @@ -0,0 +1,18 @@ +import { CommandParser } from '../client/parser'; +import { SimpleStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: false, + /** + * Swaps the data of two Redis databases. + * @param parser - The Redis command parser. + * @param index1 - First database index. + * @param index2 - Second database index. + */ + parseCommand(parser: CommandParser, index1: number, index2: number) { + parser.push('SWAPDB', index1.toString(), index2.toString()); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; + diff --git a/packages/client/lib/commands/TIME.spec.ts b/packages/client/lib/commands/TIME.spec.ts new file mode 100644 index 00000000000..4ee704f0dd0 --- /dev/null +++ b/packages/client/lib/commands/TIME.spec.ts @@ -0,0 +1,20 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import TIME from './TIME'; +import { parseArgs } from './generic-transformers'; + +describe('TIME', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(TIME), + ['TIME'] + ); + }); + + testUtils.testWithClient('client.time', async client => { + const reply = await client.time(); + assert.ok(Array.isArray(reply)); + assert.equal(typeof reply[0], 'string'); + assert.equal(typeof reply[1], 'string'); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/TIME.ts b/packages/client/lib/commands/TIME.ts new file mode 100644 index 00000000000..dc248d82069 --- /dev/null +++ b/packages/client/lib/commands/TIME.ts @@ -0,0 +1,21 @@ +import { CommandParser } from '../client/parser'; +import { BlobStringReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Constructs the TIME command to return the server's current time + * + * @param parser - The command parser + * @returns Array containing the Unix timestamp in seconds and microseconds + * @see https://redis.io/commands/time/ + */ + parseCommand(parser: CommandParser) { + parser.push('TIME'); + }, + transformReply: undefined as unknown as () => [ + unixTimestamp: BlobStringReply<`${number}`>, + microseconds: BlobStringReply<`${number}`> + ] +} as const satisfies Command; diff --git a/packages/client/lib/commands/TOUCH.spec.ts b/packages/client/lib/commands/TOUCH.spec.ts new file mode 100644 index 00000000000..69a3498346b --- /dev/null +++ b/packages/client/lib/commands/TOUCH.spec.ts @@ -0,0 +1,32 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import TOUCH from './TOUCH'; +import { parseArgs } from './generic-transformers'; + +describe('TOUCH', () => { + describe('transformArguments', () => { + it('string', () => { + assert.deepEqual( + parseArgs(TOUCH, 'key'), + ['TOUCH', 'key'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(TOUCH, ['1', '2']), + ['TOUCH', '1', '2'] + ); + }); + }); + + testUtils.testAll('touch', async client => { + assert.equal( + await client.touch('key'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/TOUCH.ts b/packages/client/lib/commands/TOUCH.ts new file mode 100644 index 00000000000..953a696111c --- /dev/null +++ b/packages/client/lib/commands/TOUCH.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '../client/parser'; +import { NumberReply, Command } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export default { + IS_READ_ONLY: false, + /** + * Constructs the TOUCH command to alter the last access time of keys + * + * @param parser - The command parser + * @param key - One or more keys to touch + * @returns The number of keys that were touched + * @see https://redis.io/commands/touch/ + */ + parseCommand(parser: CommandParser, key: RedisVariadicArgument) { + parser.push('TOUCH'); + parser.pushKeys(key); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/TTL.spec.ts b/packages/client/lib/commands/TTL.spec.ts new file mode 100644 index 00000000000..4d36053c02e --- /dev/null +++ b/packages/client/lib/commands/TTL.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import TTL from './TTL'; +import { parseArgs } from './generic-transformers'; + +describe('TTL', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(TTL, 'key'), + ['TTL', 'key'] + ); + }); + + testUtils.testAll('ttl', async client => { + assert.equal( + await client.ttl('key'), + -2 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/TTL.ts b/packages/client/lib/commands/TTL.ts new file mode 100644 index 00000000000..c3340eda32e --- /dev/null +++ b/packages/client/lib/commands/TTL.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +export default { + IS_READ_ONLY: true, + /** + * Constructs the TTL command to get the remaining time to live of a key + * + * @param parser - The command parser + * @param key - Key to check + * @returns Time to live in seconds, -2 if key does not exist, -1 if has no timeout + * @see https://redis.io/commands/ttl/ + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('TTL'); + parser.pushKey(key); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/TYPE.spec.ts b/packages/client/lib/commands/TYPE.spec.ts new file mode 100644 index 00000000000..ae7392cdce9 --- /dev/null +++ b/packages/client/lib/commands/TYPE.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import TYPE from './TYPE'; +import { parseArgs } from './generic-transformers'; + +describe('TYPE', () => { + it('processCommand', () => { + assert.deepEqual( + parseArgs(TYPE, 'key'), + ['TYPE', 'key'] + ); + }); + + testUtils.testAll('type', async client => { + assert.equal( + await client.type('key'), + 'none' + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/TYPE.ts b/packages/client/lib/commands/TYPE.ts new file mode 100644 index 00000000000..740aa08e94a --- /dev/null +++ b/packages/client/lib/commands/TYPE.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '../RESP/types'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Constructs the TYPE command to determine the data type stored at key + * + * @param parser - The command parser + * @param key - Key to check + * @returns String reply: "none", "string", "list", "set", "zset", "hash", "stream" + * @see https://redis.io/commands/type/ + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('TYPE'); + parser.pushKey(key); + }, + transformReply: undefined as unknown as () => SimpleStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/UNLINK.spec.ts b/packages/client/lib/commands/UNLINK.spec.ts new file mode 100644 index 00000000000..2c32bee8e33 --- /dev/null +++ b/packages/client/lib/commands/UNLINK.spec.ts @@ -0,0 +1,32 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import UNLINK from './UNLINK'; +import { parseArgs } from './generic-transformers'; + +describe('UNLINK', () => { + describe('transformArguments', () => { + it('string', () => { + assert.deepEqual( + parseArgs(UNLINK, 'key'), + ['UNLINK', 'key'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(UNLINK, ['1', '2']), + ['UNLINK', '1', '2'] + ); + }); + }); + + testUtils.testAll('unlink', async client => { + assert.equal( + await client.unlink('key'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/UNLINK.ts b/packages/client/lib/commands/UNLINK.ts new file mode 100644 index 00000000000..4aa9cc315ab --- /dev/null +++ b/packages/client/lib/commands/UNLINK.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '../client/parser'; +import { NumberReply, Command } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export default { + IS_READ_ONLY: false, + /** + * Constructs the UNLINK command to asynchronously delete one or more keys + * + * @param parser - The command parser + * @param keys - One or more keys to unlink + * @returns The number of keys that were unlinked + * @see https://redis.io/commands/unlink/ + */ + parseCommand(parser: CommandParser, keys: RedisVariadicArgument) { + parser.push('UNLINK'); + parser.pushKeys(keys); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/VADD.spec.ts b/packages/client/lib/commands/VADD.spec.ts new file mode 100644 index 00000000000..e064beab498 --- /dev/null +++ b/packages/client/lib/commands/VADD.spec.ts @@ -0,0 +1,121 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import VADD from './VADD'; +import { BasicCommandParser } from '../client/parser'; + +describe('VADD', () => { + describe('parseCommand', () => { + it('basic usage', () => { + const parser = new BasicCommandParser(); + VADD.parseCommand(parser, 'key', [1.0, 2.0, 3.0], 'element'); + assert.deepEqual( + parser.redisArgs, + ['VADD', 'key', 'VALUES', '3', '1', '2', '3', 'element'] + ); + }); + + it('with REDUCE option', () => { + const parser = new BasicCommandParser(); + VADD.parseCommand(parser, 'key', [1.0, 2], 'element', { REDUCE: 50 }); + assert.deepEqual( + parser.redisArgs, + ['VADD', 'key', 'REDUCE', '50', 'VALUES', '2', '1', '2', 'element'] + ); + }); + + it('with quantization options', () => { + let parser = new BasicCommandParser(); + VADD.parseCommand(parser, 'key', [1.0, 2.0], 'element', { QUANT: 'Q8' }); + assert.deepEqual( + parser.redisArgs, + ['VADD', 'key', 'VALUES', '2', '1', '2', 'element', 'Q8'] + ); + + parser = new BasicCommandParser(); + VADD.parseCommand(parser, 'key', [1.0, 2.0], 'element', { QUANT: 'BIN' }); + assert.deepEqual( + parser.redisArgs, + ['VADD', 'key', 'VALUES', '2', '1', '2', 'element', 'BIN'] + ); + + parser = new BasicCommandParser(); + VADD.parseCommand(parser, 'key', [1.0, 2.0], 'element', { QUANT: 'NOQUANT' }); + assert.deepEqual( + parser.redisArgs, + ['VADD', 'key', 'VALUES', '2', '1', '2', 'element', 'NOQUANT'] + ); + }); + + it('with all options', () => { + const parser = new BasicCommandParser(); + VADD.parseCommand(parser, 'key', [1.0, 2.0], 'element', { + REDUCE: 50, + CAS: true, + QUANT: 'Q8', + EF: 200, + SETATTR: { name: 'test', value: 42 }, + M: 16 + }); + assert.deepEqual( + parser.redisArgs, + [ + 'VADD', 'key', 'REDUCE', '50', 'VALUES', '2', '1', '2', 'element', + 'CAS', 'Q8', 'EF', '200', 'SETATTR', '{"name":"test","value":42}', 'M', '16' + ] + ); + }); + }); + + testUtils.testAll('vAdd', async client => { + assert.equal( + await client.vAdd('key', [1.0, 2.0, 3.0], 'element'), + true + ); + + // same element should not be added again + assert.equal( + await client.vAdd('key', [1, 2 , 3], 'element'), + false + ); + + }, { + client: { ...GLOBAL.SERVERS.OPEN, minimumDockerVersion: [8, 0] }, + cluster: { ...GLOBAL.CLUSTERS.OPEN, minimumDockerVersion: [8, 0] }, + }); + + testUtils.testWithClient('vAdd with RESP3', async client => { + // Test basic functionality with RESP3 + assert.equal( + await client.vAdd('resp3-key', [1.5, 2.5, 3.5], 'resp3-element'), + true + ); + + // same element should not be added again + assert.equal( + await client.vAdd('resp3-key', [1, 2 , 3], 'resp3-element'), + false + ); + + // Test with options to ensure complex parameters work with RESP3 + assert.equal( + await client.vAdd('resp3-key', [4.0, 5.0, 6.0], 'resp3-element2', { + QUANT: 'Q8', + CAS: true, + SETATTR: { type: 'test', value: 123 } + }), + true + ); + + // Verify the vector set was created correctly + assert.equal( + await client.vCard('resp3-key'), + 2 + ); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3 + }, + minimumDockerVersion: [8, 0] + }); +}); diff --git a/packages/client/lib/commands/VADD.ts b/packages/client/lib/commands/VADD.ts new file mode 100644 index 00000000000..0406bd58d03 --- /dev/null +++ b/packages/client/lib/commands/VADD.ts @@ -0,0 +1,65 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, Command } from '../RESP/types'; +import { transformBooleanReply, transformDoubleArgument } from './generic-transformers'; + +export interface VAddOptions { + REDUCE?: number; + CAS?: boolean; + QUANT?: 'NOQUANT' | 'BIN' | 'Q8', + EF?: number; + SETATTR?: Record; + M?: number; +} + +export default { + /** + * Add a new element into the vector set specified by key + * + * @param parser - The command parser + * @param key - The name of the key that will hold the vector set data + * @param vector - The vector data as array of numbers + * @param element - The name of the element being added to the vector set + * @param options - Optional parameters for vector addition + * @see https://redis.io/commands/vadd/ + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + vector: Array, + element: RedisArgument, + options?: VAddOptions + ) { + parser.push('VADD'); + parser.pushKey(key); + + if (options?.REDUCE !== undefined) { + parser.push('REDUCE', options.REDUCE.toString()); + } + + parser.push('VALUES', vector.length.toString()); + for (const value of vector) { + parser.push(transformDoubleArgument(value)); + } + + parser.push(element); + + if (options?.CAS) { + parser.push('CAS'); + } + + options?.QUANT && parser.push(options.QUANT); + + if (options?.EF !== undefined) { + parser.push('EF', options.EF.toString()); + } + + if (options?.SETATTR) { + parser.push('SETATTR', JSON.stringify(options.SETATTR)); + } + + if (options?.M !== undefined) { + parser.push('M', options.M.toString()); + } + }, + transformReply: transformBooleanReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/VCARD.spec.ts b/packages/client/lib/commands/VCARD.spec.ts new file mode 100644 index 00000000000..feb9040fcb7 --- /dev/null +++ b/packages/client/lib/commands/VCARD.spec.ts @@ -0,0 +1,60 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import VCARD from './VCARD'; +import { BasicCommandParser } from '../client/parser'; + +describe('VCARD', () => { + it('parseCommand', () => { + const parser = new BasicCommandParser(); + VCARD.parseCommand(parser, 'key') + assert.deepEqual( + parser.redisArgs, + ['VCARD', 'key'] + ); + }); + + testUtils.testAll('vCard', async client => { + await client.vAdd('key', [1.0, 2.0, 3.0], 'element1'); + await client.vAdd('key', [4.0, 5.0, 6.0], 'element2'); + + assert.equal( + await client.vCard('key'), + 2 + ); + + assert.equal(await client.vCard('unknown'), 0); + }, { + client: { ...GLOBAL.SERVERS.OPEN, minimumDockerVersion: [8, 0] }, + cluster: { ...GLOBAL.CLUSTERS.OPEN, minimumDockerVersion: [8, 0] } + }); + + testUtils.testWithClient('vCard with RESP3', async client => { + // Test empty vector set + assert.equal( + await client.vCard('resp3-empty-key'), + 0 + ); + + // Add elements and test cardinality + await client.vAdd('resp3-key', [1.0, 2.0], 'elem1'); + assert.equal( + await client.vCard('resp3-key'), + 1 + ); + + await client.vAdd('resp3-key', [3.0, 4.0], 'elem2'); + await client.vAdd('resp3-key', [5.0, 6.0], 'elem3'); + assert.equal( + await client.vCard('resp3-key'), + 3 + ); + + assert.equal(await client.vCard('unknown'), 0); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3 + }, + minimumDockerVersion: [8, 0] + }); +}); diff --git a/packages/client/lib/commands/VCARD.ts b/packages/client/lib/commands/VCARD.ts new file mode 100644 index 00000000000..575abf9b710 --- /dev/null +++ b/packages/client/lib/commands/VCARD.ts @@ -0,0 +1,18 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +export default { + IS_READ_ONLY: true, + /** + * Retrieve the number of elements in a vector set + * + * @param parser - The command parser + * @param key - The key of the vector set + * @see https://redis.io/commands/vcard/ + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('VCARD'); + parser.pushKey(key); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/VDIM.spec.ts b/packages/client/lib/commands/VDIM.spec.ts new file mode 100644 index 00000000000..db3f5f3bd8f --- /dev/null +++ b/packages/client/lib/commands/VDIM.spec.ts @@ -0,0 +1,43 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import VDIM from './VDIM'; +import { BasicCommandParser } from '../client/parser'; + +describe('VDIM', () => { + it('parseCommand', () => { + const parser = new BasicCommandParser(); + VDIM.parseCommand(parser, 'key'); + assert.deepEqual( + parser.redisArgs, + ['VDIM', 'key'] + ); + }); + + testUtils.testAll('vDim', async client => { + await client.vAdd('key', [1.0, 2.0, 3.0], 'element'); + + assert.equal( + await client.vDim('key'), + 3 + ); + }, { + client: { ...GLOBAL.SERVERS.OPEN, minimumDockerVersion: [8, 0] }, + cluster: { ...GLOBAL.CLUSTERS.OPEN, minimumDockerVersion: [8, 0] } + }); + + testUtils.testWithClient('vDim with RESP3', async client => { + await client.vAdd('resp3-5d', [1.0, 2.0, 3.0, 4.0, 5.0], 'elem5d'); + + assert.equal( + await client.vDim('resp3-5d'), + 5 + ); + + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3 + }, + minimumDockerVersion: [8, 0] + }); +}); diff --git a/packages/client/lib/commands/VDIM.ts b/packages/client/lib/commands/VDIM.ts new file mode 100644 index 00000000000..f7933e77eac --- /dev/null +++ b/packages/client/lib/commands/VDIM.ts @@ -0,0 +1,18 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +export default { + IS_READ_ONLY: true, + /** + * Retrieve the dimension of the vectors in a vector set + * + * @param parser - The command parser + * @param key - The key of the vector set + * @see https://redis.io/commands/vdim/ + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('VDIM'); + parser.pushKey(key); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/VEMB.spec.ts b/packages/client/lib/commands/VEMB.spec.ts new file mode 100644 index 00000000000..ed9515ebddf --- /dev/null +++ b/packages/client/lib/commands/VEMB.spec.ts @@ -0,0 +1,42 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import VEMB from './VEMB'; +import { BasicCommandParser } from '../client/parser'; + +describe('VEMB', () => { + it('parseCommand', () => { + const parser = new BasicCommandParser(); + VEMB.parseCommand(parser, 'key', 'element'); + assert.deepEqual( + parser.redisArgs, + ['VEMB', 'key', 'element'] + ); + }); + + testUtils.testAll('vEmb', async client => { + await client.vAdd('key', [1.0, 2.0, 3.0], 'element'); + + const result = await client.vEmb('key', 'element'); + assert.ok(Array.isArray(result)); + assert.equal(result.length, 3); + assert.equal(typeof result[0], 'number'); + }, { + client: { ...GLOBAL.SERVERS.OPEN, minimumDockerVersion: [8, 0] }, + cluster: { ...GLOBAL.CLUSTERS.OPEN, minimumDockerVersion: [8, 0] } + }); + + testUtils.testWithClient('vEmb with RESP3', async client => { + await client.vAdd('resp3-key', [1.5, 2.5, 3.5, 4.5], 'resp3-element'); + + const result = await client.vEmb('resp3-key', 'resp3-element'); + assert.ok(Array.isArray(result)); + assert.equal(result.length, 4); + assert.equal(typeof result[0], 'number'); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3 + }, + minimumDockerVersion: [8, 0] + }); +}); diff --git a/packages/client/lib/commands/VEMB.ts b/packages/client/lib/commands/VEMB.ts new file mode 100644 index 00000000000..d534c27d65d --- /dev/null +++ b/packages/client/lib/commands/VEMB.ts @@ -0,0 +1,21 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, Command } from '../RESP/types'; +import { transformDoubleArrayReply } from './generic-transformers'; + +export default { + IS_READ_ONLY: true, + /** + * Retrieve the approximate vector associated with a vector set element + * + * @param parser - The command parser + * @param key - The key of the vector set + * @param element - The name of the element to retrieve the vector for + * @see https://redis.io/commands/vemb/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, element: RedisArgument) { + parser.push('VEMB'); + parser.pushKey(key); + parser.push(element); + }, + transformReply: transformDoubleArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/VEMB_RAW.spec.ts b/packages/client/lib/commands/VEMB_RAW.spec.ts new file mode 100644 index 00000000000..33d3af8540d --- /dev/null +++ b/packages/client/lib/commands/VEMB_RAW.spec.ts @@ -0,0 +1,68 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import VEMB_RAW from './VEMB_RAW'; +import { BasicCommandParser } from '../client/parser'; + +describe('VEMB_RAW', () => { + it('parseCommand', () => { + const parser = new BasicCommandParser(); + VEMB_RAW.parseCommand(parser, 'key', 'element'); + assert.deepEqual( + parser.redisArgs, + ['VEMB', 'key', 'element', 'RAW'] + ); + }); + + testUtils.testAll('vEmbRaw', async client => { + await client.vAdd('key1', [1.0, 2.0, 3.0], 'element'); + const result1 = await client.vEmbRaw('key1', 'element'); + assert.equal(result1.quantization, 'int8'); + assert.ok(result1.quantizationRange !== undefined); + + await client.vAdd('key2', [1.0, 2.0, 3.0], 'element', { QUANT: 'Q8' }); + const result2 = await client.vEmbRaw('key2', 'element'); + assert.equal(result2.quantization, 'int8'); + assert.ok(result2.quantizationRange !== undefined); + + await client.vAdd('key3', [1.0, 2.0, 3.0], 'element', { QUANT: 'NOQUANT' }); + const result3 = await client.vEmbRaw('key3', 'element'); + assert.equal(result3.quantization, 'f32'); + assert.equal(result3.quantizationRange, undefined); + + await client.vAdd('key4', [1.0, 2.0, 3.0], 'element', { QUANT: 'BIN' }); + const result4 = await client.vEmbRaw('key4', 'element'); + assert.equal(result4.quantization, 'bin'); + assert.equal(result4.quantizationRange, undefined); + }, { + client: { ...GLOBAL.SERVERS.OPEN, minimumDockerVersion: [8, 0] }, + cluster: { ...GLOBAL.CLUSTERS.OPEN, minimumDockerVersion: [8, 0] } + }); + + testUtils.testWithClient('vEmbRaw with RESP3', async client => { + await client.vAdd('key1', [1.0, 2.0, 3.0], 'element'); + const result1 = await client.vEmbRaw('key1', 'element'); + assert.equal(result1.quantization, 'int8'); + assert.ok(result1.quantizationRange !== undefined); + + await client.vAdd('key2', [1.0, 2.0, 3.0], 'element', { QUANT: 'Q8' }); + const result2 = await client.vEmbRaw('key2', 'element'); + assert.equal(result2.quantization, 'int8'); + assert.ok(result2.quantizationRange !== undefined); + + await client.vAdd('key3', [1.0, 2.0, 3.0], 'element', { QUANT: 'NOQUANT' }); + const result3 = await client.vEmbRaw('key3', 'element'); + assert.equal(result3.quantization, 'f32'); + assert.equal(result3.quantizationRange, undefined); + + await client.vAdd('key4', [1.0, 2.0, 3.0], 'element', { QUANT: 'BIN' }); + const result4 = await client.vEmbRaw('key4', 'element'); + assert.equal(result4.quantization, 'bin'); + assert.equal(result4.quantizationRange, undefined); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3 + }, + minimumDockerVersion: [8, 0] + }); +}); diff --git a/packages/client/lib/commands/VEMB_RAW.ts b/packages/client/lib/commands/VEMB_RAW.ts new file mode 100644 index 00000000000..b6881d321c9 --- /dev/null +++ b/packages/client/lib/commands/VEMB_RAW.ts @@ -0,0 +1,57 @@ +import { CommandParser } from '../client/parser'; +import { + RedisArgument, + Command, + BlobStringReply, + SimpleStringReply, + DoubleReply +} from '../RESP/types'; +import { transformDoubleReply } from './generic-transformers'; +import VEMB from './VEMB'; + +type RawVembReply = { + quantization: SimpleStringReply; + raw: BlobStringReply; + l2Norm: DoubleReply; + quantizationRange?: DoubleReply; +}; + +const transformRawVembReply = { + 2: (reply: any[]): RawVembReply => { + return { + quantization: reply[0], + raw: reply[1], + l2Norm: transformDoubleReply[2](reply[2]), + ...(reply[3] !== undefined && { quantizationRange: transformDoubleReply[2](reply[3]) }) + }; + }, + 3: (reply: any[]): RawVembReply => { + return { + quantization: reply[0], + raw: reply[1], + l2Norm: reply[2], + quantizationRange: reply[3] + }; + }, +}; + +export default { + IS_READ_ONLY: true, + /** + * Retrieve the RAW approximate vector associated with a vector set element + * + * @param parser - The command parser + * @param key - The key of the vector set + * @param element - The name of the element to retrieve the vector for + * @see https://redis.io/commands/vemb/ + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + element: RedisArgument + ) { + VEMB.parseCommand(parser, key, element); + parser.push('RAW'); + }, + transformReply: transformRawVembReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/VGETATTR.spec.ts b/packages/client/lib/commands/VGETATTR.spec.ts new file mode 100644 index 00000000000..d904146c670 --- /dev/null +++ b/packages/client/lib/commands/VGETATTR.spec.ts @@ -0,0 +1,77 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import VGETATTR from './VGETATTR'; +import { BasicCommandParser } from '../client/parser'; + +describe('VGETATTR', () => { + it('parseCommand', () => { + const parser = new BasicCommandParser(); + VGETATTR.parseCommand(parser, 'key', 'element'); + assert.deepEqual( + parser.redisArgs, + ['VGETATTR', 'key', 'element'] + ); + }); + + testUtils.testAll('vGetAttr', async client => { + await client.vAdd('key', [1.0, 2.0, 3.0], 'element'); + + const nullResult = await client.vGetAttr('key', 'element'); + assert.equal(nullResult, null); + + await client.vSetAttr('key', 'element', { name: 'test' }); + + const result = await client.vGetAttr('key', 'element'); + + assert.ok(result !== null); + assert.equal(typeof result, 'object') + + assert.deepEqual(result, { + name: 'test' + }) + + + }, { + client: { ...GLOBAL.SERVERS.OPEN, minimumDockerVersion: [8, 0] }, + cluster: { ...GLOBAL.CLUSTERS.OPEN, minimumDockerVersion: [8, 0] } + }); + + testUtils.testWithClient('vGetAttr with RESP3', async client => { + await client.vAdd('resp3-key', [1.0, 2.0], 'resp3-element'); + + // Test null case (no attributes set) + const nullResult = await client.vGetAttr('resp3-key', 'resp3-element'); + + assert.equal(nullResult, null); + + // Set complex attributes and retrieve them + const complexAttrs = { + name: 'test-item', + category: 'electronics', + price: 99.99, + inStock: true, + tags: ['new', 'featured'] + }; + await client.vSetAttr('resp3-key', 'resp3-element', complexAttrs); + + const result = await client.vGetAttr('resp3-key', 'resp3-element'); + + assert.ok(result !== null); + assert.equal(typeof result, 'object') + + assert.deepEqual(result, { + name: 'test-item', + category: 'electronics', + price: 99.99, + inStock: true, + tags: ['new', 'featured'] + }) + + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3 + }, + minimumDockerVersion: [8, 0] + }); +}); diff --git a/packages/client/lib/commands/VGETATTR.ts b/packages/client/lib/commands/VGETATTR.ts new file mode 100644 index 00000000000..05ec8706fb1 --- /dev/null +++ b/packages/client/lib/commands/VGETATTR.ts @@ -0,0 +1,21 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, Command } from '../RESP/types'; +import { transformRedisJsonNullReply } from './generic-transformers'; + +export default { + IS_READ_ONLY: true, + /** + * Retrieve the attributes of a vector set element + * + * @param parser - The command parser + * @param key - The key of the vector set + * @param element - The name of the element to retrieve attributes for + * @see https://redis.io/commands/vgetattr/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, element: RedisArgument) { + parser.push('VGETATTR'); + parser.pushKey(key); + parser.push(element); + }, + transformReply: transformRedisJsonNullReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/VINFO.spec.ts b/packages/client/lib/commands/VINFO.spec.ts new file mode 100644 index 00000000000..074598644ff --- /dev/null +++ b/packages/client/lib/commands/VINFO.spec.ts @@ -0,0 +1,58 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import VINFO from './VINFO'; +import { BasicCommandParser } from '../client/parser'; + +describe('VINFO', () => { + it('parseCommand', () => { + const parser = new BasicCommandParser(); + VINFO.parseCommand(parser, 'key'); + assert.deepEqual( + parser.redisArgs, + ['VINFO', 'key'] + ); + }); + + testUtils.testAll('vInfo', async client => { + await client.vAdd('key', [1.0, 2.0, 3.0], 'element'); + + const result = await client.vInfo('key'); + assert.ok(typeof result === 'object' && result !== null); + + assert.equal(result['vector-dim'], 3); + assert.equal(result['size'], 1); + assert.ok('quant-type' in result); + assert.ok('hnsw-m' in result); + assert.ok('projection-input-dim' in result); + assert.ok('max-level' in result); + assert.ok('attributes-count' in result); + assert.ok('vset-uid' in result); + assert.ok('hnsw-max-node-uid' in result); + }, { + client: { ...GLOBAL.SERVERS.OPEN, minimumDockerVersion: [8, 0] }, + cluster: { ...GLOBAL.CLUSTERS.OPEN, minimumDockerVersion: [8, 0] } + }); + + testUtils.testWithClient('vInfo with RESP3', async client => { + await client.vAdd('resp3-key', [1.0, 2.0, 3.0], 'resp3-element'); + + const result = await client.vInfo('resp3-key'); + assert.ok(typeof result === 'object' && result !== null); + + assert.equal(result['vector-dim'], 3); + assert.equal(result['size'], 1); + assert.ok('quant-type' in result); + assert.ok('hnsw-m' in result); + assert.ok('projection-input-dim' in result); + assert.ok('max-level' in result); + assert.ok('attributes-count' in result); + assert.ok('vset-uid' in result); + assert.ok('hnsw-max-node-uid' in result); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3 + }, + minimumDockerVersion: [8, 0] + }); +}); diff --git a/packages/client/lib/commands/VINFO.ts b/packages/client/lib/commands/VINFO.ts new file mode 100644 index 00000000000..4e0d68d7cb0 --- /dev/null +++ b/packages/client/lib/commands/VINFO.ts @@ -0,0 +1,38 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, Command, UnwrapReply, Resp2Reply, TuplesToMapReply, SimpleStringReply, NumberReply } from '../RESP/types'; + +export type VInfoReplyMap = TuplesToMapReply<[ + [SimpleStringReply<'quant-type'>, SimpleStringReply], + [SimpleStringReply<'vector-dim'>, NumberReply], + [SimpleStringReply<'size'>, NumberReply], + [SimpleStringReply<'max-level'>, NumberReply], + [SimpleStringReply<'vset-uid'>, NumberReply], + [SimpleStringReply<'hnsw-max-node-uid'>, NumberReply], +]>; + +export default { + IS_READ_ONLY: true, + /** + * Retrieve metadata and internal details about a vector set, including size, dimensions, quantization type, and graph structure + * + * @param parser - The command parser + * @param key - The key of the vector set + * @see https://redis.io/commands/vinfo/ + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('VINFO'); + parser.pushKey(key); + }, + transformReply: { + 2: (reply: UnwrapReply>): VInfoReplyMap => { + const ret = Object.create(null); + + for (let i = 0; i < reply.length; i += 2) { + ret[reply[i].toString()] = reply[i + 1]; + } + + return ret as unknown as VInfoReplyMap; + }, + 3: undefined as unknown as () => VInfoReplyMap + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/VLINKS.spec.ts b/packages/client/lib/commands/VLINKS.spec.ts new file mode 100644 index 00000000000..e788f9f9a98 --- /dev/null +++ b/packages/client/lib/commands/VLINKS.spec.ts @@ -0,0 +1,42 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import VLINKS from './VLINKS'; +import { BasicCommandParser } from '../client/parser'; + +describe('VLINKS', () => { + it('parseCommand', () => { + const parser = new BasicCommandParser(); + VLINKS.parseCommand(parser, 'key', 'element'); + assert.deepEqual( + parser.redisArgs, + ['VLINKS', 'key', 'element'] + ); + }); + + testUtils.testAll('vLinks', async client => { + await client.vAdd('key', [1.0, 2.0, 3.0], 'element1'); + await client.vAdd('key', [1.1, 2.1, 3.1], 'element2'); + + const result = await client.vLinks('key', 'element1'); + assert.ok(Array.isArray(result)); + assert.ok(result.length) + }, { + client: { ...GLOBAL.SERVERS.OPEN, minimumDockerVersion: [8, 0] }, + cluster: { ...GLOBAL.CLUSTERS.OPEN, minimumDockerVersion: [8, 0] } + }); + + testUtils.testWithClient('vLinks with RESP3', async client => { + await client.vAdd('resp3-key', [1.0, 2.0, 3.0], 'element1'); + await client.vAdd('resp3-key', [1.1, 2.1, 3.1], 'element2'); + + const result = await client.vLinks('resp3-key', 'element1'); + assert.ok(Array.isArray(result)); + assert.ok(result.length) + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3 + }, + minimumDockerVersion: [8, 0] + }); +}); diff --git a/packages/client/lib/commands/VLINKS.ts b/packages/client/lib/commands/VLINKS.ts new file mode 100644 index 00000000000..9e97fc7de9b --- /dev/null +++ b/packages/client/lib/commands/VLINKS.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, BlobStringReply, Command } from '../RESP/types'; + +export default { + IS_READ_ONLY: true, + /** + * Retrieve the neighbors of a specified element in a vector set; the connections for each layer of the HNSW graph + * + * @param parser - The command parser + * @param key - The key of the vector set + * @param element - The name of the element to retrieve neighbors for + * @see https://redis.io/commands/vlinks/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, element: RedisArgument) { + parser.push('VLINKS'); + parser.pushKey(key); + parser.push(element); + }, + transformReply: undefined as unknown as () => ArrayReply> +} as const satisfies Command; diff --git a/packages/client/lib/commands/VLINKS_WITHSCORES.spec.ts b/packages/client/lib/commands/VLINKS_WITHSCORES.spec.ts new file mode 100644 index 00000000000..db96bd1a8af --- /dev/null +++ b/packages/client/lib/commands/VLINKS_WITHSCORES.spec.ts @@ -0,0 +1,75 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import VLINKS_WITHSCORES from './VLINKS_WITHSCORES'; +import { BasicCommandParser } from '../client/parser'; + +describe('VLINKS WITHSCORES', () => { + it('parseCommand', () => { + const parser = new BasicCommandParser(); + VLINKS_WITHSCORES.parseCommand(parser, 'key', 'element'); + assert.deepEqual(parser.redisArgs, [ + 'VLINKS', + 'key', + 'element', + 'WITHSCORES' + ]); + }); + + testUtils.testAll( + 'vLinksWithScores', + async client => { + // Create a vector set with multiple elements to build HNSW graph layers + await client.vAdd('key', [1.0, 2.0, 3.0], 'element1'); + await client.vAdd('key', [1.1, 2.1, 3.1], 'element2'); + await client.vAdd('key', [1.2, 2.2, 3.2], 'element3'); + await client.vAdd('key', [2.0, 3.0, 4.0], 'element4'); + + const result = await client.vLinksWithScores('key', 'element1'); + + assert.ok(Array.isArray(result)); + + for (const layer of result) { + assert.equal( + typeof layer, + 'object' + ); + } + + assert.ok(result.length >= 1, 'Should have at least layer 0'); + }, + { + client: { ...GLOBAL.SERVERS.OPEN, minimumDockerVersion: [8, 0] }, + cluster: { ...GLOBAL.CLUSTERS.OPEN, minimumDockerVersion: [8, 0] } + } + ); + + testUtils.testWithClient( + 'vLinksWithScores with RESP3', + async client => { + await client.vAdd('resp3-key', [1.0, 2.0, 3.0], 'element1'); + await client.vAdd('resp3-key', [1.1, 2.1, 3.1], 'element2'); + await client.vAdd('resp3-key', [1.2, 2.2, 3.2], 'element3'); + await client.vAdd('resp3-key', [2.0, 3.0, 4.0], 'element4'); + + const result = await client.vLinksWithScores('resp3-key', 'element1'); + + assert.ok(Array.isArray(result)); + + for (const layer of result) { + assert.equal( + typeof layer, + 'object' + ); + } + + assert.ok(result.length >= 1, 'Should have at least layer 0'); + }, + { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3 + }, + minimumDockerVersion: [8, 0] + } + ); +}); diff --git a/packages/client/lib/commands/VLINKS_WITHSCORES.ts b/packages/client/lib/commands/VLINKS_WITHSCORES.ts new file mode 100644 index 00000000000..10ebe160fcd --- /dev/null +++ b/packages/client/lib/commands/VLINKS_WITHSCORES.ts @@ -0,0 +1,42 @@ +import { BlobStringReply, Command, DoubleReply, MapReply } from '../RESP/types'; +import { transformDoubleReply } from './generic-transformers'; +import VLINKS from './VLINKS'; + + +function transformVLinksWithScoresReply(reply: any): Array> { + const layers: Array> = []; + + for (const layer of reply) { + const obj: Record = Object.create(null); + + // Each layer contains alternating element names and scores + for (let i = 0; i < layer.length; i += 2) { + const element = layer[i]; + const score = transformDoubleReply[2](layer[i + 1]); + obj[element.toString()] = score; + } + + layers.push(obj); + } + + return layers; +} + +export default { + IS_READ_ONLY: VLINKS.IS_READ_ONLY, + /** + * Get the connections for each layer of the HNSW graph with similarity scores + * @param args - Same parameters as the VLINKS command + * @see https://redis.io/commands/vlinks/ + */ + parseCommand(...args: Parameters) { + const parser = args[0]; + + VLINKS.parseCommand(...args); + parser.push('WITHSCORES'); + }, + transformReply: { + 2: transformVLinksWithScoresReply, + 3: undefined as unknown as () => Array> + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/VRANDMEMBER.spec.ts b/packages/client/lib/commands/VRANDMEMBER.spec.ts new file mode 100644 index 00000000000..28c020e3563 --- /dev/null +++ b/packages/client/lib/commands/VRANDMEMBER.spec.ts @@ -0,0 +1,201 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import VRANDMEMBER from './VRANDMEMBER'; +import { BasicCommandParser } from '../client/parser'; + +describe('VRANDMEMBER', () => { + describe('parseCommand', () => { + it('without count', () => { + const parser = new BasicCommandParser(); + VRANDMEMBER.parseCommand(parser, 'key'); + assert.deepEqual( + parser.redisArgs, + ['VRANDMEMBER', 'key'] + ); + }); + + it('with count', () => { + const parser = new BasicCommandParser(); + VRANDMEMBER.parseCommand(parser, 'key', 2); + assert.deepEqual( + parser.redisArgs, + ['VRANDMEMBER', 'key', '2'] + ); + }); + }); + + describe('RESP2 tests', () => { + testUtils.testAll('vRandMember without count - returns single element as string', async client => { + await client.vAdd('key', [1.0, 2.0, 3.0], 'element1'); + await client.vAdd('key', [4.0, 5.0, 6.0], 'element2'); + await client.vAdd('key', [7.0, 8.0, 9.0], 'element3'); + + const result = await client.vRandMember('key'); + assert.equal(typeof result, 'string'); + assert.ok(['element1', 'element2', 'element3'].includes(result as string)); + }, { + client: { ...GLOBAL.SERVERS.OPEN, minimumDockerVersion: [8, 0] }, + cluster: { ...GLOBAL.CLUSTERS.OPEN, minimumDockerVersion: [8, 0] } + }); + + testUtils.testAll('vRandMember with positive count - returns distinct elements', async client => { + await client.vAdd('key', [1.0, 2.0, 3.0], 'element1'); + await client.vAdd('key', [4.0, 5.0, 6.0], 'element2'); + await client.vAdd('key', [7.0, 8.0, 9.0], 'element3'); + + const result = await client.vRandMember('key', 2); + assert.ok(Array.isArray(result)); + assert.equal(result.length, 2); + + }, { + client: { ...GLOBAL.SERVERS.OPEN, minimumDockerVersion: [8, 0] }, + cluster: { ...GLOBAL.CLUSTERS.OPEN, minimumDockerVersion: [8, 0] } + }); + + testUtils.testAll('vRandMember with negative count - allows duplicates', async client => { + await client.vAdd('key', [1.0, 2.0, 3.0], 'element1'); + await client.vAdd('key', [4.0, 5.0, 6.0], 'element2'); + + const result = await client.vRandMember('key', -5); + assert.ok(Array.isArray(result)); + assert.equal(result.length, 5); + + // All elements should be from our set (duplicates allowed) + result.forEach(element => { + assert.ok(['element1', 'element2'].includes(element)); + }); + }, { + client: { ...GLOBAL.SERVERS.OPEN, minimumDockerVersion: [8, 0] }, + cluster: { ...GLOBAL.CLUSTERS.OPEN, minimumDockerVersion: [8, 0] } + }); + + testUtils.testAll('vRandMember count exceeds set size - returns entire set', async client => { + await client.vAdd('key', [1.0, 2.0, 3.0], 'element1'); + await client.vAdd('key', [4.0, 5.0, 6.0], 'element2'); + + const result = await client.vRandMember('key', 10); + assert.ok(Array.isArray(result)); + assert.equal(result.length, 2); // Only 2 elements exist + + // Should contain both elements + assert.ok(result.includes('element1')); + assert.ok(result.includes('element2')); + }, { + client: { ...GLOBAL.SERVERS.OPEN, minimumDockerVersion: [8, 0] }, + cluster: { ...GLOBAL.CLUSTERS.OPEN, minimumDockerVersion: [8, 0] } + }); + + testUtils.testAll('vRandMember on non-existent key', async client => { + // Without count - should return null + const resultNoCount = await client.vRandMember('nonexistent'); + assert.equal(resultNoCount, null); + + // With count - should return empty array + const resultWithCount = await client.vRandMember('nonexistent', 5); + assert.ok(Array.isArray(resultWithCount)); + assert.equal(resultWithCount.length, 0); + }, { + client: { ...GLOBAL.SERVERS.OPEN, minimumDockerVersion: [8, 0] }, + cluster: { ...GLOBAL.CLUSTERS.OPEN, minimumDockerVersion: [8, 0] } + }); + }); + + describe('RESP3 tests', () => { + testUtils.testWithClient('vRandMember without count - returns single element as string', async client => { + await client.vAdd('resp3-key', [1.0, 2.0, 3.0], 'element1'); + await client.vAdd('resp3-key', [4.0, 5.0, 6.0], 'element2'); + await client.vAdd('resp3-key', [7.0, 8.0, 9.0], 'element3'); + + const result = await client.vRandMember('resp3-key'); + assert.equal(typeof result, 'string'); + assert.ok(['element1', 'element2', 'element3'].includes(result as string)); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3 + }, + minimumDockerVersion: [8, 0] + }); + + testUtils.testWithClient('vRandMember with positive count - returns distinct elements', async client => { + await client.vAdd('resp3-key', [1.0, 2.0, 3.0], 'element1'); + await client.vAdd('resp3-key', [4.0, 5.0, 6.0], 'element2'); + await client.vAdd('resp3-key', [7.0, 8.0, 9.0], 'element3'); + + const result = await client.vRandMember('resp3-key', 2); + assert.ok(Array.isArray(result)); + assert.equal(result.length, 2); + + // Should be distinct elements (no duplicates) + const uniqueElements = new Set(result); + assert.equal(uniqueElements.size, 2); + + // All elements should be from our set + result.forEach(element => { + assert.ok(['element1', 'element2', 'element3'].includes(element)); + }); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3 + }, + minimumDockerVersion: [8, 0] + }); + + testUtils.testWithClient('vRandMember with negative count - allows duplicates', async client => { + await client.vAdd('resp3-key', [1.0, 2.0, 3.0], 'element1'); + await client.vAdd('resp3-key', [4.0, 5.0, 6.0], 'element2'); + + const result = await client.vRandMember('resp3-key', -5); + assert.ok(Array.isArray(result)); + assert.equal(result.length, 5); + + // All elements should be from our set (duplicates allowed) + result.forEach(element => { + assert.ok(['element1', 'element2'].includes(element)); + }); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3 + }, + minimumDockerVersion: [8, 0] + }); + + testUtils.testWithClient('vRandMember count exceeds set size - returns entire set', async client => { + await client.vAdd('resp3-key', [1.0, 2.0, 3.0], 'element1'); + await client.vAdd('resp3-key', [4.0, 5.0, 6.0], 'element2'); + + const result = await client.vRandMember('resp3-key', 10); + assert.ok(Array.isArray(result)); + assert.equal(result.length, 2); // Only 2 elements exist + + // Should contain both elements + assert.ok(result.includes('element1')); + assert.ok(result.includes('element2')); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3 + }, + minimumDockerVersion: [8, 0] + }); + + testUtils.testWithClient('vRandMember on non-existent key', async client => { + // Without count - should return null + const resultNoCount = await client.vRandMember('resp3-nonexistent'); + assert.equal(resultNoCount, null); + + // With count - should return empty array + const resultWithCount = await client.vRandMember('resp3-nonexistent', 5); + assert.ok(Array.isArray(resultWithCount)); + assert.equal(resultWithCount.length, 0); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3 + }, + minimumDockerVersion: [8, 0] + }); + }); +}); diff --git a/packages/client/lib/commands/VRANDMEMBER.ts b/packages/client/lib/commands/VRANDMEMBER.ts new file mode 100644 index 00000000000..299af33b9fa --- /dev/null +++ b/packages/client/lib/commands/VRANDMEMBER.ts @@ -0,0 +1,23 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, BlobStringReply, ArrayReply, Command, NullReply } from '../RESP/types'; + +export default { + IS_READ_ONLY: true, + /** + * Retrieve random elements of a vector set + * + * @param parser - The command parser + * @param key - The key of the vector set + * @param count - Optional number of elements to return + * @see https://redis.io/commands/vrandmember/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, count?: number) { + parser.push('VRANDMEMBER'); + parser.pushKey(key); + + if (count !== undefined) { + parser.push(count.toString()); + } + }, + transformReply: undefined as unknown as () => BlobStringReply | ArrayReply | NullReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/VREM.spec.ts b/packages/client/lib/commands/VREM.spec.ts new file mode 100644 index 00000000000..9e558c991c1 --- /dev/null +++ b/packages/client/lib/commands/VREM.spec.ts @@ -0,0 +1,63 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import VREM from './VREM'; +import { BasicCommandParser } from '../client/parser'; + +describe('VREM', () => { + const parser = new BasicCommandParser(); + VREM.parseCommand(parser, 'key', 'element'); + it('parseCommand', () => { + assert.deepEqual( + parser.redisArgs, + ['VREM', 'key', 'element'] + ); + }); + + testUtils.testAll('vRem', async client => { + await client.vAdd('key', [1.0, 2.0, 3.0], 'element'); + + assert.equal( + await client.vRem('key', 'element'), + true + ); + + assert.equal( + await client.vRem('key', 'element'), + false + ); + + assert.equal( + await client.vCard('key'), + 0 + ); + }, { + client: { ...GLOBAL.SERVERS.OPEN, minimumDockerVersion: [8, 0] }, + cluster: { ...GLOBAL.CLUSTERS.OPEN, minimumDockerVersion: [8, 0] } + }); + + testUtils.testWithClient('vRem with RESP3', async client => { + await client.vAdd('resp3-key', [1.0, 2.0, 3.0], 'resp3-element'); + + assert.equal( + await client.vRem('resp3-key', 'resp3-element'), + true + ); + + assert.equal( + await client.vRem('resp3-key', 'resp3-element'), + false + ); + + + assert.equal( + await client.vCard('resp3-key'), + 0 + ); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3 + }, + minimumDockerVersion: [8, 0] + }); +}); diff --git a/packages/client/lib/commands/VREM.ts b/packages/client/lib/commands/VREM.ts new file mode 100644 index 00000000000..7eb22b2e2ec --- /dev/null +++ b/packages/client/lib/commands/VREM.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, Command } from '../RESP/types'; +import { transformBooleanReply } from './generic-transformers'; + +export default { + /** + * Remove an element from a vector set + * + * @param parser - The command parser + * @param key - The key of the vector set + * @param element - The name of the element to remove from the vector set + * @see https://redis.io/commands/vrem/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, element: RedisArgument) { + parser.push('VREM'); + parser.pushKey(key); + parser.push(element); + }, + transformReply: transformBooleanReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/VSETATTR.spec.ts b/packages/client/lib/commands/VSETATTR.spec.ts new file mode 100644 index 00000000000..303006d4081 --- /dev/null +++ b/packages/client/lib/commands/VSETATTR.spec.ts @@ -0,0 +1,58 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import VSETATTR from './VSETATTR'; +import { BasicCommandParser } from '../client/parser'; + +describe('VSETATTR', () => { + describe('parseCommand', () => { + it('with object', () => { + const parser = new BasicCommandParser(); + VSETATTR.parseCommand(parser, 'key', 'element', { name: 'test', value: 42 }), + assert.deepEqual( + parser.redisArgs, + ['VSETATTR', 'key', 'element', '{"name":"test","value":42}'] + ); + }); + + it('with string', () => { + const parser = new BasicCommandParser(); + VSETATTR.parseCommand(parser, 'key', 'element', '{"name":"test"}'), + assert.deepEqual( + parser.redisArgs, + ['VSETATTR', 'key', 'element', '{"name":"test"}'] + ); + }); + }); + + testUtils.testAll('vSetAttr', async client => { + await client.vAdd('key', [1.0, 2.0, 3.0], 'element'); + + assert.equal( + await client.vSetAttr('key', 'element', { name: 'test', value: 42 }), + true + ); + }, { + client: { ...GLOBAL.SERVERS.OPEN, minimumDockerVersion: [8, 0] }, + cluster: { ...GLOBAL.CLUSTERS.OPEN, minimumDockerVersion: [8, 0] } + }); + + testUtils.testWithClient('vSetAttr with RESP3 - returns boolean', async client => { + await client.vAdd('resp3-key', [1.0, 2.0, 3.0], 'resp3-element'); + + const result = await client.vSetAttr('resp3-key', 'resp3-element', { + name: 'test-item', + category: 'electronics', + price: 99.99 + }); + + // RESP3 returns boolean instead of number + assert.equal(typeof result, 'boolean'); + assert.equal(result, true); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3 + }, + minimumDockerVersion: [8, 0] + }); +}); diff --git a/packages/client/lib/commands/VSETATTR.ts b/packages/client/lib/commands/VSETATTR.ts new file mode 100644 index 00000000000..084b8f8008e --- /dev/null +++ b/packages/client/lib/commands/VSETATTR.ts @@ -0,0 +1,32 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, Command } from '../RESP/types'; +import { transformBooleanReply } from './generic-transformers'; + +export default { + /** + * Set or replace attributes on a vector set element + * + * @param parser - The command parser + * @param key - The key of the vector set + * @param element - The name of the element to set attributes for + * @param attributes - The attributes to set (as JSON string or object) + * @see https://redis.io/commands/vsetattr/ + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + element: RedisArgument, + attributes: RedisArgument | Record + ) { + parser.push('VSETATTR'); + parser.pushKey(key); + parser.push(element); + + if (typeof attributes === 'object' && attributes !== null) { + parser.push(JSON.stringify(attributes)); + } else { + parser.push(attributes); + } + }, + transformReply: transformBooleanReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/VSIM.spec.ts b/packages/client/lib/commands/VSIM.spec.ts new file mode 100644 index 00000000000..dbfad76fd59 --- /dev/null +++ b/packages/client/lib/commands/VSIM.spec.ts @@ -0,0 +1,107 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import VSIM from './VSIM'; +import { BasicCommandParser } from '../client/parser'; + +describe('VSIM', () => { + describe('parseCommand', () => { + it('with vector', () => { + const parser = new BasicCommandParser(); + VSIM.parseCommand(parser, 'key', [1.0, 2.0, 3.0]), + assert.deepEqual( + parser.redisArgs, + ['VSIM', 'key', 'VALUES', '3', '1', '2', '3'] + ); + }); + + it('with element', () => { + const parser = new BasicCommandParser(); + VSIM.parseCommand(parser, 'key', 'element'); + assert.deepEqual( + parser.redisArgs, + ['VSIM', 'key', 'ELE', 'element'] + ); + }); + + it('with options', () => { + const parser = new BasicCommandParser(); + VSIM.parseCommand(parser, 'key', 'element', { + COUNT: 5, + EF: 100, + FILTER: '.price > 20', + 'FILTER-EF': 50, + TRUTH: true, + NOTHREAD: true, + EPSILON: 0.1 + }); + assert.deepEqual( + parser.redisArgs, + [ + 'VSIM', 'key', 'ELE', 'element', 'COUNT', '5', + 'EPSILON', '0.1', 'EF', '100', 'FILTER', '.price > 20', + 'FILTER-EF', '50', 'TRUTH', 'NOTHREAD', + ] + ); + }); + }); + + testUtils.testAll('vSim', async client => { + await client.vAdd('key', [1.0, 2.0, 3.0], 'element1'); + await client.vAdd('key', [1.1, 2.1, 3.1], 'element2'); + + const result = await client.vSim('key', 'element1'); + assert.ok(Array.isArray(result)); + assert.ok(result.includes('element1')); + }, { + client: { ...GLOBAL.SERVERS.OPEN, minimumDockerVersion: [8, 0] }, + cluster: { ...GLOBAL.CLUSTERS.OPEN, minimumDockerVersion: [8, 0] } + }); + + + testUtils.testAll('vSim with options', async client => { + await client.vAdd('key', [1.0, 2.0, 3.0], 'element1'); + await client.vAdd('key', [1.1, 2.1, 3.1], 'element2'); + + const result = await client.vSim('key', 'element1', { + EPSILON: 0.1, + COUNT: 1, + EF: 100, + FILTER: '.year == 8', + 'FILTER-EF': 50, + TRUTH: true, + NOTHREAD: true + }); + + assert.ok(Array.isArray(result)); + }, { + client: { ...GLOBAL.SERVERS.OPEN, minimumDockerVersion: [8, 0] }, + cluster: { ...GLOBAL.CLUSTERS.OPEN, minimumDockerVersion: [8, 0] } + }); + + testUtils.testWithClient('vSim with RESP3', async client => { + await client.vAdd('resp3-key', [1.0, 2.0, 3.0], 'element1'); + await client.vAdd('resp3-key', [1.1, 2.1, 3.1], 'element2'); + await client.vAdd('resp3-key', [2.0, 3.0, 4.0], 'element3'); + + // Test similarity search with vector + const resultWithVector = await client.vSim('resp3-key', [1.05, 2.05, 3.05]); + assert.ok(Array.isArray(resultWithVector)); + assert.ok(resultWithVector.length > 0); + + // Test similarity search with element + const resultWithElement = await client.vSim('resp3-key', 'element1'); + assert.ok(Array.isArray(resultWithElement)); + assert.ok(resultWithElement.includes('element1')); + + // Test with options + const resultWithOptions = await client.vSim('resp3-key', 'element1', { COUNT: 2 }); + assert.ok(Array.isArray(resultWithOptions)); + assert.ok(resultWithOptions.length <= 2); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3 + }, + minimumDockerVersion: [8, 0] + }); +}); diff --git a/packages/client/lib/commands/VSIM.ts b/packages/client/lib/commands/VSIM.ts new file mode 100644 index 00000000000..7c94cd7c79d --- /dev/null +++ b/packages/client/lib/commands/VSIM.ts @@ -0,0 +1,73 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, BlobStringReply, Command } from '../RESP/types'; +import { transformDoubleArgument } from './generic-transformers'; + +export interface VSimOptions { + COUNT?: number; + EPSILON?: number; + EF?: number; + FILTER?: string; + 'FILTER-EF'?: number; + TRUTH?: boolean; + NOTHREAD?: boolean; +} + +export default { + IS_READ_ONLY: true, + /** + * Retrieve elements similar to a given vector or element with optional filtering + * + * @param parser - The command parser + * @param key - The key of the vector set + * @param query - The query vector (array of numbers) or element name (string) + * @param options - Optional parameters for similarity search + * @see https://redis.io/commands/vsim/ + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + query: RedisArgument | Array, + options?: VSimOptions + ) { + parser.push('VSIM'); + parser.pushKey(key); + + if (Array.isArray(query)) { + parser.push('VALUES', query.length.toString()); + for (const value of query) { + parser.push(transformDoubleArgument(value)); + } + } else { + parser.push('ELE', query); + } + + if (options?.COUNT !== undefined) { + parser.push('COUNT', options.COUNT.toString()); + } + + if (options?.EPSILON !== undefined) { + parser.push('EPSILON', options.EPSILON.toString()); + } + + if (options?.EF !== undefined) { + parser.push('EF', options.EF.toString()); + } + + if (options?.FILTER) { + parser.push('FILTER', options.FILTER); + } + + if (options?.['FILTER-EF'] !== undefined) { + parser.push('FILTER-EF', options['FILTER-EF'].toString()); + } + + if (options?.TRUTH) { + parser.push('TRUTH'); + } + + if (options?.NOTHREAD) { + parser.push('NOTHREAD'); + } + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/VSIM_WITHSCORES.spec.ts b/packages/client/lib/commands/VSIM_WITHSCORES.spec.ts new file mode 100644 index 00000000000..ff9bc41376f --- /dev/null +++ b/packages/client/lib/commands/VSIM_WITHSCORES.spec.ts @@ -0,0 +1,62 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import VSIM_WITHSCORES from './VSIM_WITHSCORES'; +import { BasicCommandParser } from '../client/parser'; + +describe('VSIM WITHSCORES', () => { + it('parseCommand', () => { + const parser = new BasicCommandParser(); + VSIM_WITHSCORES.parseCommand(parser, 'key', 'element') + assert.deepEqual(parser.redisArgs, [ + 'VSIM', + 'key', + 'ELE', + 'element', + 'WITHSCORES' + ]); + }); + + testUtils.testAll( + 'vSimWithScores', + async client => { + await client.vAdd('key', [1.0, 2.0, 3.0], 'element1'); + await client.vAdd('key', [1.1, 2.1, 3.1], 'element2'); + + const result = await client.vSimWithScores('key', 'element1'); + + assert.ok(typeof result === 'object'); + assert.ok('element1' in result); + assert.ok('element2' in result); + assert.equal(typeof result['element1'], 'number'); + assert.equal(typeof result['element2'], 'number'); + }, + { + client: { ...GLOBAL.SERVERS.OPEN, minimumDockerVersion: [8, 0] }, + cluster: { ...GLOBAL.CLUSTERS.OPEN, minimumDockerVersion: [8, 0] } + } + ); + + testUtils.testWithClient( + 'vSimWithScores with RESP3 - returns Map with scores', + async client => { + await client.vAdd('resp3-key', [1.0, 2.0, 3.0], 'element1'); + await client.vAdd('resp3-key', [1.1, 2.1, 3.1], 'element2'); + await client.vAdd('resp3-key', [2.0, 3.0, 4.0], 'element3'); + + const result = await client.vSimWithScores('resp3-key', 'element1'); + + assert.ok(typeof result === 'object'); + assert.ok('element1' in result); + assert.ok('element2' in result); + assert.equal(typeof result['element1'], 'number'); + assert.equal(typeof result['element2'], 'number'); + }, + { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3 + }, + minimumDockerVersion: [8, 0] + } + ); +}); diff --git a/packages/client/lib/commands/VSIM_WITHSCORES.ts b/packages/client/lib/commands/VSIM_WITHSCORES.ts new file mode 100644 index 00000000000..fda05be6642 --- /dev/null +++ b/packages/client/lib/commands/VSIM_WITHSCORES.ts @@ -0,0 +1,36 @@ +import { + ArrayReply, + BlobStringReply, + Command, + DoubleReply, + MapReply, + UnwrapReply +} from '../RESP/types'; +import { transformDoubleReply } from './generic-transformers'; +import VSIM from './VSIM'; + +export default { + IS_READ_ONLY: VSIM.IS_READ_ONLY, + /** + * Retrieve elements similar to a given vector or element with similarity scores + * @param args - Same parameters as the VSIM command + * @see https://redis.io/commands/vsim/ + */ + parseCommand(...args: Parameters) { + const parser = args[0]; + + VSIM.parseCommand(...args); + parser.push('WITHSCORES'); + }, + transformReply: { + 2: (reply: ArrayReply) => { + const inferred = reply as unknown as UnwrapReply; + const members: Record = {}; + for (let i = 0; i < inferred.length; i += 2) { + members[inferred[i].toString()] = transformDoubleReply[2](inferred[i + 1]); + } + return members; + }, + 3: undefined as unknown as () => MapReply + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/WAIT.spec.ts b/packages/client/lib/commands/WAIT.spec.ts new file mode 100644 index 00000000000..d2778e7967b --- /dev/null +++ b/packages/client/lib/commands/WAIT.spec.ts @@ -0,0 +1,20 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import WAIT from './WAIT'; +import { parseArgs } from './generic-transformers'; + +describe('WAIT', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(WAIT, 0, 1), + ['WAIT', '0', '1'] + ); + }); + + testUtils.testWithClient('client.wait', async client => { + assert.equal( + await client.wait(0, 1), + 0 + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/WAIT.ts b/packages/client/lib/commands/WAIT.ts new file mode 100644 index 00000000000..7ccebbc4ec9 --- /dev/null +++ b/packages/client/lib/commands/WAIT.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '../client/parser'; +import { NumberReply, Command } from '../RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Constructs the WAIT command to synchronize with replicas + * + * @param parser - The command parser + * @param numberOfReplicas - Number of replicas that must acknowledge the write + * @param timeout - Maximum time to wait in milliseconds + * @returns The number of replicas that acknowledged the write + * @see https://redis.io/commands/wait/ + */ + parseCommand(parser: CommandParser, numberOfReplicas: number, timeout: number) { + parser.push('WAIT', numberOfReplicas.toString(), timeout.toString()); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/XACK.spec.ts b/packages/client/lib/commands/XACK.spec.ts new file mode 100644 index 00000000000..4ad60b256d0 --- /dev/null +++ b/packages/client/lib/commands/XACK.spec.ts @@ -0,0 +1,32 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import XACK from './XACK'; +import { parseArgs } from './generic-transformers'; + +describe('XACK', () => { + describe('transformArguments', () => { + it('string', () => { + assert.deepEqual( + parseArgs(XACK, 'key', 'group', '0-0'), + ['XACK', 'key', 'group', '0-0'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(XACK, 'key', 'group', ['0-0', '1-0']), + ['XACK', 'key', 'group', '0-0', '1-0'] + ); + }); + }); + + testUtils.testAll('xAck', async client => { + assert.equal( + await client.xAck('key', 'group', '0-0'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/XACK.ts b/packages/client/lib/commands/XACK.ts new file mode 100644 index 00000000000..26e1c962baa --- /dev/null +++ b/packages/client/lib/commands/XACK.ts @@ -0,0 +1,25 @@ +import { CommandParser } from '../client/parser'; +import { NumberReply, Command, RedisArgument } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export default { + IS_READ_ONLY: false, + /** + * Constructs the XACK command to acknowledge the processing of stream messages in a consumer group + * + * @param parser - The command parser + * @param key - The stream key + * @param group - The consumer group name + * @param id - One or more message IDs to acknowledge + * @returns The number of messages successfully acknowledged + * @see https://redis.io/commands/xack/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, group: RedisArgument, id: RedisVariadicArgument) { + parser.push('XACK'); + parser.pushKey(key); + parser.push(group) + parser.pushVariadic(id); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; + \ No newline at end of file diff --git a/packages/client/lib/commands/XACKDEL.spec.ts b/packages/client/lib/commands/XACKDEL.spec.ts new file mode 100644 index 00000000000..9d7bad15a24 --- /dev/null +++ b/packages/client/lib/commands/XACKDEL.spec.ts @@ -0,0 +1,196 @@ +import { strict as assert } from "node:assert"; +import XACKDEL from "./XACKDEL"; +import { parseArgs } from "./generic-transformers"; +import testUtils, { GLOBAL } from "../test-utils"; +import { + STREAM_DELETION_POLICY, + STREAM_DELETION_REPLY_CODES, +} from "./common-stream.types"; + +describe("XACKDEL", () => { + describe("transformArguments", () => { + it("string - without policy", () => { + assert.deepEqual(parseArgs(XACKDEL, "key", "group", "0-0"), [ + "XACKDEL", + "key", + "group", + "IDS", + "1", + "0-0", + ]); + }); + + it("string - with policy", () => { + assert.deepEqual( + parseArgs( + XACKDEL, + "key", + "group", + "0-0", + STREAM_DELETION_POLICY.KEEPREF + ), + ["XACKDEL", "key", "group", "KEEPREF", "IDS", "1", "0-0"] + ); + }); + + it("array - without policy", () => { + assert.deepEqual(parseArgs(XACKDEL, "key", "group", ["0-0", "1-0"]), [ + "XACKDEL", + "key", + "group", + "IDS", + "2", + "0-0", + "1-0", + ]); + }); + + it("array - with policy", () => { + assert.deepEqual( + parseArgs( + XACKDEL, + "key", + "group", + ["0-0", "1-0"], + STREAM_DELETION_POLICY.DELREF + ), + ["XACKDEL", "key", "group", "DELREF", "IDS", "2", "0-0", "1-0"] + ); + }); + }); + + testUtils.testAll( + `XACKDEL non-existing key - without policy`, + async (client) => { + const reply = await client.xAckDel("{tag}stream-key", "testgroup", "0-0"); + assert.deepEqual(reply, [STREAM_DELETION_REPLY_CODES.NOT_FOUND]); + }, + { + client: { ...GLOBAL.SERVERS.OPEN, minimumDockerVersion: [8, 2] }, + cluster: { ...GLOBAL.CLUSTERS.OPEN, minimumDockerVersion: [8, 2] }, + } + ); + + testUtils.testAll( + `XACKDEL existing key - without policy`, + async (client) => { + const streamKey = "{tag}stream-key"; + const groupName = "testgroup"; + + // create consumer group, stream and message + await client.xGroupCreate(streamKey, groupName, "0", { MKSTREAM: true }); + const messageId = await client.xAdd(streamKey, "*", { field: "value" }); + + // read message + await client.xReadGroup(groupName, "testconsumer", { + key: streamKey, + id: ">", + }); + + const reply = await client.xAckDel(streamKey, groupName, messageId); + assert.deepEqual(reply, [STREAM_DELETION_REPLY_CODES.DELETED]); + }, + { + client: { ...GLOBAL.SERVERS.OPEN, minimumDockerVersion: [8, 2] }, + cluster: { ...GLOBAL.CLUSTERS.OPEN, minimumDockerVersion: [8, 2] }, + } + ); + + testUtils.testAll( + `XACKDEL existing key - with policy`, + async (client) => { + const streamKey = "{tag}stream-key"; + const groupName = "testgroup"; + + // create consumer group, stream and message + await client.xGroupCreate(streamKey, groupName, "0", { MKSTREAM: true }); + const messageId = await client.xAdd(streamKey, "*", { field: "value" }); + + // read message + await client.xReadGroup(groupName, "testconsumer", { + key: streamKey, + id: ">", + }); + + const reply = await client.xAckDel( + streamKey, + groupName, + messageId, + STREAM_DELETION_POLICY.DELREF + ); + assert.deepEqual(reply, [STREAM_DELETION_REPLY_CODES.DELETED]); + }, + { + client: { ...GLOBAL.SERVERS.OPEN, minimumDockerVersion: [8, 2] }, + cluster: { ...GLOBAL.CLUSTERS.OPEN, minimumDockerVersion: [8, 2] }, + } + ); + + testUtils.testAll( + `XACKDEL acknowledge policy - with consumer group`, + async (client) => { + const streamKey = "{tag}stream-key"; + const groupName = "testgroup"; + + // create consumer groups, stream and message + await client.xGroupCreate(streamKey, groupName, "0", { MKSTREAM: true }); + await client.xGroupCreate(streamKey, "some-other-group", "0"); + const messageId = await client.xAdd(streamKey, "*", { field: "value" }); + + // read message + await client.xReadGroup(groupName, "testconsumer", { + key: streamKey, + id: ">", + }); + + const reply = await client.xAckDel( + streamKey, + groupName, + messageId, + STREAM_DELETION_POLICY.ACKED + ); + assert.deepEqual(reply, [STREAM_DELETION_REPLY_CODES.DANGLING_REFS]); + }, + { + client: { ...GLOBAL.SERVERS.OPEN, minimumDockerVersion: [8, 2] }, + cluster: { ...GLOBAL.CLUSTERS.OPEN, minimumDockerVersion: [8, 2] }, + } + ); + + testUtils.testAll( + `XACKDEL multiple keys`, + async (client) => { + const streamKey = "{tag}stream-key"; + const groupName = "testgroup"; + + // create consumer groups, stream and add messages + await client.xGroupCreate(streamKey, groupName, "0", { MKSTREAM: true }); + const messageIds = await Promise.all([ + client.xAdd(streamKey, "*", { field: "value1" }), + client.xAdd(streamKey, "*", { field: "value2" }), + ]); + + // read messages + await client.xReadGroup(groupName, "testconsumer", { + key: streamKey, + id: ">", + }); + + const reply = await client.xAckDel( + streamKey, + groupName, + [...messageIds, "0-0"], + STREAM_DELETION_POLICY.DELREF + ); + assert.deepEqual(reply, [ + STREAM_DELETION_REPLY_CODES.DELETED, + STREAM_DELETION_REPLY_CODES.DELETED, + STREAM_DELETION_REPLY_CODES.NOT_FOUND, + ]); + }, + { + client: { ...GLOBAL.SERVERS.OPEN, minimumDockerVersion: [8, 2] }, + cluster: { ...GLOBAL.CLUSTERS.OPEN, minimumDockerVersion: [8, 2] }, + } + ); +}); diff --git a/packages/client/lib/commands/XACKDEL.ts b/packages/client/lib/commands/XACKDEL.ts new file mode 100644 index 00000000000..6e209879e49 --- /dev/null +++ b/packages/client/lib/commands/XACKDEL.ts @@ -0,0 +1,45 @@ +import { CommandParser } from "../client/parser"; +import { RedisArgument, ArrayReply, Command } from "../RESP/types"; +import { + StreamDeletionReplyCode, + StreamDeletionPolicy, +} from "./common-stream.types"; +import { RedisVariadicArgument } from "./generic-transformers"; + +/** + * Acknowledges and deletes one or multiple messages for a stream consumer group + */ +export default { + IS_READ_ONLY: false, + /** + * Constructs the XACKDEL command to acknowledge and delete one or multiple messages for a stream consumer group + * + * @param parser - The command parser + * @param key - The stream key + * @param group - The consumer group name + * @param id - One or more message IDs to acknowledge and delete + * @param policy - Policy to apply when deleting entries (optional, defaults to KEEPREF) + * @returns Array of integers: -1 (not found), 1 (acknowledged and deleted), 2 (acknowledged with dangling refs) + * @see https://redis.io/commands/xackdel/ + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + group: RedisArgument, + id: RedisVariadicArgument, + policy?: StreamDeletionPolicy + ) { + parser.push("XACKDEL"); + parser.pushKey(key); + parser.push(group); + + if (policy) { + parser.push(policy); + } + + parser.push("IDS"); + parser.pushVariadicWithLength(id); + }, + transformReply: + undefined as unknown as () => ArrayReply, +} as const satisfies Command; diff --git a/packages/client/lib/commands/XADD.spec.ts b/packages/client/lib/commands/XADD.spec.ts new file mode 100644 index 00000000000..a41e8682751 --- /dev/null +++ b/packages/client/lib/commands/XADD.spec.ts @@ -0,0 +1,174 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import XADD from './XADD'; +import { parseArgs } from './generic-transformers'; +import { STREAM_DELETION_POLICY } from './common-stream.types'; + +describe('XADD', () => { + describe('transformArguments', () => { + it('single field', () => { + assert.deepEqual( + parseArgs(XADD, 'key', '*', { + field: 'value' + }), + ['XADD', 'key', '*', 'field', 'value'] + ); + }); + + it('multiple fields', () => { + assert.deepEqual( + parseArgs(XADD, 'key', '*', { + '1': 'I', + '2': 'II' + }), + ['XADD', 'key', '*', '1', 'I', '2', 'II'] + ); + }); + + it('with TRIM', () => { + assert.deepEqual( + parseArgs(XADD, 'key', '*', { + field: 'value' + }, { + TRIM: { + threshold: 1000 + } + }), + ['XADD', 'key', '1000', '*', 'field', 'value'] + ); + }); + + it('with TRIM.strategy', () => { + assert.deepEqual( + parseArgs(XADD, 'key', '*', { + field: 'value' + }, { + TRIM: { + strategy: 'MAXLEN', + threshold: 1000 + } + }), + ['XADD', 'key', 'MAXLEN', '1000', '*', 'field', 'value'] + ); + }); + + it('with TRIM.strategyModifier', () => { + assert.deepEqual( + parseArgs(XADD, 'key', '*', { + field: 'value' + }, { + TRIM: { + strategyModifier: '=', + threshold: 1000 + } + }), + ['XADD', 'key', '=', '1000', '*', 'field', 'value'] + ); + }); + + it('with TRIM.limit', () => { + assert.deepEqual( + parseArgs(XADD, 'key', '*', { + field: 'value' + }, { + TRIM: { + threshold: 1000, + limit: 1 + } + }), + ['XADD', 'key', '1000', 'LIMIT', '1', '*', 'field', 'value'] + ); + }); + + it('with TRIM.policy', () => { + assert.deepEqual( + parseArgs(XADD, 'key', '*', { + field: 'value' + }, { + TRIM: { + threshold: 1000, + policy: STREAM_DELETION_POLICY.DELREF + } + }), + ['XADD', 'key', '1000', 'DELREF', '*', 'field', 'value'] + ); + }); + + it('with all TRIM options', () => { + assert.deepEqual( + parseArgs(XADD, 'key', '*', { + field: 'value' + }, { + TRIM: { + strategy: 'MAXLEN', + strategyModifier: '~', + threshold: 1000, + limit: 100, + policy: STREAM_DELETION_POLICY.ACKED + } + }), + ['XADD', 'key', 'MAXLEN', '~', '1000', 'LIMIT', '100', 'ACKED', '*', 'field', 'value'] + ); + }); + }); + + testUtils.testAll('xAdd', async client => { + assert.equal( + typeof await client.xAdd('key', '*', { + field: 'value' + }), + 'string' + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); + + testUtils.testAll( + 'xAdd with TRIM policy', + async (client) => { + assert.equal( + typeof await client.xAdd('{tag}key', '*', + { field: 'value' }, + { + TRIM: { + strategy: 'MAXLEN', + threshold: 1000, + policy: STREAM_DELETION_POLICY.KEEPREF + } + } + ), + 'string' + ); + }, + { + client: { ...GLOBAL.SERVERS.OPEN, minimumDockerVersion: [8, 2] }, + cluster: { ...GLOBAL.CLUSTERS.OPEN, minimumDockerVersion: [8, 2] }, + } + ); + + testUtils.testAll( + 'xAdd with all TRIM options', + async (client) => { + assert.equal( + typeof await client.xAdd('{tag}key2', '*', + { field: 'value' }, + { + TRIM: { + strategy: 'MAXLEN', + strategyModifier: '~', + threshold: 1000, + limit: 10, + policy: STREAM_DELETION_POLICY.DELREF + } + } + ), + 'string' + ); + }, + { + client: { ...GLOBAL.SERVERS.OPEN, minimumDockerVersion: [8, 2] }, + cluster: { ...GLOBAL.CLUSTERS.OPEN, minimumDockerVersion: [8, 2] }, + } + ); +}); diff --git a/packages/client/lib/commands/XADD.ts b/packages/client/lib/commands/XADD.ts new file mode 100644 index 00000000000..f2509a9fa7b --- /dev/null +++ b/packages/client/lib/commands/XADD.ts @@ -0,0 +1,95 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, BlobStringReply, Command } from '../RESP/types'; +import { StreamDeletionPolicy } from './common-stream.types'; +import { Tail } from './generic-transformers'; + +/** + * Options for the XADD command + * + * @property TRIM - Optional trimming configuration + * @property TRIM.strategy - Trim strategy: MAXLEN (by length) or MINID (by ID) + * @property TRIM.strategyModifier - Exact ('=') or approximate ('~') trimming + * @property TRIM.threshold - Maximum stream length or minimum ID to retain + * @property TRIM.limit - Maximum number of entries to trim in one call + * @property TRIM.policy - Policy to apply when trimming entries (optional, defaults to KEEPREF) + */ +export interface XAddOptions { + TRIM?: { + strategy?: 'MAXLEN' | 'MINID'; + strategyModifier?: '=' | '~'; + threshold: number; + limit?: number; + /** added in 8.2 */ + policy?: StreamDeletionPolicy; + }; +} + +/** + * Parses arguments for the XADD command + * + * @param optional - Optional command modifier + * @param parser - The command parser + * @param key - The stream key + * @param id - Message ID (* for auto-generation) + * @param message - Key-value pairs representing the message fields + * @param options - Additional options for stream trimming + */ +export function parseXAddArguments( + optional: RedisArgument | undefined, + parser: CommandParser, + key: RedisArgument, + id: RedisArgument, + message: Record, + options?: XAddOptions +) { + parser.push('XADD'); + parser.pushKey(key); + if (optional) { + parser.push(optional); + } + + if (options?.TRIM) { + if (options.TRIM.strategy) { + parser.push(options.TRIM.strategy); + } + + if (options.TRIM.strategyModifier) { + parser.push(options.TRIM.strategyModifier); + } + + parser.push(options.TRIM.threshold.toString()); + + if (options.TRIM.limit) { + parser.push('LIMIT', options.TRIM.limit.toString()); + } + + if (options.TRIM.policy) { + parser.push(options.TRIM.policy); + } + } + + parser.push(id); + + for (const [key, value] of Object.entries(message)) { + parser.push(key, value); + } +} + +export default { + IS_READ_ONLY: false, + /** + * Constructs the XADD command to append a new entry to a stream + * + * @param parser - The command parser + * @param key - The stream key + * @param id - Message ID (* for auto-generation) + * @param message - Key-value pairs representing the message fields + * @param options - Additional options for stream trimming + * @returns The ID of the added entry + * @see https://redis.io/commands/xadd/ + */ + parseCommand(...args: Tail>) { + return parseXAddArguments(undefined, ...args); + }, + transformReply: undefined as unknown as () => BlobStringReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/XADD_NOMKSTREAM.spec.ts b/packages/client/lib/commands/XADD_NOMKSTREAM.spec.ts new file mode 100644 index 00000000000..a957d0f06c1 --- /dev/null +++ b/packages/client/lib/commands/XADD_NOMKSTREAM.spec.ts @@ -0,0 +1,162 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import XADD_NOMKSTREAM from './XADD_NOMKSTREAM'; +import { parseArgs } from './generic-transformers'; +import { STREAM_DELETION_POLICY } from './common-stream.types'; + +describe('XADD NOMKSTREAM', () => { + testUtils.isVersionGreaterThanHook([6, 2]); + + describe('transformArguments', () => { + it('single field', () => { + assert.deepEqual( + parseArgs(XADD_NOMKSTREAM, 'key', '*', { + field: 'value' + }), + ['XADD', 'key', 'NOMKSTREAM', '*', 'field', 'value'] + ); + }); + + it('multiple fields', () => { + assert.deepEqual( + parseArgs(XADD_NOMKSTREAM, 'key', '*', { + '1': 'I', + '2': 'II' + }), + ['XADD', 'key', 'NOMKSTREAM', '*', '1', 'I', '2', 'II'] + ); + }); + + it('with TRIM', () => { + assert.deepEqual( + parseArgs(XADD_NOMKSTREAM, 'key', '*', { + field: 'value' + }, { + TRIM: { + threshold: 1000 + } + }), + ['XADD', 'key', 'NOMKSTREAM', '1000', '*', 'field', 'value'] + ); + }); + + it('with TRIM.strategy', () => { + assert.deepEqual( + parseArgs(XADD_NOMKSTREAM, 'key', '*', { + field: 'value' + }, { + TRIM: { + strategy: 'MAXLEN', + threshold: 1000 + } + }), + ['XADD', 'key', 'NOMKSTREAM', 'MAXLEN', '1000', '*', 'field', 'value'] + ); + }); + + it('with TRIM.strategyModifier', () => { + assert.deepEqual( + parseArgs(XADD_NOMKSTREAM, 'key', '*', { + field: 'value' + }, { + TRIM: { + strategyModifier: '=', + threshold: 1000 + } + }), + ['XADD', 'key', 'NOMKSTREAM', '=', '1000', '*', 'field', 'value'] + ); + }); + + it('with TRIM.limit', () => { + assert.deepEqual( + parseArgs(XADD_NOMKSTREAM, 'key', '*', { + field: 'value' + }, { + TRIM: { + threshold: 1000, + limit: 1 + } + }), + ['XADD', 'key', 'NOMKSTREAM', '1000', 'LIMIT', '1', '*', 'field', 'value'] + ); + }); + + it('with TRIM.policy', () => { + assert.deepEqual( + parseArgs(XADD_NOMKSTREAM, 'key', '*', { + field: 'value' + }, { + TRIM: { + threshold: 1000, + policy: STREAM_DELETION_POLICY.DELREF + } + }), + ['XADD', 'key', 'NOMKSTREAM', '1000', 'DELREF', '*', 'field', 'value'] + ); + }); + + it('with all TRIM options', () => { + assert.deepEqual( + parseArgs(XADD_NOMKSTREAM, 'key', '*', { + field: 'value' + }, { + TRIM: { + strategy: 'MAXLEN', + strategyModifier: '~', + threshold: 1000, + limit: 100, + policy: STREAM_DELETION_POLICY.ACKED + } + }), + ['XADD', 'key', 'NOMKSTREAM', 'MAXLEN', '~', '1000', 'LIMIT', '100', 'ACKED', '*', 'field', 'value'] + ); + }); + }); + + testUtils.testAll( + 'xAddNoMkStream - null when stream does not exist', + async (client) => { + assert.equal( + await client.xAddNoMkStream('{tag}nonexistent-stream', '*', { + field: 'value' + }), + null + ); + }, + { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN, + } + ); + + testUtils.testAll( + 'xAddNoMkStream - with all TRIM options', + async (client) => { + const streamKey = '{tag}stream'; + + // Create stream and add some messages + await client.xAdd(streamKey, '*', { field: 'value1' }); + + // Use NOMKSTREAM with all TRIM options + const messageId = await client.xAddNoMkStream(streamKey, '*', + { field: 'value2' }, + { + TRIM: { + strategyModifier: '~', + limit: 1, + strategy: 'MAXLEN', + threshold: 2, + policy: STREAM_DELETION_POLICY.DELREF + } + } + ); + + assert.equal(typeof messageId, 'string'); + }, + { + client: { ...GLOBAL.SERVERS.OPEN, minimumDockerVersion: [8, 2] }, + cluster: { ...GLOBAL.CLUSTERS.OPEN, minimumDockerVersion: [8, 2] }, + } + ); +}); diff --git a/packages/client/lib/commands/XADD_NOMKSTREAM.ts b/packages/client/lib/commands/XADD_NOMKSTREAM.ts new file mode 100644 index 00000000000..8b1861a065b --- /dev/null +++ b/packages/client/lib/commands/XADD_NOMKSTREAM.ts @@ -0,0 +1,21 @@ +import { BlobStringReply, NullReply, Command } from '../RESP/types'; +import { Tail } from './generic-transformers'; +import { parseXAddArguments } from './XADD'; + +/** + * Command for adding entries to an existing stream without creating it if it doesn't exist + */ +export default { + IS_READ_ONLY: false, + /** + * Constructs the XADD command with NOMKSTREAM option to append a new entry to an existing stream + * + * @param args - Arguments tuple containing parser, key, id, message, and options + * @returns The ID of the added entry, or null if the stream doesn't exist + * @see https://redis.io/commands/xadd/ + */ + parseCommand(...args: Tail>) { + return parseXAddArguments('NOMKSTREAM', ...args); + }, + transformReply: undefined as unknown as () => BlobStringReply | NullReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/XAUTOCLAIM.spec.ts b/packages/client/lib/commands/XAUTOCLAIM.spec.ts new file mode 100644 index 00000000000..58b09a63e78 --- /dev/null +++ b/packages/client/lib/commands/XAUTOCLAIM.spec.ts @@ -0,0 +1,69 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import XAUTOCLAIM from './XAUTOCLAIM'; +import { parseArgs } from './generic-transformers'; + +describe('XAUTOCLAIM', () => { + testUtils.isVersionGreaterThanHook([6, 2]); + + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(XAUTOCLAIM, 'key', 'group', 'consumer', 1, '0-0'), + ['XAUTOCLAIM', 'key', 'group', 'consumer', '1', '0-0'] + ); + }); + + it('with COUNT', () => { + assert.deepEqual( + parseArgs(XAUTOCLAIM, 'key', 'group', 'consumer', 1, '0-0', { + COUNT: 1 + }), + ['XAUTOCLAIM', 'key', 'group', 'consumer', '1', '0-0', 'COUNT', '1'] + ); + }); + }); + + testUtils.testAll('xAutoClaim', async client => { + const message = Object.create(null, { + field: { + value: 'value', + enumerable: true + } + }); + + const [, id1, id2, , , reply] = await Promise.all([ + client.xGroupCreate('key', 'group', '$', { + MKSTREAM: true + }), + client.xAdd('key', '*', message), + client.xAdd('key', '*', message), + client.xReadGroup('group', 'consumer', { + key: 'key', + id: '>' + }), + client.xTrim('key', 'MAXLEN', 1), + client.xAutoClaim('key', 'group', 'consumer', 0, '0-0') + ]); + + assert.deepEqual(reply, { + nextId: '0-0', + ...(testUtils.isVersionGreaterThan([7, 0]) ? { + messages: [{ + id: id2, + message + }], + deletedMessages: [id1] + } : { + messages: [null, { + id: id2, + message + }], + deletedMessages: undefined + }) + }); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/XAUTOCLAIM.ts b/packages/client/lib/commands/XAUTOCLAIM.ts new file mode 100644 index 00000000000..bd6f7b05346 --- /dev/null +++ b/packages/client/lib/commands/XAUTOCLAIM.ts @@ -0,0 +1,74 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, TuplesReply, BlobStringReply, ArrayReply, NullReply, UnwrapReply, Command, TypeMapping } from '../RESP/types'; +import { StreamMessageRawReply, transformStreamMessageNullReply } from './generic-transformers'; + +/** + * Options for the XAUTOCLAIM command + * + * @property COUNT - Limit the number of messages to claim + */ +export interface XAutoClaimOptions { + COUNT?: number; +} + +/** + * Raw reply structure for XAUTOCLAIM command + * + * @property nextId - The ID to use for the next XAUTOCLAIM call + * @property messages - Array of claimed messages or null entries + * @property deletedMessages - Array of message IDs that no longer exist + */ +export type XAutoClaimRawReply = TuplesReply<[ + nextId: BlobStringReply, + messages: ArrayReply, + deletedMessages: ArrayReply +]>; + +export default { + IS_READ_ONLY: false, + /** + * Constructs the XAUTOCLAIM command to automatically claim pending messages in a consumer group + * + * @param parser - The command parser + * @param key - The stream key + * @param group - The consumer group name + * @param consumer - The consumer name that will claim the messages + * @param minIdleTime - Minimum idle time in milliseconds for a message to be claimed + * @param start - Message ID to start scanning from + * @param options - Additional options for the claim operation + * @returns Object containing nextId, claimed messages, and list of deleted message IDs + * @see https://redis.io/commands/xautoclaim/ + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + group: RedisArgument, + consumer: RedisArgument, + minIdleTime: number, + start: RedisArgument, + options?: XAutoClaimOptions + ) { + parser.push('XAUTOCLAIM'); + parser.pushKey(key); + parser.push(group, consumer, minIdleTime.toString(), start); + + if (options?.COUNT) { + parser.push('COUNT', options.COUNT.toString()); + } + }, + /** + * Transforms the raw XAUTOCLAIM reply into a structured object + * + * @param reply - Raw reply from Redis + * @param preserve - Preserve options (unused) + * @param typeMapping - Type mapping for message fields + * @returns Structured object containing nextId, messages, and deletedMessages + */ + transformReply(reply: UnwrapReply, preserve?: any, typeMapping?: TypeMapping) { + return { + nextId: reply[0], + messages: (reply[1] as unknown as UnwrapReply).map(transformStreamMessageNullReply.bind(undefined, typeMapping)), + deletedMessages: reply[2] + }; + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/XAUTOCLAIM_JUSTID.spec.ts b/packages/client/lib/commands/XAUTOCLAIM_JUSTID.spec.ts new file mode 100644 index 00000000000..78911657086 --- /dev/null +++ b/packages/client/lib/commands/XAUTOCLAIM_JUSTID.spec.ts @@ -0,0 +1,38 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import XAUTOCLAIM_JUSTID from './XAUTOCLAIM_JUSTID'; +import { parseArgs } from './generic-transformers'; + +describe('XAUTOCLAIM JUSTID', () => { + testUtils.isVersionGreaterThanHook([6, 2]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(XAUTOCLAIM_JUSTID, 'key', 'group', 'consumer', 1, '0-0'), + ['XAUTOCLAIM', 'key', 'group', 'consumer', '1', '0-0', 'JUSTID'] + ); + }); + + testUtils.testWithClient('client.xAutoClaimJustId', async client => { + const [, , id, , reply] = await Promise.all([ + client.xGroupCreate('key', 'group', '$', { + MKSTREAM: true + }), + client.xGroupCreateConsumer('key', 'group', 'consumer'), + client.xAdd('key', '*', { + field: 'value' + }), + client.xReadGroup('group', 'consumer', { + key: 'key', + id: '>' + }), + client.xAutoClaimJustId('key', 'group', 'consumer', 0, '0-0') + ]); + + assert.deepEqual(reply, { + nextId: '0-0', + messages: [id], + deletedMessages: testUtils.isVersionGreaterThan([7, 0]) ? [] : undefined + }); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/XAUTOCLAIM_JUSTID.ts b/packages/client/lib/commands/XAUTOCLAIM_JUSTID.ts new file mode 100644 index 00000000000..efa299c6f8f --- /dev/null +++ b/packages/client/lib/commands/XAUTOCLAIM_JUSTID.ts @@ -0,0 +1,44 @@ +import { TuplesReply, BlobStringReply, ArrayReply, UnwrapReply, Command } from '../RESP/types'; +import XAUTOCLAIM from './XAUTOCLAIM'; + +/** + * Raw reply structure for XAUTOCLAIM JUSTID command + * + * @property nextId - The ID to use for the next XAUTOCLAIM call + * @property messages - Array of message IDs that were claimed + * @property deletedMessages - Array of message IDs that no longer exist + */ +type XAutoClaimJustIdRawReply = TuplesReply<[ + nextId: BlobStringReply, + messages: ArrayReply, + deletedMessages: ArrayReply +]>; + +export default { + IS_READ_ONLY: XAUTOCLAIM.IS_READ_ONLY, + /** + * Constructs the XAUTOCLAIM command with JUSTID option to get only message IDs + * + * @param args - Same parameters as XAUTOCLAIM command + * @returns Object containing nextId and arrays of claimed and deleted message IDs + * @see https://redis.io/commands/xautoclaim/ + */ + parseCommand(...args: Parameters) { + const parser = args[0]; + XAUTOCLAIM.parseCommand(...args); + parser.push('JUSTID'); + }, + /** + * Transforms the raw XAUTOCLAIM JUSTID reply into a structured object + * + * @param reply - Raw reply from Redis + * @returns Structured object containing nextId, message IDs, and deleted message IDs + */ + transformReply(reply: UnwrapReply) { + return { + nextId: reply[0], + messages: reply[1], + deletedMessages: reply[2] + }; + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/XCLAIM.spec.ts b/packages/client/lib/commands/XCLAIM.spec.ts new file mode 100644 index 00000000000..90768509225 --- /dev/null +++ b/packages/client/lib/commands/XCLAIM.spec.ts @@ -0,0 +1,126 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import XCLAIM from './XCLAIM'; +import { parseArgs } from './generic-transformers'; + +describe('XCLAIM', () => { + describe('transformArguments', () => { + it('single id (string)', () => { + assert.deepEqual( + parseArgs(XCLAIM, 'key', 'group', 'consumer', 1, '0-0'), + ['XCLAIM', 'key', 'group', 'consumer', '1', '0-0'] + ); + }); + + it('multiple ids (array)', () => { + assert.deepEqual( + parseArgs(XCLAIM, 'key', 'group', 'consumer', 1, ['0-0', '1-0']), + ['XCLAIM', 'key', 'group', 'consumer', '1', '0-0', '1-0'] + ); + }); + + it('with IDLE', () => { + assert.deepEqual( + parseArgs(XCLAIM, 'key', 'group', 'consumer', 1, '0-0', { + IDLE: 1 + }), + ['XCLAIM', 'key', 'group', 'consumer', '1', '0-0', 'IDLE', '1'] + ); + }); + + describe('with TIME', () => { + it('number', () => { + assert.deepEqual( + parseArgs(XCLAIM, 'key', 'group', 'consumer', 1, '0-0', { + TIME: 1 + }), + ['XCLAIM', 'key', 'group', 'consumer', '1', '0-0', 'TIME', '1'] + ); + }); + + it('Date', () => { + const d = new Date(); + assert.deepEqual( + parseArgs(XCLAIM, 'key', 'group', 'consumer', 1, '0-0', { + TIME: d + }), + ['XCLAIM', 'key', 'group', 'consumer', '1', '0-0', 'TIME', d.getTime().toString()] + ); + }); + }); + + it('with RETRYCOUNT', () => { + assert.deepEqual( + parseArgs(XCLAIM, 'key', 'group', 'consumer', 1, '0-0', { + RETRYCOUNT: 1 + }), + ['XCLAIM', 'key', 'group', 'consumer', '1', '0-0', 'RETRYCOUNT', '1'] + ); + }); + + it('with FORCE', () => { + assert.deepEqual( + parseArgs(XCLAIM, 'key', 'group', 'consumer', 1, '0-0', { + FORCE: true + }), + ['XCLAIM', 'key', 'group', 'consumer', '1', '0-0', 'FORCE'] + ); + }); + + it('with LASTID', () => { + assert.deepEqual( + parseArgs(XCLAIM, 'key', 'group', 'consumer', 1, '0-0', { + LASTID: '0-0' + }), + ['XCLAIM', 'key', 'group', 'consumer', '1', '0-0', 'LASTID', '0-0'] + ); + }); + + it('with IDLE, TIME, RETRYCOUNT, FORCE, LASTID', () => { + assert.deepEqual( + parseArgs(XCLAIM, 'key', 'group', 'consumer', 1, '0-0', { + IDLE: 1, + TIME: 1, + RETRYCOUNT: 1, + FORCE: true, + LASTID: '0-0' + }), + ['XCLAIM', 'key', 'group', 'consumer', '1', '0-0', 'IDLE', '1', 'TIME', '1', 'RETRYCOUNT', '1', 'FORCE', 'LASTID', '0-0'] + ); + }); + }); + + testUtils.testAll('xClaim', async client => { + const message = Object.create(null, { + field: { + value: 'value', + enumerable: true + } + }); + + const [, , , , , reply] = await Promise.all([ + client.xGroupCreate('key', 'group', '$', { + MKSTREAM: true + }), + client.xAdd('key', '1-0', message), + client.xAdd('key', '2-0', message), + client.xReadGroup('group', 'consumer', { + key: 'key', + id: '>' + }), + client.xTrim('key', 'MAXLEN', 1), + client.xClaim('key', 'group', 'consumer', 0, ['1-0', '2-0']) + ]); + + assert.deepEqual(reply, [ + ...(testUtils.isVersionGreaterThan([7, 0]) ? [] : [null]), + { + id: '2-0', + message + } + ]); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/XCLAIM.ts b/packages/client/lib/commands/XCLAIM.ts new file mode 100644 index 00000000000..2bc771288ac --- /dev/null +++ b/packages/client/lib/commands/XCLAIM.ts @@ -0,0 +1,89 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, NullReply, UnwrapReply, Command, TypeMapping } from '../RESP/types'; +import { RedisVariadicArgument, StreamMessageRawReply, transformStreamMessageNullReply } from './generic-transformers'; + +/** + * Options for the XCLAIM command + * + * @property IDLE - Set the idle time (in milliseconds) for the claimed messages + * @property TIME - Set the last delivery time (Unix timestamp or Date) + * @property RETRYCOUNT - Set the retry counter for the claimed messages + * @property FORCE - Create the pending message entry even if the message doesn't exist + * @property LASTID - Update the consumer group last ID + */ +export interface XClaimOptions { + IDLE?: number; + TIME?: number | Date; + RETRYCOUNT?: number; + FORCE?: boolean; + LASTID?: RedisArgument; +} + +export default { + IS_READ_ONLY: false, + /** + * Constructs the XCLAIM command to claim pending messages in a consumer group + * + * @param parser - The command parser + * @param key - The stream key + * @param group - The consumer group name + * @param consumer - The consumer name that will claim the messages + * @param minIdleTime - Minimum idle time in milliseconds for a message to be claimed + * @param id - One or more message IDs to claim + * @param options - Additional options for the claim operation + * @returns Array of claimed messages + * @see https://redis.io/commands/xclaim/ + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + group: RedisArgument, + consumer: RedisArgument, + minIdleTime: number, + id: RedisVariadicArgument, + options?: XClaimOptions + ) { + parser.push('XCLAIM'); + parser.pushKey(key); + parser.push(group, consumer, minIdleTime.toString()); + parser.pushVariadic(id); + + if (options?.IDLE !== undefined) { + parser.push('IDLE', options.IDLE.toString()); + } + + if (options?.TIME !== undefined) { + parser.push( + 'TIME', + (options.TIME instanceof Date ? options.TIME.getTime() : options.TIME).toString() + ); + } + + if (options?.RETRYCOUNT !== undefined) { + parser.push('RETRYCOUNT', options.RETRYCOUNT.toString()); + } + + if (options?.FORCE) { + parser.push('FORCE'); + } + + if (options?.LASTID !== undefined) { + parser.push('LASTID', options.LASTID); + } + }, + /** + * Transforms the raw XCLAIM reply into an array of messages + * + * @param reply - Raw reply from Redis + * @param preserve - Preserve options (unused) + * @param typeMapping - Type mapping for message fields + * @returns Array of claimed messages with their fields + */ + transformReply( + reply: UnwrapReply>, + preserve?: any, + typeMapping?: TypeMapping + ) { + return reply.map(transformStreamMessageNullReply.bind(undefined, typeMapping)); + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/XCLAIM_JUSTID.spec.ts b/packages/client/lib/commands/XCLAIM_JUSTID.spec.ts new file mode 100644 index 00000000000..d7bf9fdc70c --- /dev/null +++ b/packages/client/lib/commands/XCLAIM_JUSTID.spec.ts @@ -0,0 +1,25 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import XCLAIM_JUSTID from './XCLAIM_JUSTID'; +import { parseArgs } from './generic-transformers'; + +describe('XCLAIM JUSTID', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(XCLAIM_JUSTID, 'key', 'group', 'consumer', 1, '0-0'), + ['XCLAIM', 'key', 'group', 'consumer', '1', '0-0', 'JUSTID'] + ); + }); + + // TODO: test with messages + testUtils.testWithClient('client.xClaimJustId', async client => { + const [, reply] = await Promise.all([ + client.xGroupCreate('key', 'group', '$', { + MKSTREAM: true + }), + client.xClaimJustId('key', 'group', 'consumer', 1, '0-0') + ]); + + assert.deepEqual(reply, []); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/XCLAIM_JUSTID.ts b/packages/client/lib/commands/XCLAIM_JUSTID.ts new file mode 100644 index 00000000000..56e1d576158 --- /dev/null +++ b/packages/client/lib/commands/XCLAIM_JUSTID.ts @@ -0,0 +1,27 @@ +import { ArrayReply, BlobStringReply, Command } from '../RESP/types'; +import XCLAIM from './XCLAIM'; + +/** + * Command variant for XCLAIM that returns only message IDs + */ +export default { + IS_READ_ONLY: XCLAIM.IS_READ_ONLY, + /** + * Constructs the XCLAIM command with JUSTID option to get only message IDs + * + * @param args - Same parameters as XCLAIM command + * @returns Array of successfully claimed message IDs + * @see https://redis.io/commands/xclaim/ + */ + parseCommand(...args: Parameters) { + const parser = args[0]; + XCLAIM.parseCommand(...args); + parser.push('JUSTID'); + }, + /** + * Transforms the XCLAIM JUSTID reply into an array of message IDs + * + * @returns Array of claimed message IDs + */ + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/XDEL.spec.ts b/packages/client/lib/commands/XDEL.spec.ts new file mode 100644 index 00000000000..510168bb765 --- /dev/null +++ b/packages/client/lib/commands/XDEL.spec.ts @@ -0,0 +1,32 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import XDEL from './XDEL'; +import { parseArgs } from './generic-transformers'; + +describe('XDEL', () => { + describe('transformArguments', () => { + it('string', () => { + assert.deepEqual( + parseArgs(XDEL, 'key', '0-0'), + ['XDEL', 'key', '0-0'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(XDEL, 'key', ['0-0', '1-0']), + ['XDEL', 'key', '0-0', '1-0'] + ); + }); + }); + + testUtils.testAll('xDel', async client => { + assert.equal( + await client.xDel('key', '0-0'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/XDEL.ts b/packages/client/lib/commands/XDEL.ts new file mode 100644 index 00000000000..db8df7d4fd2 --- /dev/null +++ b/packages/client/lib/commands/XDEL.ts @@ -0,0 +1,25 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +/** + * Command for removing messages from a stream + */ +export default { + IS_READ_ONLY: false, + /** + * Constructs the XDEL command to remove one or more messages from a stream + * + * @param parser - The command parser + * @param key - The stream key + * @param id - One or more message IDs to delete + * @returns The number of messages actually deleted + * @see https://redis.io/commands/xdel/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, id: RedisVariadicArgument) { + parser.push('XDEL'); + parser.pushKey(key); + parser.pushVariadic(id); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/XDELEX.spec.ts b/packages/client/lib/commands/XDELEX.spec.ts new file mode 100644 index 00000000000..8c421503256 --- /dev/null +++ b/packages/client/lib/commands/XDELEX.spec.ts @@ -0,0 +1,156 @@ +import { strict as assert } from "node:assert"; +import XDELEX from "./XDELEX"; +import { parseArgs } from "./generic-transformers"; +import testUtils, { GLOBAL } from "../test-utils"; +import { + STREAM_DELETION_POLICY, + STREAM_DELETION_REPLY_CODES, +} from "./common-stream.types"; + +describe("XDELEX", () => { + describe("transformArguments", () => { + it("string - without policy", () => { + assert.deepEqual(parseArgs(XDELEX, "key", "0-0"), [ + "XDELEX", + "key", + "IDS", + "1", + "0-0", + ]); + }); + + it("string - with policy", () => { + assert.deepEqual( + parseArgs(XDELEX, "key", "0-0", STREAM_DELETION_POLICY.KEEPREF), + ["XDELEX", "key", "KEEPREF", "IDS", "1", "0-0"] + ); + }); + + it("array - without policy", () => { + assert.deepEqual(parseArgs(XDELEX, "key", ["0-0", "1-0"]), [ + "XDELEX", + "key", + "IDS", + "2", + "0-0", + "1-0", + ]); + }); + + it("array - with policy", () => { + assert.deepEqual( + parseArgs(XDELEX, "key", ["0-0", "1-0"], STREAM_DELETION_POLICY.DELREF), + ["XDELEX", "key", "DELREF", "IDS", "2", "0-0", "1-0"] + ); + }); + }); + + testUtils.testAll( + `XDELEX non-existing key - without policy`, + async (client) => { + const reply = await client.xDelEx("{tag}stream-key", "0-0"); + assert.deepEqual(reply, [STREAM_DELETION_REPLY_CODES.NOT_FOUND]); + }, + { + client: { ...GLOBAL.SERVERS.OPEN, minimumDockerVersion: [8, 2] }, + cluster: { ...GLOBAL.CLUSTERS.OPEN, minimumDockerVersion: [8, 2] }, + } + ); + + testUtils.testAll( + `XDELEX existing key - without policy`, + async (client) => { + const streamKey = "{tag}stream-key"; + const messageId = await client.xAdd(streamKey, "*", { + field: "value", + }); + + const reply = await client.xDelEx( + streamKey, + messageId, + ); + assert.deepEqual(reply, [STREAM_DELETION_REPLY_CODES.DELETED]); + }, + { + client: { ...GLOBAL.SERVERS.OPEN, minimumDockerVersion: [8, 2] }, + cluster: { ...GLOBAL.CLUSTERS.OPEN, minimumDockerVersion: [8, 2] }, + } + ); + + testUtils.testAll( + `XDELEX existing key - with policy`, + async (client) => { + const streamKey = "{tag}stream-key"; + const messageId = await client.xAdd(streamKey, "*", { + field: "value", + }); + + const reply = await client.xDelEx( + streamKey, + messageId, + STREAM_DELETION_POLICY.DELREF + ); + assert.deepEqual(reply, [STREAM_DELETION_REPLY_CODES.DELETED]); + }, + { + client: { ...GLOBAL.SERVERS.OPEN, minimumDockerVersion: [8, 2] }, + cluster: { ...GLOBAL.CLUSTERS.OPEN, minimumDockerVersion: [8, 2] }, + } + ); + + testUtils.testAll( + `XDELEX acknowledge policy - with consumer group`, + async (client) => { + const streamKey = "{tag}stream-key"; + + // Add a message to the stream + const messageId = await client.xAdd(streamKey, "*", { + field: "value", + }); + + // Create consumer group + await client.xGroupCreate(streamKey, "testgroup", "0"); + + const reply = await client.xDelEx( + streamKey, + messageId, + STREAM_DELETION_POLICY.ACKED + ); + assert.deepEqual(reply, [STREAM_DELETION_REPLY_CODES.DANGLING_REFS]); + }, + { + client: { ...GLOBAL.SERVERS.OPEN, minimumDockerVersion: [8, 2] }, + cluster: { ...GLOBAL.CLUSTERS.OPEN, minimumDockerVersion: [8, 2] }, + } + ); + + testUtils.testAll( + `XDELEX multiple keys`, + async (client) => { + const streamKey = "{tag}stream-key"; + const messageIds = await Promise.all([ + client.xAdd(streamKey, "*", { + field: "value1", + }), + client.xAdd(streamKey, "*", { + field: "value2", + }), + ]); + + const reply = await client.xDelEx( + streamKey, + [...messageIds, "0-0"], + STREAM_DELETION_POLICY.DELREF + ); + assert.deepEqual(reply, [ + STREAM_DELETION_REPLY_CODES.DELETED, + STREAM_DELETION_REPLY_CODES.DELETED, + STREAM_DELETION_REPLY_CODES.NOT_FOUND, + ]); + }, + { + client: { ...GLOBAL.SERVERS.OPEN, minimumDockerVersion: [8, 2] }, + cluster: { ...GLOBAL.CLUSTERS.OPEN, minimumDockerVersion: [8, 2] }, + } + ); +}); diff --git a/packages/client/lib/commands/XDELEX.ts b/packages/client/lib/commands/XDELEX.ts new file mode 100644 index 00000000000..021dd0a9e13 --- /dev/null +++ b/packages/client/lib/commands/XDELEX.ts @@ -0,0 +1,42 @@ +import { CommandParser } from "../client/parser"; +import { RedisArgument, ArrayReply, Command } from "../RESP/types"; +import { + StreamDeletionPolicy, + StreamDeletionReplyCode, +} from "./common-stream.types"; +import { RedisVariadicArgument } from "./generic-transformers"; + +/** + * Deletes one or multiple entries from the stream + */ +export default { + IS_READ_ONLY: false, + /** + * Constructs the XDELEX command to delete one or multiple entries from the stream + * + * @param parser - The command parser + * @param key - The stream key + * @param id - One or more message IDs to delete + * @param policy - Policy to apply when deleting entries (optional, defaults to KEEPREF) + * @returns Array of integers: -1 (not found), 1 (deleted), 2 (dangling refs) + * @see https://redis.io/commands/xdelex/ + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + id: RedisVariadicArgument, + policy?: StreamDeletionPolicy + ) { + parser.push("XDELEX"); + parser.pushKey(key); + + if (policy) { + parser.push(policy); + } + + parser.push("IDS"); + parser.pushVariadicWithLength(id); + }, + transformReply: + undefined as unknown as () => ArrayReply, +} as const satisfies Command; diff --git a/packages/client/lib/commands/XGROUP_CREATE.spec.ts b/packages/client/lib/commands/XGROUP_CREATE.spec.ts new file mode 100644 index 00000000000..7c9d6298c6b --- /dev/null +++ b/packages/client/lib/commands/XGROUP_CREATE.spec.ts @@ -0,0 +1,45 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import XGROUP_CREATE from './XGROUP_CREATE'; +import { parseArgs } from './generic-transformers'; + +describe('XGROUP CREATE', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(XGROUP_CREATE, 'key', 'group', '$'), + ['XGROUP', 'CREATE', 'key', 'group', '$'] + ); + }); + + it('with MKSTREAM', () => { + assert.deepEqual( + parseArgs(XGROUP_CREATE, 'key', 'group', '$', { + MKSTREAM: true + }), + ['XGROUP', 'CREATE', 'key', 'group', '$', 'MKSTREAM'] + ); + }); + + it('with ENTRIESREAD', () => { + assert.deepEqual( + parseArgs(XGROUP_CREATE, 'key', 'group', '$', { + ENTRIESREAD: 1 + }), + ['XGROUP', 'CREATE', 'key', 'group', '$', 'ENTRIESREAD', '1'] + ); + }); + }); + + testUtils.testAll('xGroupCreate', async client => { + assert.equal( + await client.xGroupCreate('key', 'group', '$', { + MKSTREAM: true + }), + 'OK' + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/XGROUP_CREATE.ts b/packages/client/lib/commands/XGROUP_CREATE.ts new file mode 100644 index 00000000000..db6df04fa0f --- /dev/null +++ b/packages/client/lib/commands/XGROUP_CREATE.ts @@ -0,0 +1,52 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '../RESP/types'; + +/** + * Options for creating a consumer group + * + * @property MKSTREAM - Create the stream if it doesn't exist + * @property ENTRIESREAD - Set the number of entries that were read in this consumer group (Redis 7.0+) + */ +export interface XGroupCreateOptions { + MKSTREAM?: boolean; + /** + * added in 7.0 + */ + ENTRIESREAD?: number; +} + +export default { + IS_READ_ONLY: false, + /** + * Constructs the XGROUP CREATE command to create a consumer group for a stream + * + * @param parser - The command parser + * @param key - The stream key + * @param group - Name of the consumer group + * @param id - ID of the last delivered item in the stream ('$' for last item, '0' for all items) + * @param options - Additional options for group creation + * @returns 'OK' if successful + * @see https://redis.io/commands/xgroup-create/ + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + group: RedisArgument, + id: RedisArgument, + options?: XGroupCreateOptions + ) { + parser.push('XGROUP', 'CREATE'); + parser.pushKey(key); + parser.push(group, id); + + if (options?.MKSTREAM) { + parser.push('MKSTREAM'); + } + + if (options?.ENTRIESREAD) { + parser.push('ENTRIESREAD', options.ENTRIESREAD.toString()); + } + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; + diff --git a/packages/client/lib/commands/XGROUP_CREATECONSUMER.spec.ts b/packages/client/lib/commands/XGROUP_CREATECONSUMER.spec.ts new file mode 100644 index 00000000000..eb749073d35 --- /dev/null +++ b/packages/client/lib/commands/XGROUP_CREATECONSUMER.spec.ts @@ -0,0 +1,29 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import XGROUP_CREATECONSUMER from './XGROUP_CREATECONSUMER'; +import { parseArgs } from './generic-transformers'; + +describe('XGROUP CREATECONSUMER', () => { + testUtils.isVersionGreaterThanHook([6, 2]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(XGROUP_CREATECONSUMER, 'key', 'group', 'consumer'), + ['XGROUP', 'CREATECONSUMER', 'key', 'group', 'consumer'] + ); + }); + + testUtils.testAll('xGroupCreateConsumer', async client => { + const [, reply] = await Promise.all([ + client.xGroupCreate('key', 'group', '$', { + MKSTREAM: true + }), + client.xGroupCreateConsumer('key', 'group', 'consumer') + ]); + + assert.equal(reply, 1); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/XGROUP_CREATECONSUMER.ts b/packages/client/lib/commands/XGROUP_CREATECONSUMER.ts new file mode 100644 index 00000000000..0b730c7f96b --- /dev/null +++ b/packages/client/lib/commands/XGROUP_CREATECONSUMER.ts @@ -0,0 +1,30 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, Command, NumberReply } from '../RESP/types'; + +/** + * Command for creating a new consumer in a consumer group + */ +export default { + IS_READ_ONLY: false, + /** + * Constructs the XGROUP CREATECONSUMER command to create a new consumer in a consumer group + * + * @param parser - The command parser + * @param key - The stream key + * @param group - Name of the consumer group + * @param consumer - Name of the consumer to create + * @returns 1 if the consumer was created, 0 if it already existed + * @see https://redis.io/commands/xgroup-createconsumer/ + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + group: RedisArgument, + consumer: RedisArgument + ) { + parser.push('XGROUP', 'CREATECONSUMER'); + parser.pushKey(key); + parser.push(group, consumer); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/XGROUP_DELCONSUMER.spec.ts b/packages/client/lib/commands/XGROUP_DELCONSUMER.spec.ts new file mode 100644 index 00000000000..fabef789d78 --- /dev/null +++ b/packages/client/lib/commands/XGROUP_DELCONSUMER.spec.ts @@ -0,0 +1,27 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import XGROUP_DELCONSUMER from './XGROUP_DELCONSUMER'; +import { parseArgs } from './generic-transformers'; + +describe('XGROUP DELCONSUMER', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(XGROUP_DELCONSUMER, 'key', 'group', 'consumer'), + ['XGROUP', 'DELCONSUMER', 'key', 'group', 'consumer'] + ); + }); + + testUtils.testAll('xGroupDelConsumer', async client => { + const [, reply] = await Promise.all([ + client.xGroupCreate('key', 'group', '$', { + MKSTREAM: true + }), + client.xGroupDelConsumer('key', 'group', 'consumer') + ]); + + assert.equal(reply, 0); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/XGROUP_DELCONSUMER.ts b/packages/client/lib/commands/XGROUP_DELCONSUMER.ts new file mode 100644 index 00000000000..5feffe74042 --- /dev/null +++ b/packages/client/lib/commands/XGROUP_DELCONSUMER.ts @@ -0,0 +1,30 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +/** + * Command for removing a consumer from a consumer group + */ +export default { + IS_READ_ONLY: false, + /** + * Constructs the XGROUP DELCONSUMER command to remove a consumer from a consumer group + * + * @param parser - The command parser + * @param key - The stream key + * @param group - Name of the consumer group + * @param consumer - Name of the consumer to remove + * @returns The number of pending messages owned by the deleted consumer + * @see https://redis.io/commands/xgroup-delconsumer/ + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + group: RedisArgument, + consumer: RedisArgument + ) { + parser.push('XGROUP', 'DELCONSUMER'); + parser.pushKey(key); + parser.push(group, consumer); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/XGROUP_DESTROY.spec.ts b/packages/client/lib/commands/XGROUP_DESTROY.spec.ts new file mode 100644 index 00000000000..8277c66d3f6 --- /dev/null +++ b/packages/client/lib/commands/XGROUP_DESTROY.spec.ts @@ -0,0 +1,27 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import XGROUP_DESTROY from './XGROUP_DESTROY'; +import { parseArgs } from './generic-transformers'; + +describe('XGROUP DESTROY', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(XGROUP_DESTROY, 'key', 'group'), + ['XGROUP', 'DESTROY', 'key', 'group'] + ); + }); + + testUtils.testAll('xGroupDestroy', async client => { + const [, reply] = await Promise.all([ + client.xGroupCreate('key', 'group', '$', { + MKSTREAM: true + }), + client.xGroupDestroy('key', 'group') + ]); + + assert.equal(reply, 1); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/XGROUP_DESTROY.ts b/packages/client/lib/commands/XGROUP_DESTROY.ts new file mode 100644 index 00000000000..ed454abbb2b --- /dev/null +++ b/packages/client/lib/commands/XGROUP_DESTROY.ts @@ -0,0 +1,24 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +/** + * Command for removing a consumer group + */ +export default { + IS_READ_ONLY: false, + /** + * Constructs the XGROUP DESTROY command to remove a consumer group + * + * @param parser - The command parser + * @param key - The stream key + * @param group - Name of the consumer group to destroy + * @returns 1 if the group was destroyed, 0 if it did not exist + * @see https://redis.io/commands/xgroup-destroy/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, group: RedisArgument) { + parser.push('XGROUP', 'DESTROY'); + parser.pushKey(key); + parser.push(group); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/XGROUP_SETID.spec.ts b/packages/client/lib/commands/XGROUP_SETID.spec.ts new file mode 100644 index 00000000000..6ea0dd79c37 --- /dev/null +++ b/packages/client/lib/commands/XGROUP_SETID.spec.ts @@ -0,0 +1,27 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import XGROUP_SETID from './XGROUP_SETID'; +import { parseArgs } from './generic-transformers'; + +describe('XGROUP SETID', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(XGROUP_SETID, 'key', 'group', '0'), + ['XGROUP', 'SETID', 'key', 'group', '0'] + ); + }); + + testUtils.testAll('xGroupSetId', async client => { + const [, reply] = await Promise.all([ + client.xGroupCreate('key', 'group', '$', { + MKSTREAM: true + }), + client.xGroupSetId('key', 'group', '0') + ]); + + assert.equal(reply, 'OK'); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/XGROUP_SETID.ts b/packages/client/lib/commands/XGROUP_SETID.ts new file mode 100644 index 00000000000..4f3076b6032 --- /dev/null +++ b/packages/client/lib/commands/XGROUP_SETID.ts @@ -0,0 +1,43 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '../RESP/types'; + +/** + * Options for setting a consumer group's ID position + * + * @property ENTRIESREAD - Set the number of entries that were read in this consumer group (Redis 7.0+) + */ +export interface XGroupSetIdOptions { + /** added in 7.0 */ + ENTRIESREAD?: number; +} + +export default { + IS_READ_ONLY: false, + /** + * Constructs the XGROUP SETID command to set the last delivered ID for a consumer group + * + * @param parser - The command parser + * @param key - The stream key + * @param group - Name of the consumer group + * @param id - ID to set as last delivered message ('$' for last item, '0' for all items) + * @param options - Additional options for setting the group ID + * @returns 'OK' if successful + * @see https://redis.io/commands/xgroup-setid/ + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + group: RedisArgument, + id: RedisArgument, + options?: XGroupSetIdOptions + ) { + parser.push('XGROUP', 'SETID'); + parser.pushKey(key); + parser.push(group, id); + + if (options?.ENTRIESREAD) { + parser.push('ENTRIESREAD', options.ENTRIESREAD.toString()); + } + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/commands/XINFO_CONSUMERS.spec.ts b/packages/client/lib/commands/XINFO_CONSUMERS.spec.ts new file mode 100644 index 00000000000..b1f245dbf18 --- /dev/null +++ b/packages/client/lib/commands/XINFO_CONSUMERS.spec.ts @@ -0,0 +1,39 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import XINFO_CONSUMERS from './XINFO_CONSUMERS'; +import { parseArgs } from './generic-transformers'; + +describe('XINFO CONSUMERS', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(XINFO_CONSUMERS, 'key', 'group'), + ['XINFO', 'CONSUMERS', 'key', 'group'] + ); + }); + + testUtils.testAll('xInfoConsumers', async client => { + const [, , reply] = await Promise.all([ + client.xGroupCreate('key', 'group', '$', { + MKSTREAM: true + }), + // using `XREADGROUP` and not `XGROUP CREATECONSUMER` because the latter was introduced in Redis 6.2 + client.xReadGroup('group', 'consumer', { + key: 'key', + id: '0-0' + }), + client.xInfoConsumers('key', 'group') + ]); + + for (const consumer of reply) { + assert.equal(typeof consumer.name, 'string'); + assert.equal(typeof consumer.pending, 'number'); + assert.equal(typeof consumer.idle, 'number'); + if (testUtils.isVersionGreaterThan([7, 2])) { + assert.equal(typeof consumer.inactive, 'number'); + } + } + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/XINFO_CONSUMERS.ts b/packages/client/lib/commands/XINFO_CONSUMERS.ts new file mode 100644 index 00000000000..49267f13980 --- /dev/null +++ b/packages/client/lib/commands/XINFO_CONSUMERS.ts @@ -0,0 +1,56 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, TuplesToMapReply, BlobStringReply, NumberReply, UnwrapReply, Resp2Reply, Command } from '../RESP/types'; + +/** + * Reply structure for XINFO CONSUMERS command + * + * @property name - Name of the consumer + * @property pending - Number of pending messages for this consumer + * @property idle - Idle time in milliseconds + * @property inactive - Time in milliseconds since last interaction (Redis 7.2+) + */ +export type XInfoConsumersReply = ArrayReply, BlobStringReply], + [BlobStringReply<'pending'>, NumberReply], + [BlobStringReply<'idle'>, NumberReply], + /** added in 7.2 */ + [BlobStringReply<'inactive'>, NumberReply] +]>>; + +export default { + IS_READ_ONLY: true, + /** + * Constructs the XINFO CONSUMERS command to list the consumers in a consumer group + * + * @param parser - The command parser + * @param key - The stream key + * @param group - Name of the consumer group + * @returns Array of consumer information objects + * @see https://redis.io/commands/xinfo-consumers/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, group: RedisArgument) { + parser.push('XINFO', 'CONSUMERS'); + parser.pushKey(key); + parser.push(group); + }, + transformReply: { + /** + * Transforms RESP2 reply into a structured consumer information array + * + * @param reply - Raw RESP2 reply from Redis + * @returns Array of consumer information objects + */ + 2: (reply: UnwrapReply>) => { + return reply.map(consumer => { + const unwrapped = consumer as unknown as UnwrapReply; + return { + name: unwrapped[1], + pending: unwrapped[3], + idle: unwrapped[5], + inactive: unwrapped[7] + }; + }); + }, + 3: undefined as unknown as () => XInfoConsumersReply + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/XINFO_GROUPS.spec.ts b/packages/client/lib/commands/XINFO_GROUPS.spec.ts new file mode 100644 index 00000000000..a1196f4957a --- /dev/null +++ b/packages/client/lib/commands/XINFO_GROUPS.spec.ts @@ -0,0 +1,37 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import XINFO_GROUPS from './XINFO_GROUPS'; +import { parseArgs } from './generic-transformers'; + +describe('XINFO GROUPS', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(XINFO_GROUPS, 'key'), + ['XINFO', 'GROUPS', 'key'] + ); + }); + + testUtils.testAll('xInfoGroups', async client => { + const [, reply] = await Promise.all([ + client.xGroupCreate('key', 'group', '$', { + MKSTREAM: true + }), + client.xInfoGroups('key') + ]); + + assert.deepEqual( + reply, + [{ + name: 'group', + consumers: 0, + pending: 0, + 'last-delivered-id': '0-0', + 'entries-read': testUtils.isVersionGreaterThan([7, 0]) ? null : undefined, + lag: testUtils.isVersionGreaterThan([7, 0]) ? 0 : undefined + }] + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/XINFO_GROUPS.ts b/packages/client/lib/commands/XINFO_GROUPS.ts new file mode 100644 index 00000000000..1d8142bfaef --- /dev/null +++ b/packages/client/lib/commands/XINFO_GROUPS.ts @@ -0,0 +1,60 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, TuplesToMapReply, BlobStringReply, NumberReply, NullReply, UnwrapReply, Resp2Reply, Command } from '../RESP/types'; + +/** + * Reply structure for XINFO GROUPS command containing information about consumer groups + */ +export type XInfoGroupsReply = ArrayReply, BlobStringReply], + [BlobStringReply<'consumers'>, NumberReply], + [BlobStringReply<'pending'>, NumberReply], + [BlobStringReply<'last-delivered-id'>, NumberReply], + /** added in 7.0 */ + [BlobStringReply<'entries-read'>, NumberReply | NullReply], + /** added in 7.0 */ + [BlobStringReply<'lag'>, NumberReply], +]>>; + +export default { + IS_READ_ONLY: true, + /** + * Constructs the XINFO GROUPS command to list the consumer groups of a stream + * + * @param parser - The command parser + * @param key - The stream key + * @returns Array of consumer group information objects + * @see https://redis.io/commands/xinfo-groups/ + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('XINFO', 'GROUPS'); + parser.pushKey(key); + }, + transformReply: { + /** + * Transforms RESP2 reply into a structured consumer group information array + * + * @param reply - Raw RESP2 reply from Redis + * @returns Array of consumer group information objects containing: + * name - Name of the consumer group + * consumers - Number of consumers in the group + * pending - Number of pending messages for the group + * last-delivered-id - ID of the last delivered message + * entries-read - Number of entries read in the group (Redis 7.0+) + * lag - Number of entries not read by the group (Redis 7.0+) + */ + 2: (reply: UnwrapReply>) => { + return reply.map(group => { + const unwrapped = group as unknown as UnwrapReply; + return { + name: unwrapped[1], + consumers: unwrapped[3], + pending: unwrapped[5], + 'last-delivered-id': unwrapped[7], + 'entries-read': unwrapped[9], + lag: unwrapped[11] + }; + }); + }, + 3: undefined as unknown as () => XInfoGroupsReply + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/XINFO_STREAM.spec.ts b/packages/client/lib/commands/XINFO_STREAM.spec.ts new file mode 100644 index 00000000000..7e1829f3059 --- /dev/null +++ b/packages/client/lib/commands/XINFO_STREAM.spec.ts @@ -0,0 +1,40 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import XINFO_STREAM from './XINFO_STREAM'; +import { parseArgs } from './generic-transformers'; + +describe('XINFO STREAM', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(XINFO_STREAM, 'key'), + ['XINFO', 'STREAM', 'key'] + ); + }); + + testUtils.testAll('xInfoStream', async client => { + const [, reply] = await Promise.all([ + client.xGroupCreate('key', 'group', '$', { + MKSTREAM: true + }), + client.xInfoStream('key') + ]); + + assert.deepEqual(reply, { + length: 0, + 'radix-tree-keys': 0, + 'radix-tree-nodes': 1, + 'last-generated-id': '0-0', + ...testUtils.isVersionGreaterThan([7, 0]) && { + 'max-deleted-entry-id': '0-0', + 'entries-added': 0, + 'recorded-first-entry-id': '0-0', + }, + groups: 1, + 'first-entry': null, + 'last-entry': null + }); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/XINFO_STREAM.ts b/packages/client/lib/commands/XINFO_STREAM.ts new file mode 100644 index 00000000000..546dd70cab7 --- /dev/null +++ b/packages/client/lib/commands/XINFO_STREAM.ts @@ -0,0 +1,114 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, TuplesToMapReply, BlobStringReply, NumberReply, NullReply, TuplesReply, ArrayReply, UnwrapReply, Command } from '../RESP/types'; +import { isNullReply, transformTuplesReply } from './generic-transformers'; + +/** + * Reply structure for XINFO STREAM command containing detailed information about a stream + * + * @property length - Number of entries in the stream + * @property radix-tree-keys - Number of radix tree keys + * @property radix-tree-nodes - Number of radix tree nodes + * @property last-generated-id - Last generated message ID + * @property max-deleted-entry-id - Highest message ID deleted (Redis 7.2+) + * @property entries-added - Total number of entries added (Redis 7.2+) + * @property recorded-first-entry-id - ID of the first recorded entry (Redis 7.2+) + * @property groups - Number of consumer groups + * @property first-entry - First entry in the stream + * @property last-entry - Last entry in the stream + */ +export type XInfoStreamReply = TuplesToMapReply<[ + [BlobStringReply<'length'>, NumberReply], + [BlobStringReply<'radix-tree-keys'>, NumberReply], + [BlobStringReply<'radix-tree-nodes'>, NumberReply], + [BlobStringReply<'last-generated-id'>, BlobStringReply], + /** added in 7.2 */ + [BlobStringReply<'max-deleted-entry-id'>, BlobStringReply], + /** added in 7.2 */ + [BlobStringReply<'entries-added'>, NumberReply], + /** added in 7.2 */ + [BlobStringReply<'recorded-first-entry-id'>, BlobStringReply], + [BlobStringReply<'groups'>, NumberReply], + [BlobStringReply<'first-entry'>, ReturnType], + [BlobStringReply<'last-entry'>, ReturnType] +]>; + +export default { + IS_READ_ONLY: true, + /** + * Constructs the XINFO STREAM command to get detailed information about a stream + * + * @param parser - The command parser + * @param key - The stream key + * @returns Detailed information about the stream including its length, structure, and entries + * @see https://redis.io/commands/xinfo-stream/ + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('XINFO', 'STREAM'); + parser.pushKey(key); + }, + transformReply: { + // TODO: is there a "type safe" way to do it? + 2(reply: any) { + const parsedReply: Partial = {}; + + for (let i = 0; i < reply.length; i += 2) { + switch (reply[i]) { + case 'first-entry': + case 'last-entry': + parsedReply[reply[i] as ('first-entry' | 'last-entry')] = transformEntry(reply[i + 1]) as any; + break; + + default: + parsedReply[reply[i] as keyof typeof parsedReply] = reply[i + 1]; + break; + } + } + + return parsedReply as XInfoStreamReply['DEFAULT']; + }, + 3(reply: any) { + if (reply instanceof Map) { + reply.set( + 'first-entry', + transformEntry(reply.get('first-entry')) + ); + reply.set( + 'last-entry', + transformEntry(reply.get('last-entry')) + ); + } else if (reply instanceof Array) { + reply[17] = transformEntry(reply[17]); + reply[19] = transformEntry(reply[19]); + } else { + reply['first-entry'] = transformEntry(reply['first-entry']); + reply['last-entry'] = transformEntry(reply['last-entry']); + } + + return reply as XInfoStreamReply; + } + } +} as const satisfies Command; + +/** + * Raw entry structure from Redis stream + */ +type RawEntry = TuplesReply<[ + id: BlobStringReply, + message: ArrayReply +]> | NullReply; + +/** + * Transforms a raw stream entry into a structured object + * + * @param entry - Raw entry from Redis + * @returns Structured object with id and message, or null if entry is null + */ +function transformEntry(entry: RawEntry) { + if (isNullReply(entry)) return entry; + + const [id, message] = entry as unknown as UnwrapReply; + return { + id, + message: transformTuplesReply(message) + }; +} diff --git a/packages/client/lib/commands/XLEN.spec.ts b/packages/client/lib/commands/XLEN.spec.ts new file mode 100644 index 00000000000..3e22b9aebfa --- /dev/null +++ b/packages/client/lib/commands/XLEN.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import XLEN from './XLEN'; +import { parseArgs } from './generic-transformers'; + +describe('XLEN', () => { + it('processCommand', () => { + assert.deepEqual( + parseArgs(XLEN, 'key'), + ['XLEN', 'key'] + ); + }); + + testUtils.testAll('xLen', async client => { + assert.equal( + await client.xLen('key'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/XLEN.ts b/packages/client/lib/commands/XLEN.ts new file mode 100644 index 00000000000..f7718371cf2 --- /dev/null +++ b/packages/client/lib/commands/XLEN.ts @@ -0,0 +1,23 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +/** + * Command for getting the length of a stream + */ +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Constructs the XLEN command to get the number of entries in a stream + * + * @param parser - The command parser + * @param key - The stream key + * @returns The number of entries inside the stream + * @see https://redis.io/commands/xlen/ + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('XLEN'); + parser.pushKey(key); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/XPENDING.spec.ts b/packages/client/lib/commands/XPENDING.spec.ts new file mode 100644 index 00000000000..55cb957fc62 --- /dev/null +++ b/packages/client/lib/commands/XPENDING.spec.ts @@ -0,0 +1,61 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import XPENDING from './XPENDING'; +import { parseArgs } from './generic-transformers'; + +describe('XPENDING', () => { + describe('transformArguments', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(XPENDING, 'key', 'group'), + ['XPENDING', 'key', 'group'] + ); + }); + }); + + describe('client.xPending', () => { + testUtils.testWithClient('simple', async client => { + const [, reply] = await Promise.all([ + client.xGroupCreate('key', 'group', '$', { + MKSTREAM: true + }), + client.xPending('key', 'group') + ]); + + assert.deepEqual(reply, { + pending: 0, + firstId: null, + lastId: null, + consumers: null + }); + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClient('with consumers', async client => { + const [, , id, , reply] = await Promise.all([ + client.xGroupCreate('key', 'group', '$', { + MKSTREAM: true + }), + client.xGroupCreateConsumer('key', 'group', 'consumer'), + client.xAdd('key', '*', { field: 'value' }), + client.xReadGroup('group', 'consumer', { + key: 'key', + id: '>' + }), + client.xPending('key', 'group') + ]); + + assert.deepEqual(reply, { + pending: 1, + firstId: id, + lastId: id, + consumers: [{ + name: 'consumer', + deliveriesCounter: 1 + }] + }); + }, { + ...GLOBAL.SERVERS.OPEN, + minimumDockerVersion: [6, 2] + }); + }); +}); diff --git a/packages/client/lib/commands/XPENDING.ts b/packages/client/lib/commands/XPENDING.ts new file mode 100644 index 00000000000..cff9ef2f51b --- /dev/null +++ b/packages/client/lib/commands/XPENDING.ts @@ -0,0 +1,60 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, BlobStringReply, NullReply, ArrayReply, TuplesReply, NumberReply, UnwrapReply, Command } from '../RESP/types'; + +/** + * Raw reply structure for XPENDING command + * + * @property pending - Number of pending messages in the group + * @property firstId - ID of the first pending message + * @property lastId - ID of the last pending message + * @property consumers - Array of consumer info with delivery counts + */ +type XPendingRawReply = TuplesReply<[ + pending: NumberReply, + firstId: BlobStringReply | NullReply, + lastId: BlobStringReply | NullReply, + consumers: ArrayReply> | NullReply +]>; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Constructs the XPENDING command to inspect pending messages of a consumer group + * + * @param parser - The command parser + * @param key - The stream key + * @param group - Name of the consumer group + * @returns Summary of pending messages including total count, ID range, and per-consumer stats + * @see https://redis.io/commands/xpending/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, group: RedisArgument) { + parser.push('XPENDING'); + parser.pushKey(key); + parser.push(group); + }, + /** + * Transforms the raw XPENDING reply into a structured object + * + * @param reply - Raw reply from Redis + * @returns Object containing pending count, ID range, and consumer statistics + */ + transformReply(reply: UnwrapReply) { + const consumers = reply[3] as unknown as UnwrapReply; + return { + pending: reply[0], + firstId: reply[1], + lastId: reply[2], + consumers: consumers === null ? null : consumers.map(consumer => { + const [name, deliveriesCounter] = consumer as unknown as UnwrapReply; + return { + name, + deliveriesCounter: Number(deliveriesCounter) + }; + }) + } + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/XPENDING_RANGE.spec.ts b/packages/client/lib/commands/XPENDING_RANGE.spec.ts new file mode 100644 index 00000000000..33cd836f2a9 --- /dev/null +++ b/packages/client/lib/commands/XPENDING_RANGE.spec.ts @@ -0,0 +1,67 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import XPENDING_RANGE from './XPENDING_RANGE'; +import { parseArgs } from './generic-transformers'; + +describe('XPENDING RANGE', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(XPENDING_RANGE, 'key', 'group', '-', '+', 1), + ['XPENDING', 'key', 'group', '-', '+', '1'] + ); + }); + + it('with IDLE', () => { + assert.deepEqual( + parseArgs(XPENDING_RANGE, 'key', 'group', '-', '+', 1, { + IDLE: 1, + }), + ['XPENDING', 'key', 'group', 'IDLE', '1', '-', '+', '1'] + ); + }); + + it('with consumer', () => { + assert.deepEqual( + parseArgs(XPENDING_RANGE, 'key', 'group', '-', '+', 1, { + consumer: 'consumer' + }), + ['XPENDING', 'key', 'group', '-', '+', '1', 'consumer'] + ); + }); + + it('with IDLE, consumer', () => { + assert.deepEqual( + parseArgs(XPENDING_RANGE, 'key', 'group', '-', '+', 1, { + IDLE: 1, + consumer: 'consumer' + }), + ['XPENDING', 'key', 'group', 'IDLE', '1', '-', '+', '1', 'consumer'] + ); + }); + }); + + testUtils.testAll('xPendingRange', async client => { + const [, id, , reply] = await Promise.all([ + client.xGroupCreate('key', 'group', '$', { + MKSTREAM: true + }), + client.xAdd('key', '*', { field: 'value' }), + client.xReadGroup('group', 'consumer', { + key: 'key', + id: '>' + }), + client.xPendingRange('key', 'group', '-', '+', 1) + ]); + + assert.ok(Array.isArray(reply)); + assert.equal(reply.length, 1); + assert.equal(reply[0].id, id); + assert.equal(reply[0].consumer, 'consumer'); + assert.equal(typeof reply[0].millisecondsSinceLastDelivery, 'number'); + assert.equal(reply[0].deliveriesCounter, 1); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/XPENDING_RANGE.ts b/packages/client/lib/commands/XPENDING_RANGE.ts new file mode 100644 index 00000000000..e136061fe9e --- /dev/null +++ b/packages/client/lib/commands/XPENDING_RANGE.ts @@ -0,0 +1,86 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, TuplesReply, BlobStringReply, NumberReply, UnwrapReply, Command } from '../RESP/types'; + +/** + * Options for the XPENDING RANGE command + * + * @property IDLE - Filter by message idle time in milliseconds + * @property consumer - Filter by specific consumer name + */ +export interface XPendingRangeOptions { + IDLE?: number; + consumer?: RedisArgument; +} + +/** + * Raw reply structure for XPENDING RANGE command + * + * @property id - Message ID + * @property consumer - Name of the consumer that holds the message + * @property millisecondsSinceLastDelivery - Time since last delivery attempt + * @property deliveriesCounter - Number of times this message was delivered + */ +type XPendingRangeRawReply = ArrayReply>; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Constructs the XPENDING command with range parameters to get detailed information about pending messages + * + * @param parser - The command parser + * @param key - The stream key + * @param group - Name of the consumer group + * @param start - Start of ID range (use '-' for minimum ID) + * @param end - End of ID range (use '+' for maximum ID) + * @param count - Maximum number of messages to return + * @param options - Additional filtering options + * @returns Array of pending message details + * @see https://redis.io/commands/xpending/ + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + group: RedisArgument, + start: RedisArgument, + end: RedisArgument, + count: number, + options?: XPendingRangeOptions + ) { + parser.push('XPENDING'); + parser.pushKey(key); + parser.push(group); + + if (options?.IDLE !== undefined) { + parser.push('IDLE', options.IDLE.toString()); + } + + parser.push(start, end, count.toString()); + + if (options?.consumer) { + parser.push(options.consumer); + } + }, + /** + * Transforms the raw XPENDING RANGE reply into a structured array of message details + * + * @param reply - Raw reply from Redis + * @returns Array of objects containing message ID, consumer, idle time, and delivery count + */ + transformReply(reply: UnwrapReply) { + return reply.map(pending => { + const unwrapped = pending as unknown as UnwrapReply; + return { + id: unwrapped[0], + consumer: unwrapped[1], + millisecondsSinceLastDelivery: unwrapped[2], + deliveriesCounter: unwrapped[3] + }; + }); + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/XRANGE.spec.ts b/packages/client/lib/commands/XRANGE.spec.ts new file mode 100644 index 00000000000..b111a97aff1 --- /dev/null +++ b/packages/client/lib/commands/XRANGE.spec.ts @@ -0,0 +1,46 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import XRANGE from './XRANGE'; +import { parseArgs } from './generic-transformers'; + +describe('XRANGE', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(XRANGE, 'key', '-', '+'), + ['XRANGE', 'key', '-', '+'] + ); + }); + + it('with COUNT', () => { + assert.deepEqual( + parseArgs(XRANGE, 'key', '-', '+', { + COUNT: 1 + }), + ['XRANGE', 'key', '-', '+', 'COUNT', '1'] + ); + }); + }); + + testUtils.testAll('xRange', async client => { + const message = Object.create(null, { + field: { + value: 'value', + enumerable: true + } + }); + + const [id, reply] = await Promise.all([ + client.xAdd('key', '*', message), + client.xRange('key', '-', '+') + ]); + + assert.deepEqual(reply, [{ + id, + message + }]); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/XRANGE.ts b/packages/client/lib/commands/XRANGE.ts new file mode 100644 index 00000000000..4b83a66e5e6 --- /dev/null +++ b/packages/client/lib/commands/XRANGE.ts @@ -0,0 +1,68 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, UnwrapReply, Command, TypeMapping } from '../RESP/types'; +import { StreamMessageRawReply, transformStreamMessageReply } from './generic-transformers'; + +/** + * Options for the XRANGE command + * + * @property COUNT - Limit the number of entries returned + */ +export interface XRangeOptions { + COUNT?: number; +} + +/** + * Helper function to build XRANGE command arguments + * + * @param start - Start of ID range (use '-' for minimum ID) + * @param end - End of ID range (use '+' for maximum ID) + * @param options - Additional options for the range query + * @returns Array of arguments for the XRANGE command + */ +export function xRangeArguments( + start: RedisArgument, + end: RedisArgument, + options?: XRangeOptions +) { + const args = [start, end]; + + if (options?.COUNT) { + args.push('COUNT', options.COUNT.toString()); + } + + return args; +} + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Constructs the XRANGE command to read stream entries in a specific range + * + * @param parser - The command parser + * @param key - The stream key + * @param args - Arguments tuple containing start ID, end ID, and options + * @returns Array of messages in the specified range + * @see https://redis.io/commands/xrange/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, ...args: Parameters) { + parser.push('XRANGE'); + parser.pushKey(key); + parser.pushVariadic(xRangeArguments(args[0], args[1], args[2])); + }, + /** + * Transforms the raw XRANGE reply into structured message objects + * + * @param reply - Raw reply from Redis + * @param preserve - Preserve options (unused) + * @param typeMapping - Type mapping for message fields + * @returns Array of structured message objects + */ + transformReply( + reply: UnwrapReply>, + preserve?: any, + typeMapping?: TypeMapping + ) { + return reply.map(transformStreamMessageReply.bind(undefined, typeMapping)); + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/XREAD.spec.ts b/packages/client/lib/commands/XREAD.spec.ts new file mode 100644 index 00000000000..0edcfe43117 --- /dev/null +++ b/packages/client/lib/commands/XREAD.spec.ts @@ -0,0 +1,167 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL, parseFirstKey } from '../test-utils'; +import XREAD from './XREAD'; +import { parseArgs } from './generic-transformers'; + +describe('XREAD', () => { + describe('FIRST_KEY_INDEX', () => { + it('single stream', () => { + assert.equal( + parseFirstKey(XREAD, { + key: 'key', + id: '' + }), + 'key' + ); + }); + + it('multiple streams', () => { + assert.equal( + parseFirstKey(XREAD, [{ + key: '1', + id: '' + }, { + key: '2', + id: '' + }]), + '1' + ); + }); + }); + + describe('transformArguments', () => { + it('single stream', () => { + assert.deepEqual( + parseArgs(XREAD, { + key: 'key', + id: '0-0' + }), + ['XREAD', 'STREAMS', 'key', '0-0'] + ); + }); + + it('multiple streams', () => { + assert.deepEqual( + parseArgs(XREAD, [{ + key: '1', + id: '0-0' + }, { + key: '2', + id: '0-0' + }]), + ['XREAD', 'STREAMS', '1', '2', '0-0', '0-0'] + ); + }); + + it('with COUNT', () => { + assert.deepEqual( + parseArgs(XREAD, { + key: 'key', + id: '0-0' + }, { + COUNT: 1 + }), + ['XREAD', 'COUNT', '1', 'STREAMS', 'key', '0-0'] + ); + }); + + it('with BLOCK', () => { + assert.deepEqual( + parseArgs(XREAD, { + key: 'key', + id: '0-0' + }, { + BLOCK: 0 + }), + ['XREAD', 'BLOCK', '0', 'STREAMS', 'key', '0-0'] + ); + }); + + it('with COUNT, BLOCK', () => { + assert.deepEqual( + parseArgs(XREAD, { + key: 'key', + id: '0-0' + }, { + COUNT: 1, + BLOCK: 0 + }), + ['XREAD', 'COUNT', '1', 'BLOCK', '0', 'STREAMS', 'key', '0-0'] + ); + }); + }); + + testUtils.testAll('client.xRead', async client => { + const message = { field: 'value' }, + [id, reply] = await Promise.all([ + client.xAdd('key', '*', message), + client.xRead({ + key: 'key', + id: '0-0' + }), + ]) + + // FUTURE resp3 compatible + const obj = Object.assign(Object.create(null), { + 'key': [{ + id: id, + message: Object.create(null, { + field: { + value: 'value', + configurable: true, + enumerable: true + } + }) + }] + }); + + // v4 compatible + const expected = [{ + name: 'key', + messages: [{ + id: id, + message: Object.assign(Object.create(null), { + field: 'value' + }) + }] + }]; + + assert.deepStrictEqual(reply, expected); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); + + testUtils.testWithClient('client.xRead should throw with resp3 and unstableResp3: false', async client => { + assert.throws( + () => client.xRead({ + key: 'key', + id: '0-0' + }), + { + message: 'Some RESP3 results for Redis Query Engine responses may change. Refer to the readme for guidance' + } + ); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3 + } + }); + + testUtils.testWithClient('client.xRead should not throw with resp3 and unstableResp3: true', async client => { + assert.doesNotThrow( + () => client.xRead({ + key: 'key', + id: '0-0' + }) + ); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3, + unstableResp3: true + } + }); + +}); diff --git a/packages/client/lib/commands/XREAD.ts b/packages/client/lib/commands/XREAD.ts new file mode 100644 index 00000000000..110443ad3a5 --- /dev/null +++ b/packages/client/lib/commands/XREAD.ts @@ -0,0 +1,83 @@ +import { CommandParser } from '../client/parser'; +import { Command, RedisArgument, ReplyUnion } from '../RESP/types'; +import { transformStreamsMessagesReplyResp2 } from './generic-transformers'; + +/** + * Structure representing a stream to read from + * + * @property key - The stream key + * @property id - The message ID to start reading from + */ +export interface XReadStream { + key: RedisArgument; + id: RedisArgument; +} + +export type XReadStreams = Array | XReadStream; + +/** + * Helper function to push stream keys and IDs to the command parser + * + * @param parser - The command parser + * @param streams - Single stream or array of streams to read from + */ +export function pushXReadStreams(parser: CommandParser, streams: XReadStreams) { + parser.push('STREAMS'); + + if (Array.isArray(streams)) { + for (let i = 0; i < streams.length; i++) { + parser.pushKey(streams[i].key); + } + for (let i = 0; i < streams.length; i++) { + parser.push(streams[i].id); + } + } else { + parser.pushKey(streams.key); + parser.push(streams.id); + } +} + +/** + * Options for the XREAD command + * + * @property COUNT - Limit the number of entries returned per stream + * @property BLOCK - Milliseconds to block waiting for new entries (0 for indefinite) + */ +export interface XReadOptions { + COUNT?: number; + BLOCK?: number; +} + +export default { + IS_READ_ONLY: true, + /** + * Constructs the XREAD command to read messages from one or more streams + * + * @param parser - The command parser + * @param streams - Single stream or array of streams to read from + * @param options - Additional options for reading streams + * @returns Array of stream entries, each containing the stream name and its messages + * @see https://redis.io/commands/xread/ + */ + parseCommand(parser: CommandParser, streams: XReadStreams, options?: XReadOptions) { + parser.push('XREAD'); + + if (options?.COUNT) { + parser.push('COUNT', options.COUNT.toString()); + } + + if (options?.BLOCK !== undefined) { + parser.push('BLOCK', options.BLOCK.toString()); + } + + pushXReadStreams(parser, streams); + }, + /** + * Transform functions for different RESP versions + */ + transformReply: { + 2: transformStreamsMessagesReplyResp2, + 3: undefined as unknown as () => ReplyUnion + }, + unstableResp3: true +} as const satisfies Command; diff --git a/packages/client/lib/commands/XREADGROUP.spec.ts b/packages/client/lib/commands/XREADGROUP.spec.ts new file mode 100644 index 00000000000..acc7cc2dea9 --- /dev/null +++ b/packages/client/lib/commands/XREADGROUP.spec.ts @@ -0,0 +1,190 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL, parseFirstKey } from '../test-utils'; +import XREADGROUP from './XREADGROUP'; +import { parseArgs } from './generic-transformers'; + +describe('XREADGROUP', () => { + describe('FIRST_KEY_INDEX', () => { + it('single stream', () => { + assert.equal( + parseFirstKey(XREADGROUP, '', '', { key: 'key', id: '' }), + 'key' + ); + }); + + it('multiple streams', () => { + assert.equal( + parseFirstKey(XREADGROUP, '', '', [{ key: '1', id: '' }, { key: '2', id: '' }]), + '1' + ); + }); + }); + + describe('transformArguments', () => { + it('single stream', () => { + assert.deepEqual( + parseArgs(XREADGROUP, 'group', 'consumer', { + key: 'key', + id: '0-0' + }), + ['XREADGROUP', 'GROUP', 'group', 'consumer', 'STREAMS', 'key', '0-0'] + ); + }); + + it('multiple streams', () => { + assert.deepEqual( + parseArgs(XREADGROUP, 'group', 'consumer', [{ + key: '1', + id: '0-0' + }, { + key: '2', + id: '0-0' + }]), + ['XREADGROUP', 'GROUP', 'group', 'consumer', 'STREAMS', '1', '2', '0-0', '0-0'] + ); + }); + + it('with COUNT', () => { + assert.deepEqual( + parseArgs(XREADGROUP, 'group', 'consumer', { + key: 'key', + id: '0-0' + }, { + COUNT: 1 + }), + ['XREADGROUP', 'GROUP', 'group', 'consumer', 'COUNT', '1', 'STREAMS', 'key', '0-0'] + ); + }); + + it('with BLOCK', () => { + assert.deepEqual( + parseArgs(XREADGROUP, 'group', 'consumer', { + key: 'key', + id: '0-0' + }, { + BLOCK: 0 + }), + ['XREADGROUP', 'GROUP', 'group', 'consumer', 'BLOCK', '0', 'STREAMS', 'key', '0-0'] + ); + }); + + it('with NOACK', () => { + assert.deepEqual( + parseArgs(XREADGROUP, 'group', 'consumer', { + key: 'key', + id: '0-0' + }, { + NOACK: true + }), + ['XREADGROUP', 'GROUP', 'group', 'consumer', 'NOACK', 'STREAMS', 'key', '0-0'] + ); + }); + + it('with COUNT, BLOCK, NOACK', () => { + assert.deepEqual( + parseArgs(XREADGROUP, 'group', 'consumer', { + key: 'key', + id: '0-0' + }, { + COUNT: 1, + BLOCK: 0, + NOACK: true + }), + ['XREADGROUP', 'GROUP', 'group', 'consumer', 'COUNT', '1', 'BLOCK', '0', 'NOACK', 'STREAMS', 'key', '0-0'] + ); + }); + }); + + testUtils.testAll('xReadGroup - null', async client => { + const [, readGroupReply] = await Promise.all([ + client.xGroupCreate('key', 'group', '$', { + MKSTREAM: true + }), + client.xReadGroup('group', 'consumer', { + key: 'key', + id: '>' + }) + ]); + + assert.equal(readGroupReply, null); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); + + testUtils.testAll('xReadGroup - with a message', async client => { + const [, id, readGroupReply] = await Promise.all([ + client.xGroupCreate('key', 'group', '$', { + MKSTREAM: true + }), + client.xAdd('key', '*', { field: 'value' }), + client.xReadGroup('group', 'consumer', { + key: 'key', + id: '>' + }) + ]); + + + // FUTURE resp3 compatible + const obj = Object.assign(Object.create(null), { + 'key': [{ + id: id, + message: Object.create(null, { + field: { + value: 'value', + configurable: true, + enumerable: true + } + }) + }] + }); + + // v4 compatible + const expected = [{ + name: 'key', + messages: [{ + id: id, + message: Object.assign(Object.create(null), { + field: 'value' + }) + }] + }]; + + assert.deepStrictEqual(readGroupReply, expected); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); + + testUtils.testWithClient('client.xReadGroup should throw with resp3 and unstableResp3: false', async client => { + assert.throws( + () => client.xReadGroup('group', 'consumer', { + key: 'key', + id: '>' + }), + { + message: 'Some RESP3 results for Redis Query Engine responses may change. Refer to the readme for guidance' + } + ); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3 + } + }); + + testUtils.testWithClient('client.xReadGroup should not throw with resp3 and unstableResp3: true', async client => { + assert.doesNotThrow( + () => client.xReadGroup('group', 'consumer', { + key: 'key', + id: '>' + }) + ); + }, { + ...GLOBAL.SERVERS.OPEN, + clientOptions: { + RESP: 3, + unstableResp3: true + } + }); +}); diff --git a/packages/client/lib/commands/XREADGROUP.ts b/packages/client/lib/commands/XREADGROUP.ts new file mode 100644 index 00000000000..b274aab95fe --- /dev/null +++ b/packages/client/lib/commands/XREADGROUP.ts @@ -0,0 +1,63 @@ +import { CommandParser } from '../client/parser'; +import { Command, RedisArgument, ReplyUnion } from '../RESP/types'; +import { XReadStreams, pushXReadStreams } from './XREAD'; +import { transformStreamsMessagesReplyResp2 } from './generic-transformers'; + +/** + * Options for the XREADGROUP command + * + * @property COUNT - Limit the number of entries returned per stream + * @property BLOCK - Milliseconds to block waiting for new entries (0 for indefinite) + * @property NOACK - Skip adding the message to the PEL (Pending Entries List) + */ +export interface XReadGroupOptions { + COUNT?: number; + BLOCK?: number; + NOACK?: boolean; +} + +export default { + IS_READ_ONLY: true, + /** + * Constructs the XREADGROUP command to read messages from streams as a consumer group member + * + * @param parser - The command parser + * @param group - Name of the consumer group + * @param consumer - Name of the consumer in the group + * @param streams - Single stream or array of streams to read from + * @param options - Additional options for reading streams + * @returns Array of stream entries, each containing the stream name and its messages + * @see https://redis.io/commands/xreadgroup/ + */ + parseCommand( + parser: CommandParser, + group: RedisArgument, + consumer: RedisArgument, + streams: XReadStreams, + options?: XReadGroupOptions + ) { + parser.push('XREADGROUP', 'GROUP', group, consumer); + + if (options?.COUNT !== undefined) { + parser.push('COUNT', options.COUNT.toString()); + } + + if (options?.BLOCK !== undefined) { + parser.push('BLOCK', options.BLOCK.toString()); + } + + if (options?.NOACK) { + parser.push('NOACK'); + } + + pushXReadStreams(parser, streams); + }, + /** + * Transform functions for different RESP versions + */ + transformReply: { + 2: transformStreamsMessagesReplyResp2, + 3: undefined as unknown as () => ReplyUnion + }, + unstableResp3: true, +} as const satisfies Command; diff --git a/packages/client/lib/commands/XREVRANGE.spec.ts b/packages/client/lib/commands/XREVRANGE.spec.ts new file mode 100644 index 00000000000..9872dc5e9e0 --- /dev/null +++ b/packages/client/lib/commands/XREVRANGE.spec.ts @@ -0,0 +1,46 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import XREVRANGE from './XREVRANGE'; +import { parseArgs } from './generic-transformers'; + +describe('XREVRANGE', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(XREVRANGE, 'key', '-', '+'), + ['XREVRANGE', 'key', '-', '+'] + ); + }); + + it('with COUNT', () => { + assert.deepEqual( + parseArgs(XREVRANGE, 'key', '-', '+', { + COUNT: 1 + }), + ['XREVRANGE', 'key', '-', '+', 'COUNT', '1'] + ); + }); + }); + + testUtils.testAll('xRevRange', async client => { + const message = Object.create(null, { + field: { + value: 'value', + enumerable: true + } + }); + + const [id, reply] = await Promise.all([ + client.xAdd('key', '*', message), + client.xRange('key', '-', '+') + ]); + + assert.deepEqual(reply, [{ + id, + message + }]); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/XREVRANGE.ts b/packages/client/lib/commands/XREVRANGE.ts new file mode 100644 index 00000000000..452c2ab3807 --- /dev/null +++ b/packages/client/lib/commands/XREVRANGE.ts @@ -0,0 +1,35 @@ +import { CommandParser } from '../client/parser'; +import { Command, RedisArgument } from '../RESP/types'; +import XRANGE, { xRangeArguments } from './XRANGE'; + +/** + * Options for the XREVRANGE command + * + * @property COUNT - Limit the number of entries returned + */ +export interface XRevRangeOptions { + COUNT?: number; +} + +/** + * Command for reading stream entries in reverse order + */ +export default { + CACHEABLE: XRANGE.CACHEABLE, + IS_READ_ONLY: XRANGE.IS_READ_ONLY, + /** + * Constructs the XREVRANGE command to read stream entries in reverse order + * + * @param parser - The command parser + * @param key - The stream key + * @param args - Arguments tuple containing start ID, end ID, and options + * @returns Array of messages in the specified range in reverse order + * @see https://redis.io/commands/xrevrange/ + */ + parseCommand(parser: CommandParser, key: RedisArgument, ...args: Parameters) { + parser.push('XREVRANGE'); + parser.pushKey(key); + parser.pushVariadic(xRangeArguments(args[0], args[1], args[2])); + }, + transformReply: XRANGE.transformReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/XSETID.spec.ts b/packages/client/lib/commands/XSETID.spec.ts new file mode 100644 index 00000000000..b3609695345 --- /dev/null +++ b/packages/client/lib/commands/XSETID.spec.ts @@ -0,0 +1,47 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import XSETID from './XSETID'; +import { parseArgs } from './generic-transformers'; + +describe('XSETID', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(XSETID, 'key', '0-0'), + ['XSETID', 'key', '0-0'] + ); + }); + + it('with ENTRIESADDED', () => { + assert.deepEqual( + parseArgs(XSETID, 'key', '0-0', { + ENTRIESADDED: 1 + }), + ['XSETID', 'key', '0-0', 'ENTRIESADDED', '1'] + ); + }); + + it('with MAXDELETEDID', () => { + assert.deepEqual( + parseArgs(XSETID, 'key', '0-0', { + MAXDELETEDID: '1-1' + }), + ['XSETID', 'key', '0-0', 'MAXDELETEDID', '1-1'] + ); + }); + }); + + testUtils.testAll('xSetId', async client => { + const id = await client.xAdd('key', '*', { + field: 'value' + }); + + assert.equal( + await client.xSetId('key', id), + 'OK' + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/XSETID.ts b/packages/client/lib/commands/XSETID.ts new file mode 100644 index 00000000000..c76ac0b23a4 --- /dev/null +++ b/packages/client/lib/commands/XSETID.ts @@ -0,0 +1,32 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '../RESP/types'; +export interface XSetIdOptions { + /** added in 7.0 */ + ENTRIESADDED?: number; + /** added in 7.0 */ + MAXDELETEDID?: RedisArgument; +} + +export default { + IS_READ_ONLY: false, + parseCommand( + parser: CommandParser, + key: RedisArgument, + lastId: RedisArgument, + options?: XSetIdOptions + ) { + parser.push('XSETID'); + parser.pushKey(key); + parser.push(lastId); + + if (options?.ENTRIESADDED) { + parser.push('ENTRIESADDED', options.ENTRIESADDED.toString()); + } + + if (options?.MAXDELETEDID) { + parser.push('MAXDELETEDID', options.MAXDELETEDID); + } + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; + diff --git a/packages/client/lib/commands/XTRIM.spec.ts b/packages/client/lib/commands/XTRIM.spec.ts new file mode 100644 index 00000000000..38254d565e9 --- /dev/null +++ b/packages/client/lib/commands/XTRIM.spec.ts @@ -0,0 +1,157 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import XTRIM from './XTRIM'; +import { parseArgs } from './generic-transformers'; +import { STREAM_DELETION_POLICY } from './common-stream.types'; + +describe('XTRIM', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(XTRIM, 'key', 'MAXLEN', 1), + ['XTRIM', 'key', 'MAXLEN', '1'] + ); + }); + + it('simple - MINID', () => { + assert.deepEqual( + parseArgs(XTRIM, 'key', 'MINID', 123), + ['XTRIM', 'key', 'MINID', '123'] + ); + + assert.deepEqual( + parseArgs(XTRIM, 'key', 'MINID', '0-0'), + ['XTRIM', 'key', 'MINID', '0-0'] + ); + }); + + it('with strategyModifier', () => { + assert.deepEqual( + parseArgs(XTRIM, 'key', 'MAXLEN', 1, { + strategyModifier: '=' + }), + ['XTRIM', 'key', 'MAXLEN', '=', '1'] + ); + }); + + it('with LIMIT', () => { + assert.deepEqual( + parseArgs(XTRIM, 'key', 'MAXLEN', 1, { + LIMIT: 1 + }), + ['XTRIM', 'key', 'MAXLEN', '1', 'LIMIT', '1'] + ); + }); + + it('with strategyModifier, LIMIT', () => { + assert.deepEqual( + parseArgs(XTRIM, 'key', 'MAXLEN', 1, { + strategyModifier: '=', + LIMIT: 1 + }), + ['XTRIM', 'key', 'MAXLEN', '=', '1', 'LIMIT', '1'] + ); + }); + + it('with policy', () => { + assert.deepEqual( + parseArgs(XTRIM, 'key', 'MAXLEN', 1, { + policy: STREAM_DELETION_POLICY.DELREF + }), + ['XTRIM', 'key', 'MAXLEN', '1', 'DELREF'] + ); + }); + + it('with all options', () => { + assert.deepEqual( + parseArgs(XTRIM, 'key', 'MAXLEN', 1, { + strategyModifier: '~', + LIMIT: 100, + policy: STREAM_DELETION_POLICY.ACKED + }), + ['XTRIM', 'key', 'MAXLEN', '~', '1', 'LIMIT', '100', 'ACKED'] + ); + }); + }); + + testUtils.testAll('xTrim with MAXLEN', async client => { + assert.equal( + typeof await client.xTrim('key', 'MAXLEN', 1), + 'number' + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN, + }); + + testUtils.testAll('xTrim with MINID', async client => { + assert.equal( + typeof await client.xTrim('key', 'MINID', 1), + 'number' + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN, + }); + + testUtils.testAll('xTrim with string MINID', async client => { + assert.equal( + typeof await client.xTrim('key', 'MINID', '0-0'), + 'number' + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN, + }); + + testUtils.testAll( + 'xTrim with LIMIT', + async (client) => { + assert.equal( + typeof await client.xTrim('{tag}key', 'MAXLEN', 1000, { + strategyModifier: '~', + LIMIT: 10 + }), + 'number' + ); + }, + { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN, + } + ); + + testUtils.testAll( + 'xTrim with policy', + async (client) => { + assert.equal( + typeof await client.xTrim('{tag}key', 'MAXLEN', 0, { + policy: STREAM_DELETION_POLICY.DELREF + }), + 'number' + ); + }, + { + client: { ...GLOBAL.SERVERS.OPEN, minimumDockerVersion: [8, 2] }, + cluster: { ...GLOBAL.CLUSTERS.OPEN, minimumDockerVersion: [8, 2] }, + } + ); + + testUtils.testAll( + 'xTrim with all options', + async (client) => { + assert.equal( + typeof await client.xTrim('{tag}key', 'MINID', 0, { + strategyModifier: '~', + LIMIT: 10, + policy: STREAM_DELETION_POLICY.KEEPREF + }), + 'number' + ); + }, + { + client: { ...GLOBAL.SERVERS.OPEN, minimumDockerVersion: [8, 2] }, + cluster: { ...GLOBAL.CLUSTERS.OPEN, minimumDockerVersion: [8, 2] }, + } + ); +}); diff --git a/packages/client/lib/commands/XTRIM.ts b/packages/client/lib/commands/XTRIM.ts new file mode 100644 index 00000000000..8d40824d791 --- /dev/null +++ b/packages/client/lib/commands/XTRIM.ts @@ -0,0 +1,62 @@ +import { CommandParser } from '../client/parser'; +import { NumberReply, Command, RedisArgument } from '../RESP/types'; +import { StreamDeletionPolicy } from './common-stream.types'; + +/** + * Options for the XTRIM command + * + * @property strategyModifier - Exact ('=') or approximate ('~') trimming + * @property LIMIT - Maximum number of entries to trim in one call (Redis 6.2+) + * @property policy - Policy to apply when deleting entries (optional, defaults to KEEPREF) + */ +export interface XTrimOptions { + strategyModifier?: '=' | '~'; + /** added in 6.2 */ + LIMIT?: number; + /** added in 8.2 */ + policy?: StreamDeletionPolicy; +} + +/** + * Command for trimming a stream to a specified length or minimum ID + */ +export default { + IS_READ_ONLY: false, + /** + * Constructs the XTRIM command to trim a stream by length or minimum ID + * + * @param parser - The command parser + * @param key - The stream key + * @param strategy - Trim by maximum length (MAXLEN) or minimum ID (MINID) + * @param threshold - Maximum length or minimum ID threshold + * @param options - Additional options for trimming + * @returns Number of entries removed from the stream + * @see https://redis.io/commands/xtrim/ + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + strategy: 'MAXLEN' | 'MINID', + threshold: number | string, + options?: XTrimOptions + ) { + parser.push('XTRIM') + parser.pushKey(key); + parser.push(strategy); + + if (options?.strategyModifier) { + parser.push(options.strategyModifier); + } + + parser.push(threshold.toString()); + + if (options?.LIMIT) { + parser.push('LIMIT', options.LIMIT.toString()); + } + + if (options?.policy) { + parser.push(options.policy); + } + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ZADD.spec.ts b/packages/client/lib/commands/ZADD.spec.ts new file mode 100644 index 00000000000..0e770693e3a --- /dev/null +++ b/packages/client/lib/commands/ZADD.spec.ts @@ -0,0 +1,146 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ZADD from './ZADD'; +import { parseArgs } from './generic-transformers'; + +describe('ZADD', () => { + describe('transformArguments', () => { + it('single member', () => { + assert.deepEqual( + parseArgs(ZADD, 'key', { + value: '1', + score: 1 + }), + ['ZADD', 'key', '1', '1'] + ); + }); + + it('multiple members', () => { + assert.deepEqual( + parseArgs(ZADD, 'key', [{ + value: '1', + score: 1 + }, { + value: '2', + score: 2 + }]), + ['ZADD', 'key', '1', '1', '2', '2'] + ); + }); + + describe('with condition', () => { + it('condition property', () => { + assert.deepEqual( + parseArgs(ZADD, 'key', { + value: '1', + score: 1 + }, { + condition: 'NX' + }), + ['ZADD', 'key', 'NX', '1', '1'] + ); + }); + + it('with NX (backwards compatibility)', () => { + assert.deepEqual( + parseArgs(ZADD, 'key', { + value: '1', + score: 1 + }, { + NX: true + }), + ['ZADD', 'key', 'NX', '1', '1'] + ); + }); + + it('with XX (backwards compatibility)', () => { + assert.deepEqual( + parseArgs(ZADD, 'key', { + value: '1', + score: 1 + }, { + XX: true + }), + ['ZADD', 'key', 'XX', '1', '1'] + ); + }); + }); + + describe('with comparison', () => { + it('with LT', () => { + assert.deepEqual( + parseArgs(ZADD, 'key', { + value: '1', + score: 1 + }, { + comparison: 'LT' + }), + ['ZADD', 'key', 'LT', '1', '1'] + ); + }); + + it('with LT (backwards compatibility)', () => { + assert.deepEqual( + parseArgs(ZADD, 'key', { + value: '1', + score: 1 + }, { + LT: true + }), + ['ZADD', 'key', 'LT', '1', '1'] + ); + }); + + it('with GT (backwards compatibility)', () => { + assert.deepEqual( + parseArgs(ZADD, 'key', { + value: '1', + score: 1 + }, { + GT: true + }), + ['ZADD', 'key', 'GT', '1', '1'] + ); + }); + }); + + it('with CH', () => { + assert.deepEqual( + parseArgs(ZADD, 'key', { + value: '1', + score: 1 + }, { + CH: true + }), + ['ZADD', 'key', 'CH', '1', '1'] + ); + }); + + it('with condition, comparison, CH', () => { + assert.deepEqual( + parseArgs(ZADD, 'key', { + value: '1', + score: 1 + }, { + condition: 'XX', + comparison: 'LT', + CH: true + }), + ['ZADD', 'key', 'XX', 'LT', 'CH', '1', '1'] + ); + }); + }); + + testUtils.testAll('zAdd', async client => { + assert.equal( + await client.zAdd('key', { + value: 'a', + score: 1 + }), + 1 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/ZADD.ts b/packages/client/lib/commands/ZADD.ts new file mode 100644 index 00000000000..d53835d44d1 --- /dev/null +++ b/packages/client/lib/commands/ZADD.ts @@ -0,0 +1,110 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, Command } from '../RESP/types'; +import { SortedSetMember, transformDoubleArgument, transformDoubleReply } from './generic-transformers'; + +/** + * Options for the ZADD command + */ +export interface ZAddOptions { + condition?: 'NX' | 'XX'; + /** + * @deprecated Use `{ condition: 'NX' }` instead. + */ + NX?: boolean; + /** + * @deprecated Use `{ condition: 'XX' }` instead. + */ + XX?: boolean; + comparison?: 'LT' | 'GT'; + /** + * @deprecated Use `{ comparison: 'LT' }` instead. + */ + LT?: boolean; + /** + * @deprecated Use `{ comparison: 'GT' }` instead. + */ + GT?: boolean; + CH?: boolean; +} + +/** + * Command for adding members to a sorted set + */ +export default { + /** + * Constructs the ZADD command to add one or more members to a sorted set + * + * @param parser - The command parser + * @param key - The sorted set key + * @param members - One or more members to add with their scores + * @param options - Additional options for adding members + * @returns Number of new members added (or changed members if CH is set) + * @see https://redis.io/commands/zadd/ + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + members: SortedSetMember | Array, + options?: ZAddOptions + ) { + parser.push('ZADD'); + parser.pushKey(key); + + if (options?.condition) { + parser.push(options.condition); + } else if (options?.NX) { + parser.push('NX'); + } else if (options?.XX) { + parser.push('XX'); + } + + if (options?.comparison) { + parser.push(options.comparison); + } else if (options?.LT) { + parser.push('LT'); + } else if (options?.GT) { + parser.push('GT'); + } + + if (options?.CH) { + parser.push('CH'); + } + + pushMembers(parser, members); + }, + transformReply: transformDoubleReply +} as const satisfies Command; + +/** + * Helper function to push sorted set members to the command + * + * @param parser - The command parser + * @param members - One or more members with their scores + */ +export function pushMembers( + parser: CommandParser, + members: SortedSetMember | Array) { + if (Array.isArray(members)) { + for (const member of members) { + pushMember(parser, member); + } + } else { + pushMember(parser, members); + } +} + +/** + * Helper function to push a single sorted set member to the command + * + * @param parser - The command parser + * @param member - Member with its score + */ +function pushMember( + parser: CommandParser, + member: SortedSetMember +) { + parser.push( + transformDoubleArgument(member.score), + member.value + ); +} diff --git a/packages/client/lib/commands/ZADD_INCR.spec.ts b/packages/client/lib/commands/ZADD_INCR.spec.ts new file mode 100644 index 00000000000..df9ac87f449 --- /dev/null +++ b/packages/client/lib/commands/ZADD_INCR.spec.ts @@ -0,0 +1,94 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ZADD_INCR from './ZADD_INCR'; +import { parseArgs } from './generic-transformers'; + +describe('ZADD INCR', () => { + describe('transformArguments', () => { + it('single member', () => { + assert.deepEqual( + parseArgs(ZADD_INCR, 'key', { + value: '1', + score: 1 + }), + ['ZADD', 'key', 'INCR', '1', '1'] + ); + }); + + it('multiple members', () => { + assert.deepEqual( + parseArgs(ZADD_INCR, 'key', [{ + value: '1', + score: 1 + }, { + value: '2', + score: 2 + }]), + ['ZADD', 'key', 'INCR', '1', '1', '2', '2'] + ); + }); + + it('with condition', () => { + assert.deepEqual( + parseArgs(ZADD_INCR, 'key', { + value: '1', + score: 1 + }, { + condition: 'NX' + }), + ['ZADD', 'key', 'NX', 'INCR', '1', '1'] + ); + }); + + it('with comparison', () => { + assert.deepEqual( + parseArgs(ZADD_INCR, 'key', { + value: '1', + score: 1 + }, { + comparison: 'LT' + }), + ['ZADD', 'key', 'LT', 'INCR', '1', '1'] + ); + }); + + it('with CH', () => { + assert.deepEqual( + parseArgs(ZADD_INCR, 'key', { + value: '1', + score: 1 + }, { + CH: true + }), + ['ZADD', 'key', 'CH', 'INCR', '1', '1'] + ); + }); + + it('with condition, comparison, CH', () => { + assert.deepEqual( + parseArgs(ZADD_INCR, 'key', { + value: '1', + score: 1 + }, { + condition: 'XX', + comparison: 'LT', + CH: true + }), + ['ZADD', 'key', 'XX', 'LT', 'CH', 'INCR', '1', '1'] + ); + }); + }); + + testUtils.testAll('zAddIncr', async client => { + assert.equal( + await client.zAddIncr('key', { + value: 'a', + score: 1 + }), + 1 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/ZADD_INCR.ts b/packages/client/lib/commands/ZADD_INCR.ts new file mode 100644 index 00000000000..73e40efe5c8 --- /dev/null +++ b/packages/client/lib/commands/ZADD_INCR.ts @@ -0,0 +1,59 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, Command } from '../RESP/types'; +import { pushMembers } from './ZADD'; +import { SortedSetMember, transformNullableDoubleReply } from './generic-transformers'; + +/** + * Options for the ZADD INCR command + * + * @property condition - Add condition: NX (only if not exists) or XX (only if exists) + * @property comparison - Score comparison: LT (less than) or GT (greater than) + * @property CH - Return the number of changed elements instead of added elements + */ +export interface ZAddOptions { + condition?: 'NX' | 'XX'; + comparison?: 'LT' | 'GT'; + CH?: boolean; +} + +/** + * Command for incrementing the score of a member in a sorted set + */ +export default { + /** + * Constructs the ZADD command with INCR option to increment the score of a member + * + * @param parser - The command parser + * @param key - The sorted set key + * @param members - Member(s) whose score to increment + * @param options - Additional options for the increment operation + * @returns The new score of the member after increment (null if member does not exist with XX option) + * @see https://redis.io/commands/zadd/ + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + members: SortedSetMember | Array, + options?: ZAddOptions + ) { + parser.push('ZADD'); + parser.pushKey(key); + + if (options?.condition) { + parser.push(options.condition); + } + + if (options?.comparison) { + parser.push(options.comparison); + } + + if (options?.CH) { + parser.push('CH'); + } + + parser.push('INCR'); + + pushMembers(parser, members); + }, + transformReply: transformNullableDoubleReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ZCARD.spec.ts b/packages/client/lib/commands/ZCARD.spec.ts new file mode 100644 index 00000000000..44adec0833a --- /dev/null +++ b/packages/client/lib/commands/ZCARD.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ZCARD from './ZCARD'; +import { parseArgs } from './generic-transformers'; + +describe('ZCARD', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(ZCARD, 'key'), + ['ZCARD', 'key'] + ); + }); + + testUtils.testAll('zCard', async client => { + assert.equal( + await client.zCard('key'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/ZCARD.ts b/packages/client/lib/commands/ZCARD.ts new file mode 100644 index 00000000000..d2e0f8df5e2 --- /dev/null +++ b/packages/client/lib/commands/ZCARD.ts @@ -0,0 +1,23 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +/** + * Command for getting the number of members in a sorted set + */ +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Constructs the ZCARD command to get the cardinality (number of members) of a sorted set + * + * @param parser - The command parser + * @param key - The sorted set key + * @returns Number of members in the sorted set + * @see https://redis.io/commands/zcard/ + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('ZCARD'); + parser.pushKey(key); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ZCOUNT.spec.ts b/packages/client/lib/commands/ZCOUNT.spec.ts new file mode 100644 index 00000000000..5d279d7a4ca --- /dev/null +++ b/packages/client/lib/commands/ZCOUNT.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ZCOUNT from './ZCOUNT'; +import { parseArgs } from './generic-transformers'; + +describe('ZCOUNT', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(ZCOUNT, 'key', 0, 1), + ['ZCOUNT', 'key', '0', '1'] + ); + }); + + testUtils.testAll('zCount', async client => { + assert.equal( + await client.zCount('key', 0, 1), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.SERVERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/ZCOUNT.ts b/packages/client/lib/commands/ZCOUNT.ts new file mode 100644 index 00000000000..0ac473eb710 --- /dev/null +++ b/packages/client/lib/commands/ZCOUNT.ts @@ -0,0 +1,29 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; +import { transformStringDoubleArgument } from './generic-transformers'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Returns the number of elements in the sorted set with a score between min and max. + * @param parser - The Redis command parser. + * @param key - Key of the sorted set. + * @param min - Minimum score to count from (inclusive). + * @param max - Maximum score to count to (inclusive). + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + min: number | RedisArgument, + max: number | RedisArgument + ) { + parser.push('ZCOUNT'); + parser.pushKey(key); + parser.push( + transformStringDoubleArgument(min), + transformStringDoubleArgument(max) + ); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ZDIFF.spec.ts b/packages/client/lib/commands/ZDIFF.spec.ts new file mode 100644 index 00000000000..4914df3e978 --- /dev/null +++ b/packages/client/lib/commands/ZDIFF.spec.ts @@ -0,0 +1,34 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ZDIFF from './ZDIFF'; +import { parseArgs } from './generic-transformers'; + +describe('ZDIFF', () => { + testUtils.isVersionGreaterThanHook([6, 2]); + + describe('transformArguments', () => { + it('string', () => { + assert.deepEqual( + parseArgs(ZDIFF, 'key'), + ['ZDIFF', '1', 'key'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(ZDIFF, ['1', '2']), + ['ZDIFF', '2', '1', '2'] + ); + }); + }); + + testUtils.testAll('zDiff', async client => { + assert.deepEqual( + await client.zDiff('key'), + [] + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/ZDIFF.ts b/packages/client/lib/commands/ZDIFF.ts new file mode 100644 index 00000000000..f52492c2bca --- /dev/null +++ b/packages/client/lib/commands/ZDIFF.ts @@ -0,0 +1,17 @@ +import { CommandParser } from '../client/parser'; +import { ArrayReply, BlobStringReply, Command } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export default { + IS_READ_ONLY: true, + /** + * Returns the difference between the first sorted set and all the successive sorted sets. + * @param parser - The Redis command parser. + * @param keys - Keys of the sorted sets. + */ + parseCommand(parser: CommandParser, keys: RedisVariadicArgument) { + parser.push('ZDIFF'); + parser.pushKeysLength(keys); + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ZDIFFSTORE.spec.ts b/packages/client/lib/commands/ZDIFFSTORE.spec.ts new file mode 100644 index 00000000000..7f380cfc532 --- /dev/null +++ b/packages/client/lib/commands/ZDIFFSTORE.spec.ts @@ -0,0 +1,34 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ZDIFFSTORE from './ZDIFFSTORE'; +import { parseArgs } from './generic-transformers'; + +describe('ZDIFFSTORE', () => { + testUtils.isVersionGreaterThanHook([6, 2]); + + describe('transformArguments', () => { + it('string', () => { + assert.deepEqual( + parseArgs(ZDIFFSTORE, 'destination', 'key'), + ['ZDIFFSTORE', 'destination', '1', 'key'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(ZDIFFSTORE, 'destination', ['1', '2']), + ['ZDIFFSTORE', 'destination', '2', '1', '2'] + ); + }); + }); + + testUtils.testAll('zDiffStore', async client => { + assert.equal( + await client.zDiffStore('{tag}destination', '{tag}key'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/ZDIFFSTORE.ts b/packages/client/lib/commands/ZDIFFSTORE.ts new file mode 100644 index 00000000000..87407421fb0 --- /dev/null +++ b/packages/client/lib/commands/ZDIFFSTORE.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export default { + IS_READ_ONLY: true, + /** + * Computes the difference between the first and all successive sorted sets and stores it in a new key. + * @param parser - The Redis command parser. + * @param destination - Destination key where the result will be stored. + * @param inputKeys - Keys of the sorted sets to find the difference between. + */ + parseCommand(parser: CommandParser, destination: RedisArgument, inputKeys: RedisVariadicArgument) { + parser.push('ZDIFFSTORE'); + parser.pushKey(destination); + parser.pushKeysLength(inputKeys); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ZDIFF_WITHSCORES.spec.ts b/packages/client/lib/commands/ZDIFF_WITHSCORES.spec.ts new file mode 100644 index 00000000000..bea639f223e --- /dev/null +++ b/packages/client/lib/commands/ZDIFF_WITHSCORES.spec.ts @@ -0,0 +1,34 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ZDIFF_WITHSCORES from './ZDIFF_WITHSCORES'; +import { parseArgs } from './generic-transformers'; + +describe('ZDIFF WITHSCORES', () => { + testUtils.isVersionGreaterThanHook([6, 2]); + + describe('transformArguments', () => { + it('string', () => { + assert.deepEqual( + parseArgs(ZDIFF_WITHSCORES, 'key'), + ['ZDIFF', '1', 'key', 'WITHSCORES'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(ZDIFF_WITHSCORES, ['1', '2']), + ['ZDIFF', '2', '1', '2', 'WITHSCORES'] + ); + }); + }); + + testUtils.testAll('zDiffWithScores', async client => { + assert.deepEqual( + await client.zDiffWithScores('key'), + [] + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/ZDIFF_WITHSCORES.ts b/packages/client/lib/commands/ZDIFF_WITHSCORES.ts new file mode 100644 index 00000000000..6cb661b652a --- /dev/null +++ b/packages/client/lib/commands/ZDIFF_WITHSCORES.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '../client/parser'; +import { Command } from '../RESP/types'; +import { RedisVariadicArgument, transformSortedSetReply } from './generic-transformers'; +import ZDIFF from './ZDIFF'; + + +export default { + IS_READ_ONLY: ZDIFF.IS_READ_ONLY, + /** + * Returns the difference between the first sorted set and all successive sorted sets with their scores. + * @param parser - The Redis command parser. + * @param keys - Keys of the sorted sets. + */ + parseCommand(parser: CommandParser, keys: RedisVariadicArgument) { + ZDIFF.parseCommand(parser, keys); + parser.push('WITHSCORES'); + }, + transformReply: transformSortedSetReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ZINCRBY.spec.ts b/packages/client/lib/commands/ZINCRBY.spec.ts new file mode 100644 index 00000000000..8f6c5141252 --- /dev/null +++ b/packages/client/lib/commands/ZINCRBY.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ZINCRBY from './ZINCRBY'; +import { parseArgs } from './generic-transformers'; + +describe('ZINCRBY', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(ZINCRBY, 'key', 1, 'member'), + ['ZINCRBY', 'key', '1', 'member'] + ); + }); + + testUtils.testAll('zIncrBy', async client => { + assert.equal( + await client.zIncrBy('destination', 1, 'member'), + 1 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/ZINCRBY.ts b/packages/client/lib/commands/ZINCRBY.ts new file mode 100644 index 00000000000..30692fffb59 --- /dev/null +++ b/packages/client/lib/commands/ZINCRBY.ts @@ -0,0 +1,24 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, Command } from '../RESP/types'; +import { transformDoubleArgument, transformDoubleReply } from './generic-transformers'; + +export default { + /** + * Increments the score of a member in a sorted set by the specified increment. + * @param parser - The Redis command parser. + * @param key - Key of the sorted set. + * @param increment - Value to increment the score by. + * @param member - Member whose score should be incremented. + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + increment: number, + member: RedisArgument + ) { + parser.push('ZINCRBY'); + parser.pushKey(key); + parser.push(transformDoubleArgument(increment), member); + }, + transformReply: transformDoubleReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ZINTER.spec.ts b/packages/client/lib/commands/ZINTER.spec.ts new file mode 100644 index 00000000000..73df0935de9 --- /dev/null +++ b/packages/client/lib/commands/ZINTER.spec.ts @@ -0,0 +1,66 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ZINTER from './ZINTER'; +import { parseArgs } from './generic-transformers'; + +describe('ZINTER', () => { + testUtils.isVersionGreaterThanHook([6, 2]); + + describe('transformArguments', () => { + it('key (string)', () => { + assert.deepEqual( + parseArgs(ZINTER, 'key'), + ['ZINTER', '1', 'key'] + ); + }); + + it('keys (Array)', () => { + assert.deepEqual( + parseArgs(ZINTER, ['1', '2']), + ['ZINTER', '2', '1', '2'] + ); + }); + + it('key & weight', () => { + assert.deepEqual( + parseArgs(ZINTER, { + key: 'key', + weight: 1 + }), + ['ZINTER', '1', 'key', 'WEIGHTS', '1'] + ); + }); + + it('keys & weights', () => { + assert.deepEqual( + parseArgs(ZINTER, [{ + key: 'a', + weight: 1 + }, { + key: 'b', + weight: 2 + }]), + ['ZINTER', '2', 'a', 'b', 'WEIGHTS', '1', '2'] + ); + }); + + it('with AGGREGATE', () => { + assert.deepEqual( + parseArgs(ZINTER, 'key', { + AGGREGATE: 'SUM' + }), + ['ZINTER', '1', 'key', 'AGGREGATE', 'SUM'] + ); + }); + }); + + testUtils.testAll('zInter', async client => { + assert.deepEqual( + await client.zInter('key'), + [] + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/ZINTER.ts b/packages/client/lib/commands/ZINTER.ts new file mode 100644 index 00000000000..30d6716293e --- /dev/null +++ b/packages/client/lib/commands/ZINTER.ts @@ -0,0 +1,43 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, BlobStringReply, Command } from '../RESP/types'; +import { ZKeys, parseZKeysArguments } from './generic-transformers'; + +export type ZInterKeyAndWeight = { + key: RedisArgument; + weight: number; +}; + +export type ZInterKeys = T | [T, ...Array]; + +export type ZInterKeysType = ZInterKeys | ZInterKeys; + +export interface ZInterOptions { + AGGREGATE?: 'SUM' | 'MIN' | 'MAX'; +} + +export function parseZInterArguments( + parser: CommandParser, + keys: ZKeys, + options?: ZInterOptions +) { + parseZKeysArguments(parser, keys); + + if (options?.AGGREGATE) { + parser.push('AGGREGATE', options.AGGREGATE); + } +} + +export default { + IS_READ_ONLY: true, + /** + * Intersects multiple sorted sets and returns the result as a new sorted set. + * @param parser - The Redis command parser. + * @param keys - Keys of the sorted sets to intersect. + * @param options - Optional parameters for the intersection operation. + */ + parseCommand(parser: CommandParser, keys: ZInterKeysType, options?: ZInterOptions) { + parser.push('ZINTER'); + parseZInterArguments(parser, keys, options); + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ZINTERCARD.spec.ts b/packages/client/lib/commands/ZINTERCARD.spec.ts new file mode 100644 index 00000000000..5204872a2d0 --- /dev/null +++ b/packages/client/lib/commands/ZINTERCARD.spec.ts @@ -0,0 +1,45 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ZINTERCARD from './ZINTERCARD'; +import { parseArgs } from './generic-transformers'; + +describe('ZINTERCARD', () => { + testUtils.isVersionGreaterThanHook([7]); + + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(ZINTERCARD, ['1', '2']), + ['ZINTERCARD', '2', '1', '2'] + ); + }); + + describe('with LIMIT', () => { + it('plain number (backwards compatibility)', () => { + assert.deepEqual( + parseArgs(ZINTERCARD, ['1', '2'], 1), + ['ZINTERCARD', '2', '1', '2', 'LIMIT', '1'] + ); + }); + + it('{ LIMIT: number }', () => { + assert.deepEqual( + parseArgs(ZINTERCARD, ['1', '2'], { + LIMIT: 1 + }), + ['ZINTERCARD', '2', '1', '2', 'LIMIT', '1'] + ); + }); + }); + }); + + testUtils.testAll('zInterCard', async client => { + assert.deepEqual( + await client.zInterCard('key'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/ZINTERCARD.ts b/packages/client/lib/commands/ZINTERCARD.ts new file mode 100644 index 00000000000..7673b0f0a69 --- /dev/null +++ b/packages/client/lib/commands/ZINTERCARD.ts @@ -0,0 +1,33 @@ +import { CommandParser } from '../client/parser'; +import { NumberReply, Command } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export interface ZInterCardOptions { + LIMIT?: number; +} + +export default { + IS_READ_ONLY: true, + /** + * Returns the cardinality of the intersection of multiple sorted sets. + * @param parser - The Redis command parser. + * @param keys - Keys of the sorted sets to intersect. + * @param options - Limit option or options object with limit. + */ + parseCommand( + parser: CommandParser, + keys: RedisVariadicArgument, + options?: ZInterCardOptions['LIMIT'] | ZInterCardOptions + ) { + parser.push('ZINTERCARD'); + parser.pushKeysLength(keys); + + // backwards compatibility + if (typeof options === 'number') { + parser.push('LIMIT', options.toString()); + } else if (options?.LIMIT) { + parser.push('LIMIT', options.LIMIT.toString()); + } + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ZINTERSTORE.spec.ts b/packages/client/lib/commands/ZINTERSTORE.spec.ts new file mode 100644 index 00000000000..c6b448ab908 --- /dev/null +++ b/packages/client/lib/commands/ZINTERSTORE.spec.ts @@ -0,0 +1,64 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ZINTERSTORE from './ZINTERSTORE'; +import { parseArgs } from './generic-transformers'; + +describe('ZINTERSTORE', () => { + describe('transformArguments', () => { + it('key (string)', () => { + assert.deepEqual( + parseArgs(ZINTERSTORE, 'destination', 'source'), + ['ZINTERSTORE', 'destination', '1', 'source'] + ); + }); + + it('keys (Array)', () => { + assert.deepEqual( + parseArgs(ZINTERSTORE, 'destination', ['1', '2']), + ['ZINTERSTORE', 'destination', '2', '1', '2'] + ); + }); + + it('key & weight', () => { + assert.deepEqual( + parseArgs(ZINTERSTORE, 'destination', { + key: 'source', + weight: 1 + }), + ['ZINTERSTORE', 'destination', '1', 'source', 'WEIGHTS', '1'] + ); + }); + + it('keys & weights', () => { + assert.deepEqual( + parseArgs(ZINTERSTORE, 'destination', [{ + key: 'a', + weight: 1 + }, { + key: 'b', + weight: 2 + }]), + ['ZINTERSTORE', 'destination', '2', 'a', 'b', 'WEIGHTS', '1', '2'] + ); + }); + + it('with AGGREGATE', () => { + assert.deepEqual( + parseArgs(ZINTERSTORE, 'destination', 'source', { + AGGREGATE: 'SUM' + }), + ['ZINTERSTORE', 'destination', '1', 'source', 'AGGREGATE', 'SUM'] + ); + }); + }); + + testUtils.testAll('zInterStore', async client => { + assert.equal( + await client.zInterStore('{tag}destination', '{tag}key'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/ZINTERSTORE.ts b/packages/client/lib/commands/ZINTERSTORE.ts new file mode 100644 index 00000000000..1405b70287b --- /dev/null +++ b/packages/client/lib/commands/ZINTERSTORE.ts @@ -0,0 +1,27 @@ + +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; +import { ZKeys } from './generic-transformers'; +import { parseZInterArguments, ZInterOptions } from './ZINTER'; + +export default { + IS_READ_ONLY: false, + /** + * Stores the result of intersection of multiple sorted sets in a new sorted set. + * @param parser - The Redis command parser. + * @param destination - Destination key where the result will be stored. + * @param keys - Keys of the sorted sets to intersect. + * @param options - Optional parameters for the intersection operation. + */ + parseCommand( + parser: CommandParser, + destination: RedisArgument, + keys: ZKeys, + options?: ZInterOptions + ) { + parser.push('ZINTERSTORE'); + parser.pushKey(destination); + parseZInterArguments(parser, keys, options); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ZINTER_WITHSCORES.spec.ts b/packages/client/lib/commands/ZINTER_WITHSCORES.spec.ts new file mode 100644 index 00000000000..234b250b143 --- /dev/null +++ b/packages/client/lib/commands/ZINTER_WITHSCORES.spec.ts @@ -0,0 +1,66 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ZINTER_WITHSCORES from './ZINTER_WITHSCORES'; +import { parseArgs } from './generic-transformers'; + +describe('ZINTER WITHSCORES', () => { + testUtils.isVersionGreaterThanHook([6, 2]); + + describe('transformArguments', () => { + it('key (string)', () => { + assert.deepEqual( + parseArgs(ZINTER_WITHSCORES, 'key'), + ['ZINTER', '1', 'key', 'WITHSCORES'] + ); + }); + + it('keys (Array)', () => { + assert.deepEqual( + parseArgs(ZINTER_WITHSCORES, ['1', '2']), + ['ZINTER', '2', '1', '2', 'WITHSCORES'] + ); + }); + + it('key & weight', () => { + assert.deepEqual( + parseArgs(ZINTER_WITHSCORES, { + key: 'key', + weight: 1 + }), + ['ZINTER', '1', 'key', 'WEIGHTS', '1', 'WITHSCORES'] + ); + }); + + it('keys & weights', () => { + assert.deepEqual( + parseArgs(ZINTER_WITHSCORES, [{ + key: 'a', + weight: 1 + }, { + key: 'b', + weight: 2 + }]), + ['ZINTER', '2', 'a', 'b', 'WEIGHTS', '1', '2', 'WITHSCORES'] + ); + }); + + it('with AGGREGATE', () => { + assert.deepEqual( + parseArgs(ZINTER_WITHSCORES, 'key', { + AGGREGATE: 'SUM' + }), + ['ZINTER', '1', 'key', 'AGGREGATE', 'SUM', 'WITHSCORES'] + ); + }); + }); + + testUtils.testAll('zInterWithScores', async client => { + assert.deepEqual( + await client.zInterWithScores('key'), + [] + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/ZINTER_WITHSCORES.ts b/packages/client/lib/commands/ZINTER_WITHSCORES.ts new file mode 100644 index 00000000000..40ba3ce4287 --- /dev/null +++ b/packages/client/lib/commands/ZINTER_WITHSCORES.ts @@ -0,0 +1,17 @@ +import { Command } from '../RESP/types'; +import { transformSortedSetReply } from './generic-transformers'; +import ZINTER from './ZINTER'; + + +export default { + IS_READ_ONLY: ZINTER.IS_READ_ONLY, + /** + * Intersects multiple sorted sets and returns the result with scores. + * @param args - Same parameters as ZINTER command. + */ + parseCommand(...args: Parameters) { + ZINTER.parseCommand(...args); + args[0].push('WITHSCORES'); + }, + transformReply: transformSortedSetReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ZLEXCOUNT.spec.ts b/packages/client/lib/commands/ZLEXCOUNT.spec.ts new file mode 100644 index 00000000000..78c7411affd --- /dev/null +++ b/packages/client/lib/commands/ZLEXCOUNT.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ZLEXCOUNT from './ZLEXCOUNT'; +import { parseArgs } from './generic-transformers'; + +describe('ZLEXCOUNT', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(ZLEXCOUNT, 'key', '[a', '[b'), + ['ZLEXCOUNT', 'key', '[a', '[b'] + ); + }); + + testUtils.testAll('zLexCount', async client => { + assert.equal( + await client.zLexCount('key', '[a', '[b'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/ZLEXCOUNT.ts b/packages/client/lib/commands/ZLEXCOUNT.ts new file mode 100644 index 00000000000..97bed9f6014 --- /dev/null +++ b/packages/client/lib/commands/ZLEXCOUNT.ts @@ -0,0 +1,26 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Returns the number of elements in the sorted set between the lexicographical range specified by min and max. + * @param parser - The Redis command parser. + * @param key - Key of the sorted set. + * @param min - Minimum lexicographical value (inclusive). + * @param max - Maximum lexicographical value (inclusive). + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + min: RedisArgument, + max: RedisArgument + ) { + parser.push('ZLEXCOUNT'); + parser.pushKey(key); + parser.push(min); + parser.push(max); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ZMPOP.spec.ts b/packages/client/lib/commands/ZMPOP.spec.ts new file mode 100644 index 00000000000..c15a53b7313 --- /dev/null +++ b/packages/client/lib/commands/ZMPOP.spec.ts @@ -0,0 +1,56 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ZMPOP from './ZMPOP'; +import { parseArgs } from './generic-transformers'; + +describe('ZMPOP', () => { + testUtils.isVersionGreaterThanHook([7]); + + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(ZMPOP, 'key', 'MIN'), + ['ZMPOP', '1', 'key', 'MIN'] + ); + }); + + it('with count', () => { + assert.deepEqual( + parseArgs(ZMPOP, 'key', 'MIN', { + COUNT: 2 + }), + ['ZMPOP', '1', 'key', 'MIN', 'COUNT', '2'] + ); + }); + }); + + testUtils.testAll('zmPop - null', async client => { + assert.equal( + await client.zmPop('key', 'MIN'), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); + + testUtils.testAll('zmPop - with members', async client => { + const members = [{ + value: '1', + score: 1 + }]; + + const [, reply] = await Promise.all([ + client.zAdd('key', members), + client.zmPop('key', 'MIN') + ]); + + assert.deepEqual(reply, { + key: 'key', + members + }); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/ZMPOP.ts b/packages/client/lib/commands/ZMPOP.ts new file mode 100644 index 00000000000..fe766ddd13a --- /dev/null +++ b/packages/client/lib/commands/ZMPOP.ts @@ -0,0 +1,72 @@ +import { CommandParser } from '../client/parser'; +import { NullReply, TuplesReply, BlobStringReply, DoubleReply, ArrayReply, UnwrapReply, Resp2Reply, Command, TypeMapping } from '../RESP/types'; +import { RedisVariadicArgument, SortedSetSide, transformSortedSetReply, transformDoubleReply, Tail } from './generic-transformers'; + +export interface ZMPopOptions { + COUNT?: number; +} + +export type ZMPopRawReply = NullReply | TuplesReply<[ + key: BlobStringReply, + members: ArrayReply> +]>; + +export function parseZMPopArguments( + parser: CommandParser, + keys: RedisVariadicArgument, + side: SortedSetSide, + options?: ZMPopOptions +) { + parser.pushKeysLength(keys); + + parser.push(side); + + if (options?.COUNT) { + parser.push('COUNT', options.COUNT.toString()); + } +} + +export type ZMPopArguments = Tail>; + +export default { + IS_READ_ONLY: false, + /** + * Removes and returns up to count members with the highest/lowest scores from the first non-empty sorted set. + * @param parser - The Redis command parser. + * @param keys - Keys of the sorted sets to pop from. + * @param side - Side to pop from (MIN or MAX). + * @param options - Optional parameters including COUNT. + */ + parseCommand( + parser: CommandParser, + keys: RedisVariadicArgument, + side: SortedSetSide, + options?: ZMPopOptions + ) { + parser.push('ZMPOP'); + parseZMPopArguments(parser, keys, side, options) + }, + transformReply: { + 2(reply: UnwrapReply>, preserve?: any, typeMapping?: TypeMapping) { + return reply === null ? null : { + key: reply[0], + members: (reply[1] as unknown as UnwrapReply).map(member => { + const [value, score] = member as unknown as UnwrapReply; + return { + value, + score: transformDoubleReply[2](score, preserve, typeMapping) + }; + }) + }; + }, + 3(reply: UnwrapReply) { + return reply === null ? null : { + key: reply[0], + members: transformSortedSetReply[3](reply[1]) + }; + } + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/ZMSCORE.spec.ts b/packages/client/lib/commands/ZMSCORE.spec.ts new file mode 100644 index 00000000000..6c6d2946e00 --- /dev/null +++ b/packages/client/lib/commands/ZMSCORE.spec.ts @@ -0,0 +1,34 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ZMSCORE from './ZMSCORE'; +import { parseArgs } from './generic-transformers'; + +describe('ZMSCORE', () => { + testUtils.isVersionGreaterThanHook([6, 2]); + + describe('transformArguments', () => { + it('string', () => { + assert.deepEqual( + parseArgs(ZMSCORE, 'key', 'member'), + ['ZMSCORE', 'key', 'member'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(ZMSCORE, 'key', ['1', '2']), + ['ZMSCORE', 'key', '1', '2'] + ); + }); + }); + + testUtils.testAll('zmScore', async client => { + assert.deepEqual( + await client.zmScore('key', 'member'), + [null] + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/ZMSCORE.ts b/packages/client/lib/commands/ZMSCORE.ts new file mode 100644 index 00000000000..0275e8d98db --- /dev/null +++ b/packages/client/lib/commands/ZMSCORE.ts @@ -0,0 +1,25 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, NullReply, BlobStringReply, DoubleReply, UnwrapReply, Command, TypeMapping } from '../RESP/types'; +import { createTransformNullableDoubleReplyResp2Func, RedisVariadicArgument } from './generic-transformers'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Returns the scores associated with the specified members in the sorted set stored at key. + * @param parser - The Redis command parser. + * @param key - Key of the sorted set. + * @param member - One or more members to get scores for. + */ + parseCommand(parser: CommandParser, key: RedisArgument, member: RedisVariadicArgument) { + parser.push('ZMSCORE'); + parser.pushKey(key); + parser.pushVariadic(member); + }, + transformReply: { + 2: (reply: UnwrapReply>, preserve?: any, typeMapping?: TypeMapping) => { + return reply.map(createTransformNullableDoubleReplyResp2Func(preserve, typeMapping)); + }, + 3: undefined as unknown as () => ArrayReply + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/ZPOPMAX.spec.ts b/packages/client/lib/commands/ZPOPMAX.spec.ts new file mode 100644 index 00000000000..1796647df86 --- /dev/null +++ b/packages/client/lib/commands/ZPOPMAX.spec.ts @@ -0,0 +1,40 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ZPOPMAX from './ZPOPMAX'; +import { parseArgs } from './generic-transformers'; + +describe('ZPOPMAX', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(ZPOPMAX, 'key'), + ['ZPOPMAX', 'key'] + ); + }); + + testUtils.testAll('zPopMax - null', async client => { + assert.equal( + await client.zPopMax('key'), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.SERVERS.OPEN + }); + + testUtils.testAll('zPopMax - with member', async client => { + const member = { + value: 'value', + score: 1 + }; + + const [, reply] = await Promise.all([ + client.zAdd('key', member), + client.zPopMax('key') + ]); + + assert.deepEqual(reply, member); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.SERVERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/ZPOPMAX.ts b/packages/client/lib/commands/ZPOPMAX.ts new file mode 100644 index 00000000000..fd7b7cf9f94 --- /dev/null +++ b/packages/client/lib/commands/ZPOPMAX.ts @@ -0,0 +1,34 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, TuplesReply, BlobStringReply, DoubleReply, UnwrapReply, Command, TypeMapping } from '../RESP/types'; +import { transformDoubleReply } from './generic-transformers'; + +export default { + IS_READ_ONLY: false, + /** + * Removes and returns the member with the highest score in the sorted set. + * @param parser - The Redis command parser. + * @param key - Key of the sorted set. + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('ZPOPMAX'); + parser.pushKey(key); + }, + transformReply: { + 2: (reply: UnwrapReply>, preserve?: any, typeMapping?: TypeMapping) => { + if (reply.length === 0) return null; + + return { + value: reply[0], + score: transformDoubleReply[2](reply[1], preserve, typeMapping), + }; + }, + 3: (reply: UnwrapReply>) => { + if (reply.length === 0) return null; + + return { + value: reply[0], + score: reply[1] + }; + } + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/ZPOPMAX_COUNT.spec.ts b/packages/client/lib/commands/ZPOPMAX_COUNT.spec.ts new file mode 100644 index 00000000000..dd9d85dbd36 --- /dev/null +++ b/packages/client/lib/commands/ZPOPMAX_COUNT.spec.ts @@ -0,0 +1,33 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ZPOPMAX_COUNT from './ZPOPMAX_COUNT'; +import { parseArgs } from './generic-transformers'; + +describe('ZPOPMAX COUNT', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(ZPOPMAX_COUNT, 'key', 1), + ['ZPOPMAX', 'key', '1'] + ); + }); + + testUtils.testAll('zPopMaxCount', async client => { + const members = [{ + value: '1', + score: 1 + }, { + value: '2', + score: 2 + }]; + + const [ , reply] = await Promise.all([ + client.zAdd('key', members), + client.zPopMaxCount('key', members.length) + ]); + + assert.deepEqual(reply, members.reverse()); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.SERVERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/ZPOPMAX_COUNT.ts b/packages/client/lib/commands/ZPOPMAX_COUNT.ts new file mode 100644 index 00000000000..50f347acf3e --- /dev/null +++ b/packages/client/lib/commands/ZPOPMAX_COUNT.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, Command } from '../RESP/types'; +import { transformSortedSetReply } from './generic-transformers'; + +export default { + IS_READ_ONLY: false, + /** + * Removes and returns up to count members with the highest scores in the sorted set. + * @param parser - The Redis command parser. + * @param key - Key of the sorted set. + * @param count - Number of members to pop. + */ + parseCommand(parser: CommandParser, key: RedisArgument, count: number) { + parser.push('ZPOPMAX'); + parser.pushKey(key); + parser.push(count.toString()); + }, + transformReply: transformSortedSetReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ZPOPMIN.spec.ts b/packages/client/lib/commands/ZPOPMIN.spec.ts new file mode 100644 index 00000000000..653a4e70a92 --- /dev/null +++ b/packages/client/lib/commands/ZPOPMIN.spec.ts @@ -0,0 +1,40 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ZPOPMIN from './ZPOPMIN'; +import { parseArgs } from './generic-transformers'; + +describe('ZPOPMIN', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(ZPOPMIN, 'key'), + ['ZPOPMIN', 'key'] + ); + }); + + testUtils.testAll('zPopMin - null', async client => { + assert.equal( + await client.zPopMin('key'), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.SERVERS.OPEN + }); + + testUtils.testAll('zPopMax - with member', async client => { + const member = { + value: 'value', + score: 1 + }; + + const [, reply] = await Promise.all([ + client.zAdd('key', member), + client.zPopMin('key') + ]); + + assert.deepEqual(reply, member); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.SERVERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/ZPOPMIN.ts b/packages/client/lib/commands/ZPOPMIN.ts new file mode 100644 index 00000000000..2de4977da7f --- /dev/null +++ b/packages/client/lib/commands/ZPOPMIN.ts @@ -0,0 +1,17 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, Command } from '../RESP/types'; +import ZPOPMAX from './ZPOPMAX'; + +export default { + IS_READ_ONLY: false, + /** + * Removes and returns the member with the lowest score in the sorted set. + * @param parser - The Redis command parser. + * @param key - Key of the sorted set. + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('ZPOPMIN'); + parser.pushKey(key); + }, + transformReply: ZPOPMAX.transformReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ZPOPMIN_COUNT.spec.ts b/packages/client/lib/commands/ZPOPMIN_COUNT.spec.ts new file mode 100644 index 00000000000..126a3cc1e9a --- /dev/null +++ b/packages/client/lib/commands/ZPOPMIN_COUNT.spec.ts @@ -0,0 +1,33 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ZPOPMIN_COUNT from './ZPOPMIN_COUNT'; +import { parseArgs } from './generic-transformers'; + +describe('ZPOPMIN COUNT', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(ZPOPMIN_COUNT, 'key', 1), + ['ZPOPMIN', 'key', '1'] + ); + }); + + testUtils.testAll('zPopMinCount', async client => { + const members = [{ + value: '1', + score: 1 + }, { + value: '2', + score: 2 + }]; + + const [ , reply] = await Promise.all([ + client.zAdd('key', members), + client.zPopMinCount('key', members.length) + ]); + + assert.deepEqual(reply, members); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.SERVERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/ZPOPMIN_COUNT.ts b/packages/client/lib/commands/ZPOPMIN_COUNT.ts new file mode 100644 index 00000000000..24e084b2aef --- /dev/null +++ b/packages/client/lib/commands/ZPOPMIN_COUNT.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, Command } from '../RESP/types'; +import { transformSortedSetReply } from './generic-transformers'; + +export default { + IS_READ_ONLY: false, + /** + * Removes and returns up to count members with the lowest scores in the sorted set. + * @param parser - The Redis command parser. + * @param key - Key of the sorted set. + * @param count - Number of members to pop. + */ + parseCommand(parser: CommandParser, key: RedisArgument, count: number) { + parser.push('ZPOPMIN'); + parser.pushKey(key); + parser.push(count.toString()); + }, + transformReply: transformSortedSetReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ZRANDMEMBER.spec.ts b/packages/client/lib/commands/ZRANDMEMBER.spec.ts new file mode 100644 index 00000000000..a25ea79f8e1 --- /dev/null +++ b/packages/client/lib/commands/ZRANDMEMBER.spec.ts @@ -0,0 +1,25 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ZRANDMEMBER from './ZRANDMEMBER'; +import { parseArgs } from './generic-transformers'; + +describe('ZRANDMEMBER', () => { + testUtils.isVersionGreaterThanHook([6, 2]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(ZRANDMEMBER, 'key'), + ['ZRANDMEMBER', 'key'] + ); + }); + + testUtils.testAll('zRandMember', async client => { + assert.equal( + await client.zRandMember('key'), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/ZRANDMEMBER.ts b/packages/client/lib/commands/ZRANDMEMBER.ts new file mode 100644 index 00000000000..ed0a529da5e --- /dev/null +++ b/packages/client/lib/commands/ZRANDMEMBER.ts @@ -0,0 +1,16 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, BlobStringReply, NullReply, Command } from '../RESP/types'; + +export default { + IS_READ_ONLY: true, + /** + * Returns a random member from a sorted set. + * @param parser - The Redis command parser. + * @param key - Key of the sorted set. + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('ZRANDMEMBER'); + parser.pushKey(key); + }, + transformReply: undefined as unknown as () => BlobStringReply | NullReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ZRANDMEMBER_COUNT.spec.ts b/packages/client/lib/commands/ZRANDMEMBER_COUNT.spec.ts new file mode 100644 index 00000000000..eee0d454975 --- /dev/null +++ b/packages/client/lib/commands/ZRANDMEMBER_COUNT.spec.ts @@ -0,0 +1,25 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ZRANDMEMBER_COUNT from './ZRANDMEMBER_COUNT'; +import { parseArgs } from './generic-transformers'; + +describe('ZRANDMEMBER COUNT', () => { + testUtils.isVersionGreaterThanHook([6, 2, 5]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(ZRANDMEMBER_COUNT, 'key', 1), + ['ZRANDMEMBER', 'key', '1'] + ); + }); + + testUtils.testAll('zRandMemberCount', async client => { + assert.deepEqual( + await client.zRandMemberCount('key', 1), + [] + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/ZRANDMEMBER_COUNT.ts b/packages/client/lib/commands/ZRANDMEMBER_COUNT.ts new file mode 100644 index 00000000000..f201f9c236a --- /dev/null +++ b/packages/client/lib/commands/ZRANDMEMBER_COUNT.ts @@ -0,0 +1,18 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, BlobStringReply, Command } from '../RESP/types'; +import ZRANDMEMBER from './ZRANDMEMBER'; + +export default { + IS_READ_ONLY: ZRANDMEMBER.IS_READ_ONLY, + /** + * Returns one or more random members from a sorted set. + * @param parser - The Redis command parser. + * @param key - Key of the sorted set. + * @param count - Number of members to return. + */ + parseCommand(parser: CommandParser, key: RedisArgument, count: number) { + ZRANDMEMBER.parseCommand(parser, key); + parser.push(count.toString()); + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ZRANDMEMBER_COUNT_WITHSCORES.spec.ts b/packages/client/lib/commands/ZRANDMEMBER_COUNT_WITHSCORES.spec.ts new file mode 100644 index 00000000000..3be3b92aeef --- /dev/null +++ b/packages/client/lib/commands/ZRANDMEMBER_COUNT_WITHSCORES.spec.ts @@ -0,0 +1,25 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ZRANDMEMBER_COUNT_WITHSCORES from './ZRANDMEMBER_COUNT_WITHSCORES'; +import { parseArgs } from './generic-transformers'; + +describe('ZRANDMEMBER COUNT WITHSCORES', () => { + testUtils.isVersionGreaterThanHook([6, 2, 5]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(ZRANDMEMBER_COUNT_WITHSCORES, 'key', 1), + ['ZRANDMEMBER', 'key', '1', 'WITHSCORES'] + ); + }); + + testUtils.testAll('zRandMemberCountWithScores', async client => { + assert.deepEqual( + await client.zRandMemberCountWithScores('key', 1), + [] + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/ZRANDMEMBER_COUNT_WITHSCORES.ts b/packages/client/lib/commands/ZRANDMEMBER_COUNT_WITHSCORES.ts new file mode 100644 index 00000000000..3792bce794f --- /dev/null +++ b/packages/client/lib/commands/ZRANDMEMBER_COUNT_WITHSCORES.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '../client/parser'; +import { Command, RedisArgument } from '../RESP/types'; +import { transformSortedSetReply } from './generic-transformers'; +import ZRANDMEMBER_COUNT from './ZRANDMEMBER_COUNT'; + +export default { + IS_READ_ONLY: ZRANDMEMBER_COUNT.IS_READ_ONLY, + /** + * Returns one or more random members with their scores from a sorted set. + * @param parser - The Redis command parser. + * @param key - Key of the sorted set. + * @param count - Number of members to return. + */ + parseCommand(parser: CommandParser, key: RedisArgument, count: number) { + ZRANDMEMBER_COUNT.parseCommand(parser, key, count); + parser.push('WITHSCORES'); + }, + transformReply: transformSortedSetReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ZRANGE.spec.ts b/packages/client/lib/commands/ZRANGE.spec.ts new file mode 100644 index 00000000000..a780e4ef613 --- /dev/null +++ b/packages/client/lib/commands/ZRANGE.spec.ts @@ -0,0 +1,78 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ZRANGE from './ZRANGE'; +import { parseArgs } from './generic-transformers'; + +describe('ZRANGE', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(ZRANGE, 'src', 0, 1), + ['ZRANGE', 'src', '0', '1'] + ); + }); + + it('with BYSCORE', () => { + assert.deepEqual( + parseArgs(ZRANGE, 'src', 0, 1, { + BY: 'SCORE' + }), + ['ZRANGE', 'src', '0', '1', 'BYSCORE'] + ); + }); + + it('with BYLEX', () => { + assert.deepEqual( + parseArgs(ZRANGE, 'src', 0, 1, { + BY: 'LEX' + }), + ['ZRANGE', 'src', '0', '1', 'BYLEX'] + ); + }); + + it('with REV', () => { + assert.deepEqual( + parseArgs(ZRANGE, 'src', 0, 1, { + REV: true + }), + ['ZRANGE', 'src', '0', '1', 'REV'] + ); + }); + + it('with LIMIT', () => { + assert.deepEqual( + parseArgs(ZRANGE, 'src', 0, 1, { + LIMIT: { + offset: 0, + count: 1 + } + }), + ['ZRANGE', 'src', '0', '1', 'LIMIT', '0', '1'] + ); + }); + + it('with BY & REV & LIMIT', () => { + assert.deepEqual( + parseArgs(ZRANGE, 'src', 0, 1, { + BY: 'SCORE', + REV: true, + LIMIT: { + offset: 0, + count: 1 + } + }), + ['ZRANGE', 'src', '0', '1', 'BYSCORE', 'REV', 'LIMIT', '0', '1'] + ); + }); + }); + + testUtils.testAll('zRange', async client => { + assert.deepEqual( + await client.zRange('src', 0, 1), + [] + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/ZRANGE.ts b/packages/client/lib/commands/ZRANGE.ts new file mode 100644 index 00000000000..43801289bde --- /dev/null +++ b/packages/client/lib/commands/ZRANGE.ts @@ -0,0 +1,72 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, BlobStringReply, Command } from '../RESP/types'; +import { transformStringDoubleArgument } from './generic-transformers'; + +export interface ZRangeOptions { + BY?: 'SCORE' | 'LEX'; + REV?: boolean; + LIMIT?: { + offset: number; + count: number; + }; +} + +export function zRangeArgument( + min: RedisArgument | number, + max: RedisArgument | number, + options?: ZRangeOptions +) { + const args = [ + transformStringDoubleArgument(min), + transformStringDoubleArgument(max) + ] + + switch (options?.BY) { + case 'SCORE': + args.push('BYSCORE'); + break; + + case 'LEX': + args.push('BYLEX'); + break; + } + + if (options?.REV) { + args.push('REV'); + } + + if (options?.LIMIT) { + args.push( + 'LIMIT', + options.LIMIT.offset.toString(), + options.LIMIT.count.toString() + ); + } + + return args; +} + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Returns the specified range of elements in the sorted set. + * @param parser - The Redis command parser. + * @param key - Key of the sorted set. + * @param min - Minimum index, score or lexicographical value. + * @param max - Maximum index, score or lexicographical value. + * @param options - Optional parameters for range retrieval (BY, REV, LIMIT). + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + min: RedisArgument | number, + max: RedisArgument | number, + options?: ZRangeOptions + ) { + parser.push('ZRANGE'); + parser.pushKey(key); + parser.pushVariadic(zRangeArgument(min, max, options)) + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ZRANGEBYLEX.spec.ts b/packages/client/lib/commands/ZRANGEBYLEX.spec.ts new file mode 100644 index 00000000000..942e184661a --- /dev/null +++ b/packages/client/lib/commands/ZRANGEBYLEX.spec.ts @@ -0,0 +1,37 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ZRANGEBYLEX from './ZRANGEBYLEX'; +import { parseArgs } from './generic-transformers'; + +describe('ZRANGEBYLEX', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(ZRANGEBYLEX, 'src', '-', '+'), + ['ZRANGEBYLEX', 'src', '-', '+'] + ); + }); + + it('with LIMIT', () => { + assert.deepEqual( + parseArgs(ZRANGEBYLEX, 'src', '-', '+', { + LIMIT: { + offset: 0, + count: 1 + } + }), + ['ZRANGEBYLEX', 'src', '-', '+', 'LIMIT', '0', '1'] + ); + }); + }); + + testUtils.testAll('zRangeByLex', async client => { + assert.deepEqual( + await client.zRangeByLex('src', '-', '+'), + [] + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/ZRANGEBYLEX.ts b/packages/client/lib/commands/ZRANGEBYLEX.ts new file mode 100644 index 00000000000..e069fa55b4b --- /dev/null +++ b/packages/client/lib/commands/ZRANGEBYLEX.ts @@ -0,0 +1,42 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, BlobStringReply, Command } from '../RESP/types'; +import { transformStringDoubleArgument } from './generic-transformers'; + +export interface ZRangeByLexOptions { + LIMIT?: { + offset: number; + count: number; + }; +} + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Returns all the elements in the sorted set at key with a lexicographical value between min and max. + * @param parser - The Redis command parser. + * @param key - Key of the sorted set. + * @param min - Minimum lexicographical value. + * @param max - Maximum lexicographical value. + * @param options - Optional parameters including LIMIT. + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + min: RedisArgument, + max: RedisArgument, + options?: ZRangeByLexOptions + ) { + parser.push('ZRANGEBYLEX'); + parser.pushKey(key); + parser.push( + transformStringDoubleArgument(min), + transformStringDoubleArgument(max) + ); + + if (options?.LIMIT) { + parser.push('LIMIT', options.LIMIT.offset.toString(), options.LIMIT.count.toString()); + } + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ZRANGEBYSCORE.spec.ts b/packages/client/lib/commands/ZRANGEBYSCORE.spec.ts new file mode 100644 index 00000000000..364882f21a9 --- /dev/null +++ b/packages/client/lib/commands/ZRANGEBYSCORE.spec.ts @@ -0,0 +1,37 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ZRANGEBYSCORE from './ZRANGEBYSCORE'; +import { parseArgs } from './generic-transformers'; + +describe('ZRANGEBYSCORE', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(ZRANGEBYSCORE, 'src', 0, 1), + ['ZRANGEBYSCORE', 'src', '0', '1'] + ); + }); + + it('with LIMIT', () => { + assert.deepEqual( + parseArgs(ZRANGEBYSCORE, 'src', 0, 1, { + LIMIT: { + offset: 0, + count: 1 + } + }), + ['ZRANGEBYSCORE', 'src', '0', '1', 'LIMIT', '0', '1'] + ); + }); + }); + + testUtils.testAll('zRangeByScore', async client => { + assert.deepEqual( + await client.zRangeByScore('src', 0, 1), + [] + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/ZRANGEBYSCORE.ts b/packages/client/lib/commands/ZRANGEBYSCORE.ts new file mode 100644 index 00000000000..80bc8bc2b6c --- /dev/null +++ b/packages/client/lib/commands/ZRANGEBYSCORE.ts @@ -0,0 +1,44 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, BlobStringReply, Command } from '../RESP/types'; +import { transformStringDoubleArgument } from './generic-transformers'; + +export interface ZRangeByScoreOptions { + LIMIT?: { + offset: number; + count: number; + }; +} + +export declare function transformReply(): Array; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Returns all the elements in the sorted set with a score between min and max. + * @param parser - The Redis command parser. + * @param key - Key of the sorted set. + * @param min - Minimum score. + * @param max - Maximum score. + * @param options - Optional parameters including LIMIT. + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + min: string | number, + max: string | number, + options?: ZRangeByScoreOptions + ) { + parser.push('ZRANGEBYSCORE'); + parser.pushKey(key); + parser.push( + transformStringDoubleArgument(min), + transformStringDoubleArgument(max) + ); + + if (options?.LIMIT) { + parser.push('LIMIT', options.LIMIT.offset.toString(), options.LIMIT.count.toString()); + } + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ZRANGEBYSCORE_WITHSCORES.spec.ts b/packages/client/lib/commands/ZRANGEBYSCORE_WITHSCORES.spec.ts new file mode 100644 index 00000000000..191eaa4e34f --- /dev/null +++ b/packages/client/lib/commands/ZRANGEBYSCORE_WITHSCORES.spec.ts @@ -0,0 +1,37 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ZRANGEBYSCORE_WITHSCORES from './ZRANGEBYSCORE_WITHSCORES'; +import { parseArgs } from './generic-transformers'; + +describe('ZRANGEBYSCORE WITHSCORES', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(ZRANGEBYSCORE_WITHSCORES, 'src', 0, 1), + ['ZRANGEBYSCORE', 'src', '0', '1', 'WITHSCORES'] + ); + }); + + it('with LIMIT', () => { + assert.deepEqual( + parseArgs(ZRANGEBYSCORE_WITHSCORES, 'src', 0, 1, { + LIMIT: { + offset: 0, + count: 1 + } + }), + ['ZRANGEBYSCORE', 'src', '0', '1', 'LIMIT', '0', '1', 'WITHSCORES'] + ); + }); + }); + + testUtils.testAll('zRangeByScoreWithScores', async client => { + assert.deepEqual( + await client.zRangeByScoreWithScores('src', 0, 1), + [] + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/ZRANGEBYSCORE_WITHSCORES.ts b/packages/client/lib/commands/ZRANGEBYSCORE_WITHSCORES.ts new file mode 100644 index 00000000000..9cea5bd7b83 --- /dev/null +++ b/packages/client/lib/commands/ZRANGEBYSCORE_WITHSCORES.ts @@ -0,0 +1,19 @@ +import { Command } from '../RESP/types'; +import { transformSortedSetReply } from './generic-transformers'; +import ZRANGEBYSCORE from './ZRANGEBYSCORE'; + +export default { + CACHEABLE: ZRANGEBYSCORE.CACHEABLE, + IS_READ_ONLY: ZRANGEBYSCORE.IS_READ_ONLY, + /** + * Returns all the elements in the sorted set with a score between min and max, with their scores. + * @param args - Same parameters as the ZRANGEBYSCORE command. + */ + parseCommand(...args: Parameters) { + const parser = args[0]; + + ZRANGEBYSCORE.parseCommand(...args); + parser.push('WITHSCORES'); + }, + transformReply: transformSortedSetReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ZRANGESTORE.spec.ts b/packages/client/lib/commands/ZRANGESTORE.spec.ts new file mode 100644 index 00000000000..c9708efd6fd --- /dev/null +++ b/packages/client/lib/commands/ZRANGESTORE.spec.ts @@ -0,0 +1,82 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ZRANGESTORE from './ZRANGESTORE'; +import { parseArgs } from './generic-transformers'; + +describe('ZRANGESTORE', () => { + testUtils.isVersionGreaterThanHook([6, 2]); + + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(ZRANGESTORE, 'destination', 'source', 0, 1), + ['ZRANGESTORE', 'destination', 'source', '0', '1'] + ); + }); + + it('with BYSCORE', () => { + assert.deepEqual( + parseArgs(ZRANGESTORE, 'destination', 'source', 0, 1, { + BY: 'SCORE' + }), + ['ZRANGESTORE', 'destination', 'source', '0', '1', 'BYSCORE'] + ); + }); + + it('with BYLEX', () => { + assert.deepEqual( + parseArgs(ZRANGESTORE, 'destination', 'source', 0, 1, { + BY: 'LEX' + }), + ['ZRANGESTORE', 'destination', 'source', '0', '1', 'BYLEX'] + ); + }); + + it('with REV', () => { + assert.deepEqual( + parseArgs(ZRANGESTORE, 'destination', 'source', 0, 1, { + REV: true + }), + ['ZRANGESTORE', 'destination', 'source', '0', '1', 'REV'] + ); + }); + + it('with LIMIT', () => { + assert.deepEqual( + parseArgs(ZRANGESTORE, 'destination', 'source', 0, 1, { + LIMIT: { + offset: 0, + count: 1 + } + }), + ['ZRANGESTORE', 'destination', 'source', '0', '1', 'LIMIT', '0', '1'] + ); + }); + + it('with BY & REV & LIMIT', () => { + assert.deepEqual( + parseArgs(ZRANGESTORE, 'destination', 'source', 0, 1, { + BY: 'SCORE', + REV: true, + LIMIT: { + offset: 0, + count: 1 + } + }), + ['ZRANGESTORE', 'destination', 'source', '0', '1', 'BYSCORE', 'REV', 'LIMIT', '0', '1'] + ); + }); + }); + + testUtils.testWithClient('client.zRangeStore', async client => { + const [, reply] = await Promise.all([ + client.zAdd('{tag}source', { + score: 1, + value: '1' + }), + client.zRangeStore('{tag}destination', '{tag}source', 0, 1) + ]); + + assert.equal(reply, 1); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/ZRANGESTORE.ts b/packages/client/lib/commands/ZRANGESTORE.ts new file mode 100644 index 00000000000..bd3e260e32c --- /dev/null +++ b/packages/client/lib/commands/ZRANGESTORE.ts @@ -0,0 +1,60 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; +import { transformStringDoubleArgument } from './generic-transformers'; + +export interface ZRangeStoreOptions { + BY?: 'SCORE' | 'LEX'; + REV?: true; + LIMIT?: { + offset: number; + count: number; + }; +} + +export default { + IS_READ_ONLY: false, + /** + * Stores the result of a range operation on a sorted set into a new sorted set. + * @param parser - The Redis command parser. + * @param destination - Destination key where the result will be stored. + * @param source - Key of the source sorted set. + * @param min - Minimum index, score or lexicographical value. + * @param max - Maximum index, score or lexicographical value. + * @param options - Optional parameters for the range operation (BY, REV, LIMIT). + */ + parseCommand( + parser: CommandParser, + destination: RedisArgument, + source: RedisArgument, + min: RedisArgument | number, + max: RedisArgument | number, + options?: ZRangeStoreOptions + ) { + parser.push('ZRANGESTORE'); + parser.pushKey(destination); + parser.pushKey(source); + parser.push( + transformStringDoubleArgument(min), + transformStringDoubleArgument(max) + ); + + switch (options?.BY) { + case 'SCORE': + parser.push('BYSCORE'); + break; + + case 'LEX': + parser.push('BYLEX'); + break; + } + + if (options?.REV) { + parser.push('REV'); + } + + if (options?.LIMIT) { + parser.push('LIMIT', options.LIMIT.offset.toString(), options.LIMIT.count.toString()); + } + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ZRANGE_WITHSCORES.spec.ts b/packages/client/lib/commands/ZRANGE_WITHSCORES.spec.ts new file mode 100644 index 00000000000..e3009a6eadb --- /dev/null +++ b/packages/client/lib/commands/ZRANGE_WITHSCORES.spec.ts @@ -0,0 +1,76 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ZRANGE_WITHSCORES from './ZRANGE_WITHSCORES'; +import { parseArgs } from './generic-transformers'; + +describe('ZRANGE WITHSCORES', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(ZRANGE_WITHSCORES, 'src', 0, 1), + ['ZRANGE', 'src', '0', '1', 'WITHSCORES'] + ); + }); + + it('with BY', () => { + assert.deepEqual( + parseArgs(ZRANGE_WITHSCORES, 'src', 0, 1, { + BY: 'SCORE' + }), + ['ZRANGE', 'src', '0', '1', 'BYSCORE', 'WITHSCORES'] + ); + }); + + it('with REV', () => { + assert.deepEqual( + parseArgs(ZRANGE_WITHSCORES, 'src', 0, 1, { + REV: true + }), + ['ZRANGE', 'src', '0', '1', 'REV', 'WITHSCORES'] + ); + }); + + it('with LIMIT', () => { + assert.deepEqual( + parseArgs(ZRANGE_WITHSCORES, 'src', 0, 1, { + LIMIT: { + offset: 0, + count: 1 + } + }), + ['ZRANGE', 'src', '0', '1', 'LIMIT', '0', '1', 'WITHSCORES'] + ); + }); + + it('with BY & REV & LIMIT', () => { + assert.deepEqual( + parseArgs(ZRANGE_WITHSCORES, 'src', 0, 1, { + BY: 'SCORE', + REV: true, + LIMIT: { + offset: 0, + count: 1 + } + }), + ['ZRANGE', 'src', '0', '1', 'BYSCORE', 'REV', 'LIMIT', '0', '1', 'WITHSCORES'] + ); + }); + }); + + testUtils.testAll('zRangeWithScores', async client => { + const members = [{ + value: '1', + score: 1 + }]; + + const [, reply] = await Promise.all([ + client.zAdd('key', members), + client.zRangeWithScores('key', 0, 1) + ]); + + assert.deepEqual(reply, members); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/ZRANGE_WITHSCORES.ts b/packages/client/lib/commands/ZRANGE_WITHSCORES.ts new file mode 100644 index 00000000000..e85af4be08e --- /dev/null +++ b/packages/client/lib/commands/ZRANGE_WITHSCORES.ts @@ -0,0 +1,20 @@ +import { Command } from '../RESP/types'; +import { transformSortedSetReply } from './generic-transformers'; +import ZRANGE from './ZRANGE'; + +export default { + CACHEABLE: ZRANGE.CACHEABLE, + IS_READ_ONLY: ZRANGE.IS_READ_ONLY, + /** + * Returns the specified range of elements in the sorted set with their scores. + * @param args - Same parameters as the ZRANGE command. + */ + parseCommand(...args: Parameters) { + const parser = args[0]; + + ZRANGE.parseCommand(...args); + parser.push('WITHSCORES'); + }, + transformReply: transformSortedSetReply +} as const satisfies Command; + diff --git a/packages/client/lib/commands/ZRANK.spec.ts b/packages/client/lib/commands/ZRANK.spec.ts new file mode 100644 index 00000000000..480f75f66e1 --- /dev/null +++ b/packages/client/lib/commands/ZRANK.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ZRANK from './ZRANK'; +import { parseArgs } from './generic-transformers'; + +describe('ZRANK', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(ZRANK, 'key', 'member'), + ['ZRANK', 'key', 'member'] + ); + }); + + testUtils.testAll('zRank', async client => { + assert.equal( + await client.zRank('key', 'member'), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/ZRANK.ts b/packages/client/lib/commands/ZRANK.ts new file mode 100644 index 00000000000..73329aa2a59 --- /dev/null +++ b/packages/client/lib/commands/ZRANK.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, NullReply, Command } from '../RESP/types'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Returns the rank of a member in the sorted set, with scores ordered from low to high. + * @param parser - The Redis command parser. + * @param key - Key of the sorted set. + * @param member - Member to get the rank for. + */ + parseCommand(parser: CommandParser, key: RedisArgument, member: RedisArgument) { + parser.push('ZRANK'); + parser.pushKey(key); + parser.push(member); + }, + transformReply: undefined as unknown as () => NumberReply | NullReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ZRANK_WITHSCORE.spec.ts b/packages/client/lib/commands/ZRANK_WITHSCORE.spec.ts new file mode 100644 index 00000000000..9fa7cb1f6fd --- /dev/null +++ b/packages/client/lib/commands/ZRANK_WITHSCORE.spec.ts @@ -0,0 +1,47 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ZRANK_WITHSCORE from './ZRANK_WITHSCORE'; +import { parseArgs } from './generic-transformers'; + +describe('ZRANK WITHSCORE', () => { + testUtils.isVersionGreaterThanHook([7, 2]); + + it('transformArguments', () => { + assert.deepEqual( + parseArgs(ZRANK_WITHSCORE, 'key', 'member'), + ['ZRANK', 'key', 'member', 'WITHSCORE'] + ); + }); + + testUtils.testAll('zRankWithScore - null', async client => { + assert.equal( + await client.zRankWithScore('key', 'member'), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); + + testUtils.testAll('zRankWithScore - with member', async client => { + const member = { + value: '1', + score: 1 + } + + const [, reply] = await Promise.all([ + client.zAdd('key', member), + client.zRankWithScore('key', member.value) + ]) + assert.deepEqual( + reply, + { + rank: 0, + score: 1 + } + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/ZRANK_WITHSCORE.ts b/packages/client/lib/commands/ZRANK_WITHSCORE.ts new file mode 100644 index 00000000000..6f6537f4087 --- /dev/null +++ b/packages/client/lib/commands/ZRANK_WITHSCORE.ts @@ -0,0 +1,35 @@ +import { NullReply, TuplesReply, NumberReply, BlobStringReply, DoubleReply, UnwrapReply, Command } from '../RESP/types'; +import ZRANK from './ZRANK'; + +export default { + CACHEABLE: ZRANK.CACHEABLE, + IS_READ_ONLY: ZRANK.IS_READ_ONLY, + /** + * Returns the rank of a member in the sorted set with its score. + * @param args - Same parameters as the ZRANK command. + */ + parseCommand(...args: Parameters) { + const parser = args[0]; + + ZRANK.parseCommand(...args); + parser.push('WITHSCORE'); + }, + transformReply: { + 2: (reply: UnwrapReply>) => { + if (reply === null) return null; + + return { + rank: reply[0], + score: Number(reply[1]) + }; + }, + 3: (reply: UnwrapReply>) => { + if (reply === null) return null; + + return { + rank: reply[0], + score: reply[1] + }; + } + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/ZREM.spec.ts b/packages/client/lib/commands/ZREM.spec.ts new file mode 100644 index 00000000000..ac65b3d0139 --- /dev/null +++ b/packages/client/lib/commands/ZREM.spec.ts @@ -0,0 +1,32 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ZREM from './ZREM'; +import { parseArgs } from './generic-transformers'; + +describe('ZREM', () => { + describe('transformArguments', () => { + it('string', () => { + assert.deepEqual( + parseArgs(ZREM, 'key', 'member'), + ['ZREM', 'key', 'member'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(ZREM, 'key', ['1', '2']), + ['ZREM', 'key', '1', '2'] + ); + }); + }); + + testUtils.testAll('zRem', async client => { + assert.equal( + await client.zRem('key', 'member'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/ZREM.ts b/packages/client/lib/commands/ZREM.ts new file mode 100644 index 00000000000..960b47a36fd --- /dev/null +++ b/packages/client/lib/commands/ZREM.ts @@ -0,0 +1,23 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; +import { RedisVariadicArgument } from './generic-transformers'; + +export default { + IS_READ_ONLY: false, + /** + * Removes the specified members from the sorted set. + * @param parser - The Redis command parser. + * @param key - Key of the sorted set. + * @param member - One or more members to remove. + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + member: RedisVariadicArgument + ) { + parser.push('ZREM'); + parser.pushKey(key); + parser.pushVariadic(member); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ZREMRANGEBYLEX.spec.ts b/packages/client/lib/commands/ZREMRANGEBYLEX.spec.ts new file mode 100644 index 00000000000..b141b7679ee --- /dev/null +++ b/packages/client/lib/commands/ZREMRANGEBYLEX.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ZREMRANGEBYLEX from './ZREMRANGEBYLEX'; +import { parseArgs } from './generic-transformers'; + +describe('ZREMRANGEBYLEX', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(ZREMRANGEBYLEX, 'key', '[a', '[b'), + ['ZREMRANGEBYLEX', 'key', '[a', '[b'] + ); + }); + + testUtils.testAll('zRemRangeByLex', async client => { + assert.equal( + await client.zRemRangeByLex('key', '[a', '[b'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/ZREMRANGEBYLEX.ts b/packages/client/lib/commands/ZREMRANGEBYLEX.ts new file mode 100644 index 00000000000..434dcc6aac0 --- /dev/null +++ b/packages/client/lib/commands/ZREMRANGEBYLEX.ts @@ -0,0 +1,28 @@ +import { CommandParser } from '../client/parser'; +import { NumberReply, Command, RedisArgument } from '../RESP/types'; +import { transformStringDoubleArgument } from './generic-transformers'; + +export default { + IS_READ_ONLY: false, + /** + * Removes all elements in the sorted set with lexicographical values between min and max. + * @param parser - The Redis command parser. + * @param key - Key of the sorted set. + * @param min - Minimum lexicographical value. + * @param max - Maximum lexicographical value. + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + min: RedisArgument | number, + max: RedisArgument | number + ) { + parser.push('ZREMRANGEBYLEX'); + parser.pushKey(key); + parser.push( + transformStringDoubleArgument(min), + transformStringDoubleArgument(max) + ); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ZREMRANGEBYRANK.spec.ts b/packages/client/lib/commands/ZREMRANGEBYRANK.spec.ts new file mode 100644 index 00000000000..19f54466c20 --- /dev/null +++ b/packages/client/lib/commands/ZREMRANGEBYRANK.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ZREMRANGEBYRANK from './ZREMRANGEBYRANK'; +import { parseArgs } from './generic-transformers'; + +describe('ZREMRANGEBYRANK', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(ZREMRANGEBYRANK, 'key', 0, 1), + ['ZREMRANGEBYRANK', 'key', '0', '1'] + ); + }); + + testUtils.testAll('zRemRangeByRank', async client => { + assert.equal( + await client.zRemRangeByRank('key', 0, 1), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/ZREMRANGEBYRANK.ts b/packages/client/lib/commands/ZREMRANGEBYRANK.ts new file mode 100644 index 00000000000..90ab6b3aefe --- /dev/null +++ b/packages/client/lib/commands/ZREMRANGEBYRANK.ts @@ -0,0 +1,27 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; + +export default { + IS_READ_ONLY: false, + /** + * Removes all elements in the sorted set with rank between start and stop. + * @param parser - The Redis command parser. + * @param key - Key of the sorted set. + * @param start - Minimum rank (starting from 0). + * @param stop - Maximum rank. + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + start: number, + stop: number + ) { + parser.push('ZREMRANGEBYRANK'); + parser.pushKey(key); + parser.push( + start.toString(), + stop.toString() + ); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ZREMRANGEBYSCORE.spec.ts b/packages/client/lib/commands/ZREMRANGEBYSCORE.spec.ts new file mode 100644 index 00000000000..856692ef8f5 --- /dev/null +++ b/packages/client/lib/commands/ZREMRANGEBYSCORE.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import { parseArgs } from './generic-transformers'; +import ZREMRANGEBYSCORE from './ZREMRANGEBYSCORE'; + +describe('ZREMRANGEBYSCORE', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(ZREMRANGEBYSCORE, 'key', 0, 1), + ['ZREMRANGEBYSCORE', 'key', '0', '1'] + ); + }); + + testUtils.testAll('zRemRangeByScore', async client => { + assert.equal( + await client.zRemRangeByScore('key', 0, 1), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/ZREMRANGEBYSCORE.ts b/packages/client/lib/commands/ZREMRANGEBYSCORE.ts new file mode 100644 index 00000000000..e78c57ea656 --- /dev/null +++ b/packages/client/lib/commands/ZREMRANGEBYSCORE.ts @@ -0,0 +1,28 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command } from '../RESP/types'; +import { transformStringDoubleArgument } from './generic-transformers'; + +export default { + IS_READ_ONLY: false, + /** + * Removes all elements in the sorted set with scores between min and max. + * @param parser - The Redis command parser. + * @param key - Key of the sorted set. + * @param min - Minimum score. + * @param max - Maximum score. + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + min: RedisArgument | number, + max: RedisArgument | number, + ) { + parser.push('ZREMRANGEBYSCORE'); + parser.pushKey(key); + parser.push( + transformStringDoubleArgument(min), + transformStringDoubleArgument(max) + ); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ZREVRANK.spec.ts b/packages/client/lib/commands/ZREVRANK.spec.ts new file mode 100644 index 00000000000..c89f528eb1c --- /dev/null +++ b/packages/client/lib/commands/ZREVRANK.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import { parseArgs } from './generic-transformers'; +import ZREVRANK from './ZREVRANK'; + +describe('ZREVRANK', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(ZREVRANK, 'key', 'member'), + ['ZREVRANK', 'key', 'member'] + ); + }); + + testUtils.testAll('zRevRank', async client => { + assert.equal( + await client.zRevRank('key', 'member'), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/ZREVRANK.ts b/packages/client/lib/commands/ZREVRANK.ts new file mode 100644 index 00000000000..f2f79e570cc --- /dev/null +++ b/packages/client/lib/commands/ZREVRANK.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '../client/parser'; +import { NumberReply, NullReply, Command, RedisArgument } from '../RESP/types'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Returns the rank of a member in the sorted set, with scores ordered from high to low. + * @param parser - The Redis command parser. + * @param key - Key of the sorted set. + * @param member - Member to get the rank for. + */ + parseCommand(parser: CommandParser, key: RedisArgument, member: RedisArgument) { + parser.push('ZREVRANK'); + parser.pushKey(key); + parser.push(member); + }, + transformReply: undefined as unknown as () => NumberReply | NullReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ZSCAN.spec.ts b/packages/client/lib/commands/ZSCAN.spec.ts new file mode 100644 index 00000000000..f8064aea41e --- /dev/null +++ b/packages/client/lib/commands/ZSCAN.spec.ts @@ -0,0 +1,53 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import { parseArgs } from './generic-transformers'; +import ZSCAN from './ZSCAN'; + +describe('ZSCAN', () => { + describe('transformArguments', () => { + it('cusror only', () => { + assert.deepEqual( + parseArgs(ZSCAN, 'key', '0'), + ['ZSCAN', 'key', '0'] + ); + }); + + it('with MATCH', () => { + assert.deepEqual( + parseArgs(ZSCAN, 'key', '0', { + MATCH: 'pattern' + }), + ['ZSCAN', 'key', '0', 'MATCH', 'pattern'] + ); + }); + + it('with COUNT', () => { + assert.deepEqual( + parseArgs(ZSCAN, 'key', '0', { + COUNT: 1 + }), + ['ZSCAN', 'key', '0', 'COUNT', '1'] + ); + }); + + it('with MATCH & COUNT', () => { + assert.deepEqual( + parseArgs(ZSCAN, 'key', '0', { + MATCH: 'pattern', + COUNT: 1 + }), + ['ZSCAN', 'key', '0', 'MATCH', 'pattern', 'COUNT', '1'] + ); + }); + }); + + testUtils.testWithClient('zScan', async client => { + assert.deepEqual( + await client.zScan('key', '0'), + { + cursor: '0', + members: [] + } + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/client/lib/commands/ZSCAN.ts b/packages/client/lib/commands/ZSCAN.ts new file mode 100644 index 00000000000..2790db5e023 --- /dev/null +++ b/packages/client/lib/commands/ZSCAN.ts @@ -0,0 +1,36 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, ArrayReply, BlobStringReply, Command } from '../RESP/types'; +import { ScanCommonOptions, parseScanArguments } from './SCAN'; +import { transformSortedSetReply } from './generic-transformers'; + +export interface HScanEntry { + field: BlobStringReply; + value: BlobStringReply; +} + +export default { + IS_READ_ONLY: true, + /** + * Incrementally iterates over a sorted set. + * @param parser - The Redis command parser. + * @param key - Key of the sorted set. + * @param cursor - Cursor position to start the scan from. + * @param options - Optional scan parameters (COUNT, MATCH, TYPE). + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + cursor: RedisArgument, + options?: ScanCommonOptions + ) { + parser.push('ZSCAN'); + parser.pushKey(key); + parseScanArguments(parser, cursor, options); + }, + transformReply([cursor, rawMembers]: [BlobStringReply, ArrayReply]) { + return { + cursor, + members: transformSortedSetReply[2](rawMembers) + }; + } +} as const satisfies Command; diff --git a/packages/client/lib/commands/ZSCORE.spec.ts b/packages/client/lib/commands/ZSCORE.spec.ts new file mode 100644 index 00000000000..4229ab7aac0 --- /dev/null +++ b/packages/client/lib/commands/ZSCORE.spec.ts @@ -0,0 +1,23 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ZSCORE from './ZSCORE'; +import { parseArgs } from './generic-transformers'; + +describe('ZSCORE', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(ZSCORE, 'key', 'member'), + ['ZSCORE', 'key', 'member'] + ); + }); + + testUtils.testAll('zScore', async client => { + assert.equal( + await client.zScore('key', 'member'), + null + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/ZSCORE.ts b/packages/client/lib/commands/ZSCORE.ts new file mode 100644 index 00000000000..8b44154f44d --- /dev/null +++ b/packages/client/lib/commands/ZSCORE.ts @@ -0,0 +1,21 @@ + +import { CommandParser } from '../client/parser'; +import { RedisArgument, Command } from '../RESP/types'; +import { transformNullableDoubleReply } from './generic-transformers'; + +export default { + CACHEABLE: true, + IS_READ_ONLY: true, + /** + * Returns the score of a member in a sorted set. + * @param parser - The Redis command parser. + * @param key - Key of the sorted set. + * @param member - Member to get the score for. + */ + parseCommand(parser: CommandParser, key: RedisArgument, member: RedisArgument) { + parser.push('ZSCORE'); + parser.pushKey(key); + parser.push(member); + }, + transformReply: transformNullableDoubleReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ZUNION.spec.ts b/packages/client/lib/commands/ZUNION.spec.ts new file mode 100644 index 00000000000..b4dbb4de603 --- /dev/null +++ b/packages/client/lib/commands/ZUNION.spec.ts @@ -0,0 +1,66 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ZUNION from './ZUNION'; +import { parseArgs } from './generic-transformers'; + +describe('ZUNION', () => { + testUtils.isVersionGreaterThanHook([6, 2]); + + describe('transformArguments', () => { + it('key (string)', () => { + assert.deepEqual( + parseArgs(ZUNION, 'key'), + ['ZUNION', '1', 'key'] + ); + }); + + it('keys (Array)', () => { + assert.deepEqual( + parseArgs(ZUNION, ['1', '2']), + ['ZUNION', '2', '1', '2'] + ); + }); + + it('key & weight', () => { + assert.deepEqual( + parseArgs(ZUNION, { + key: 'key', + weight: 1 + }), + ['ZUNION', '1', 'key', 'WEIGHTS', '1'] + ); + }); + + it('keys & weights', () => { + assert.deepEqual( + parseArgs(ZUNION, [{ + key: 'a', + weight: 1 + }, { + key: 'b', + weight: 2 + }]), + ['ZUNION', '2', 'a', 'b', 'WEIGHTS', '1', '2'] + ); + }); + + it('with AGGREGATE', () => { + assert.deepEqual( + parseArgs(ZUNION, 'key', { + AGGREGATE: 'SUM' + }), + ['ZUNION', '1', 'key', 'AGGREGATE', 'SUM'] + ); + }); + }); + + testUtils.testAll('zUnion', async client => { + assert.deepEqual( + await client.zUnion('key'), + [] + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/ZUNION.ts b/packages/client/lib/commands/ZUNION.ts new file mode 100644 index 00000000000..6497d0d8e8b --- /dev/null +++ b/packages/client/lib/commands/ZUNION.ts @@ -0,0 +1,26 @@ +import { CommandParser } from '../client/parser'; +import { ArrayReply, BlobStringReply, Command } from '../RESP/types'; +import { ZKeys, parseZKeysArguments } from './generic-transformers'; + +export interface ZUnionOptions { + AGGREGATE?: 'SUM' | 'MIN' | 'MAX'; +} + +export default { + IS_READ_ONLY: true, + /** + * Returns the union of multiple sorted sets. + * @param parser - The Redis command parser. + * @param keys - Keys of the sorted sets to combine. + * @param options - Optional parameters for the union operation. + */ + parseCommand(parser: CommandParser, keys: ZKeys, options?: ZUnionOptions) { + parser.push('ZUNION'); + parseZKeysArguments(parser, keys); + + if (options?.AGGREGATE) { + parser.push('AGGREGATE', options.AGGREGATE); + } + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ZUNIONSTORE.spec.ts b/packages/client/lib/commands/ZUNIONSTORE.spec.ts new file mode 100644 index 00000000000..a369a649311 --- /dev/null +++ b/packages/client/lib/commands/ZUNIONSTORE.spec.ts @@ -0,0 +1,64 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ZUNIONSTORE from './ZUNIONSTORE'; +import { parseArgs } from './generic-transformers'; + +describe('ZUNIONSTORE', () => { + describe('transformArguments', () => { + it('key (string)', () => { + assert.deepEqual( + parseArgs(ZUNIONSTORE, 'destination', 'source'), + ['ZUNIONSTORE', 'destination', '1', 'source'] + ); + }); + + it('keys (Array)', () => { + assert.deepEqual( + parseArgs(ZUNIONSTORE, 'destination', ['1', '2']), + ['ZUNIONSTORE', 'destination', '2', '1', '2'] + ); + }); + + it('key & weight', () => { + assert.deepEqual( + parseArgs(ZUNIONSTORE, 'destination', { + key: 'source', + weight: 1 + }), + ['ZUNIONSTORE', 'destination', '1', 'source', 'WEIGHTS', '1'] + ); + }); + + it('keys & weights', () => { + assert.deepEqual( + parseArgs(ZUNIONSTORE, 'destination', [{ + key: 'a', + weight: 1 + }, { + key: 'b', + weight: 2 + }]), + ['ZUNIONSTORE', 'destination', '2', 'a', 'b', 'WEIGHTS', '1', '2'] + ); + }); + + it('with AGGREGATE', () => { + assert.deepEqual( + parseArgs(ZUNIONSTORE, 'destination', 'source', { + AGGREGATE: 'SUM' + }), + ['ZUNIONSTORE', 'destination', '1', 'source', 'AGGREGATE', 'SUM'] + ); + }); + }); + + testUtils.testAll('zUnionStore', async client => { + assert.equal( + await client.zUnionStore('{tag}destination', '{tag}key'), + 0 + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/ZUNIONSTORE.ts b/packages/client/lib/commands/ZUNIONSTORE.ts new file mode 100644 index 00000000000..9de766e8b06 --- /dev/null +++ b/packages/client/lib/commands/ZUNIONSTORE.ts @@ -0,0 +1,33 @@ +import { CommandParser } from '../client/parser'; +import { RedisArgument, NumberReply, Command, } from '../RESP/types'; +import { ZKeys, parseZKeysArguments } from './generic-transformers'; + +export interface ZUnionOptions { + AGGREGATE?: 'SUM' | 'MIN' | 'MAX'; +} + +export default { + IS_READ_ONLY: false, + /** + * Stores the union of multiple sorted sets in a new sorted set. + * @param parser - The Redis command parser. + * @param destination - Destination key where the result will be stored. + * @param keys - Keys of the sorted sets to combine. + * @param options - Optional parameters for the union operation. + */ + parseCommand( + parser: CommandParser, + destination: RedisArgument, + keys: ZKeys, + options?: ZUnionOptions + ): any { + parser.push('ZUNIONSTORE'); + parser.pushKey(destination); + parseZKeysArguments(parser, keys); + + if (options?.AGGREGATE) { + parser.push('AGGREGATE', options.AGGREGATE); + } + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/ZUNION_WITHSCORES.spec.ts b/packages/client/lib/commands/ZUNION_WITHSCORES.spec.ts new file mode 100644 index 00000000000..dee735fc99f --- /dev/null +++ b/packages/client/lib/commands/ZUNION_WITHSCORES.spec.ts @@ -0,0 +1,66 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ZUNION_WITHSCORES from './ZUNION_WITHSCORES'; +import { parseArgs } from './generic-transformers'; + +describe('ZUNION WITHSCORES', () => { + testUtils.isVersionGreaterThanHook([6, 2]); + + describe('transformArguments', () => { + it('key (string)', () => { + assert.deepEqual( + parseArgs(ZUNION_WITHSCORES, 'key'), + ['ZUNION', '1', 'key', 'WITHSCORES'] + ); + }); + + it('keys (Array)', () => { + assert.deepEqual( + parseArgs(ZUNION_WITHSCORES, ['1', '2']), + ['ZUNION', '2', '1', '2', 'WITHSCORES'] + ); + }); + + it('key & weight', () => { + assert.deepEqual( + parseArgs(ZUNION_WITHSCORES, { + key: 'key', + weight: 1 + }), + ['ZUNION', '1', 'key', 'WEIGHTS', '1', 'WITHSCORES'] + ); + }); + + it('keys & weights', () => { + assert.deepEqual( + parseArgs(ZUNION_WITHSCORES, [{ + key: 'a', + weight: 1 + }, { + key: 'b', + weight: 2 + }]), + ['ZUNION', '2', 'a', 'b', 'WEIGHTS', '1', '2', 'WITHSCORES'] + ); + }); + + it('with AGGREGATE', () => { + assert.deepEqual( + parseArgs(ZUNION_WITHSCORES, 'key', { + AGGREGATE: 'SUM' + }), + ['ZUNION', '1', 'key', 'AGGREGATE', 'SUM', 'WITHSCORES'] + ); + }); + }); + + testUtils.testAll('zUnionWithScores', async client => { + assert.deepEqual( + await client.zUnionWithScores('key'), + [] + ); + }, { + client: GLOBAL.SERVERS.OPEN, + cluster: GLOBAL.CLUSTERS.OPEN + }); +}); diff --git a/packages/client/lib/commands/ZUNION_WITHSCORES.ts b/packages/client/lib/commands/ZUNION_WITHSCORES.ts new file mode 100644 index 00000000000..af93a4eb1c0 --- /dev/null +++ b/packages/client/lib/commands/ZUNION_WITHSCORES.ts @@ -0,0 +1,19 @@ +import { Command } from '../RESP/types'; +import { transformSortedSetReply } from './generic-transformers'; +import ZUNION from './ZUNION'; + + +export default { + IS_READ_ONLY: ZUNION.IS_READ_ONLY, + /** + * Returns the union of multiple sorted sets with their scores. + * @param args - Same parameters as the ZUNION command. + */ + parseCommand(...args: Parameters) { + const parser = args[0]; + + ZUNION.parseCommand(...args); + parser.push('WITHSCORES'); + }, + transformReply: transformSortedSetReply +} as const satisfies Command; diff --git a/packages/client/lib/commands/common-stream.types.ts b/packages/client/lib/commands/common-stream.types.ts new file mode 100644 index 00000000000..60955b6e3c3 --- /dev/null +++ b/packages/client/lib/commands/common-stream.types.ts @@ -0,0 +1,28 @@ +/** Common stream deletion policies + * + * Added in Redis 8.2 + */ +export const STREAM_DELETION_POLICY = { + /** Preserve references (default) */ + KEEPREF: "KEEPREF", + /** Delete all references */ + DELREF: "DELREF", + /** Only acknowledged entries */ + ACKED: "ACKED", +} as const; + +export type StreamDeletionPolicy = + (typeof STREAM_DELETION_POLICY)[keyof typeof STREAM_DELETION_POLICY]; + +/** Common reply codes for stream deletion operations */ +export const STREAM_DELETION_REPLY_CODES = { + /** ID not found */ + NOT_FOUND: -1, + /** Entry deleted */ + DELETED: 1, + /** Dangling references */ + DANGLING_REFS: 2, +} as const; + +export type StreamDeletionReplyCode = + (typeof STREAM_DELETION_REPLY_CODES)[keyof typeof STREAM_DELETION_REPLY_CODES]; diff --git a/packages/client/lib/commands/generic-transformers.spec.ts b/packages/client/lib/commands/generic-transformers.spec.ts new file mode 100644 index 00000000000..5f990d4e34d --- /dev/null +++ b/packages/client/lib/commands/generic-transformers.spec.ts @@ -0,0 +1,685 @@ +// import { strict as assert } from 'node:assert'; +// import { +// transformBooleanReply, +// transformBooleanArrayReply, +// pushScanArguments, +// transformNumberInfinityReply, +// transformNumberInfinityNullReply, +// transformNumberInfinityArgument, +// transformStringNumberInfinityArgument, +// transformTuplesReply, +// transformStreamMessagesReply, +// transformStreamsMessagesReply, +// transformSortedSetWithScoresReply, +// pushGeoCountArgument, +// pushGeoSearchArguments, +// GeoReplyWith, +// transformGeoMembersWithReply, +// transformEXAT, +// transformPXAT, +// pushEvalArguments, +// pushVariadicArguments, +// pushVariadicNumberArguments, +// pushVariadicArgument, +// pushOptionalVariadicArgument, +// transformCommandReply, +// CommandFlags, +// CommandCategories, +// pushSlotRangesArguments +// } from './generic-transformers'; + +// describe('Generic Transformers', () => { +// describe('transformBooleanReply', () => { +// it('0', () => { +// assert.equal( +// transformBooleanReply(0), +// false +// ); +// }); + +// it('1', () => { +// assert.equal( +// transformBooleanReply(1), +// true +// ); +// }); +// }); + +// describe('transformBooleanArrayReply', () => { +// it('empty array', () => { +// assert.deepEqual( +// transformBooleanArrayReply([]), +// [] +// ); +// }); + +// it('0, 1', () => { +// assert.deepEqual( +// transformBooleanArrayReply([0, 1]), +// [false, true] +// ); +// }); +// }); + +// describe('pushScanArguments', () => { +// it('cusror only', () => { +// assert.deepEqual( +// pushScanArguments([], 0), +// ['0'] +// ); +// }); + +// it('with MATCH', () => { +// assert.deepEqual( +// pushScanArguments([], 0, { +// MATCH: 'pattern' +// }), +// ['0', 'MATCH', 'pattern'] +// ); +// }); + +// it('with COUNT', () => { +// assert.deepEqual( +// pushScanArguments([], 0, { +// COUNT: 1 +// }), +// ['0', 'COUNT', '1'] +// ); +// }); + +// it('with MATCH & COUNT', () => { +// assert.deepEqual( +// pushScanArguments([], 0, { +// MATCH: 'pattern', +// COUNT: 1 +// }), +// ['0', 'MATCH', 'pattern', 'COUNT', '1'] +// ); +// }); +// }); + +// describe('transformNumberInfinityReply', () => { +// it('0.5', () => { +// assert.equal( +// transformNumberInfinityReply('0.5'), +// 0.5 +// ); +// }); + +// it('+inf', () => { +// assert.equal( +// transformNumberInfinityReply('+inf'), +// Infinity +// ); +// }); + +// it('-inf', () => { +// assert.equal( +// transformNumberInfinityReply('-inf'), +// -Infinity +// ); +// }); +// }); + +// describe('transformNumberInfinityNullReply', () => { +// it('null', () => { +// assert.equal( +// transformNumberInfinityNullReply(null), +// null +// ); +// }); + +// it('1', () => { +// assert.equal( +// transformNumberInfinityNullReply('1'), +// 1 +// ); +// }); +// }); + +// describe('transformNumberInfinityArgument', () => { +// it('0.5', () => { +// assert.equal( +// transformNumberInfinityArgument(0.5), +// '0.5' +// ); +// }); + +// it('Infinity', () => { +// assert.equal( +// transformNumberInfinityArgument(Infinity), +// '+inf' +// ); +// }); + +// it('-Infinity', () => { +// assert.equal( +// transformNumberInfinityArgument(-Infinity), +// '-inf' +// ); +// }); +// }); + +// describe('transformStringNumberInfinityArgument', () => { +// it("'0.5'", () => { +// assert.equal( +// transformStringNumberInfinityArgument('0.5'), +// '0.5' +// ); +// }); + +// it('0.5', () => { +// assert.equal( +// transformStringNumberInfinityArgument(0.5), +// '0.5' +// ); +// }); +// }); + +// it('transformTuplesReply', () => { +// assert.deepEqual( +// transformTuplesReply(['key1', 'value1', 'key2', 'value2']), +// Object.create(null, { +// key1: { +// value: 'value1', +// configurable: true, +// enumerable: true +// }, +// key2: { +// value: 'value2', +// configurable: true, +// enumerable: true +// } +// }) +// ); +// }); + +// it('transformStreamMessagesReply', () => { +// assert.deepEqual( +// transformStreamMessagesReply([['0-0', ['0key', '0value']], ['1-0', ['1key', '1value']]]), +// [{ +// id: '0-0', +// message: Object.create(null, { +// '0key': { +// value: '0value', +// configurable: true, +// enumerable: true +// } +// }) +// }, { +// id: '1-0', +// message: Object.create(null, { +// '1key': { +// value: '1value', +// configurable: true, +// enumerable: true +// } +// }) +// }] +// ); +// }); + +// describe('transformStreamsMessagesReply', () => { +// it('null', () => { +// assert.equal( +// transformStreamsMessagesReply(null), +// null +// ); +// }); + +// it('with messages', () => { +// assert.deepEqual( +// transformStreamsMessagesReply([['stream1', [['0-1', ['11key', '11value']], ['1-1', ['12key', '12value']]]], ['stream2', [['0-2', ['2key1', '2value1', '2key2', '2value2']]]]]), +// [{ +// name: 'stream1', +// messages: [{ +// id: '0-1', +// message: Object.create(null, { +// '11key': { +// value: '11value', +// configurable: true, +// enumerable: true +// } +// }) +// }, { +// id: '1-1', +// message: Object.create(null, { +// '12key': { +// value: '12value', +// configurable: true, +// enumerable: true +// } +// }) +// }] +// }, { +// name: 'stream2', +// messages: [{ +// id: '0-2', +// message: Object.create(null, { +// '2key1': { +// value: '2value1', +// configurable: true, +// enumerable: true +// }, +// '2key2': { +// value: '2value2', +// configurable: true, +// enumerable: true +// } +// }) +// }] +// }] +// ); +// }); +// }); + +// it('transformSortedSetWithScoresReply', () => { +// assert.deepEqual( +// transformSortedSetWithScoresReply(['member1', '0.5', 'member2', '+inf', 'member3', '-inf']), +// [{ +// value: 'member1', +// score: 0.5 +// }, { +// value: 'member2', +// score: Infinity +// }, { +// value: 'member3', +// score: -Infinity +// }] +// ); +// }); + +// describe('pushGeoCountArgument', () => { +// it('undefined', () => { +// assert.deepEqual( +// pushGeoCountArgument([], undefined), +// [] +// ); +// }); + +// it('number', () => { +// assert.deepEqual( +// pushGeoCountArgument([], 1), +// ['COUNT', '1'] +// ); +// }); + +// describe('with COUNT', () => { +// it('number', () => { +// assert.deepEqual( +// pushGeoCountArgument([], 1), +// ['COUNT', '1'] +// ); +// }); + +// describe('object', () => { +// it('value', () => { +// assert.deepEqual( +// pushGeoCountArgument([], { value: 1 }), +// ['COUNT', '1'] +// ); +// }); + +// it('value, ANY', () => { +// assert.deepEqual( +// pushGeoCountArgument([], { +// value: 1, +// ANY: true +// }), +// ['COUNT', '1', 'ANY'] +// ); +// }); +// }); +// }); +// }); + +// describe('pushGeoSearchArguments', () => { +// it('FROMMEMBER, BYRADIUS', () => { +// assert.deepEqual( +// pushGeoSearchArguments([], 'key', 'member', { +// radius: 1, +// unit: 'm' +// }), +// ['key', 'FROMMEMBER', 'member', 'BYRADIUS', '1', 'm'] +// ); +// }); + +// it('FROMLONLAT, BYBOX', () => { +// assert.deepEqual( +// pushGeoSearchArguments([], 'key', { +// longitude: 1, +// latitude: 2 +// }, { +// width: 1, +// height: 2, +// unit: 'm' +// }), +// ['key', 'FROMLONLAT', '1', '2', 'BYBOX', '1', '2', 'm'] +// ); +// }); + +// it('with SORT', () => { +// assert.deepEqual( +// pushGeoSearchArguments([], 'key', 'member', { +// radius: 1, +// unit: 'm' +// }, { +// SORT: 'ASC' +// }), +// ['key', 'FROMMEMBER', 'member', 'BYRADIUS', '1', 'm', 'ASC'] +// ); +// }); +// }); + +// describe('transformGeoMembersWithReply', () => { +// it('DISTANCE', () => { +// assert.deepEqual( +// transformGeoMembersWithReply([ +// [ +// '1', +// '2' +// ], +// [ +// '3', +// '4' +// ] +// ], [GeoReplyWith.DISTANCE]), +// [{ +// member: '1', +// distance: '2' +// }, { +// member: '3', +// distance: '4' +// }] +// ); +// }); + +// it('HASH', () => { +// assert.deepEqual( +// transformGeoMembersWithReply([ +// [ +// '1', +// 2 +// ], +// [ +// '3', +// 4 +// ] +// ], [GeoReplyWith.HASH]), +// [{ +// member: '1', +// hash: 2 +// }, { +// member: '3', +// hash: 4 +// }] +// ); +// }); + +// it('COORDINATES', () => { +// assert.deepEqual( +// transformGeoMembersWithReply([ +// [ +// '1', +// [ +// '2', +// '3' +// ] +// ], +// [ +// '4', +// [ +// '5', +// '6' +// ] +// ] +// ], [GeoReplyWith.COORDINATES]), +// [{ +// member: '1', +// coordinates: { +// longitude: '2', +// latitude: '3' +// } +// }, { +// member: '4', +// coordinates: { +// longitude: '5', +// latitude: '6' +// } +// }] +// ); +// }); + +// it('DISTANCE, HASH, COORDINATES', () => { +// assert.deepEqual( +// transformGeoMembersWithReply([ +// [ +// '1', +// '2', +// 3, +// [ +// '4', +// '5' +// ] +// ], +// [ +// '6', +// '7', +// 8, +// [ +// '9', +// '10' +// ] +// ] +// ], [GeoReplyWith.DISTANCE, GeoReplyWith.HASH, GeoReplyWith.COORDINATES]), +// [{ +// member: '1', +// distance: '2', +// hash: 3, +// coordinates: { +// longitude: '4', +// latitude: '5' +// } +// }, { +// member: '6', +// distance: '7', +// hash: 8, +// coordinates: { +// longitude: '9', +// latitude: '10' +// } +// }] +// ); +// }); +// }); + +// describe('transformEXAT', () => { +// it('number', () => { +// assert.equal( +// transformEXAT(1), +// '1' +// ); +// }); + +// it('date', () => { +// const d = new Date(); +// assert.equal( +// transformEXAT(d), +// Math.floor(d.getTime() / 1000).toString() +// ); +// }); +// }); + +// describe('transformPXAT', () => { +// it('number', () => { +// assert.equal( +// transformPXAT(1), +// '1' +// ); +// }); + +// it('date', () => { +// const d = new Date(); +// assert.equal( +// transformPXAT(d), +// d.getTime().toString() +// ); +// }); +// }); + +// describe('pushEvalArguments', () => { +// it('empty', () => { +// assert.deepEqual( +// pushEvalArguments([]), +// ['0'] +// ); +// }); + +// it('with keys', () => { +// assert.deepEqual( +// pushEvalArguments([], { +// keys: ['key'] +// }), +// ['1', 'key'] +// ); +// }); + +// it('with arguments', () => { +// assert.deepEqual( +// pushEvalArguments([], { +// arguments: ['argument'] +// }), +// ['0', 'argument'] +// ); +// }); + +// it('with keys and arguments', () => { +// assert.deepEqual( +// pushEvalArguments([], { +// keys: ['key'], +// arguments: ['argument'] +// }), +// ['1', 'key', 'argument'] +// ); +// }); +// }); + +// describe('pushVariadicArguments', () => { +// it('string', () => { +// assert.deepEqual( +// pushVariadicArguments([], 'string'), +// ['string'] +// ); +// }); + +// it('array', () => { +// assert.deepEqual( +// pushVariadicArguments([], ['1', '2']), +// ['1', '2'] +// ); +// }); +// }); + +// describe('pushVariadicNumberArguments', () => { +// it('number', () => { +// assert.deepEqual( +// pushVariadicNumberArguments([], 0), +// ['0'] +// ); +// }); + +// it('array', () => { +// assert.deepEqual( +// pushVariadicNumberArguments([], [0, 1]), +// ['0', '1'] +// ); +// }); +// }); + +// describe('pushVariadicArgument', () => { +// it('string', () => { +// assert.deepEqual( +// pushVariadicArgument([], 'string'), +// ['1', 'string'] +// ); +// }); + +// it('array', () => { +// assert.deepEqual( +// pushVariadicArgument([], ['1', '2']), +// ['2', '1', '2'] +// ); +// }); +// }); + +// describe('pushOptionalVariadicArgument', () => { +// it('undefined', () => { +// assert.deepEqual( +// pushOptionalVariadicArgument([], 'name', undefined), +// [] +// ); +// }); + +// it('string', () => { +// assert.deepEqual( +// pushOptionalVariadicArgument([], 'name', 'string'), +// ['name', '1', 'string'] +// ); +// }); + +// it('array', () => { +// assert.deepEqual( +// pushOptionalVariadicArgument([], 'name', ['1', '2']), +// ['name', '2', '1', '2'] +// ); +// }); +// }); + +// it('transformCommandReply', () => { +// assert.deepEqual( +// transformCommandReply([ +// 'ping', +// -1, +// [CommandFlags.STALE, CommandFlags.FAST], +// 0, +// 0, +// 0, +// [CommandCategories.FAST, CommandCategories.CONNECTION] +// ]), +// { +// name: 'ping', +// arity: -1, +// flags: new Set([CommandFlags.STALE, CommandFlags.FAST]), +// firstKeyIndex: 0, +// lastKeyIndex: 0, +// step: 0, +// categories: new Set([CommandCategories.FAST, CommandCategories.CONNECTION]) +// } +// ); +// }); + +// describe('pushSlotRangesArguments', () => { +// it('single range', () => { +// assert.deepEqual( +// pushSlotRangesArguments([], { +// start: 0, +// end: 1 +// }), +// ['0', '1'] +// ); +// }); + +// it('multiple ranges', () => { +// assert.deepEqual( +// pushSlotRangesArguments([], [{ +// start: 0, +// end: 1 +// }, { +// start: 2, +// end: 3 +// }]), +// ['0', '1', '2', '3'] +// ); +// }); +// }); +// }); diff --git a/packages/client/lib/commands/generic-transformers.ts b/packages/client/lib/commands/generic-transformers.ts new file mode 100644 index 00000000000..022339e4bb7 --- /dev/null +++ b/packages/client/lib/commands/generic-transformers.ts @@ -0,0 +1,682 @@ +import { BasicCommandParser, CommandParser } from '../client/parser'; +import { RESP_TYPES } from '../RESP/decoder'; +import { UnwrapReply, ArrayReply, BlobStringReply, BooleanReply, CommandArguments, DoubleReply, NullReply, NumberReply, RedisArgument, TuplesReply, MapReply, TypeMapping, Command } from '../RESP/types'; + +export function isNullReply(reply: unknown): reply is NullReply { + return reply === null; +} + +export function isArrayReply(reply: unknown): reply is ArrayReply { + return Array.isArray(reply); +} + +export const transformBooleanReply = { + 2: (reply: NumberReply<0 | 1>) => reply as unknown as UnwrapReply === 1, + 3: undefined as unknown as () => BooleanReply +}; + +export const transformBooleanArrayReply = { + 2: (reply: ArrayReply>) => { + return (reply as unknown as UnwrapReply).map(transformBooleanReply[2]); + }, + 3: undefined as unknown as () => ArrayReply +}; + +export type BitValue = 0 | 1; + +export function transformDoubleArgument(num: number): string { + switch (num) { + case Infinity: + return '+inf'; + + case -Infinity: + return '-inf'; + + default: + return num.toString(); + } +} + +export function transformStringDoubleArgument(num: RedisArgument | number): RedisArgument { + if (typeof num !== 'number') return num; + + return transformDoubleArgument(num); +} + +export const transformDoubleReply = { + 2: (reply: BlobStringReply, preserve?: any, typeMapping?: TypeMapping): DoubleReply => { + const double = typeMapping ? typeMapping[RESP_TYPES.DOUBLE] : undefined; + + switch (double) { + case String: { + return reply as unknown as DoubleReply; + } + default: { + let ret: number; + + switch (reply.toString()) { + case 'inf': + case '+inf': + ret = Infinity; + + case '-inf': + ret = -Infinity; + + case 'nan': + ret = NaN; + + default: + ret = Number(reply); + } + + return ret as unknown as DoubleReply; + } + } + }, + 3: undefined as unknown as () => DoubleReply +}; + +export function createTransformDoubleReplyResp2Func(preserve?: any, typeMapping?: TypeMapping) { + return (reply: BlobStringReply) => { + return transformDoubleReply[2](reply, preserve, typeMapping); + } +} + +export const transformDoubleArrayReply = { + 2: (reply: Array, preserve?: any, typeMapping?: TypeMapping) => { + return reply.map(createTransformDoubleReplyResp2Func(preserve, typeMapping)); + }, + 3: undefined as unknown as () => ArrayReply +} + +export function createTransformNullableDoubleReplyResp2Func(preserve?: any, typeMapping?: TypeMapping) { + return (reply: BlobStringReply | NullReply) => { + return transformNullableDoubleReply[2](reply, preserve, typeMapping); + } +} + +export const transformNullableDoubleReply = { + 2: (reply: BlobStringReply | NullReply, preserve?: any, typeMapping?: TypeMapping) => { + if (reply === null) return null; + + return transformDoubleReply[2](reply as BlobStringReply, preserve, typeMapping); + }, + 3: undefined as unknown as () => DoubleReply | NullReply +}; + +export interface Stringable { + toString(): string; +} + +export function transformTuplesToMap( + reply: UnwrapReply>, + func: (elem: any) => T, +) { + const message = Object.create(null); + + for (let i = 0; i < reply.length; i+= 2) { + message[reply[i].toString()] = func(reply[i + 1]); + } + + return message; +} + +export function createTransformTuplesReplyFunc(preserve?: any, typeMapping?: TypeMapping) { + return (reply: ArrayReply) => { + return transformTuplesReply(reply, preserve, typeMapping); + }; +} + +export function transformTuplesReply( + reply: ArrayReply, + preserve?: any, + typeMapping?: TypeMapping +): MapReply { + const mapType = typeMapping ? typeMapping[RESP_TYPES.MAP] : undefined; + + const inferred = reply as unknown as UnwrapReply + + switch (mapType) { + case Array: { + return reply as unknown as MapReply; + } + case Map: { + const ret = new Map; + + for (let i = 0; i < inferred.length; i += 2) { + ret.set(inferred[i].toString(), inferred[i + 1] as any); + } + + return ret as unknown as MapReply;; + } + default: { + const ret: Record = Object.create(null); + + for (let i = 0; i < inferred.length; i += 2) { + ret[inferred[i].toString()] = inferred[i + 1] as any; + } + + return ret as unknown as MapReply;; + } + } +} + +export interface SortedSetMember { + value: RedisArgument; + score: number; +} + +export type SortedSetSide = 'MIN' | 'MAX'; + +export const transformSortedSetReply = { + 2: (reply: ArrayReply, preserve?: any, typeMapping?: TypeMapping) => { + const inferred = reply as unknown as UnwrapReply, + members = []; + for (let i = 0; i < inferred.length; i += 2) { + members.push({ + value: inferred[i], + score: transformDoubleReply[2](inferred[i + 1], preserve, typeMapping) + }); + } + + return members; + }, + 3: (reply: ArrayReply>) => { + return (reply as unknown as UnwrapReply).map(member => { + const [value, score] = member as unknown as UnwrapReply; + return { + value, + score + }; + }); + } +} + +export type ListSide = 'LEFT' | 'RIGHT'; + +export function transformEXAT(EXAT: number | Date): string { + return (typeof EXAT === 'number' ? EXAT : Math.floor(EXAT.getTime() / 1000)).toString(); +} + +export function transformPXAT(PXAT: number | Date): string { + return (typeof PXAT === 'number' ? PXAT : PXAT.getTime()).toString(); +} + +export interface EvalOptions { + keys?: Array; + arguments?: Array; +} + +export function evalFirstKeyIndex(options?: EvalOptions): string | undefined { + return options?.keys?.[0]; +} + +export function pushEvalArguments(args: Array, options?: EvalOptions): Array { + if (options?.keys) { + args.push( + options.keys.length.toString(), + ...options.keys + ); + } else { + args.push('0'); + } + + if (options?.arguments) { + args.push(...options.arguments); + } + + return args; +} + +export function pushVariadicArguments(args: CommandArguments, value: RedisVariadicArgument): CommandArguments { + if (Array.isArray(value)) { + // https://github.com/redis/node-redis/pull/2160 + args = args.concat(value); + } else { + args.push(value); + } + + return args; +} + +export function pushVariadicNumberArguments( + args: CommandArguments, + value: number | Array +): CommandArguments { + if (Array.isArray(value)) { + for (const item of value) { + args.push(item.toString()); + } + } else { + args.push(value.toString()); + } + + return args; +} + +export type RedisVariadicArgument = RedisArgument | Array; + +export function pushVariadicArgument( + args: Array, + value: RedisVariadicArgument +): CommandArguments { + if (Array.isArray(value)) { + args.push(value.length.toString(), ...value); + } else { + args.push('1', value); + } + + return args; +} + +export function parseOptionalVariadicArgument( + parser: CommandParser, + name: RedisArgument, + value?: RedisVariadicArgument +) { + if (value === undefined) return; + + parser.push(name); + + parser.pushVariadicWithLength(value); +} + +export enum CommandFlags { + WRITE = 'write', // command may result in modifications + READONLY = 'readonly', // command will never modify keys + DENYOOM = 'denyoom', // reject command if currently out of memory + ADMIN = 'admin', // server admin command + PUBSUB = 'pubsub', // pubsub-related command + NOSCRIPT = 'noscript', // deny this command from scripts + RANDOM = 'random', // command has random results, dangerous for scripts + SORT_FOR_SCRIPT = 'sort_for_script', // if called from script, sort output + LOADING = 'loading', // allow command while database is loading + STALE = 'stale', // allow command while replica has stale data + SKIP_MONITOR = 'skip_monitor', // do not show this command in MONITOR + ASKING = 'asking', // cluster related - accept even if importing + FAST = 'fast', // command operates in constant or log(N) time. Used for latency monitoring. + MOVABLEKEYS = 'movablekeys' // keys have no pre-determined position. You must discover keys yourself. +} + +export enum CommandCategories { + KEYSPACE = '@keyspace', + READ = '@read', + WRITE = '@write', + SET = '@set', + SORTEDSET = '@sortedset', + LIST = '@list', + HASH = '@hash', + STRING = '@string', + BITMAP = '@bitmap', + HYPERLOGLOG = '@hyperloglog', + GEO = '@geo', + STREAM = '@stream', + PUBSUB = '@pubsub', + ADMIN = '@admin', + FAST = '@fast', + SLOW = '@slow', + BLOCKING = '@blocking', + DANGEROUS = '@dangerous', + CONNECTION = '@connection', + TRANSACTION = '@transaction', + SCRIPTING = '@scripting' +} + +export type CommandRawReply = [ + name: string, + arity: number, + flags: Array, + firstKeyIndex: number, + lastKeyIndex: number, + step: number, + categories: Array +]; + +export type CommandReply = { + name: string, + arity: number, + flags: Set, + firstKeyIndex: number, + lastKeyIndex: number, + step: number, + categories: Set +}; + +export function transformCommandReply( + this: void, + [name, arity, flags, firstKeyIndex, lastKeyIndex, step, categories]: CommandRawReply +): CommandReply { + return { + name, + arity, + flags: new Set(flags), + firstKeyIndex, + lastKeyIndex, + step, + categories: new Set(categories) + }; +} + +export enum RedisFunctionFlags { + NO_WRITES = 'no-writes', + ALLOW_OOM = 'allow-oom', + ALLOW_STALE = 'allow-stale', + NO_CLUSTER = 'no-cluster' +} + +export type FunctionListRawItemReply = [ + 'library_name', + string, + 'engine', + string, + 'functions', + Array<[ + 'name', + string, + 'description', + string | null, + 'flags', + Array + ]> +]; + +export interface FunctionListItemReply { + libraryName: string; + engine: string; + functions: Array<{ + name: string; + description: string | null; + flags: Array; + }>; +} + +export function transformFunctionListItemReply(reply: FunctionListRawItemReply): FunctionListItemReply { + return { + libraryName: reply[1], + engine: reply[3], + functions: reply[5].map(fn => ({ + name: fn[1], + description: fn[3], + flags: fn[5] + })) + }; +} + +export interface SlotRange { + start: number; + end: number; +} + +function parseSlotRangeArguments( + parser: CommandParser, + range: SlotRange +): void { + parser.push( + range.start.toString(), + range.end.toString() + ); +} + +export function parseSlotRangesArguments( + parser: CommandParser, + ranges: SlotRange | Array +) { + if (Array.isArray(ranges)) { + for (const range of ranges) { + parseSlotRangeArguments(parser, range); + } + } else { + parseSlotRangeArguments(parser, ranges); + } +} + +export type RawRangeReply = [ + start: number, + end: number +]; + +export interface RangeReply { + start: number; + end: number; +} + +export function transformRangeReply([start, end]: RawRangeReply): RangeReply { + return { + start, + end + }; +} + +export type ZKeyAndWeight = { + key: RedisArgument; + weight: number; +}; + +export type ZVariadicKeys = T | [T, ...Array]; + +export type ZKeys = ZVariadicKeys | ZVariadicKeys; + +export function parseZKeysArguments( + parser: CommandParser, + keys: ZKeys +) { + if (Array.isArray(keys)) { + parser.push(keys.length.toString()); + + if (keys.length) { + if (isPlainKeys(keys)) { + parser.pushKeys(keys); + } else { + for (let i = 0; i < keys.length; i++) { + parser.pushKey(keys[i].key) + } + parser.push('WEIGHTS'); + for (let i = 0; i < keys.length; i++) { + parser.push(transformDoubleArgument(keys[i].weight)); + } + } + } + } else { + parser.push('1'); + + if (isPlainKey(keys)) { + parser.pushKey(keys); + } else { + parser.pushKey(keys.key); + parser.push('WEIGHTS', transformDoubleArgument(keys.weight)); + } + } +} + +function isPlainKey(key: RedisArgument | ZKeyAndWeight): key is RedisArgument { + return typeof key === 'string' || key instanceof Buffer; +} + +function isPlainKeys(keys: Array | Array): keys is Array { + return isPlainKey(keys[0]); +} + +export type Tail = T extends [infer Head, ...infer Tail] ? Tail : never; + +/** + * @deprecated + */ +export function parseArgs(command: Command, ...args: Array): CommandArguments { + const parser = new BasicCommandParser(); + command.parseCommand!(parser, ...args); + + const redisArgs: CommandArguments = parser.redisArgs; + if (parser.preserve) { + redisArgs.preserve = parser.preserve; + } + return redisArgs; +} + +export type StreamMessageRawReply = TuplesReply<[ + id: BlobStringReply, + message: ArrayReply +]>; + +export type StreamMessageReply = { + id: BlobStringReply, + message: MapReply, +}; + +export function transformStreamMessageReply(typeMapping: TypeMapping | undefined, reply: StreamMessageRawReply): StreamMessageReply { + const [ id, message ] = reply as unknown as UnwrapReply; + return { + id: id, + message: transformTuplesReply(message, undefined, typeMapping) + }; +} + +export function transformStreamMessageNullReply(typeMapping: TypeMapping | undefined, reply: StreamMessageRawReply | NullReply) { + return isNullReply(reply) ? reply : transformStreamMessageReply(typeMapping, reply); +} + +export type StreamMessagesReply = Array; + +export type StreamsMessagesReply = Array<{ + name: BlobStringReply | string; + messages: StreamMessagesReply; +}> | null; + +export function transformStreamMessagesReply( + r: ArrayReply, + typeMapping?: TypeMapping +): StreamMessagesReply { + const reply = r as unknown as UnwrapReply; + + return reply.map(transformStreamMessageReply.bind(undefined, typeMapping)); +} + +type StreamMessagesRawReply = TuplesReply<[name: BlobStringReply, ArrayReply]>; +type StreamsMessagesRawReply2 = ArrayReply; + +export function transformStreamsMessagesReplyResp2( + reply: UnwrapReply, + preserve?: any, + typeMapping?: TypeMapping +): StreamsMessagesReply | NullReply { + // FUTURE: resposne type if resp3 was working, reverting to old v4 for now + //: MapReply | NullReply { + if (reply === null) return null as unknown as NullReply; + + switch (typeMapping? typeMapping[RESP_TYPES.MAP] : undefined) { +/* FUTURE: a response type for when resp3 is working properly + case Map: { + const ret = new Map(); + + for (let i=0; i < reply.length; i++) { + const stream = reply[i] as unknown as UnwrapReply; + + const name = stream[0]; + const rawMessages = stream[1]; + + ret.set(name.toString(), transformStreamMessagesReply(rawMessages, typeMapping)); + } + + return ret as unknown as MapReply; + } + case Array: { + const ret: Array = []; + + for (let i=0; i < reply.length; i++) { + const stream = reply[i] as unknown as UnwrapReply; + + const name = stream[0]; + const rawMessages = stream[1]; + + ret.push(name); + ret.push(transformStreamMessagesReply(rawMessages, typeMapping)); + } + + return ret as unknown as MapReply; + } + default: { + const ret: Record = Object.create(null); + + for (let i=0; i < reply.length; i++) { + const stream = reply[i] as unknown as UnwrapReply; + + const name = stream[0] as unknown as UnwrapReply; + const rawMessages = stream[1]; + + ret[name.toString()] = transformStreamMessagesReply(rawMessages); + } + + return ret as unknown as MapReply; + } +*/ + // V4 compatible response type + default: { + const ret: StreamsMessagesReply = []; + + for (let i=0; i < reply.length; i++) { + const stream = reply[i] as unknown as UnwrapReply; + + ret.push({ + name: stream[0], + messages: transformStreamMessagesReply(stream[1]) + }); + } + + return ret; + } + } +} + +type StreamsMessagesRawReply3 = MapReply>; + +export function transformStreamsMessagesReplyResp3(reply: UnwrapReply): MapReply | NullReply { + if (reply === null) return null as unknown as NullReply; + + if (reply instanceof Map) { + const ret = new Map(); + + for (const [n, rawMessages] of reply) { + const name = n as unknown as UnwrapReply; + + ret.set(name.toString(), transformStreamMessagesReply(rawMessages)); + } + + return ret as unknown as MapReply + } else if (reply instanceof Array) { + const ret = []; + + for (let i=0; i < reply.length; i += 2) { + const name = reply[i] as BlobStringReply; + const rawMessages = reply[i+1] as ArrayReply; + + ret.push(name); + ret.push(transformStreamMessagesReply(rawMessages)); + } + + return ret as unknown as MapReply + } else { + const ret = Object.create(null); + for (const [name, rawMessages] of Object.entries(reply)) { + ret[name] = transformStreamMessagesReply(rawMessages); + } + + return ret as unknown as MapReply + } +} + +export type RedisJSON = null | boolean | number | string | Date | Array | { + [key: string]: RedisJSON; + [key: number]: RedisJSON; +}; + +export function transformRedisJsonArgument(json: RedisJSON): string { + return JSON.stringify(json); +} + +export function transformRedisJsonReply(json: BlobStringReply): RedisJSON { + const res = JSON.parse((json as unknown as UnwrapReply).toString()); + return res; +} + +export function transformRedisJsonNullReply(json: NullReply | BlobStringReply): NullReply | RedisJSON { + return isNullReply(json) ? json : transformRedisJsonReply(json); +} diff --git a/packages/client/lib/commands/index.ts b/packages/client/lib/commands/index.ts new file mode 100644 index 00000000000..54ede43d011 --- /dev/null +++ b/packages/client/lib/commands/index.ts @@ -0,0 +1,1102 @@ +import type { RedisCommands } from '../RESP/types'; +import ACL_CAT from './ACL_CAT'; +import ACL_DELUSER from './ACL_DELUSER'; +import ACL_DRYRUN from './ACL_DRYRUN'; +import ACL_GENPASS from './ACL_GENPASS'; +import ACL_GETUSER from './ACL_GETUSER'; +import ACL_LIST from './ACL_LIST'; +import ACL_LOAD from './ACL_LOAD'; +import ACL_LOG_RESET from './ACL_LOG_RESET'; +import ACL_LOG from './ACL_LOG'; +import ACL_SAVE from './ACL_SAVE'; +import ACL_SETUSER from './ACL_SETUSER'; +import ACL_USERS from './ACL_USERS'; +import ACL_WHOAMI from './ACL_WHOAMI'; +import APPEND from './APPEND'; +import ASKING from './ASKING'; +import AUTH from './AUTH'; +import BGREWRITEAOF from './BGREWRITEAOF'; +import BGSAVE from './BGSAVE'; +import BITCOUNT from './BITCOUNT'; +import BITFIELD_RO from './BITFIELD_RO'; +import BITFIELD from './BITFIELD'; +import BITOP from './BITOP'; +import BITPOS from './BITPOS'; +import BLMOVE from './BLMOVE'; +import BLMPOP from './BLMPOP'; +import BLPOP from './BLPOP'; +import BRPOP from './BRPOP'; +import BRPOPLPUSH from './BRPOPLPUSH'; +import BZMPOP from './BZMPOP'; +import BZPOPMAX from './BZPOPMAX'; +import BZPOPMIN from './BZPOPMIN'; +import CLIENT_CACHING from './CLIENT_CACHING'; +import CLIENT_GETNAME from './CLIENT_GETNAME'; +import CLIENT_GETREDIR from './CLIENT_GETREDIR'; +import CLIENT_ID from './CLIENT_ID'; +import CLIENT_INFO from './CLIENT_INFO'; +import CLIENT_KILL, { CLIENT_KILL_FILTERS } from './CLIENT_KILL'; +import CLIENT_LIST from './CLIENT_LIST'; +import CLIENT_NO_EVICT from './CLIENT_NO-EVICT'; +import CLIENT_NO_TOUCH from './CLIENT_NO-TOUCH'; +import CLIENT_PAUSE from './CLIENT_PAUSE'; +import CLIENT_SETNAME from './CLIENT_SETNAME'; +import CLIENT_TRACKING from './CLIENT_TRACKING'; +import CLIENT_TRACKINGINFO from './CLIENT_TRACKINGINFO'; +import CLIENT_UNPAUSE from './CLIENT_UNPAUSE'; +import CLUSTER_ADDSLOTS from './CLUSTER_ADDSLOTS'; +import CLUSTER_ADDSLOTSRANGE from './CLUSTER_ADDSLOTSRANGE'; +import CLUSTER_BUMPEPOCH from './CLUSTER_BUMPEPOCH'; +import CLUSTER_COUNT_FAILURE_REPORTS from './CLUSTER_COUNT-FAILURE-REPORTS'; +import CLUSTER_COUNTKEYSINSLOT from './CLUSTER_COUNTKEYSINSLOT'; +import CLUSTER_DELSLOTS from './CLUSTER_DELSLOTS'; +import CLUSTER_DELSLOTSRANGE from './CLUSTER_DELSLOTSRANGE'; +import CLUSTER_FAILOVER, { FAILOVER_MODES } from './CLUSTER_FAILOVER'; +import CLUSTER_FLUSHSLOTS from './CLUSTER_FLUSHSLOTS'; +import CLUSTER_FORGET from './CLUSTER_FORGET'; +import CLUSTER_GETKEYSINSLOT from './CLUSTER_GETKEYSINSLOT'; +import CLUSTER_INFO from './CLUSTER_INFO'; +import CLUSTER_KEYSLOT from './CLUSTER_KEYSLOT'; +import CLUSTER_LINKS from './CLUSTER_LINKS'; +import CLUSTER_MEET from './CLUSTER_MEET'; +import CLUSTER_MYID from './CLUSTER_MYID'; +import CLUSTER_MYSHARDID from './CLUSTER_MYSHARDID'; +import CLUSTER_NODES from './CLUSTER_NODES'; +import CLUSTER_REPLICAS from './CLUSTER_REPLICAS'; +import CLUSTER_REPLICATE from './CLUSTER_REPLICATE'; +import CLUSTER_RESET from './CLUSTER_RESET'; +import CLUSTER_SAVECONFIG from './CLUSTER_SAVECONFIG'; +import CLUSTER_SET_CONFIG_EPOCH from './CLUSTER_SET-CONFIG-EPOCH'; +import CLUSTER_SETSLOT, { CLUSTER_SLOT_STATES } from './CLUSTER_SETSLOT'; +import CLUSTER_SLOTS from './CLUSTER_SLOTS'; +import COMMAND_COUNT from './COMMAND_COUNT'; +import COMMAND_GETKEYS from './COMMAND_GETKEYS'; +import COMMAND_GETKEYSANDFLAGS from './COMMAND_GETKEYSANDFLAGS'; +import COMMAND_INFO from './COMMAND_INFO'; +import COMMAND_LIST, { COMMAND_LIST_FILTER_BY } from './COMMAND_LIST'; +import COMMAND from './COMMAND'; +import CONFIG_GET from './CONFIG_GET'; +import CONFIG_RESETASTAT from './CONFIG_RESETSTAT'; +import CONFIG_REWRITE from './CONFIG_REWRITE'; +import CONFIG_SET from './CONFIG_SET'; +import COPY from './COPY'; +import DBSIZE from './DBSIZE'; +import DECR from './DECR'; +import DECRBY from './DECRBY'; +import DEL from './DEL'; +import DUMP from './DUMP'; +import ECHO from './ECHO'; +import EVAL_RO from './EVAL_RO'; +import EVAL from './EVAL'; +import EVALSHA_RO from './EVALSHA_RO'; +import EVALSHA from './EVALSHA'; +import GEOADD from './GEOADD'; +import GEODIST from './GEODIST'; +import GEOHASH from './GEOHASH'; +import GEOPOS from './GEOPOS'; +import GEORADIUS_RO_WITH from './GEORADIUS_RO_WITH'; +import GEORADIUS_RO from './GEORADIUS_RO'; +import GEORADIUS_STORE from './GEORADIUS_STORE'; +import GEORADIUS_WITH from './GEORADIUS_WITH'; +import GEORADIUS from './GEORADIUS'; +import GEORADIUSBYMEMBER_RO_WITH from './GEORADIUSBYMEMBER_RO_WITH'; +import GEORADIUSBYMEMBER_RO from './GEORADIUSBYMEMBER_RO'; +import GEORADIUSBYMEMBER_STORE from './GEORADIUSBYMEMBER_STORE'; +import GEORADIUSBYMEMBER_WITH from './GEORADIUSBYMEMBER_WITH'; +import GEORADIUSBYMEMBER from './GEORADIUSBYMEMBER'; +import GEOSEARCH_WITH from './GEOSEARCH_WITH'; +import GEOSEARCH from './GEOSEARCH'; +import GEOSEARCHSTORE from './GEOSEARCHSTORE'; +import GET from './GET'; +import GETBIT from './GETBIT'; +import GETDEL from './GETDEL'; +import GETEX from './GETEX'; +import GETRANGE from './GETRANGE'; +import GETSET from './GETSET'; +import EXISTS from './EXISTS'; +import EXPIRE from './EXPIRE'; +import EXPIREAT from './EXPIREAT'; +import EXPIRETIME from './EXPIRETIME'; +import FLUSHALL, { REDIS_FLUSH_MODES } from './FLUSHALL'; +import FLUSHDB from './FLUSHDB'; +import FCALL from './FCALL'; +import FCALL_RO from './FCALL_RO'; +import FUNCTION_DELETE from './FUNCTION_DELETE'; +import FUNCTION_DUMP from './FUNCTION_DUMP'; +import FUNCTION_FLUSH from './FUNCTION_FLUSH'; +import FUNCTION_KILL from './FUNCTION_KILL'; +import FUNCTION_LIST_WITHCODE from './FUNCTION_LIST_WITHCODE'; +import FUNCTION_LIST from './FUNCTION_LIST'; +import FUNCTION_LOAD from './FUNCTION_LOAD'; +import FUNCTION_RESTORE from './FUNCTION_RESTORE'; +import FUNCTION_STATS from './FUNCTION_STATS'; +import HDEL from './HDEL'; +import HELLO from './HELLO'; +import HEXISTS from './HEXISTS'; +import HEXPIRE from './HEXPIRE'; +import HEXPIREAT from './HEXPIREAT'; +import HEXPIRETIME from './HEXPIRETIME'; +import HGET from './HGET'; +import HGETALL from './HGETALL'; +import HGETDEL from './HGETDEL'; +import HGETEX from './HGETEX'; +import HINCRBY from './HINCRBY'; +import HINCRBYFLOAT from './HINCRBYFLOAT'; +import HKEYS from './HKEYS'; +import HLEN from './HLEN'; +import HMGET from './HMGET'; +import HPERSIST from './HPERSIST'; +import HPEXPIRE from './HPEXPIRE'; +import HPEXPIREAT from './HPEXPIREAT'; +import HPEXPIRETIME from './HPEXPIRETIME'; +import HPTTL from './HPTTL'; +import HRANDFIELD_COUNT_WITHVALUES from './HRANDFIELD_COUNT_WITHVALUES'; +import HRANDFIELD_COUNT from './HRANDFIELD_COUNT'; +import HRANDFIELD from './HRANDFIELD'; +import HSCAN from './HSCAN'; +import HSCAN_NOVALUES from './HSCAN_NOVALUES'; +import HSET from './HSET'; +import HSETEX from './HSETEX'; +import HSETNX from './HSETNX'; +import HSTRLEN from './HSTRLEN'; +import HTTL from './HTTL'; +import HVALS from './HVALS'; +import INCR from './INCR'; +import INCRBY from './INCRBY'; +import INCRBYFLOAT from './INCRBYFLOAT'; +import INFO from './INFO'; +import KEYS from './KEYS'; +import LASTSAVE from './LASTSAVE'; +import LATENCY_DOCTOR from './LATENCY_DOCTOR'; +import LATENCY_GRAPH from './LATENCY_GRAPH'; +import LATENCY_HISTORY from './LATENCY_HISTORY'; +import LATENCY_LATEST from './LATENCY_LATEST'; +import LATENCY_RESET from './LATENCY_RESET'; +import LCS_IDX_WITHMATCHLEN from './LCS_IDX_WITHMATCHLEN'; +import LCS_IDX from './LCS_IDX'; +import LCS_LEN from './LCS_LEN'; +import LCS from './LCS'; +import LINDEX from './LINDEX'; +import LINSERT from './LINSERT'; +import LLEN from './LLEN'; +import LMOVE from './LMOVE'; +import LMPOP from './LMPOP'; +import LOLWUT from './LOLWUT'; +import LPOP_COUNT from './LPOP_COUNT'; +import LPOP from './LPOP'; +import LPOS_COUNT from './LPOS_COUNT'; +import LPOS from './LPOS'; +import LPUSH from './LPUSH'; +import LPUSHX from './LPUSHX'; +import LRANGE from './LRANGE'; +import LREM from './LREM'; +import LSET from './LSET'; +import LTRIM from './LTRIM'; +import MEMORY_DOCTOR from './MEMORY_DOCTOR'; +import MEMORY_MALLOC_STATS from './MEMORY_MALLOC-STATS'; +import MEMORY_PURGE from './MEMORY_PURGE'; +import MEMORY_STATS from './MEMORY_STATS'; +import MEMORY_USAGE from './MEMORY_USAGE'; +import MGET from './MGET'; +import MIGRATE from './MIGRATE'; +import MODULE_LIST from './MODULE_LIST'; +import MODULE_LOAD from './MODULE_LOAD'; +import MODULE_UNLOAD from './MODULE_UNLOAD'; +import MOVE from './MOVE'; +import MSET from './MSET'; +import MSETNX from './MSETNX'; +import OBJECT_ENCODING from './OBJECT_ENCODING'; +import OBJECT_FREQ from './OBJECT_FREQ'; +import OBJECT_IDLETIME from './OBJECT_IDLETIME'; +import OBJECT_REFCOUNT from './OBJECT_REFCOUNT'; +import PERSIST from './PERSIST'; +import PEXPIRE from './PEXPIRE'; +import PEXPIREAT from './PEXPIREAT'; +import PEXPIRETIME from './PEXPIRETIME'; +import PFADD from './PFADD'; +import PFCOUNT from './PFCOUNT'; +import PFMERGE from './PFMERGE'; +import PING from './PING'; +import PSETEX from './PSETEX'; +import PTTL from './PTTL'; +import PUBLISH from './PUBLISH'; +import PUBSUB_CHANNELS from './PUBSUB_CHANNELS'; +import PUBSUB_NUMPAT from './PUBSUB_NUMPAT'; +import PUBSUB_NUMSUB from './PUBSUB_NUMSUB'; +import PUBSUB_SHARDNUMSUB from './PUBSUB_SHARDNUMSUB'; +import PUBSUB_SHARDCHANNELS from './PUBSUB_SHARDCHANNELS'; +import RANDOMKEY from './RANDOMKEY'; +import READONLY from './READONLY'; +import RENAME from './RENAME'; +import RENAMENX from './RENAMENX'; +import REPLICAOF from './REPLICAOF'; +import RESTORE_ASKING from './RESTORE-ASKING'; +import RESTORE from './RESTORE'; +import ROLE from './ROLE'; +import RPOP_COUNT from './RPOP_COUNT'; +import RPOP from './RPOP'; +import RPOPLPUSH from './RPOPLPUSH'; +import RPUSH from './RPUSH'; +import RPUSHX from './RPUSHX'; +import SADD from './SADD'; +import SCAN from './SCAN'; +import SCARD from './SCARD'; +import SCRIPT_DEBUG from './SCRIPT_DEBUG'; +import SCRIPT_EXISTS from './SCRIPT_EXISTS'; +import SCRIPT_FLUSH from './SCRIPT_FLUSH'; +import SCRIPT_KILL from './SCRIPT_KILL'; +import SCRIPT_LOAD from './SCRIPT_LOAD'; +import SDIFF from './SDIFF'; +import SDIFFSTORE from './SDIFFSTORE'; +import SET from './SET'; +import SETBIT from './SETBIT'; +import SETEX from './SETEX'; +import SETNX from './SETNX'; +import SETRANGE from './SETRANGE'; +import SINTER from './SINTER'; +import SINTERCARD from './SINTERCARD'; +import SINTERSTORE from './SINTERSTORE'; +import SISMEMBER from './SISMEMBER'; +import SMEMBERS from './SMEMBERS'; +import SMISMEMBER from './SMISMEMBER'; +import SMOVE from './SMOVE'; +import SORT_RO from './SORT_RO'; +import SORT_STORE from './SORT_STORE'; +import SORT from './SORT'; +import SPOP_COUNT from './SPOP_COUNT'; +import SPOP from './SPOP'; +import SPUBLISH from './SPUBLISH'; +import SRANDMEMBER_COUNT from './SRANDMEMBER_COUNT'; +import SRANDMEMBER from './SRANDMEMBER'; +import SREM from './SREM'; +import SSCAN from './SSCAN'; +import STRLEN from './STRLEN'; +import SUNION from './SUNION'; +import SUNIONSTORE from './SUNIONSTORE'; +import SWAPDB from './SWAPDB'; +import TIME from './TIME'; +import TOUCH from './TOUCH'; +import TTL from './TTL'; +import TYPE from './TYPE'; +import UNLINK from './UNLINK'; +import WAIT from './WAIT'; +import XACK from './XACK'; +import XACKDEL from './XACKDEL'; +import XADD_NOMKSTREAM from './XADD_NOMKSTREAM'; +import XADD from './XADD'; +import XAUTOCLAIM_JUSTID from './XAUTOCLAIM_JUSTID'; +import XAUTOCLAIM from './XAUTOCLAIM'; +import XCLAIM_JUSTID from './XCLAIM_JUSTID'; +import XCLAIM from './XCLAIM'; +import XDEL from './XDEL'; +import XDELEX from './XDELEX'; +import XGROUP_CREATE from './XGROUP_CREATE'; +import XGROUP_CREATECONSUMER from './XGROUP_CREATECONSUMER'; +import XGROUP_DELCONSUMER from './XGROUP_DELCONSUMER'; +import XGROUP_DESTROY from './XGROUP_DESTROY'; +import XGROUP_SETID from './XGROUP_SETID'; +import XINFO_CONSUMERS from './XINFO_CONSUMERS'; +import XINFO_GROUPS from './XINFO_GROUPS'; +import XINFO_STREAM from './XINFO_STREAM'; +import XLEN from './XLEN'; +import XPENDING_RANGE from './XPENDING_RANGE'; +import XPENDING from './XPENDING'; +import XRANGE from './XRANGE'; +import XREAD from './XREAD'; +import XREADGROUP from './XREADGROUP'; +import XREVRANGE from './XREVRANGE'; +import XSETID from './XSETID'; +import XTRIM from './XTRIM'; +import ZADD_INCR from './ZADD_INCR'; +import ZADD from './ZADD'; +import ZCARD from './ZCARD'; +import ZCOUNT from './ZCOUNT'; +import ZDIFF_WITHSCORES from './ZDIFF_WITHSCORES'; +import ZDIFF from './ZDIFF'; +import ZDIFFSTORE from './ZDIFFSTORE'; +import ZINCRBY from './ZINCRBY'; +import ZINTER_WITHSCORES from './ZINTER_WITHSCORES'; +import ZINTER from './ZINTER'; +import ZINTERCARD from './ZINTERCARD'; +import ZINTERSTORE from './ZINTERSTORE'; +import ZLEXCOUNT from './ZLEXCOUNT'; +import ZMPOP from './ZMPOP'; +import ZMSCORE from './ZMSCORE'; +import ZPOPMAX_COUNT from './ZPOPMAX_COUNT'; +import ZPOPMAX from './ZPOPMAX'; +import ZPOPMIN_COUNT from './ZPOPMIN_COUNT'; +import ZPOPMIN from './ZPOPMIN'; +import ZRANDMEMBER_COUNT_WITHSCORES from './ZRANDMEMBER_COUNT_WITHSCORES'; +import ZRANDMEMBER_COUNT from './ZRANDMEMBER_COUNT'; +import ZRANDMEMBER from './ZRANDMEMBER'; +import ZRANGE_WITHSCORES from './ZRANGE_WITHSCORES'; +import ZRANGE from './ZRANGE'; +import ZRANGEBYLEX from './ZRANGEBYLEX'; +import ZRANGEBYSCORE_WITHSCORES from './ZRANGEBYSCORE_WITHSCORES'; +import ZRANGEBYSCORE from './ZRANGEBYSCORE'; +import ZRANGESTORE from './ZRANGESTORE'; +import ZREMRANGEBYSCORE from './ZREMRANGEBYSCORE'; +import ZRANK_WITHSCORE from './ZRANK_WITHSCORE'; +import ZRANK from './ZRANK'; +import ZREM from './ZREM'; +import ZREMRANGEBYLEX from './ZREMRANGEBYLEX'; +import ZREMRANGEBYRANK from './ZREMRANGEBYRANK'; +import ZREVRANK from './ZREVRANK'; +import ZSCAN from './ZSCAN'; +import ZSCORE from './ZSCORE'; +import ZUNION_WITHSCORES from './ZUNION_WITHSCORES'; +import ZUNION from './ZUNION'; +import ZUNIONSTORE from './ZUNIONSTORE'; +import VADD from './VADD'; +import VCARD from './VCARD'; +import VDIM from './VDIM'; +import VEMB from './VEMB'; +import VEMB_RAW from './VEMB_RAW'; +import VGETATTR from './VGETATTR'; +import VINFO from './VINFO'; +import VLINKS from './VLINKS'; +import VLINKS_WITHSCORES from './VLINKS_WITHSCORES'; +import VRANDMEMBER from './VRANDMEMBER'; +import VREM from './VREM'; +import VSETATTR from './VSETATTR'; +import VSIM from './VSIM'; +import VSIM_WITHSCORES from './VSIM_WITHSCORES'; + +export { + CLIENT_KILL_FILTERS, + FAILOVER_MODES, + CLUSTER_SLOT_STATES, + COMMAND_LIST_FILTER_BY, + REDIS_FLUSH_MODES +}; + +export { SetOptions } from './SET'; + +export default { + ACL_CAT, + aclCat: ACL_CAT, + ACL_DELUSER, + aclDelUser: ACL_DELUSER, + ACL_DRYRUN, + aclDryRun: ACL_DRYRUN, + ACL_GENPASS, + aclGenPass: ACL_GENPASS, + ACL_GETUSER, + aclGetUser: ACL_GETUSER, + ACL_LIST, + aclList: ACL_LIST, + ACL_LOAD, + aclLoad: ACL_LOAD, + ACL_LOG_RESET, + aclLogReset: ACL_LOG_RESET, + ACL_LOG, + aclLog: ACL_LOG, + ACL_SAVE, + aclSave: ACL_SAVE, + ACL_SETUSER, + aclSetUser: ACL_SETUSER, + ACL_USERS, + aclUsers: ACL_USERS, + ACL_WHOAMI, + aclWhoAmI: ACL_WHOAMI, + APPEND, + append: APPEND, + ASKING, + asking: ASKING, + AUTH, + auth: AUTH, + BGREWRITEAOF, + bgRewriteAof: BGREWRITEAOF, + BGSAVE, + bgSave: BGSAVE, + BITCOUNT, + bitCount: BITCOUNT, + BITFIELD_RO, + bitFieldRo: BITFIELD_RO, + BITFIELD, + bitField: BITFIELD, + BITOP, + bitOp: BITOP, + BITPOS, + bitPos: BITPOS, + BLMOVE, + blMove: BLMOVE, + BLMPOP, + blmPop: BLMPOP, + BLPOP, + blPop: BLPOP, + BRPOP, + brPop: BRPOP, + BRPOPLPUSH, + brPopLPush: BRPOPLPUSH, + BZMPOP, + bzmPop: BZMPOP, + BZPOPMAX, + bzPopMax: BZPOPMAX, + BZPOPMIN, + bzPopMin: BZPOPMIN, + CLIENT_CACHING, + clientCaching: CLIENT_CACHING, + CLIENT_GETNAME, + clientGetName: CLIENT_GETNAME, + CLIENT_GETREDIR, + clientGetRedir: CLIENT_GETREDIR, + CLIENT_ID, + clientId: CLIENT_ID, + CLIENT_INFO, + clientInfo: CLIENT_INFO, + CLIENT_KILL, + clientKill: CLIENT_KILL, + CLIENT_LIST, + clientList: CLIENT_LIST, + 'CLIENT_NO-EVICT': CLIENT_NO_EVICT, + clientNoEvict: CLIENT_NO_EVICT, + 'CLIENT_NO-TOUCH': CLIENT_NO_TOUCH, + clientNoTouch: CLIENT_NO_TOUCH, + CLIENT_PAUSE, + clientPause: CLIENT_PAUSE, + CLIENT_SETNAME, + clientSetName: CLIENT_SETNAME, + CLIENT_TRACKING, + clientTracking: CLIENT_TRACKING, + CLIENT_TRACKINGINFO, + clientTrackingInfo: CLIENT_TRACKINGINFO, + CLIENT_UNPAUSE, + clientUnpause: CLIENT_UNPAUSE, + CLUSTER_ADDSLOTS, + clusterAddSlots: CLUSTER_ADDSLOTS, + CLUSTER_ADDSLOTSRANGE, + clusterAddSlotsRange: CLUSTER_ADDSLOTSRANGE, + CLUSTER_BUMPEPOCH, + clusterBumpEpoch: CLUSTER_BUMPEPOCH, + 'CLUSTER_COUNT-FAILURE-REPORTS': CLUSTER_COUNT_FAILURE_REPORTS, + clusterCountFailureReports: CLUSTER_COUNT_FAILURE_REPORTS, + CLUSTER_COUNTKEYSINSLOT, + clusterCountKeysInSlot: CLUSTER_COUNTKEYSINSLOT, + CLUSTER_DELSLOTS, + clusterDelSlots: CLUSTER_DELSLOTS, + CLUSTER_DELSLOTSRANGE, + clusterDelSlotsRange: CLUSTER_DELSLOTSRANGE, + CLUSTER_FAILOVER, + clusterFailover: CLUSTER_FAILOVER, + CLUSTER_FLUSHSLOTS, + clusterFlushSlots: CLUSTER_FLUSHSLOTS, + CLUSTER_FORGET, + clusterForget: CLUSTER_FORGET, + CLUSTER_GETKEYSINSLOT, + clusterGetKeysInSlot: CLUSTER_GETKEYSINSLOT, + CLUSTER_INFO, + clusterInfo: CLUSTER_INFO, + CLUSTER_KEYSLOT, + clusterKeySlot: CLUSTER_KEYSLOT, + CLUSTER_LINKS, + clusterLinks: CLUSTER_LINKS, + CLUSTER_MEET, + clusterMeet: CLUSTER_MEET, + CLUSTER_MYID, + clusterMyId: CLUSTER_MYID, + CLUSTER_MYSHARDID, + clusterMyShardId: CLUSTER_MYSHARDID, + CLUSTER_NODES, + clusterNodes: CLUSTER_NODES, + CLUSTER_REPLICAS, + clusterReplicas: CLUSTER_REPLICAS, + CLUSTER_REPLICATE, + clusterReplicate: CLUSTER_REPLICATE, + CLUSTER_RESET, + clusterReset: CLUSTER_RESET, + CLUSTER_SAVECONFIG, + clusterSaveConfig: CLUSTER_SAVECONFIG, + 'CLUSTER_SET-CONFIG-EPOCH': CLUSTER_SET_CONFIG_EPOCH, + clusterSetConfigEpoch: CLUSTER_SET_CONFIG_EPOCH, + CLUSTER_SETSLOT, + clusterSetSlot: CLUSTER_SETSLOT, + CLUSTER_SLOTS, + clusterSlots: CLUSTER_SLOTS, + COMMAND_COUNT, + commandCount: COMMAND_COUNT, + COMMAND_GETKEYS, + commandGetKeys: COMMAND_GETKEYS, + COMMAND_GETKEYSANDFLAGS, + commandGetKeysAndFlags: COMMAND_GETKEYSANDFLAGS, + COMMAND_INFO, + commandInfo: COMMAND_INFO, + COMMAND_LIST, + commandList: COMMAND_LIST, + COMMAND, + command: COMMAND, + CONFIG_GET, + configGet: CONFIG_GET, + CONFIG_RESETASTAT, + configResetStat: CONFIG_RESETASTAT, + CONFIG_REWRITE, + configRewrite: CONFIG_REWRITE, + CONFIG_SET, + configSet: CONFIG_SET, + COPY, + copy: COPY, + DBSIZE, + dbSize: DBSIZE, + DECR, + decr: DECR, + DECRBY, + decrBy: DECRBY, + DEL, + del: DEL, + DUMP, + dump: DUMP, + ECHO, + echo: ECHO, + EVAL_RO, + evalRo: EVAL_RO, + EVAL, + eval: EVAL, + EVALSHA_RO, + evalShaRo: EVALSHA_RO, + EVALSHA, + evalSha: EVALSHA, + EXISTS, + exists: EXISTS, + EXPIRE, + expire: EXPIRE, + EXPIREAT, + expireAt: EXPIREAT, + EXPIRETIME, + expireTime: EXPIRETIME, + FLUSHALL, + flushAll: FLUSHALL, + FLUSHDB, + flushDb: FLUSHDB, + FCALL, + fCall: FCALL, + FCALL_RO, + fCallRo: FCALL_RO, + FUNCTION_DELETE, + functionDelete: FUNCTION_DELETE, + FUNCTION_DUMP, + functionDump: FUNCTION_DUMP, + FUNCTION_FLUSH, + functionFlush: FUNCTION_FLUSH, + FUNCTION_KILL, + functionKill: FUNCTION_KILL, + FUNCTION_LIST_WITHCODE, + functionListWithCode: FUNCTION_LIST_WITHCODE, + FUNCTION_LIST, + functionList: FUNCTION_LIST, + FUNCTION_LOAD, + functionLoad: FUNCTION_LOAD, + FUNCTION_RESTORE, + functionRestore: FUNCTION_RESTORE, + FUNCTION_STATS, + functionStats: FUNCTION_STATS, + GEOADD, + geoAdd: GEOADD, + GEODIST, + geoDist: GEODIST, + GEOHASH, + geoHash: GEOHASH, + GEOPOS, + geoPos: GEOPOS, + GEORADIUS_RO_WITH, + geoRadiusRoWith: GEORADIUS_RO_WITH, + GEORADIUS_RO, + geoRadiusRo: GEORADIUS_RO, + GEORADIUS_STORE, + geoRadiusStore: GEORADIUS_STORE, + GEORADIUS_WITH, + geoRadiusWith: GEORADIUS_WITH, + GEORADIUS, + geoRadius: GEORADIUS, + GEORADIUSBYMEMBER_RO_WITH, + geoRadiusByMemberRoWith: GEORADIUSBYMEMBER_RO_WITH, + GEORADIUSBYMEMBER_RO, + geoRadiusByMemberRo: GEORADIUSBYMEMBER_RO, + GEORADIUSBYMEMBER_STORE, + geoRadiusByMemberStore: GEORADIUSBYMEMBER_STORE, + GEORADIUSBYMEMBER_WITH, + geoRadiusByMemberWith: GEORADIUSBYMEMBER_WITH, + GEORADIUSBYMEMBER, + geoRadiusByMember: GEORADIUSBYMEMBER, + GEOSEARCH_WITH, + geoSearchWith: GEOSEARCH_WITH, + GEOSEARCH, + geoSearch: GEOSEARCH, + GEOSEARCHSTORE, + geoSearchStore: GEOSEARCHSTORE, + GET, + get: GET, + GETBIT, + getBit: GETBIT, + GETDEL, + getDel: GETDEL, + GETEX, + getEx: GETEX, + GETRANGE, + getRange: GETRANGE, + GETSET, + getSet: GETSET, + HDEL, + hDel: HDEL, + HELLO, + hello: HELLO, + HEXISTS, + hExists: HEXISTS, + HEXPIRE, + hExpire: HEXPIRE, + HEXPIREAT, + hExpireAt: HEXPIREAT, + HEXPIRETIME, + hExpireTime: HEXPIRETIME, + HGET, + hGet: HGET, + HGETALL, + hGetAll: HGETALL, + HGETDEL, + hGetDel: HGETDEL, + HGETEX, + hGetEx: HGETEX, + HINCRBY, + hIncrBy: HINCRBY, + HINCRBYFLOAT, + hIncrByFloat: HINCRBYFLOAT, + HKEYS, + hKeys: HKEYS, + HLEN, + hLen: HLEN, + HMGET, + hmGet: HMGET, + HPERSIST, + hPersist: HPERSIST, + HPEXPIRE, + hpExpire: HPEXPIRE, + HPEXPIREAT, + hpExpireAt: HPEXPIREAT, + HPEXPIRETIME, + hpExpireTime: HPEXPIRETIME, + HPTTL, + hpTTL: HPTTL, + HRANDFIELD_COUNT_WITHVALUES, + hRandFieldCountWithValues: HRANDFIELD_COUNT_WITHVALUES, + HRANDFIELD_COUNT, + hRandFieldCount: HRANDFIELD_COUNT, + HRANDFIELD, + hRandField: HRANDFIELD, + HSCAN, + hScan: HSCAN, + HSCAN_NOVALUES, + hScanNoValues: HSCAN_NOVALUES, + HSET, + hSet: HSET, + HSETEX, + hSetEx: HSETEX, + HSETNX, + hSetNX: HSETNX, + HSTRLEN, + hStrLen: HSTRLEN, + HTTL, + hTTL: HTTL, + HVALS, + hVals: HVALS, + INCR, + incr: INCR, + INCRBY, + incrBy: INCRBY, + INCRBYFLOAT, + incrByFloat: INCRBYFLOAT, + INFO, + info: INFO, + KEYS, + keys: KEYS, + LASTSAVE, + lastSave: LASTSAVE, + LATENCY_DOCTOR, + latencyDoctor: LATENCY_DOCTOR, + LATENCY_GRAPH, + latencyGraph: LATENCY_GRAPH, + LATENCY_HISTORY, + latencyHistory: LATENCY_HISTORY, + LATENCY_LATEST, + latencyLatest: LATENCY_LATEST, + LATENCY_RESET, + latencyReset: LATENCY_RESET, + LCS_IDX_WITHMATCHLEN, + lcsIdxWithMatchLen: LCS_IDX_WITHMATCHLEN, + LCS_IDX, + lcsIdx: LCS_IDX, + LCS_LEN, + lcsLen: LCS_LEN, + LCS, + lcs: LCS, + LINDEX, + lIndex: LINDEX, + LINSERT, + lInsert: LINSERT, + LLEN, + lLen: LLEN, + LMOVE, + lMove: LMOVE, + LMPOP, + lmPop: LMPOP, + LOLWUT, + LPOP_COUNT, + lPopCount: LPOP_COUNT, + LPOP, + lPop: LPOP, + LPOS_COUNT, + lPosCount: LPOS_COUNT, + LPOS, + lPos: LPOS, + LPUSH, + lPush: LPUSH, + LPUSHX, + lPushX: LPUSHX, + LRANGE, + lRange: LRANGE, + LREM, + lRem: LREM, + LSET, + lSet: LSET, + LTRIM, + lTrim: LTRIM, + MEMORY_DOCTOR, + memoryDoctor: MEMORY_DOCTOR, + 'MEMORY_MALLOC-STATS': MEMORY_MALLOC_STATS, + memoryMallocStats: MEMORY_MALLOC_STATS, + MEMORY_PURGE, + memoryPurge: MEMORY_PURGE, + MEMORY_STATS, + memoryStats: MEMORY_STATS, + MEMORY_USAGE, + memoryUsage: MEMORY_USAGE, + MGET, + mGet: MGET, + MIGRATE, + migrate: MIGRATE, + MODULE_LIST, + moduleList: MODULE_LIST, + MODULE_LOAD, + moduleLoad: MODULE_LOAD, + MODULE_UNLOAD, + moduleUnload: MODULE_UNLOAD, + MOVE, + move: MOVE, + MSET, + mSet: MSET, + MSETNX, + mSetNX: MSETNX, + OBJECT_ENCODING, + objectEncoding: OBJECT_ENCODING, + OBJECT_FREQ, + objectFreq: OBJECT_FREQ, + OBJECT_IDLETIME, + objectIdleTime: OBJECT_IDLETIME, + OBJECT_REFCOUNT, + objectRefCount: OBJECT_REFCOUNT, + PERSIST, + persist: PERSIST, + PEXPIRE, + pExpire: PEXPIRE, + PEXPIREAT, + pExpireAt: PEXPIREAT, + PEXPIRETIME, + pExpireTime: PEXPIRETIME, + PFADD, + pfAdd: PFADD, + PFCOUNT, + pfCount: PFCOUNT, + PFMERGE, + pfMerge: PFMERGE, + PING, + /** + * ping jsdoc + */ + ping: PING, + PSETEX, + pSetEx: PSETEX, + PTTL, + pTTL: PTTL, + PUBLISH, + publish: PUBLISH, + PUBSUB_CHANNELS, + pubSubChannels: PUBSUB_CHANNELS, + PUBSUB_NUMPAT, + pubSubNumPat: PUBSUB_NUMPAT, + PUBSUB_NUMSUB, + pubSubNumSub: PUBSUB_NUMSUB, + PUBSUB_SHARDNUMSUB, + pubSubShardNumSub: PUBSUB_SHARDNUMSUB, + PUBSUB_SHARDCHANNELS, + pubSubShardChannels: PUBSUB_SHARDCHANNELS, + RANDOMKEY, + randomKey: RANDOMKEY, + READONLY, + readonly: READONLY, + RENAME, + rename: RENAME, + RENAMENX, + renameNX: RENAMENX, + REPLICAOF, + replicaOf: REPLICAOF, + 'RESTORE-ASKING': RESTORE_ASKING, + restoreAsking: RESTORE_ASKING, + RESTORE, + restore: RESTORE, + RPOP_COUNT, + rPopCount: RPOP_COUNT, + ROLE, + role: ROLE, + RPOP, + rPop: RPOP, + RPOPLPUSH, + rPopLPush: RPOPLPUSH, + RPUSH, + rPush: RPUSH, + RPUSHX, + rPushX: RPUSHX, + SADD, + sAdd: SADD, + SCAN, + scan: SCAN, + SCARD, + sCard: SCARD, + SCRIPT_DEBUG, + scriptDebug: SCRIPT_DEBUG, + SCRIPT_EXISTS, + scriptExists: SCRIPT_EXISTS, + SCRIPT_FLUSH, + scriptFlush: SCRIPT_FLUSH, + SCRIPT_KILL, + scriptKill: SCRIPT_KILL, + SCRIPT_LOAD, + scriptLoad: SCRIPT_LOAD, + SDIFF, + sDiff: SDIFF, + SDIFFSTORE, + sDiffStore: SDIFFSTORE, + SET, + set: SET, + SETBIT, + setBit: SETBIT, + SETEX, + setEx: SETEX, + SETNX, + setNX: SETNX, + SETRANGE, + setRange: SETRANGE, + SINTER, + sInter: SINTER, + SINTERCARD, + sInterCard: SINTERCARD, + SINTERSTORE, + sInterStore: SINTERSTORE, + SISMEMBER, + sIsMember: SISMEMBER, + SMEMBERS, + sMembers: SMEMBERS, + SMISMEMBER, + smIsMember: SMISMEMBER, + SMOVE, + sMove: SMOVE, + SORT_RO, + sortRo: SORT_RO, + SORT_STORE, + sortStore: SORT_STORE, + SORT, + sort: SORT, + SPOP_COUNT, + sPopCount: SPOP_COUNT, + SPOP, + sPop: SPOP, + SPUBLISH, + sPublish: SPUBLISH, + SRANDMEMBER_COUNT, + sRandMemberCount: SRANDMEMBER_COUNT, + SRANDMEMBER, + sRandMember: SRANDMEMBER, + SREM, + sRem: SREM, + SSCAN, + sScan: SSCAN, + STRLEN, + strLen: STRLEN, + SUNION, + sUnion: SUNION, + SUNIONSTORE, + sUnionStore: SUNIONSTORE, + SWAPDB, + swapDb: SWAPDB, + TIME, + time: TIME, + TOUCH, + touch: TOUCH, + TTL, + ttl: TTL, + TYPE, + type: TYPE, + UNLINK, + unlink: UNLINK, + WAIT, + wait: WAIT, + XACK, + xAck: XACK, + XACKDEL, + xAckDel: XACKDEL, + XADD_NOMKSTREAM, + xAddNoMkStream: XADD_NOMKSTREAM, + XADD, + xAdd: XADD, + XAUTOCLAIM_JUSTID, + xAutoClaimJustId: XAUTOCLAIM_JUSTID, + XAUTOCLAIM, + xAutoClaim: XAUTOCLAIM, + XCLAIM_JUSTID, + xClaimJustId: XCLAIM_JUSTID, + XCLAIM, + xClaim: XCLAIM, + XDEL, + xDel: XDEL, + XDELEX, + xDelEx: XDELEX, + XGROUP_CREATE, + xGroupCreate: XGROUP_CREATE, + XGROUP_CREATECONSUMER, + xGroupCreateConsumer: XGROUP_CREATECONSUMER, + XGROUP_DELCONSUMER, + xGroupDelConsumer: XGROUP_DELCONSUMER, + XGROUP_DESTROY, + xGroupDestroy: XGROUP_DESTROY, + XGROUP_SETID, + xGroupSetId: XGROUP_SETID, + XINFO_CONSUMERS, + xInfoConsumers: XINFO_CONSUMERS, + XINFO_GROUPS, + xInfoGroups: XINFO_GROUPS, + XINFO_STREAM, + xInfoStream: XINFO_STREAM, + XLEN, + xLen: XLEN, + XPENDING_RANGE, + xPendingRange: XPENDING_RANGE, + XPENDING, + xPending: XPENDING, + XRANGE, + xRange: XRANGE, + XREAD, + xRead: XREAD, + XREADGROUP, + xReadGroup: XREADGROUP, + XREVRANGE, + xRevRange: XREVRANGE, + XSETID, + xSetId: XSETID, + XTRIM, + xTrim: XTRIM, + ZADD_INCR, + zAddIncr: ZADD_INCR, + ZADD, + zAdd: ZADD, + ZCARD, + zCard: ZCARD, + ZCOUNT, + zCount: ZCOUNT, + ZDIFF_WITHSCORES, + zDiffWithScores: ZDIFF_WITHSCORES, + ZDIFF, + zDiff: ZDIFF, + ZDIFFSTORE, + zDiffStore: ZDIFFSTORE, + ZINCRBY, + zIncrBy: ZINCRBY, + ZINTER_WITHSCORES, + zInterWithScores: ZINTER_WITHSCORES, + ZINTER, + zInter: ZINTER, + ZINTERCARD, + zInterCard: ZINTERCARD, + ZINTERSTORE, + zInterStore: ZINTERSTORE, + ZLEXCOUNT, + zLexCount: ZLEXCOUNT, + ZMPOP, + zmPop: ZMPOP, + ZMSCORE, + zmScore: ZMSCORE, + ZPOPMAX_COUNT, + zPopMaxCount: ZPOPMAX_COUNT, + ZPOPMAX, + zPopMax: ZPOPMAX, + ZPOPMIN_COUNT, + zPopMinCount: ZPOPMIN_COUNT, + ZPOPMIN, + zPopMin: ZPOPMIN, + ZRANDMEMBER_COUNT_WITHSCORES, + zRandMemberCountWithScores: ZRANDMEMBER_COUNT_WITHSCORES, + ZRANDMEMBER_COUNT, + zRandMemberCount: ZRANDMEMBER_COUNT, + ZRANDMEMBER, + zRandMember: ZRANDMEMBER, + ZRANGE_WITHSCORES, + zRangeWithScores: ZRANGE_WITHSCORES, + ZRANGE, + zRange: ZRANGE, + ZRANGEBYLEX, + zRangeByLex: ZRANGEBYLEX, + ZRANGEBYSCORE_WITHSCORES, + zRangeByScoreWithScores: ZRANGEBYSCORE_WITHSCORES, + ZRANGEBYSCORE, + zRangeByScore: ZRANGEBYSCORE, + ZRANGESTORE, + zRangeStore: ZRANGESTORE, + ZRANK_WITHSCORE, + zRankWithScore: ZRANK_WITHSCORE, + ZRANK, + zRank: ZRANK, + ZREM, + zRem: ZREM, + ZREMRANGEBYLEX, + zRemRangeByLex: ZREMRANGEBYLEX, + ZREMRANGEBYRANK, + zRemRangeByRank: ZREMRANGEBYRANK, + ZREMRANGEBYSCORE, + zRemRangeByScore: ZREMRANGEBYSCORE, + ZREVRANK, + zRevRank: ZREVRANK, + ZSCAN, + zScan: ZSCAN, + ZSCORE, + zScore: ZSCORE, + ZUNION_WITHSCORES, + zUnionWithScores: ZUNION_WITHSCORES, + ZUNION, + zUnion: ZUNION, + ZUNIONSTORE, + zUnionStore: ZUNIONSTORE, + VADD, + vAdd: VADD, + VCARD, + vCard: VCARD, + VDIM, + vDim: VDIM, + VEMB, + vEmb: VEMB, + VEMB_RAW, + vEmbRaw: VEMB_RAW, + VGETATTR, + vGetAttr: VGETATTR, + VINFO, + vInfo: VINFO, + VLINKS, + vLinks: VLINKS, + VLINKS_WITHSCORES, + vLinksWithScores: VLINKS_WITHSCORES, + VRANDMEMBER, + vRandMember: VRANDMEMBER, + VREM, + vRem: VREM, + VSETATTR, + vSetAttr: VSETATTR, + VSIM, + vSim: VSIM, + VSIM_WITHSCORES, + vSimWithScores: VSIM_WITHSCORES +} as const satisfies RedisCommands; diff --git a/packages/client/lib/errors.ts b/packages/client/lib/errors.ts new file mode 100644 index 00000000000..4d9ddf7f2b1 --- /dev/null +++ b/packages/client/lib/errors.ts @@ -0,0 +1,101 @@ +export class AbortError extends Error { + constructor() { + super('The command was aborted'); + } +} + +export class WatchError extends Error { + constructor(message = 'One (or more) of the watched keys has been changed') { + super(message); + } +} + +export class ConnectionTimeoutError extends Error { + constructor() { + super('Connection timeout'); + } +} + +export class SocketTimeoutError extends Error { + constructor(timeout: number) { + super(`Socket timeout timeout. Expecting data, but didn't receive any in ${timeout}ms.`); + } +} + +export class ClientClosedError extends Error { + constructor() { + super('The client is closed'); + } +} + +export class ClientOfflineError extends Error { + constructor() { + super('The client is offline'); + } +} + +export class DisconnectsClientError extends Error { + constructor() { + super('Disconnects client'); + } +} + +export class SocketClosedUnexpectedlyError extends Error { + constructor() { + super('Socket closed unexpectedly'); + } +} + +export class RootNodesUnavailableError extends Error { + constructor() { + super('All the root nodes are unavailable'); + } +} + +export class ReconnectStrategyError extends Error { + originalError: Error; + socketError: unknown; + + constructor(originalError: Error, socketError: unknown) { + super(originalError.message); + this.originalError = originalError; + this.socketError = socketError; + } +} + +export class ErrorReply extends Error {} + +export class SimpleError extends ErrorReply {} + +export class BlobError extends ErrorReply {} + +export class TimeoutError extends Error {} + +export class SocketTimeoutDuringMaintenanceError extends TimeoutError { + constructor(timeout: number) { + super(`Socket timeout during maintenance. Expecting data, but didn't receive any in ${timeout}ms.`); + } +} + +export class CommandTimeoutDuringMaintenanceError extends TimeoutError { + constructor(timeout: number) { + super(`Command timeout during maintenance. Waited to write command for more than ${timeout}ms.`); + } +} + +export class MultiErrorReply extends ErrorReply { + replies: Array; + errorIndexes: Array; + + constructor(replies: Array, errorIndexes: Array) { + super(`${errorIndexes.length} commands failed, see .replies and .errorIndexes for more information`); + this.replies = replies; + this.errorIndexes = errorIndexes; + } + + *errors() { + for (const index of this.errorIndexes) { + yield this.replies[index]; + } + } +} diff --git a/packages/client/lib/lua-script.ts b/packages/client/lib/lua-script.ts new file mode 100644 index 00000000000..6d395b71232 --- /dev/null +++ b/packages/client/lib/lua-script.ts @@ -0,0 +1,22 @@ +import { createHash } from 'node:crypto'; +import { Command } from './RESP/types'; + +export type RedisScriptConfig = Command & { + SCRIPT: string | Buffer; + NUMBER_OF_KEYS?: number; +} + +export interface SHA1 { + SHA1: string; +} + +export function defineScript(script: S): S & SHA1 { + return { + ...script, + SHA1: scriptSha1(script.SCRIPT) + }; +} + +export function scriptSha1(script: RedisScriptConfig['SCRIPT']): string { + return createHash('sha1').update(script).digest('hex'); +} diff --git a/packages/client/lib/multi-command.spec.ts b/packages/client/lib/multi-command.spec.ts new file mode 100644 index 00000000000..7e77f88d10b --- /dev/null +++ b/packages/client/lib/multi-command.spec.ts @@ -0,0 +1,77 @@ +import { strict as assert } from 'node:assert'; +import RedisMultiCommand from './multi-command'; +import { SQUARE_SCRIPT } from './client/index.spec'; + +describe('Multi Command', () => { + it('addCommand', () => { + const multi = new RedisMultiCommand(); + multi.addCommand(['PING']); + + assert.deepEqual( + multi.queue[0].args, + ['PING'] + ); + }); + + describe('addScript', () => { + const multi = new RedisMultiCommand(); + + it('should use EVAL', () => { + multi.addScript(SQUARE_SCRIPT, ['1']); + assert.deepEqual( + Array.from(multi.queue.at(-1).args), + ['EVAL', SQUARE_SCRIPT.SCRIPT, '1', '1'] + ); + }); + + it('should use EVALSHA', () => { + multi.addScript(SQUARE_SCRIPT, ['2']); + assert.deepEqual( + Array.from(multi.queue.at(-1).args), + ['EVALSHA', SQUARE_SCRIPT.SHA1, '1', '2'] + ); + }); + + it('without NUMBER_OF_KEYS', () => { + multi.addScript({ + ...SQUARE_SCRIPT, + NUMBER_OF_KEYS: undefined + }, ['2']); + assert.deepEqual( + Array.from(multi.queue.at(-1).args), + ['EVALSHA', SQUARE_SCRIPT.SHA1, '2'] + ); + }); + }); + + describe('exec', () => { + it('without commands', () => { + assert.deepEqual( + new RedisMultiCommand().queue, + [] + ); + }); + + it('with commands', () => { + const multi = new RedisMultiCommand(); + multi.addCommand(['PING']); + + assert.deepEqual( + multi.queue, + [{ + args: ['PING'], + transformReply: undefined + }] + ); + }); + }); + + it('transformReplies', () => { + const multi = new RedisMultiCommand(); + multi.addCommand(['PING'], (reply: string) => reply.substring(0, 2)); + assert.deepEqual( + multi.transformReplies(['PONG']), + ['PO'] + ); + }); +}); diff --git a/packages/client/lib/multi-command.ts b/packages/client/lib/multi-command.ts new file mode 100644 index 00000000000..bb30fddc29e --- /dev/null +++ b/packages/client/lib/multi-command.ts @@ -0,0 +1,77 @@ +import { CommandArguments, RedisScript, ReplyUnion, TransformReply, TypeMapping } from './RESP/types'; +import { ErrorReply, MultiErrorReply } from './errors'; + +export type MULTI_REPLY = { + GENERIC: 'generic'; + TYPED: 'typed'; +}; + +export type MULTI_MODE = { + TYPED: 'typed'; + UNTYPED: 'untyped'; +}; + +export type MultiMode = MULTI_MODE[keyof MULTI_MODE]; + +export type MultiReply = MULTI_REPLY[keyof MULTI_REPLY]; + +export type MultiReplyType = T extends MULTI_REPLY['TYPED'] ? REPLIES : Array; + +export interface RedisMultiQueuedCommand { + args: CommandArguments; + transformReply?: TransformReply; +} + +export default class RedisMultiCommand { + private readonly typeMapping?: TypeMapping; + + constructor(typeMapping?: TypeMapping) { + this.typeMapping = typeMapping; + } + + readonly queue: Array = []; + + readonly scriptsInUse = new Set(); + + addCommand(args: CommandArguments, transformReply?: TransformReply) { + this.queue.push({ + args, + transformReply + }); + } + + addScript(script: RedisScript, args: CommandArguments, transformReply?: TransformReply) { + const redisArgs: CommandArguments = []; + redisArgs.preserve = args.preserve; + if (this.scriptsInUse.has(script.SHA1)) { + redisArgs.push('EVALSHA', script.SHA1); + } else { + this.scriptsInUse.add(script.SHA1); + redisArgs.push('EVAL', script.SCRIPT); + } + + if (script.NUMBER_OF_KEYS !== undefined) { + redisArgs.push(script.NUMBER_OF_KEYS.toString()); + } + + redisArgs.push(...args); + + this.addCommand(redisArgs, transformReply); + } + + transformReplies(rawReplies: Array): Array { + const errorIndexes: Array = [], + replies = rawReplies.map((reply, i) => { + if (reply instanceof ErrorReply) { + errorIndexes.push(i); + return reply; + } + + const { transformReply, args } = this.queue[i]; + return transformReply ? transformReply(reply, args.preserve, this.typeMapping) : reply; + }); + + if (errorIndexes.length) throw new MultiErrorReply(replies, errorIndexes); + return replies; + } +} diff --git a/packages/client/lib/sentinel/commands/SENTINEL_MASTER.ts b/packages/client/lib/sentinel/commands/SENTINEL_MASTER.ts new file mode 100644 index 00000000000..842b86a0596 --- /dev/null +++ b/packages/client/lib/sentinel/commands/SENTINEL_MASTER.ts @@ -0,0 +1,18 @@ +import { RedisArgument, MapReply, BlobStringReply, Command } from '../../RESP/types'; +import { CommandParser } from '../../client/parser'; +import { transformTuplesReply } from '../../commands/generic-transformers'; + +export default { + /** + * Returns information about the specified master. + * @param parser - The Redis command parser. + * @param dbname - Name of the master. + */ + parseCommand(parser: CommandParser, dbname: RedisArgument) { + parser.push('SENTINEL', 'MASTER', dbname); + }, + transformReply: { + 2: transformTuplesReply, + 3: undefined as unknown as () => MapReply + } +} as const satisfies Command; diff --git a/packages/client/lib/sentinel/commands/SENTINEL_MONITOR.ts b/packages/client/lib/sentinel/commands/SENTINEL_MONITOR.ts new file mode 100644 index 00000000000..eed4f7e7233 --- /dev/null +++ b/packages/client/lib/sentinel/commands/SENTINEL_MONITOR.ts @@ -0,0 +1,17 @@ +import { CommandParser } from '../../client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '../../RESP/types'; + +export default { + /** + * Instructs a Sentinel to monitor a new master with the specified parameters. + * @param parser - The Redis command parser. + * @param dbname - Name that identifies the master. + * @param host - Host of the master. + * @param port - Port of the master. + * @param quorum - Number of Sentinels that need to agree to trigger a failover. + */ + parseCommand(parser: CommandParser, dbname: RedisArgument, host: RedisArgument, port: RedisArgument, quorum: RedisArgument) { + parser.push('SENTINEL', 'MONITOR', dbname, host, port, quorum); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/sentinel/commands/SENTINEL_REPLICAS.ts b/packages/client/lib/sentinel/commands/SENTINEL_REPLICAS.ts new file mode 100644 index 00000000000..4228a2123d9 --- /dev/null +++ b/packages/client/lib/sentinel/commands/SENTINEL_REPLICAS.ts @@ -0,0 +1,29 @@ +import { CommandParser } from '../../client/parser'; +import { RedisArgument, ArrayReply, BlobStringReply, MapReply, Command, TypeMapping, UnwrapReply } from '../../RESP/types'; +import { transformTuplesReply } from '../../commands/generic-transformers'; + +export default { + /** + * Returns a list of replicas for the specified master. + * @param parser - The Redis command parser. + * @param dbname - Name of the master. + */ + parseCommand(parser: CommandParser, dbname: RedisArgument) { + parser.push('SENTINEL', 'REPLICAS', dbname); + }, + transformReply: { + 2: (reply: ArrayReply>, preserve?: any, typeMapping?: TypeMapping) => { + const inferred = reply as unknown as UnwrapReply; + const initial: Array> = []; + + return inferred.reduce( + (sentinels: Array>, x: ArrayReply) => { + sentinels.push(transformTuplesReply(x, undefined, typeMapping)); + return sentinels; + }, + initial + ); + }, + 3: undefined as unknown as () => ArrayReply> + } +} as const satisfies Command; diff --git a/packages/client/lib/sentinel/commands/SENTINEL_SENTINELS.ts b/packages/client/lib/sentinel/commands/SENTINEL_SENTINELS.ts new file mode 100644 index 00000000000..20cccbb76b6 --- /dev/null +++ b/packages/client/lib/sentinel/commands/SENTINEL_SENTINELS.ts @@ -0,0 +1,29 @@ +import { CommandParser } from '../../client/parser'; +import { RedisArgument, ArrayReply, MapReply, BlobStringReply, Command, TypeMapping, UnwrapReply } from '../../RESP/types'; +import { transformTuplesReply } from '../../commands/generic-transformers'; + +export default { + /** + * Returns a list of Sentinel instances for the specified master. + * @param parser - The Redis command parser. + * @param dbname - Name of the master. + */ + parseCommand(parser: CommandParser, dbname: RedisArgument) { + parser.push('SENTINEL', 'SENTINELS', dbname); + }, + transformReply: { + 2: (reply: ArrayReply>, preserve?: any, typeMapping?: TypeMapping) => { + const inferred = reply as unknown as UnwrapReply; + const initial: Array> = []; + + return inferred.reduce( + (sentinels: Array>, x: ArrayReply) => { + sentinels.push(transformTuplesReply(x, undefined, typeMapping)); + return sentinels; + }, + initial + ); + }, + 3: undefined as unknown as () => ArrayReply> + } +} as const satisfies Command; diff --git a/packages/client/lib/sentinel/commands/SENTINEL_SET.ts b/packages/client/lib/sentinel/commands/SENTINEL_SET.ts new file mode 100644 index 00000000000..b2881c14e5a --- /dev/null +++ b/packages/client/lib/sentinel/commands/SENTINEL_SET.ts @@ -0,0 +1,24 @@ +import { CommandParser } from '../../client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '../../RESP/types'; + +export type SentinelSetOptions = Array<{ + option: RedisArgument; + value: RedisArgument; +}>; + +export default { + /** + * Sets configuration parameters for a specific master. + * @param parser - The Redis command parser. + * @param dbname - Name of the master. + * @param options - Configuration options to set as option-value pairs. + */ + parseCommand(parser: CommandParser, dbname: RedisArgument, options: SentinelSetOptions) { + parser.push('SENTINEL', 'SET', dbname); + + for (const option of options) { + parser.push(option.option, option.value); + } + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/client/lib/sentinel/commands/index.ts b/packages/client/lib/sentinel/commands/index.ts new file mode 100644 index 00000000000..1fc16f872f6 --- /dev/null +++ b/packages/client/lib/sentinel/commands/index.ts @@ -0,0 +1,19 @@ +import { RedisCommands } from '../../RESP/types'; +import SENTINEL_MASTER from './SENTINEL_MASTER'; +import SENTINEL_MONITOR from './SENTINEL_MONITOR'; +import SENTINEL_REPLICAS from './SENTINEL_REPLICAS'; +import SENTINEL_SENTINELS from './SENTINEL_SENTINELS'; +import SENTINEL_SET from './SENTINEL_SET'; + +export default { + SENTINEL_SENTINELS, + sentinelSentinels: SENTINEL_SENTINELS, + SENTINEL_MASTER, + sentinelMaster: SENTINEL_MASTER, + SENTINEL_REPLICAS, + sentinelReplicas: SENTINEL_REPLICAS, + SENTINEL_MONITOR, + sentinelMonitor: SENTINEL_MONITOR, + SENTINEL_SET, + sentinelSet: SENTINEL_SET +} as const satisfies RedisCommands; diff --git a/packages/client/lib/sentinel/index.spec.ts b/packages/client/lib/sentinel/index.spec.ts new file mode 100644 index 00000000000..ef1702eab13 --- /dev/null +++ b/packages/client/lib/sentinel/index.spec.ts @@ -0,0 +1,1044 @@ +import { strict as assert } from 'node:assert'; +import { setTimeout } from 'node:timers/promises'; +import testUtils, { GLOBAL, MATH_FUNCTION } from '../test-utils'; +import { RESP_TYPES } from '../RESP/decoder'; +import { WatchError } from "../errors"; +import { RedisSentinelConfig, SentinelFramework } from "./test-util"; +import { RedisSentinelEvent, RedisSentinelType, RedisSentinelClientType, RedisNode } from "./types"; +import RedisSentinel from "./index"; +import { RedisModules, RedisFunctions, RedisScripts, RespVersions, TypeMapping, NumberReply } from '../RESP/types'; +import { promisify } from 'node:util'; +import { exec } from 'node:child_process'; +import { BasicPooledClientSideCache } from '../client/cache' +import { once } from 'node:events' +const execAsync = promisify(exec); + +describe('RedisSentinel', () => { + describe('initialization', () => { + describe('clientSideCache validation', () => { + const clientSideCacheConfig = { ttl: 0, maxEntries: 0 }; + const options = { + name: 'mymaster', + sentinelRootNodes: [ + { host: 'localhost', port: 26379 } + ] + }; + + it('should throw error when clientSideCache is enabled with RESP 2', () => { + assert.throws( + () => RedisSentinel.create({ + ...options, + clientSideCache: clientSideCacheConfig, + RESP: 2 as const, + }), + new Error('Client Side Caching is only supported with RESP3') + ); + }); + + it('should throw error when clientSideCache is enabled with RESP undefined', () => { + assert.throws( + () => RedisSentinel.create({ + ...options, + clientSideCache: clientSideCacheConfig, + }), + new Error('Client Side Caching is only supported with RESP3') + ); + }); + + it('should not throw when clientSideCache is enabled with RESP 3', () => { + assert.doesNotThrow(() => + RedisSentinel.create({ + ...options, + clientSideCache: clientSideCacheConfig, + RESP: 3 as const, + }) + ); + }); + + testUtils.testWithClientSentinel('should successfully connect to sentinel', async () => { + }, { + ...GLOBAL.SENTINEL.OPEN, + sentinelOptions: { + RESP: 3, + clientSideCache: { ttl: 0, maxEntries: 0, evictPolicy: 'LRU'}, + }, + }) + + }); + }); +}); + +[GLOBAL.SENTINEL.OPEN, GLOBAL.SENTINEL.PASSWORD].forEach(testOptions => { + const passIndex = testOptions.serverArguments.indexOf('--requirepass')+1; + let password: string | undefined = undefined; + if (passIndex != 0) { + password = testOptions.serverArguments[passIndex]; + } + + describe(`test with password - ${password}`, () => { + testUtils.testWithClientSentinel('client should be authenticated', async sentinel => { + await assert.doesNotReject(sentinel.set('x', 1)); + }, testOptions); + + testUtils.testWithClientSentinel('try to connect multiple times', async sentinel => { + await assert.rejects(sentinel.connect()); + }, testOptions); + + + testUtils.testWithClientSentinel('should respect type mapping', async sentinel => { + const typeMapped = sentinel.withTypeMapping({ + [RESP_TYPES.SIMPLE_STRING]: Buffer + }); + + const resp = await typeMapped.ping(); + assert.deepEqual(resp, Buffer.from('PONG')); + }, testOptions); + + testUtils.testWithClientSentinel('many readers', async sentinel => { + await sentinel.set("x", 1); + for (let i = 0; i < 10; i++) { + if (await sentinel.get("x") == "1") { + break; + } + await setTimeout(1000); + } + + const promises: Array> = []; + for (let i = 0; i < 500; i++) { + promises.push(sentinel.get("x")); + } + + const resp = await Promise.all(promises); + assert.equal(resp.length, 500); + for (let i = 0; i < 500; i++) { + assert.equal(resp[i], "1", `failed on match at ${i}`); + } + }, testOptions); + + testUtils.testWithClientSentinel('use', async sentinel => { + await sentinel.use( + async (client: any ) => { + await assert.doesNotReject(client.get('x')); + } + ); + }, testOptions); + + testUtils.testWithClientSentinel('watch does not carry over leases', async sentinel => { + assert.equal(await sentinel.use(client => client.watch("x")), 'OK') + assert.equal(await sentinel.use(client => client.set('x', 1)), 'OK'); + assert.deepEqual(await sentinel.use(client => client.multi().get('x').exec()), ['1']); + }, testOptions); + + testUtils.testWithClientSentinel('plain pubsub - channel', async sentinel => { + let pubSubResolve; + const pubSubPromise = new Promise((res) => { + pubSubResolve = res; + }); + + let tester = false; + await sentinel.subscribe('test', () => { + tester = true; + pubSubResolve(1); + }) + + await sentinel.publish('test', 'hello world'); + await pubSubPromise; + assert.equal(tester, true); + + // now unsubscribe + tester = false; + await sentinel.unsubscribe('test') + await sentinel.publish('test', 'hello world'); + await setTimeout(1000); + + assert.equal(tester, false); + }, testOptions); + + testUtils.testWithClientSentinel('plain pubsub - pattern', async sentinel => { + let pubSubResolve; + const pubSubPromise = new Promise((res) => { + pubSubResolve = res; + }); + + let tester = false; + await sentinel.pSubscribe('test*', () => { + tester = true; + pubSubResolve(1); + }) + + await sentinel.publish('testy', 'hello world'); + await pubSubPromise; + assert.equal(tester, true); + + // now unsubscribe + tester = false; + await sentinel.pUnsubscribe('test*'); + await sentinel.publish('testy', 'hello world'); + await setTimeout(1000); + + assert.equal(tester, false); + }, testOptions) + }); +}); + +describe(`test with scripts`, () => { + testUtils.testWithClientSentinel('with script', async sentinel => { + const [, reply] = await Promise.all([ + sentinel.set('key', '2'), + sentinel.square('key') + ]); + + assert.equal(reply, 4); + }, GLOBAL.SENTINEL.WITH_SCRIPT); + + testUtils.testWithClientSentinel('with script multi', async sentinel => { + const reply = await sentinel.multi().set('key', 2).square('key').exec(); + assert.deepEqual(reply, ['OK', 4]); + }, GLOBAL.SENTINEL.WITH_SCRIPT); + + testUtils.testWithClientSentinel('use with script', async sentinel => { + const reply = await sentinel.use( + async (client: any) => { + assert.equal(await client.set('key', '2'), 'OK'); + assert.equal(await client.get('key'), '2'); + return client.square('key') + } + ); + }, GLOBAL.SENTINEL.WITH_SCRIPT) +}); + +describe(`test with functions`, () => { + testUtils.testWithClientSentinel('with function', async sentinel => { + await sentinel.functionLoad( + MATH_FUNCTION.code, + { REPLACE: true } + ); + + await sentinel.set('key', '2'); + const resp = await sentinel.math.square('key'); + + assert.equal(resp, 4); + }, GLOBAL.SENTINEL.WITH_FUNCTION); + + testUtils.testWithClientSentinel('with function multi', async sentinel => { + await sentinel.functionLoad( + MATH_FUNCTION.code, + { REPLACE: true } + ); + + const reply = await sentinel.multi().set('key', 2).math.square('key').exec(); + assert.deepEqual(reply, ['OK', 4]); + }, GLOBAL.SENTINEL.WITH_FUNCTION); + + testUtils.testWithClientSentinel('use with function', async sentinel => { + await sentinel.functionLoad( + MATH_FUNCTION.code, + { REPLACE: true } + ); + + const reply = await sentinel.use( + async (client: any) => { + await client.set('key', '2'); + return client.math.square('key'); + } + ); + + assert.equal(reply, 4); + }, GLOBAL.SENTINEL.WITH_FUNCTION); +}); + +describe(`test with modules`, () => { + testUtils.testWithClientSentinel('with module', async sentinel => { + const resp = await sentinel.bf.add('key', 'item') + assert.equal(resp, true); + }, GLOBAL.SENTINEL.WITH_MODULE); + + testUtils.testWithClientSentinel('with module multi', async sentinel => { + const resp = await sentinel.multi().bf.add('key', 'item').exec(); + assert.deepEqual(resp, [true]); + }, GLOBAL.SENTINEL.WITH_MODULE); + + testUtils.testWithClientSentinel('use with module', async sentinel => { + const reply = await sentinel.use( + async (client: any) => { + return client.bf.add('key', 'item'); + } + ); + + assert.equal(reply, true); + }, GLOBAL.SENTINEL.WITH_MODULE); +}); + +describe(`test with replica pool size 1`, () => { + testUtils.testWithClientSentinel('client lease', async sentinel => { + sentinel.on("error", () => { }); + + const clientLease = await sentinel.acquire(); + clientLease.set('x', 456); + + let matched = false; + /* waits for replication */ + for (let i = 0; i < 15; i++) { + try { + assert.equal(await sentinel.get("x"), '456'); + matched = true; + break; + } catch (err) { + await setTimeout(1000); + } + } + + clientLease.release(); + + assert.equal(matched, true); + }, GLOBAL.SENTINEL.WITH_REPLICA_POOL_SIZE_1); + + testUtils.testWithClientSentinel('block on pool', async sentinel => { + const promise = sentinel.use( + async client => { + await setTimeout(1000); + return await client.get("x"); + } + ) + + await sentinel.set("x", 1); + assert.equal(await promise, null); + }, GLOBAL.SENTINEL.WITH_REPLICA_POOL_SIZE_1); + + testUtils.testWithClientSentinel('pipeline', async sentinel => { + const resp = await sentinel.multi().set('x', 1).get('x').execAsPipeline(); + assert.deepEqual(resp, ['OK', '1']); + }, GLOBAL.SENTINEL.WITH_REPLICA_POOL_SIZE_1); +}); + +describe(`test with masterPoolSize 2, reserve client true`, () => { + // TODO: flaky test, sometimes fails with `promise1 === null` + testUtils.testWithClientSentinel('reserve client, takes a client out of pool', async sentinel => { + const promise1 = sentinel.use( + async client => { + const val = await client.get("x"); + await client.set("x", 2); + return val; + } + ) + + const promise2 = sentinel.use( + async client => { + return client.get("x"); + } + ) + + await sentinel.set("x", 1); + assert.equal(await promise1, "1"); + assert.equal(await promise2, "2"); + }, Object.assign(GLOBAL.SENTINEL.WITH_RESERVE_CLIENT_MASTER_POOL_SIZE_2, {skipTest: true})); +}); + +describe(`test with masterPoolSize 2`, () => { + testUtils.testWithClientSentinel('multple clients', async sentinel => { + sentinel.on("error", () => { }); + + const promise = sentinel.use( + async client => { + await sentinel!.set("x", 1); + await client.get("x"); + } + ) + + await assert.doesNotReject(promise); + }, GLOBAL.SENTINEL.WITH_MASTER_POOL_SIZE_2); + + testUtils.testWithClientSentinel('use - watch - clean', async sentinel => { + let promise = sentinel.use(async (client) => { + await client.set("x", 1); + await client.watch("x"); + return client.multi().get("x").exec(); + }); + + assert.deepEqual(await promise, ['1']); + }, GLOBAL.SENTINEL.WITH_MASTER_POOL_SIZE_2); + + testUtils.testWithClientSentinel('use - watch - dirty', async sentinel => { + let promise = sentinel.use(async (client) => { + await client.set('x', 1); + await client.watch('x'); + await sentinel!.set('x', 2); + return client.multi().get('x').exec(); + }); + + await assert.rejects(promise, new WatchError()); + }, GLOBAL.SENTINEL.WITH_MASTER_POOL_SIZE_2); + + testUtils.testWithClientSentinel('lease - watch - clean', async sentinel => { + const leasedClient = await sentinel.acquire(); + await leasedClient.set('x', 1); + await leasedClient.watch('x'); + assert.deepEqual(await leasedClient.multi().get('x').exec(), ['1']) + }, GLOBAL.SENTINEL.WITH_MASTER_POOL_SIZE_2); + + testUtils.testWithClientSentinel('lease - watch - dirty', async sentinel => { + const leasedClient = await sentinel.acquire(); + await leasedClient.set('x', 1); + await leasedClient.watch('x'); + await leasedClient.set('x', 2); + + await assert.rejects(leasedClient.multi().get('x').exec(), new WatchError()); + }, GLOBAL.SENTINEL.WITH_MASTER_POOL_SIZE_2); +}); + +async function steadyState(frame: SentinelFramework) { + // wait a bit to ensure that sentinels are seeing eachother + await setTimeout(2000) + let checkedMaster = false; + let checkedReplicas = false; + while (!checkedMaster || !checkedReplicas) { + if (!checkedMaster) { + const master = await frame.sentinelMaster(); + if (master?.flags === 'master') { + checkedMaster = true; + } + } + if (!checkedReplicas) { + const replicas = (await frame.sentinelReplicas()); + checkedReplicas = true; + for (const replica of replicas!) { + checkedReplicas &&= (replica.flags === 'slave'); + } + } + } + let nodeResolve, nodeReject; + const nodePromise = new Promise((res, rej) => { + nodeResolve = res; + nodeReject = rej; + }) + const seenNodes = new Set(); + let sentinel: RedisSentinelType | undefined; + const tracer = []; + try { + sentinel = frame.getSentinelClient({ replicaPoolSize: 1, scanInterval: 2000 }, false) + .on('topology-change', (event: RedisSentinelEvent) => { + if (event.type == "MASTER_CHANGE" || event.type == "REPLICA_ADD") { + seenNodes.add(event.node.port); + if (seenNodes.size == frame.getAllNodesPort().length) { + nodeResolve(); + } + } + }).on('error', err => { }); + sentinel.setTracer(tracer); + await sentinel.connect(); + await nodePromise; + + await sentinel.flushAll(); + } finally { + if (sentinel !== undefined) { + sentinel.destroy(); + } + } +} + +describe('legacy tests', () => { + const config: RedisSentinelConfig = { sentinelName: "test", numberOfNodes: 3, password: undefined }; + const frame = new SentinelFramework(config); + let tracer = new Array(); + let stopMeasuringBlocking = false; + let longestDelta = 0; + let longestTestDelta = 0; + let last: number; + + + describe('Sentinel Client', function () { + let sentinel: RedisSentinelType | undefined; + + beforeEach(async function () { + this.timeout(15000); + + last = Date.now(); + + function deltaMeasurer() { + const delta = Date.now() - last; + if (delta > longestDelta) { + longestDelta = delta; + } + if (delta > longestTestDelta) { + longestTestDelta = delta; + } + if (!stopMeasuringBlocking) { + last = Date.now(); + setImmediate(deltaMeasurer); + } + } + setImmediate(deltaMeasurer); + await frame.spawnRedisSentinel(); + await frame.getAllRunning(); + await steadyState(frame); + longestTestDelta = 0; + }) + + afterEach(async function () { + this.timeout(60000); + // avoid errors in afterEach that end testing + if (sentinel !== undefined) { + sentinel.on('error', () => { }); + } + + if (this!.currentTest!.state === 'failed') { + console.log(`longest event loop blocked delta: ${longestDelta}`); + console.log(`longest event loop blocked in failing test: ${longestTestDelta}`); + console.log("trace:"); + for (const line of tracer) { + console.log(line); + } + console.log(`sentinel object state:`) + console.log(`master: ${JSON.stringify(sentinel?.getMasterNode())}`) + console.log(`replicas: ${JSON.stringify(sentinel?.getReplicaNodes().entries)}`) + const results = await Promise.all([ + frame.sentinelSentinels(), + frame.sentinelMaster(), + frame.sentinelReplicas() + ]) + + console.log(`sentinel sentinels:\n${JSON.stringify(results[0], undefined, '\t')}`); + console.log(`sentinel master:\n${JSON.stringify(results[1], undefined, '\t')}`); + console.log(`sentinel replicas:\n${JSON.stringify(results[2], undefined, '\t')}`); + const { stdout, stderr } = await execAsync("docker ps -a"); + console.log(`docker stdout:\n${stdout}`); + const ids = frame.getAllDockerIds(); + console.log("docker logs"); + for (const [id, port] of ids) { + console.log(`${id}/${port}\n`); + const { stdout, stderr } = await execAsync(`docker logs ${id}`, {maxBuffer: 8192 * 8192 * 4}); + console.log(stdout); + } + } + tracer.length = 0; + + if (sentinel !== undefined) { + await sentinel.destroy(); + sentinel = undefined; + } + + stopMeasuringBlocking = true; + + await frame.cleanup(); + }) + + it('use', async function () { + this.timeout(60000); + + sentinel = frame.getSentinelClient({ replicaPoolSize: 1 }); + sentinel.on("error", () => { }); + await sentinel.connect(); + + await sentinel.use( + async (client: RedisSentinelClientType, ) => { + const masterNode = sentinel!.getMasterNode(); + await frame.stopNode(masterNode!.port.toString()); + await assert.doesNotReject(client.get('x')); + } + ); + }); + + // stops master to force sentinel to update + it('stop master', async function () { + this.timeout(60000); + + sentinel = frame.getSentinelClient(); + sentinel.setTracer(tracer); + sentinel.on("error", () => { }); + await sentinel.connect(); + + tracer.push(`connected`); + + let masterChangeResolve; + const masterChangePromise = new Promise((res) => { + masterChangeResolve = res; + }) + + const masterNode = await sentinel.getMasterNode(); + sentinel.on('topology-change', (event: RedisSentinelEvent) => { + tracer.push(`got topology-change event: ${JSON.stringify(event)}`); + if (event.type === "MASTER_CHANGE" && event.node.port != masterNode!.port) { + tracer.push(`got expected master change event`); + masterChangeResolve(event.node); + } + }); + + tracer.push(`stopping master node`); + await frame.stopNode(masterNode!.port.toString()); + tracer.push(`stopped master node`); + + tracer.push(`waiting on master change promise`); + const newMaster = await masterChangePromise as RedisNode; + tracer.push(`got new master node of ${newMaster.port}`); + assert.notEqual(masterNode!.port, newMaster.port); + }); + + // if master changes, client should make sure user knows watches are invalid + it('watch across master change', async function () { + this.timeout(60000); + + sentinel = frame.getSentinelClient({ masterPoolSize: 2 }); + sentinel.setTracer(tracer); + sentinel.on("error", () => { }); + await sentinel.connect(); + + tracer.push("connected"); + + const client = await sentinel.acquire(); + tracer.push("acquired lease"); + + await client.set("x", 1); + await client.watch("x"); + + tracer.push("did a watch on lease"); + + let resolve; + const promise = new Promise((res) => { + resolve = res; + }) + + const masterNode = sentinel.getMasterNode(); + tracer.push(`got masterPort as ${masterNode!.port}`); + + sentinel.on('topology-change', (event: RedisSentinelEvent) => { + tracer.push(`got topology-change event: ${JSON.stringify(event)}`); + if (event.type === "MASTER_CHANGE" && event.node.port != masterNode!.port) { + tracer.push("resolving promise"); + resolve(event.node); + } + }); + + tracer.push("stopping master node"); + await frame.stopNode(masterNode!.port.toString()); + tracer.push("stopped master node and waiting on promise"); + + const newMaster = await promise as RedisNode; + tracer.push(`promise returned, newMaster = ${JSON.stringify(newMaster)}`); + assert.notEqual(masterNode!.port, newMaster.port); + tracer.push(`newMaster does not equal old master`); + + tracer.push(`waiting to assert that a multi/exec now fails`); + await assert.rejects(async () => { await client.multi().get("x").exec() }, new Error("sentinel config changed in middle of a WATCH Transaction")); + tracer.push(`asserted that a multi/exec now fails`); + }); + + // same as above, but set a watch before and after master change, shouldn't change the fact that watches are invalid + it('watch before and after master change', async function () { + this.timeout(60000); + + sentinel = frame.getSentinelClient({ masterPoolSize: 2 }); + sentinel.setTracer(tracer); + sentinel.on("error", () => { }); + await sentinel.connect(); + tracer.push("connected"); + + const client = await sentinel.acquire(); + tracer.push("got leased client"); + await client.set("x", 1); + await client.watch("x"); + + tracer.push("set and watched x"); + + let resolve; + const promise = new Promise((res) => { + resolve = res; + }) + + const masterNode = sentinel.getMasterNode(); + tracer.push(`initial masterPort = ${masterNode!.port} `); + + sentinel.on('topology-change', (event: RedisSentinelEvent) => { + tracer.push(`got topology-change event: ${JSON.stringify(event)}`); + if (event.type === "MASTER_CHANGE" && event.node.port != masterNode!.port) { + tracer.push("got a master change event that is not the same as before"); + resolve(event.node); + } + }); + + tracer.push("stopping master"); + await frame.stopNode(masterNode!.port.toString()); + tracer.push("stopped master"); + + tracer.push("waiting on master change promise"); + const newMaster = await promise as RedisNode; + tracer.push(`got master change port as ${newMaster.port}`); + assert.notEqual(masterNode!.port, newMaster.port); + + tracer.push("watching again, shouldn't matter"); + await client.watch("y"); + + tracer.push("expecting multi to be rejected"); + await assert.rejects(async () => { await client.multi().get("x").exec() }, new Error("sentinel config changed in middle of a WATCH Transaction")); + tracer.push("multi was rejected"); + }); + + + // pubsub continues to work, even with a master change + it('pubsub - channel - with master change', async function () { + this.timeout(60000); + + sentinel = frame.getSentinelClient(); + sentinel.setTracer(tracer); + sentinel.on("error", () => { }); + await sentinel.connect(); + tracer.push(`connected`); + + let pubSubResolve; + const pubSubPromise = new Promise((res) => { + pubSubResolve = res; + }) + + let tester = false; + await sentinel.subscribe('test', () => { + tracer.push(`got pubsub message`); + tester = true; + pubSubResolve(1); + }) + + let masterChangeResolve; + const masterChangePromise = new Promise((res) => { + masterChangeResolve = res; + }) + + const masterNode = sentinel.getMasterNode(); + tracer.push(`got masterPort as ${masterNode!.port}`); + + sentinel.on('topology-change', (event: RedisSentinelEvent) => { + tracer.push(`got topology-change event: ${JSON.stringify(event)}`); + if (event.type === "MASTER_CHANGE" && event.node.port != masterNode!.port) { + tracer.push("got a master change event that is not the same as before"); + masterChangeResolve(event.node); + } + }); + + tracer.push("stopping master"); + await frame.stopNode(masterNode!.port.toString()); + tracer.push("stopped master and waiting on change promise"); + + const newMaster = await masterChangePromise as RedisNode; + tracer.push(`got master change port as ${newMaster.port}`); + assert.notEqual(masterNode!.port, newMaster.port); + + tracer.push(`publishing pubsub message`); + await sentinel.publish('test', 'hello world'); + tracer.push(`published pubsub message and waiting pn pubsub promise`); + await pubSubPromise; + tracer.push(`got pubsub promise`); + + assert.equal(tester, true); + + // now unsubscribe + tester = false + await sentinel.unsubscribe('test') + await sentinel.publish('test', 'hello world'); + await setTimeout(1000); + + assert.equal(tester, false); + }); + + it('pubsub - pattern - with master change', async function () { + this.timeout(60000); + + sentinel = frame.getSentinelClient(); + sentinel.setTracer(tracer); + sentinel.on("error", () => { }); + await sentinel.connect(); + tracer.push(`connected`); + + let pubSubResolve; + const pubSubPromise = new Promise((res) => { + pubSubResolve = res; + }) + + let tester = false; + await sentinel.pSubscribe('test*', () => { + tracer.push(`got pubsub message`); + tester = true; + pubSubResolve(1); + }) + + let masterChangeResolve; + const masterChangePromise = new Promise((res) => { + masterChangeResolve = res; + }) + + const masterNode = sentinel.getMasterNode(); + tracer.push(`got masterPort as ${masterNode!.port}`); + + sentinel.on('topology-change', (event: RedisSentinelEvent) => { + tracer.push(`got topology-change event: ${JSON.stringify(event)}`); + if (event.type === "MASTER_CHANGE" && event.node.port != masterNode!.port) { + tracer.push("got a master change event that is not the same as before"); + masterChangeResolve(event.node); + } + }); + + tracer.push("stopping master"); + await frame.stopNode(masterNode!.port.toString()); + tracer.push("stopped master and waiting on master change promise"); + + const newMaster = await masterChangePromise as RedisNode; + tracer.push(`got master change port as ${newMaster.port}`); + assert.notEqual(masterNode!.port, newMaster.port); + + tracer.push(`publishing pubsub message`); + await sentinel.publish('testy', 'hello world'); + tracer.push(`published pubsub message and waiting on pubsub promise`); + await pubSubPromise; + tracer.push(`got pubsub promise`); + assert.equal(tester, true); + + // now unsubscribe + tester = false + await sentinel.pUnsubscribe('test*'); + await sentinel.publish('testy', 'hello world'); + await setTimeout(1000); + + assert.equal(tester, false); + }); + + // if we stop a node, the comand should "retry" until we reconfigure topology and execute on new topology + it('command immeaditely after stopping master', async function () { + this.timeout(60000); + + sentinel = frame.getSentinelClient(); + sentinel.setTracer(tracer); + sentinel.on("error", () => { }); + await sentinel.connect(); + + tracer.push("connected"); + + let masterChangeResolve; + const masterChangePromise = new Promise((res) => { + masterChangeResolve = res; + }) + + const masterNode = sentinel.getMasterNode(); + tracer.push(`original master port = ${masterNode!.port}`); + + let changeCount = 0; + sentinel.on('topology-change', (event: RedisSentinelEvent) => { + tracer.push(`got topology-change event: ${JSON.stringify(event)}`); + if (event.type === "MASTER_CHANGE" && event.node.port != masterNode!.port) { + changeCount++; + tracer.push(`got topology-change event we expected`); + masterChangeResolve(event.node); + } + }); + + tracer.push(`stopping masterNode`); + await frame.stopNode(masterNode!.port.toString()); + tracer.push(`stopped masterNode`); + assert.equal(await sentinel.set('x', 123), 'OK'); + tracer.push(`did the set operation`); + const presumamblyNewMaster = sentinel.getMasterNode(); + tracer.push(`new master node seems to be ${presumamblyNewMaster?.port} and waiting on master change promise`); + + const newMaster = await masterChangePromise as RedisNode; + tracer.push(`got new masternode event saying master is at ${newMaster.port}`); + assert.notEqual(masterNode!.port, newMaster.port); + + tracer.push(`doing the get`); + const val = await sentinel.get('x'); + tracer.push(`did the get and got ${val}`); + const newestMaster = sentinel.getMasterNode() + tracer.push(`after get, we see master as ${newestMaster?.port}`); + + switch (changeCount) { + case 1: + // if we only changed masters once, we should have the proper value + assert.equal(val, '123'); + break; + case 2: + // we changed masters twice quickly, so probably didn't replicate + // therefore, this is soewhat flakey, but the above is the common case + assert(val == '123' || val == null); + break; + default: + assert(false, "unexpected case"); + } + }); + + it('shutdown sentinel node', async function () { + this.timeout(60000); + sentinel = frame.getSentinelClient(); + sentinel.setTracer(tracer); + sentinel.on("error", () => { }); + await sentinel.connect(); + tracer.push("connected"); + + let sentinelChangeResolve; + const sentinelChangePromise = new Promise((res) => { + sentinelChangeResolve = res; + }) + + const sentinelNode = sentinel.getSentinelNode(); + tracer.push(`sentinelNode = ${sentinelNode?.port}`) + + sentinel.on('topology-change', (event: RedisSentinelEvent) => { + tracer.push(`got topology-change event: ${JSON.stringify(event)}`); + if (event.type === "SENTINEL_CHANGE") { + tracer.push("got sentinel change event"); + sentinelChangeResolve(event.node); + } + }); + + tracer.push("Stopping sentinel node"); + await frame.stopSentinel(sentinelNode!.port.toString()); + tracer.push("Stopped sentinel node and waiting on sentinel change promise"); + const newSentinel = await sentinelChangePromise as RedisNode; + tracer.push("got sentinel change promise"); + assert.notEqual(sentinelNode!.port, newSentinel.port); + }); + + it('timer works, and updates sentinel list', async function () { + this.timeout(60000); + + sentinel = frame.getSentinelClient({ scanInterval: 1000 }); + sentinel.setTracer(tracer); + await sentinel.connect(); + tracer.push("connected"); + + let sentinelChangeResolve; + const sentinelChangePromise = new Promise((res) => { + sentinelChangeResolve = res; + }) + + sentinel.on('topology-change', (event: RedisSentinelEvent) => { + tracer.push(`got topology-change event: ${JSON.stringify(event)}`); + if (event.type === "SENTINE_LIST_CHANGE" && event.size == 4) { + tracer.push(`got sentinel list change event with right size`); + sentinelChangeResolve(event.size); + } + }); + + tracer.push(`adding sentinel`); + await frame.addSentinel(); + tracer.push(`added sentinel and waiting on sentinel change promise`); + const newSentinelSize = await sentinelChangePromise as number; + + assert.equal(newSentinelSize, 4); + }); + + it('stop replica, bring back replica', async function () { + this.timeout(60000); + + sentinel = frame.getSentinelClient({ replicaPoolSize: 1 }); + sentinel.setTracer(tracer); + sentinel.on('error', err => { }); + await sentinel.connect(); + tracer.push("connected"); + + let sentinelRemoveResolve; + const sentinelRemovePromise = new Promise((res) => { + sentinelRemoveResolve = res; + }) + + const replicaPort = await frame.getRandonNonMasterNode(); + + sentinel.on('topology-change', (event: RedisSentinelEvent) => { + tracer.push(`got topology-change event: ${JSON.stringify(event)}`); + if (event.type === "REPLICA_REMOVE") { + if (event.node.port.toString() == replicaPort) { + tracer.push("got expected replica removed event"); + sentinelRemoveResolve(event.node); + } else { + tracer.push(`got replica removed event for a different node: ${event.node.port}`); + } + } + }); + + tracer.push(`replicaPort = ${replicaPort} and stopping it`); + await frame.stopNode(replicaPort); + tracer.push("stopped replica and waiting on sentinel removed promise"); + const stoppedNode = await sentinelRemovePromise as RedisNode; + tracer.push("got removed promise"); + assert.equal(stoppedNode.port, Number(replicaPort)); + + let sentinelRestartedResolve; + const sentinelRestartedPromise = new Promise((res) => { + sentinelRestartedResolve = res; + }) + + sentinel.on('topology-change', (event: RedisSentinelEvent) => { + tracer.push(`got topology-change event: ${JSON.stringify(event)}`); + if (event.type === "REPLICA_ADD") { + tracer.push("got replica added event"); + sentinelRestartedResolve(event.node); + } + }); + + tracer.push("restarting replica"); + await frame.restartNode(replicaPort); + tracer.push("restarted replica and waiting on restart promise"); + const restartedNode = await sentinelRestartedPromise as RedisNode; + tracer.push("got restarted promise"); + assert.equal(restartedNode.port, Number(replicaPort)); + }) + + it('add a node / new replica', async function () { + this.timeout(60000); + + sentinel = frame.getSentinelClient({ scanInterval: 2000, replicaPoolSize: 1 }); + sentinel.setTracer(tracer); + // need to handle errors, as the spawning a new docker node can cause existing connections to time out + sentinel.on('error', err => { }); + await sentinel.connect(); + tracer.push("connected"); + + let nodeAddedResolve: (value: RedisNode) => void; + const nodeAddedPromise = new Promise((res) => { + nodeAddedResolve = res as (value: RedisNode) => void; + }); + + const portSet = new Set(); + for (const port of frame.getAllNodesPort()) { + portSet.add(port); + } + + // "on" and not "once" as due to connection timeouts, can happen multiple times, and want right one + sentinel.on('topology-change', (event: RedisSentinelEvent) => { + tracer.push(`got topology-change event: ${JSON.stringify(event)}`); + if (event.type === "REPLICA_ADD") { + if (!portSet.has(event.node.port)) { + tracer.push("got expected replica added event"); + nodeAddedResolve(event.node); + } + } + }); + + tracer.push("adding node"); + await frame.addNode(); + tracer.push("added node and waiting on added promise"); + await nodeAddedPromise; + }) + + it('with client side caching', async function() { + this.timeout(30000); + const csc = new BasicPooledClientSideCache(); + + sentinel = frame.getSentinelClient({nodeClientOptions: {RESP: 3 as const}, RESP: 3 as const, clientSideCache: csc, masterPoolSize: 5}); + await sentinel.connect(); + + await sentinel.set('x', 1); + await sentinel.get('x'); + await sentinel.get('x'); + await sentinel.get('x'); + await sentinel.get('x'); + + assert.equal(1, csc.stats().missCount); + assert.equal(3, csc.stats().hitCount); + + const invalidatePromise = once(csc, 'invalidate'); + await sentinel.set('x', 2); + await invalidatePromise; + await sentinel.get('x'); + await sentinel.get('x'); + await sentinel.get('x'); + await sentinel.get('x'); + + assert.equal(csc.stats().missCount, 2); + assert.equal(csc.stats().hitCount, 6); + }) + }); +}); diff --git a/packages/client/lib/sentinel/index.ts b/packages/client/lib/sentinel/index.ts new file mode 100644 index 00000000000..a9a2b9a5e5d --- /dev/null +++ b/packages/client/lib/sentinel/index.ts @@ -0,0 +1,1588 @@ +import { EventEmitter } from 'node:events'; +import { CommandArguments, RedisFunctions, RedisModules, RedisScripts, ReplyUnion, RespVersions, TypeMapping } from '../RESP/types'; +import RedisClient, { RedisClientOptions, RedisClientType } from '../client'; +import { CommandOptions } from '../client/commands-queue'; +import { attachConfig } from '../commander'; +import COMMANDS from '../commands'; +import { ClientErrorEvent, NamespaceProxySentinel, NamespaceProxySentinelClient, ProxySentinel, ProxySentinelClient, RedisNode, RedisSentinelClientType, RedisSentinelEvent, RedisSentinelOptions, RedisSentinelType, SentinelCommander } from './types'; +import { clientSocketToNode, createCommand, createFunctionCommand, createModuleCommand, createNodeList, createScriptCommand, parseNode } from './utils'; +import { RedisMultiQueuedCommand } from '../multi-command'; +import RedisSentinelMultiCommand, { RedisSentinelMultiCommandType } from './multi-commands'; +import { PubSubListener } from '../client/pub-sub'; +import { PubSubProxy } from './pub-sub-proxy'; +import { setTimeout } from 'node:timers/promises'; +import RedisSentinelModule from './module' +import { RedisVariadicArgument } from '../commands/generic-transformers'; +import { WaitQueue } from './wait-queue'; +import { TcpNetConnectOpts } from 'node:net'; +import { RedisTcpSocketOptions } from '../client/socket'; +import { BasicPooledClientSideCache, PooledClientSideCacheProvider } from '../client/cache'; + +interface ClientInfo { + id: number; +} + +export class RedisSentinelClient< + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> { + #clientInfo: ClientInfo | undefined; + #internal: RedisSentinelInternal; + readonly _self: RedisSentinelClient; + + /** + * Indicates if the client connection is open + * + * @returns `true` if the client connection is open, `false` otherwise + */ + + get isOpen() { + return this._self.#internal.isOpen; + } + + /** + * Indicates if the client connection is ready to accept commands + * + * @returns `true` if the client connection is ready, `false` otherwise + */ + get isReady() { + return this._self.#internal.isReady; + } + + /** + * Gets the command options configured for this client + * + * @returns The command options for this client or `undefined` if none were set + */ + get commandOptions() { + return this._self.#commandOptions; + } + + #commandOptions?: CommandOptions; + + constructor( + internal: RedisSentinelInternal, + clientInfo: ClientInfo, + commandOptions?: CommandOptions + ) { + this._self = this; + this.#internal = internal; + this.#clientInfo = clientInfo; + this.#commandOptions = commandOptions; + } + + static factory< + M extends RedisModules = {}, + F extends RedisFunctions = {}, + S extends RedisScripts = {}, + RESP extends RespVersions = 2, + TYPE_MAPPING extends TypeMapping = {} + >(config?: SentinelCommander) { + const SentinelClient = attachConfig({ + BaseClass: RedisSentinelClient, + commands: COMMANDS, + createCommand: createCommand, + createModuleCommand: createModuleCommand, + createFunctionCommand: createFunctionCommand, + createScriptCommand: createScriptCommand, + config + }); + + SentinelClient.prototype.Multi = RedisSentinelMultiCommand.extend(config); + + return ( + internal: RedisSentinelInternal, + clientInfo: ClientInfo, + commandOptions?: CommandOptions + ) => { + // returning a "proxy" to prevent the namespaces._self to leak between "proxies" + return Object.create(new SentinelClient(internal, clientInfo, commandOptions)) as RedisSentinelClientType; + }; + } + + static create< + M extends RedisModules = {}, + F extends RedisFunctions = {}, + S extends RedisScripts = {}, + RESP extends RespVersions = 2, + TYPE_MAPPING extends TypeMapping = {} + >( + options: RedisSentinelOptions, + internal: RedisSentinelInternal, + clientInfo: ClientInfo, + commandOptions?: CommandOptions, + ) { + return RedisSentinelClient.factory(options)(internal, clientInfo, commandOptions); + } + + withCommandOptions< + OPTIONS extends CommandOptions, + TYPE_MAPPING extends TypeMapping + >(options: OPTIONS) { + const proxy = Object.create(this); + proxy._commandOptions = options; + return proxy as RedisSentinelClientType< + M, + F, + S, + RESP, + TYPE_MAPPING extends TypeMapping ? TYPE_MAPPING : {} + >; + } + + private _commandOptionsProxy< + K extends keyof CommandOptions, + V extends CommandOptions[K] + >( + key: K, + value: V + ) { + const proxy = Object.create(this); + proxy._commandOptions = Object.create(this._self.#commandOptions ?? null); + proxy._commandOptions[key] = value; + return proxy as RedisSentinelClientType< + M, + F, + S, + RESP, + K extends 'typeMapping' ? V extends TypeMapping ? V : {} : TYPE_MAPPING + >; + } + + /** + * Override the `typeMapping` command option + */ + withTypeMapping(typeMapping: TYPE_MAPPING) { + return this._commandOptionsProxy('typeMapping', typeMapping); + } + + async _execute( + isReadonly: boolean | undefined, + fn: (client: RedisClient) => Promise + ): Promise { + if (this._self.#clientInfo === undefined) { + throw new Error("Attempted execution on released RedisSentinelClient lease"); + } + + return await this._self.#internal.execute(fn, this._self.#clientInfo); + } + + async sendCommand( + isReadonly: boolean | undefined, + args: CommandArguments, + options?: CommandOptions, + ): Promise { + return this._execute( + isReadonly, + client => client.sendCommand(args, options) + ); + } + + /** + * @internal + */ + async _executePipeline( + isReadonly: boolean | undefined, + commands: Array + ) { + return this._execute( + isReadonly, + client => client._executePipeline(commands) + ); + } + + /**f + * @internal + */ + async _executeMulti( + isReadonly: boolean | undefined, + commands: Array + ) { + return this._execute( + isReadonly, + client => client._executeMulti(commands) + ); + } + + MULTI(): RedisSentinelMultiCommandType<[], M, F, S, RESP, TYPE_MAPPING> { + return new (this as any).Multi(this); + } + + multi = this.MULTI; + + WATCH(key: RedisVariadicArgument) { + if (this._self.#clientInfo === undefined) { + throw new Error("Attempted execution on released RedisSentinelClient lease"); + } + + return this._execute( + false, + client => client.watch(key) + ) + } + + watch = this.WATCH; + + UNWATCH() { + if (this._self.#clientInfo === undefined) { + throw new Error('Attempted execution on released RedisSentinelClient lease'); + } + + return this._execute( + false, + client => client.unwatch() + ) + } + + unwatch = this.UNWATCH; + + /** + * Releases the client lease back to the pool + * + * After calling this method, the client instance should no longer be used as it + * will be returned to the client pool and may be given to other operations. + * + * @returns A promise that resolves when the client is ready to be reused, or undefined + * if the client was immediately ready + * @throws Error if the lease has already been released + */ + release() { + if (this._self.#clientInfo === undefined) { + throw new Error('RedisSentinelClient lease already released'); + } + + const result = this._self.#internal.releaseClientLease(this._self.#clientInfo); + this._self.#clientInfo = undefined; + return result; + } +} + +export default class RedisSentinel< + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> extends EventEmitter { + readonly _self: RedisSentinel; + + #internal: RedisSentinelInternal; + #options: RedisSentinelOptions; + + /** + * Indicates if the sentinel connection is open + * + * @returns `true` if the sentinel connection is open, `false` otherwise + */ + get isOpen() { + return this._self.#internal.isOpen; + } + + /** + * Indicates if the sentinel connection is ready to accept commands + * + * @returns `true` if the sentinel connection is ready, `false` otherwise + */ + get isReady() { + return this._self.#internal.isReady; + } + + get commandOptions() { + return this._self.#commandOptions; + } + + #commandOptions?: CommandOptions; + + #trace: (msg: string) => unknown = () => { }; + + #reservedClientInfo?: ClientInfo; + #masterClientCount = 0; + #masterClientInfo?: ClientInfo; + + get clientSideCache() { + return this._self.#internal.clientSideCache; + } + + constructor(options: RedisSentinelOptions) { + super(); + + this._self = this; + + this.#options = options; + + if (options.commandOptions) { + this.#commandOptions = options.commandOptions; + } + + this.#internal = new RedisSentinelInternal(options); + this.#internal.on('error', err => this.emit('error', err)); + + /* pass through underling events */ + /* TODO: perhaps make this a struct and one vent, instead of multiple events */ + this.#internal.on('topology-change', (event: RedisSentinelEvent) => { + if (!this.emit('topology-change', event)) { + this._self.#trace(`RedisSentinel: re-emit for topology-change for ${event.type} event returned false`); + } + }); + } + + static factory< + M extends RedisModules = {}, + F extends RedisFunctions = {}, + S extends RedisScripts = {}, + RESP extends RespVersions = 2, + TYPE_MAPPING extends TypeMapping = {} + >(config?: SentinelCommander) { + const Sentinel = attachConfig({ + BaseClass: RedisSentinel, + commands: COMMANDS, + createCommand: createCommand, + createModuleCommand: createModuleCommand, + createFunctionCommand: createFunctionCommand, + createScriptCommand: createScriptCommand, + config + }); + + Sentinel.prototype.Multi = RedisSentinelMultiCommand.extend(config); + + return (options: Omit>) => { + // returning a "proxy" to prevent the namespaces.self to leak between "proxies" + return Object.create(new Sentinel(options)) as RedisSentinelType; + }; + } + + static create< + M extends RedisModules = {}, + F extends RedisFunctions = {}, + S extends RedisScripts = {}, + RESP extends RespVersions = 2, + TYPE_MAPPING extends TypeMapping = {} + >(options: RedisSentinelOptions) { + return RedisSentinel.factory(options)(options); + } + + withCommandOptions< + OPTIONS extends CommandOptions, + TYPE_MAPPING extends TypeMapping, + >(options: OPTIONS) { + const proxy = Object.create(this); + proxy._commandOptions = options; + return proxy as RedisSentinelType< + M, + F, + S, + RESP, + TYPE_MAPPING extends TypeMapping ? TYPE_MAPPING : {} + >; + } + + private _commandOptionsProxy< + K extends keyof CommandOptions, + V extends CommandOptions[K] + >( + key: K, + value: V + ) { + const proxy = Object.create(this); + // Create new commandOptions object with the inherited properties + proxy._self.#commandOptions = { + ...(this._self.#commandOptions || {}), + [key]: value + }; + return proxy as RedisSentinelType< + M, + F, + S, + RESP, + K extends 'typeMapping' ? V extends TypeMapping ? V : {} : TYPE_MAPPING + >; + } + + /** + * Override the `typeMapping` command option + */ + withTypeMapping(typeMapping: TYPE_MAPPING) { + return this._commandOptionsProxy('typeMapping', typeMapping); + } + + async connect() { + await this._self.#internal.connect(); + + if (this._self.#options.reserveClient) { + this._self.#reservedClientInfo = await this._self.#internal.getClientLease(); + } + + return this as unknown as RedisSentinelType; + } + + async _execute( + isReadonly: boolean | undefined, + fn: (client: RedisClient) => Promise + ): Promise { + let clientInfo: ClientInfo | undefined; + if (!isReadonly || !this._self.#internal.useReplicas) { + if (this._self.#reservedClientInfo) { + clientInfo = this._self.#reservedClientInfo; + } else { + this._self.#masterClientInfo ??= await this._self.#internal.getClientLease(); + clientInfo = this._self.#masterClientInfo; + this._self.#masterClientCount++; + } + } + + try { + return await this._self.#internal.execute(fn, clientInfo); + } finally { + if ( + clientInfo !== undefined && + clientInfo === this._self.#masterClientInfo && + --this._self.#masterClientCount === 0 + ) { + const promise = this._self.#internal.releaseClientLease(clientInfo); + this._self.#masterClientInfo = undefined; + if (promise) await promise; + } + } + } + + async use(fn: (sentinelClient: RedisSentinelClientType) => Promise) { + const clientInfo = await this._self.#internal.getClientLease(); + + try { + return await fn( + RedisSentinelClient.create(this._self.#options, this._self.#internal, clientInfo, this._self.#commandOptions) + ); + } finally { + const promise = this._self.#internal.releaseClientLease(clientInfo); + if (promise) await promise; + } + } + + async sendCommand( + isReadonly: boolean | undefined, + args: CommandArguments, + options?: CommandOptions, + ): Promise { + return this._execute( + isReadonly, + client => client.sendCommand(args, options) + ); + } + + /** + * @internal + */ + async _executePipeline( + isReadonly: boolean | undefined, + commands: Array + ) { + return this._execute( + isReadonly, + client => client._executePipeline(commands) + ); + } + + /**f + * @internal + */ + async _executeMulti( + isReadonly: boolean | undefined, + commands: Array + ) { + return this._execute( + isReadonly, + client => client._executeMulti(commands) + ); + } + + MULTI(): RedisSentinelMultiCommandType<[], M, F, S, RESP, TYPE_MAPPING> { + return new (this as any).Multi(this); + } + + multi = this.MULTI; + + async close() { + return this._self.#internal.close(); + } + + destroy() { + return this._self.#internal.destroy(); + } + + async SUBSCRIBE( + channels: string | Array, + listener: PubSubListener, + bufferMode?: T + ) { + return this._self.#internal.subscribe(channels, listener, bufferMode); + } + + subscribe = this.SUBSCRIBE; + + async UNSUBSCRIBE( + channels?: string | Array, + listener?: PubSubListener, + bufferMode?: T + ) { + return this._self.#internal.unsubscribe(channels, listener, bufferMode); + } + + unsubscribe = this.UNSUBSCRIBE; + + async PSUBSCRIBE( + patterns: string | Array, + listener: PubSubListener, + bufferMode?: T + ) { + return this._self.#internal.pSubscribe(patterns, listener, bufferMode); + } + + pSubscribe = this.PSUBSCRIBE; + + async PUNSUBSCRIBE( + patterns?: string | Array, + listener?: PubSubListener, + bufferMode?: T + ) { + return this._self.#internal.pUnsubscribe(patterns, listener, bufferMode); + } + + pUnsubscribe = this.PUNSUBSCRIBE; + + /** + * Acquires a master client lease for exclusive operations + * + * Used when multiple commands need to run on an exclusive client (for example, using `WATCH/MULTI/EXEC`). + * The returned client must be released after use with the `release()` method. + * + * @returns A promise that resolves to a Redis client connected to the master node + * @example + * ```javascript + * const clientLease = await sentinel.acquire(); + * + * try { + * await clientLease.watch('key'); + * const resp = await clientLease.multi() + * .get('key') + * .exec(); + * } finally { + * clientLease.release(); + * } + * ``` + */ + async acquire(): Promise> { + const clientInfo = await this._self.#internal.getClientLease(); + return RedisSentinelClient.create(this._self.#options, this._self.#internal, clientInfo, this._self.#commandOptions); + } + + getSentinelNode(): RedisNode | undefined { + return this._self.#internal.getSentinelNode(); + } + + getMasterNode(): RedisNode | undefined { + return this._self.#internal.getMasterNode(); + } + + getReplicaNodes(): Map { + return this._self.#internal.getReplicaNodes(); + } + + setTracer(tracer?: Array) { + if (tracer) { + this._self.#trace = (msg: string) => { tracer.push(msg) }; + } else { + this._self.#trace = () => { }; + } + + this._self.#internal.setTracer(tracer); + } +} + +class RedisSentinelInternal< + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> extends EventEmitter { + #isOpen = false; + + get isOpen() { + return this.#isOpen; + } + + #isReady = false; + + get isReady() { + return this.#isReady; + } + + readonly #name: string; + readonly #nodeClientOptions: RedisClientOptions; + readonly #sentinelClientOptions: RedisClientOptions; + readonly #scanInterval: number; + readonly #passthroughClientErrorEvents: boolean; + readonly #RESP?: RespVersions; + + #anotherReset = false; + + #configEpoch: number = 0; + + #sentinelRootNodes: Array; + #sentinelClient?: RedisClientType; + + #masterClients: Array> = []; + #masterClientQueue: WaitQueue; + readonly #masterPoolSize: number; + + #replicaClients: Array> = []; + #replicaClientsIdx: number = 0; + readonly #replicaPoolSize: number; + + get useReplicas() { + return this.#replicaPoolSize > 0; + } + + #connectPromise?: Promise; + #maxCommandRediscovers: number; + readonly #pubSubProxy: PubSubProxy; + + #scanTimer?: NodeJS.Timeout + + #destroy = false; + + #trace: (msg: string) => unknown = () => { }; + + #clientSideCache?: PooledClientSideCacheProvider; + get clientSideCache() { + return this.#clientSideCache; + } + + #validateOptions(options?: RedisSentinelOptions) { + if (options?.clientSideCache && options?.RESP !== 3) { + throw new Error('Client Side Caching is only supported with RESP3'); + } + } + + constructor(options: RedisSentinelOptions) { + super(); + + this.#validateOptions(options); + + this.#name = options.name; + + this.#RESP = options.RESP; + this.#sentinelRootNodes = Array.from(options.sentinelRootNodes); + this.#maxCommandRediscovers = options.maxCommandRediscovers ?? 16; + this.#masterPoolSize = options.masterPoolSize ?? 1; + this.#replicaPoolSize = options.replicaPoolSize ?? 0; + this.#scanInterval = options.scanInterval ?? 0; + this.#passthroughClientErrorEvents = options.passthroughClientErrorEvents ?? false; + + this.#nodeClientOptions = (options.nodeClientOptions ? {...options.nodeClientOptions} : {}) as RedisClientOptions; + if (this.#nodeClientOptions.url !== undefined) { + throw new Error("invalid nodeClientOptions for Sentinel"); + } + + if (options.clientSideCache) { + if (options.clientSideCache instanceof PooledClientSideCacheProvider) { + this.#clientSideCache = this.#nodeClientOptions.clientSideCache = options.clientSideCache; + } else { + const cscConfig = options.clientSideCache; + this.#clientSideCache = this.#nodeClientOptions.clientSideCache = new BasicPooledClientSideCache(cscConfig); +// this.#clientSideCache = this.#nodeClientOptions.clientSideCache = new PooledNoRedirectClientSideCache(cscConfig); + } + } + + this.#sentinelClientOptions = options.sentinelClientOptions ? Object.assign({} as RedisClientOptions, options.sentinelClientOptions) : {}; + this.#sentinelClientOptions.modules = RedisSentinelModule; + + if (this.#sentinelClientOptions.url !== undefined) { + throw new Error("invalid sentinelClientOptions for Sentinel"); + } + + this.#masterClientQueue = new WaitQueue(); + for (let i = 0; i < this.#masterPoolSize; i++) { + this.#masterClientQueue.push(i); + } + + /* persistent object for life of sentinel object */ + this.#pubSubProxy = new PubSubProxy( + this.#nodeClientOptions, + err => this.emit('error', err) + ); + } + + #createClient(node: RedisNode, clientOptions: RedisClientOptions, reconnectStrategy?: false) { + return RedisClient.create({ + //first take the globally set RESP + RESP: this.#RESP, + //then take the client options, which can in theory overwrite it + ...clientOptions, + socket: { + ...clientOptions.socket, + host: node.host, + port: node.port, + ...(reconnectStrategy !== undefined && { reconnectStrategy }) + } + }); + } + + /** + * Gets a client lease from the master client pool + * + * @returns A client info object or a promise that resolves to a client info object + * when a client becomes available + */ + getClientLease(): ClientInfo | Promise { + const id = this.#masterClientQueue.shift(); + if (id !== undefined) { + return { id }; + } + + return this.#masterClientQueue.wait().then(id => ({ id })); + } + + /** + * Releases a client lease back to the pool + * + * If the client was used for a transaction that might have left it in a dirty state, + * it will be reset before being returned to the pool. + * + * @param clientInfo The client info object representing the client to release + * @returns A promise that resolves when the client is ready to be reused, or undefined + * if the client was immediately ready or no longer exists + */ + releaseClientLease(clientInfo: ClientInfo) { + const client = this.#masterClients[clientInfo.id]; + // client can be undefined if releasing in middle of a reconfigure + if (client !== undefined) { + const dirtyPromise = client.resetIfDirty(); + if (dirtyPromise) { + return dirtyPromise + .then(() => this.#masterClientQueue.push(clientInfo.id)); + } + } + + this.#masterClientQueue.push(clientInfo.id); + } + + async connect() { + if (this.#isOpen) { + throw new Error("already attempting to open") + } + + try { + this.#isOpen = true; + + this.#connectPromise = this.#connect(); + await this.#connectPromise; + this.#isReady = true; + } finally { + this.#connectPromise = undefined; + if (this.#scanInterval > 0) { + this.#scanTimer = setInterval(this.#reset.bind(this), this.#scanInterval); + } + } + } + + async #connect() { + let count = 0; + while (true) { + this.#trace("starting connect loop"); + + count+=1; + if (this.#destroy) { + this.#trace("in #connect and want to destroy") + return; + } + try { + this.#anotherReset = false; + await this.transform(this.analyze(await this.observe())); + if (this.#anotherReset) { + this.#trace("#connect: anotherReset is true, so continuing"); + continue; + } + + this.#trace("#connect: returning"); + return; + } catch (e: any) { + this.#trace(`#connect: exception ${e.message}`); + if (!this.#isReady && count > this.#maxCommandRediscovers) { + throw e; + } + + if (e.message !== 'no valid master node') { + console.log(e); + } + await setTimeout(1000); + } finally { + this.#trace("finished connect"); + } + } + } + + async execute( + fn: (client: RedisClientType) => Promise, + clientInfo?: ClientInfo + ): Promise { + let iter = 0; + + while (true) { + if (this.#connectPromise !== undefined) { + await this.#connectPromise; + } + + const client = this.#getClient(clientInfo); + + if (!client.isReady) { + await this.#reset(); + continue; + } + const sockOpts = client.options?.socket as TcpNetConnectOpts | undefined; + this.#trace("attemping to send command to " + sockOpts?.host + ":" + sockOpts?.port) + + try { + /* + // force testing of READONLY errors + if (clientInfo !== undefined) { + if (Math.floor(Math.random() * 10) < 1) { + console.log("throwing READONLY error"); + throw new Error("READONLY You can't write against a read only replica."); + } + } + */ + return await fn(client); + } catch (err) { + if (++iter > this.#maxCommandRediscovers || !(err instanceof Error)) { + throw err; + } + + /* + rediscover and retry if doing a command against a "master" + a) READONLY error (topology has changed) but we haven't been notified yet via pubsub + b) client is "not ready" (disconnected), which means topology might have changed, but sentinel might not see it yet + */ + if (clientInfo !== undefined && (err.message.startsWith('READONLY') || !client.isReady)) { + await this.#reset(); + continue; + } + + throw err; + } + } + } + + async #createPubSub(client: RedisClientType) { + /* Whenever sentinels or slaves get added, or when slave configuration changes, reconfigure */ + await client.pSubscribe(['switch-master', '[-+]sdown', '+slave', '+sentinel', '[-+]odown', '+slave-reconf-done'], (message, channel) => { + this.#handlePubSubControlChannel(channel, message); + }, true); + + return client; + } + + async #handlePubSubControlChannel(channel: Buffer, message: Buffer) { + this.#trace("pubsub control channel message on " + channel); + this.#reset(); + } + + // if clientInfo is defined, it corresponds to a master client in the #masterClients array, otherwise loop around replicaClients + #getClient(clientInfo?: ClientInfo): RedisClientType { + if (clientInfo !== undefined) { + return this.#masterClients[clientInfo.id]; + } + + if (this.#replicaClientsIdx >= this.#replicaClients.length) { + this.#replicaClientsIdx = 0; + } + + if (this.#replicaClients.length == 0) { + throw new Error("no replicas available for read"); + } + + return this.#replicaClients[this.#replicaClientsIdx++]; + } + + async #reset() { + /* closing / don't reset */ + if (this.#isReady == false || this.#destroy == true) { + return; + } + + // already in #connect() + if (this.#connectPromise !== undefined) { + this.#anotherReset = true; + return await this.#connectPromise; + } + + try { + this.#connectPromise = this.#connect(); + return await this.#connectPromise; + } finally { + this.#trace("finished reconfgure"); + this.#connectPromise = undefined; + } + } + + #handleSentinelFailure(node: RedisNode) { + const found = this.#sentinelRootNodes.findIndex( + (rootNode) => rootNode.host === node.host && rootNode.port === node.port + ); + if (found !== -1) { + this.#sentinelRootNodes.splice(found, 1); + } + this.#reset(); + } + + async close() { + this.#destroy = true; + + if (this.#connectPromise != undefined) { + await this.#connectPromise; + } + + this.#isReady = false; + + this.#clientSideCache?.onPoolClose(); + + if (this.#scanTimer) { + clearInterval(this.#scanTimer); + this.#scanTimer = undefined; + } + + const promises = []; + + if (this.#sentinelClient !== undefined) { + if (this.#sentinelClient.isOpen) { + promises.push(this.#sentinelClient.close()); + } + this.#sentinelClient = undefined; + } + + for (const client of this.#masterClients) { + if (client.isOpen) { + promises.push(client.close()); + } + } + + this.#masterClients = []; + + for (const client of this.#replicaClients) { + if (client.isOpen) { + promises.push(client.close()); + } + } + + this.#replicaClients = []; + + await Promise.all(promises); + + this.#pubSubProxy.destroy(); + + this.#isOpen = false; + } + + // destroy has to be async because its stopping others async events, timers and the like + // and shouldn't return until its finished. + async destroy() { + this.#destroy = true; + + if (this.#connectPromise != undefined) { + await this.#connectPromise; + } + + this.#isReady = false; + + this.#clientSideCache?.onPoolClose(); + + if (this.#scanTimer) { + clearInterval(this.#scanTimer); + this.#scanTimer = undefined; + } + + if (this.#sentinelClient !== undefined) { + if (this.#sentinelClient.isOpen) { + this.#sentinelClient.destroy(); + } + this.#sentinelClient = undefined; + } + + for (const client of this.#masterClients) { + if (client.isOpen) { + client.destroy(); + } + } + this.#masterClients = []; + + for (const client of this.#replicaClients) { + if (client.isOpen) { + client.destroy(); + } + } + this.#replicaClients = []; + + this.#pubSubProxy.destroy(); + + this.#isOpen = false + this.#destroy = false; + } + + async subscribe( + channels: string | Array, + listener: PubSubListener, + bufferMode?: T + ) { + return this.#pubSubProxy.subscribe(channels, listener, bufferMode); + } + + async unsubscribe( + channels?: string | Array, + listener?: PubSubListener, + bufferMode?: T + ) { + return this.#pubSubProxy.unsubscribe(channels, listener, bufferMode); + } + + async pSubscribe( + patterns: string | Array, + listener: PubSubListener, + bufferMode?: T + ) { + return this.#pubSubProxy.pSubscribe(patterns, listener, bufferMode); + } + + async pUnsubscribe( + patterns?: string | Array, + listener?: PubSubListener, + bufferMode?: T + ) { + return this.#pubSubProxy.pUnsubscribe(patterns, listener, bufferMode); + } + + // observe/analyze/transform remediation functions + async observe() { + for (const node of this.#sentinelRootNodes) { + let client: RedisClientType | undefined; + try { + this.#trace(`observe: trying to connect to sentinel: ${node.host}:${node.port}`) + client = this.#createClient(node, this.#sentinelClientOptions, false) as unknown as RedisClientType; + client.on('error', (err) => this.emit('error', `obseve client error: ${err}`)); + await client.connect(); + this.#trace(`observe: connected to sentinel`) + + const [sentinelData, masterData, replicaData] = await Promise.all([ + client.sentinel.sentinelSentinels(this.#name), + client.sentinel.sentinelMaster(this.#name), + client.sentinel.sentinelReplicas(this.#name) + ]); + + this.#trace("observe: got all sentinel data"); + + const ret = { + sentinelConnected: node, + sentinelData: sentinelData, + masterData: masterData, + replicaData: replicaData, + currentMaster: this.getMasterNode(), + currentReplicas: this.getReplicaNodes(), + currentSentinel: this.getSentinelNode(), + replicaPoolSize: this.#replicaPoolSize, + useReplicas: this.useReplicas + } + + return ret; + } catch (err) { + this.#trace(`observe: error ${err}`); + this.emit('error', err); + } finally { + if (client !== undefined && client.isOpen) { + this.#trace(`observe: destroying sentinel client`); + client.destroy(); + } + } + } + + this.#trace(`observe: none of the sentinels are available`); + throw new Error('None of the sentinels are available'); + } + + analyze(observed: Awaited["observe"]>>) { + let master = parseNode(observed.masterData); + if (master === undefined) { + this.#trace(`analyze: no valid master node because ${observed.masterData.flags}`); + throw new Error("no valid master node"); + } + + if (master.host === observed.currentMaster?.host && master.port === observed.currentMaster?.port) { + this.#trace(`analyze: master node hasn't changed from ${observed.currentMaster?.host}:${observed.currentMaster?.port}`); + master = undefined; + } else { + this.#trace(`analyze: master node has changed to ${master.host}:${master.port} from ${observed.currentMaster?.host}:${observed.currentMaster?.port}`); + } + + let sentinel: RedisNode | undefined = observed.sentinelConnected; + if (sentinel.host === observed.currentSentinel?.host && sentinel.port === observed.currentSentinel.port) { + this.#trace(`analyze: sentinel node hasn't changed`); + sentinel = undefined; + } else { + this.#trace(`analyze: sentinel node has changed to ${sentinel.host}:${sentinel.port}`); + } + + const replicasToClose: Array = []; + const replicasToOpen = new Map(); + + const desiredSet = new Set(); + const seen = new Set(); + + if (observed.useReplicas) { + const replicaList = createNodeList(observed.replicaData) + + for (const node of replicaList) { + desiredSet.add(JSON.stringify(node)); + } + + for (const [node, value] of observed.currentReplicas) { + if (!desiredSet.has(JSON.stringify(node))) { + replicasToClose.push(node); + this.#trace(`analyze: adding ${node.host}:${node.port} to replicsToClose`); + } else { + seen.add(JSON.stringify(node)); + if (value != observed.replicaPoolSize) { + replicasToOpen.set(node, observed.replicaPoolSize - value); + this.#trace(`analyze: adding ${node.host}:${node.port} to replicsToOpen`); + } + } + } + + for (const node of replicaList) { + if (!seen.has(JSON.stringify(node))) { + replicasToOpen.set(node, observed.replicaPoolSize); + this.#trace(`analyze: adding ${node.host}:${node.port} to replicsToOpen`); + } + } + } + + const ret = { + sentinelList: [observed.sentinelConnected].concat(createNodeList(observed.sentinelData)), + epoch: Number(observed.masterData['config-epoch']), + + sentinelToOpen: sentinel, + masterToOpen: master, + replicasToClose: replicasToClose, + replicasToOpen: replicasToOpen, + }; + + return ret; + } + + async transform(analyzed: ReturnType["analyze"]>) { + this.#trace("transform: enter"); + + let promises: Array> = []; + + if (analyzed.sentinelToOpen) { + this.#trace(`transform: opening a new sentinel`); + if (this.#sentinelClient !== undefined && this.#sentinelClient.isOpen) { + this.#trace(`transform: destroying old sentinel as open`); + this.#sentinelClient.destroy() + this.#sentinelClient = undefined; + } else { + this.#trace(`transform: not destroying old sentinel as not open`); + } + + this.#trace(`transform: creating new sentinel to ${analyzed.sentinelToOpen.host}:${analyzed.sentinelToOpen.port}`); + const node = analyzed.sentinelToOpen; + const client = this.#createClient(analyzed.sentinelToOpen, this.#sentinelClientOptions, false); + client.on('error', (err: Error) => { + if (this.#passthroughClientErrorEvents) { + this.emit('error', new Error(`Sentinel Client (${node.host}:${node.port}): ${err.message}`, { cause: err })); + } + const event: ClientErrorEvent = { + type: 'SENTINEL', + node: clientSocketToNode(client.options!.socket!), + error: err + }; + this.emit('client-error', event); + this.#handleSentinelFailure(node); + }) + .on('end', () => this.#handleSentinelFailure(node)); + this.#sentinelClient = client; + + this.#trace(`transform: adding sentinel client connect() to promise list`); + const promise = this.#sentinelClient.connect().then((client) => { return this.#createPubSub(client) }); + promises.push(promise); + + this.#trace(`created sentinel client to ${analyzed.sentinelToOpen.host}:${analyzed.sentinelToOpen.port}`); + const event: RedisSentinelEvent = { + type: "SENTINEL_CHANGE", + node: analyzed.sentinelToOpen + } + this.#trace(`transform: emiting topology-change event for sentinel_change`); + if (!this.emit('topology-change', event)) { + this.#trace(`transform: emit for topology-change for sentinel_change returned false`); + } + } + + if (analyzed.masterToOpen) { + this.#trace(`transform: opening a new master`); + const masterPromises = []; + const masterWatches: Array = []; + + this.#trace(`transform: destroying old masters if open`); + for (const client of this.#masterClients) { + masterWatches.push(client.isWatching || client.isDirtyWatch); + + if (client.isOpen) { + client.destroy() + } + } + + this.#masterClients = []; + + this.#trace(`transform: creating all master clients and adding connect promises`); + for (let i = 0; i < this.#masterPoolSize; i++) { + const node = analyzed.masterToOpen; + const client = this.#createClient(analyzed.masterToOpen, this.#nodeClientOptions); + client.on('error', (err: Error) => { + if (this.#passthroughClientErrorEvents) { + this.emit('error', new Error(`Master Client (${node.host}:${node.port}): ${err.message}`, { cause: err })); + } + const event: ClientErrorEvent = { + type: "MASTER", + node: clientSocketToNode(client.options!.socket!), + error: err + }; + this.emit('client-error', event); + }); + + if (masterWatches[i]) { + client.setDirtyWatch("sentinel config changed in middle of a WATCH Transaction"); + } + this.#masterClients.push(client); + masterPromises.push(client.connect()); + + this.#trace(`created master client to ${analyzed.masterToOpen.host}:${analyzed.masterToOpen.port}`); + } + + this.#trace(`transform: adding promise to change #pubSubProxy node`); + masterPromises.push(this.#pubSubProxy.changeNode(analyzed.masterToOpen)); + promises.push(...masterPromises); + const event: RedisSentinelEvent = { + type: "MASTER_CHANGE", + node: analyzed.masterToOpen + } + this.#trace(`transform: emiting topology-change event for master_change`); + if (!this.emit('topology-change', event)) { + this.#trace(`transform: emit for topology-change for master_change returned false`); + } + this.#configEpoch++; + } + + const replicaCloseSet = new Set(); + for (const node of analyzed.replicasToClose) { + const str = JSON.stringify(node); + replicaCloseSet.add(str); + } + + const newClientList: Array> = []; + const removedSet = new Set(); + + for (const replica of this.#replicaClients) { + const node = clientSocketToNode(replica.options!.socket!); + const str = JSON.stringify(node); + + if (replicaCloseSet.has(str) || !replica.isOpen) { + if (replica.isOpen) { + const sockOpts = replica.options?.socket as TcpNetConnectOpts | undefined; + this.#trace(`destroying replica client to ${sockOpts?.host}:${sockOpts?.port}`); + replica.destroy() + } + if (!removedSet.has(str)) { + const event: RedisSentinelEvent = { + type: "REPLICA_REMOVE", + node: node + } + this.emit('topology-change', event); + removedSet.add(str); + } + } else { + newClientList.push(replica); + } + } + this.#replicaClients = newClientList; + + if (analyzed.replicasToOpen.size != 0) { + for (const [node, size] of analyzed.replicasToOpen) { + for (let i = 0; i < size; i++) { + const client = this.#createClient(node, this.#nodeClientOptions); + client.on('error', (err: Error) => { + if (this.#passthroughClientErrorEvents) { + this.emit('error', new Error(`Replica Client (${node.host}:${node.port}): ${err.message}`, { cause: err })); + } + const event: ClientErrorEvent = { + type: "REPLICA", + node: clientSocketToNode(client.options!.socket!), + error: err + }; + this.emit('client-error', event); + }); + + this.#replicaClients.push(client); + promises.push(client.connect()); + + this.#trace(`created replica client to ${node.host}:${node.port}`); + } + const event: RedisSentinelEvent = { + type: "REPLICA_ADD", + node: node + } + this.emit('topology-change', event); + } + } + + if (analyzed.sentinelList.length != this.#sentinelRootNodes.length) { + this.#sentinelRootNodes = analyzed.sentinelList; + const event: RedisSentinelEvent = { + type: "SENTINE_LIST_CHANGE", + size: analyzed.sentinelList.length + } + this.emit('topology-change', event); + } + + await Promise.all(promises); + this.#trace("transform: exit"); + } + + // introspection functions + getMasterNode(): RedisNode | undefined { + if (this.#masterClients.length == 0) { + return undefined; + } + + for (const master of this.#masterClients) { + if (master.isReady) { + return clientSocketToNode(master.options!.socket!); + } + } + + return undefined; + } + + getSentinelNode(): RedisNode | undefined { + if (this.#sentinelClient === undefined) { + return undefined; + } + + return clientSocketToNode(this.#sentinelClient.options!.socket!); + } + + getReplicaNodes(): Map { + const ret = new Map(); + const initialMap = new Map(); + + for (const replica of this.#replicaClients) { + const node = clientSocketToNode(replica.options!.socket!); + const hash = JSON.stringify(node); + + if (replica.isReady) { + initialMap.set(hash, (initialMap.get(hash) ?? 0) + 1); + } else { + if (!initialMap.has(hash)) { + initialMap.set(hash, 0); + } + } + } + + for (const [key, value] of initialMap) { + ret.set(JSON.parse(key) as RedisNode, value); + } + + return ret; + } + + setTracer(tracer?: Array) { + if (tracer) { + this.#trace = (msg: string) => { tracer.push(msg) }; + } else { + // empty function is faster than testing if something is defined or not + this.#trace = () => { }; + } + } +} + +export class RedisSentinelFactory extends EventEmitter { + options: RedisSentinelOptions; + #sentinelRootNodes: Array; + #replicaIdx: number = -1; + + constructor(options: RedisSentinelOptions) { + super(); + + this.options = options; + this.#sentinelRootNodes = options.sentinelRootNodes; + } + + async updateSentinelRootNodes() { + for (const node of this.#sentinelRootNodes) { + const client = RedisClient.create({ + ...this.options.sentinelClientOptions, + socket: { + ...this.options.sentinelClientOptions?.socket, + host: node.host, + port: node.port, + reconnectStrategy: false + }, + modules: RedisSentinelModule + }).on('error', (err) => this.emit(`updateSentinelRootNodes: ${err}`)); + try { + await client.connect(); + } catch { + if (client.isOpen) { + client.destroy(); + } + continue; + } + + try { + const sentinelData = await client.sentinel.sentinelSentinels(this.options.name); + this.#sentinelRootNodes = [node].concat(createNodeList(sentinelData)); + return; + } finally { + client.destroy(); + } + } + + throw new Error("Couldn't connect to any sentinel node"); + } + + async getMasterNode() { + let connected = false; + + for (const node of this.#sentinelRootNodes) { + const client = RedisClient.create({ + ...this.options.sentinelClientOptions, + socket: { + ...this.options.sentinelClientOptions?.socket, + host: node.host, + port: node.port, + reconnectStrategy: false + }, + modules: RedisSentinelModule + }).on('error', err => this.emit(`getMasterNode: ${err}`)); + + try { + await client.connect(); + } catch { + if (client.isOpen) { + client.destroy(); + } + continue; + } + + connected = true; + + try { + const masterData = await client.sentinel.sentinelMaster(this.options.name); + + let master = parseNode(masterData); + if (master === undefined) { + continue; + } + + return master; + } finally { + client.destroy(); + } + } + + if (connected) { + throw new Error("Master Node Not Enumerated"); + } + + throw new Error("couldn't connect to any sentinels"); + } + + async getMasterClient() { + const master = await this.getMasterNode(); + return RedisClient.create({ + ...this.options.nodeClientOptions, + socket: { + ...this.options.nodeClientOptions?.socket, + host: master.host, + port: master.port + } + }); + } + + async getReplicaNodes() { + let connected = false; + + for (const node of this.#sentinelRootNodes) { + const client = RedisClient.create({ + ...this.options.sentinelClientOptions, + socket: { + ...this.options.sentinelClientOptions?.socket, + host: node.host, + port: node.port, + reconnectStrategy: false + }, + modules: RedisSentinelModule + }).on('error', err => this.emit(`getReplicaNodes: ${err}`)); + + try { + await client.connect(); + } catch { + if (client.isOpen) { + client.destroy(); + } + continue; + } + + connected = true; + + try { + const replicaData = await client.sentinel.sentinelReplicas(this.options.name); + + const replicas = createNodeList(replicaData); + if (replicas.length == 0) { + continue; + } + + return replicas; + } finally { + client.destroy(); + } + } + + if (connected) { + throw new Error("No Replicas Nodes Enumerated"); + } + + throw new Error("couldn't connect to any sentinels"); + } + + async getReplicaClient() { + const replicas = await this.getReplicaNodes(); + if (replicas.length == 0) { + throw new Error("no available replicas"); + } + + this.#replicaIdx++; + if (this.#replicaIdx >= replicas.length) { + this.#replicaIdx = 0; + } + + return RedisClient.create({ + ...this.options.nodeClientOptions, + socket: { + ...this.options.nodeClientOptions?.socket, + host: replicas[this.#replicaIdx].host, + port: replicas[this.#replicaIdx].port + } + }); + } +} diff --git a/packages/client/lib/sentinel/module.ts b/packages/client/lib/sentinel/module.ts new file mode 100644 index 00000000000..e6e98e72f6d --- /dev/null +++ b/packages/client/lib/sentinel/module.ts @@ -0,0 +1,7 @@ + +import { RedisModules } from '../RESP/types'; +import sentinel from './commands'; + +export default { + sentinel +} as const satisfies RedisModules; diff --git a/packages/client/lib/sentinel/multi-commands.ts b/packages/client/lib/sentinel/multi-commands.ts new file mode 100644 index 00000000000..e70dc45c790 --- /dev/null +++ b/packages/client/lib/sentinel/multi-commands.ts @@ -0,0 +1,250 @@ +import COMMANDS from '../commands'; +import RedisMultiCommand, { MULTI_REPLY, MultiReply, MultiReplyType } from '../multi-command'; +import { ReplyWithTypeMapping, CommandReply, Command, CommandArguments, CommanderConfig, RedisFunctions, RedisModules, RedisScripts, RespVersions, TransformReply, RedisScript, RedisFunction, TypeMapping } from '../RESP/types'; +import { attachConfig, functionArgumentsPrefix, getTransformReply } from '../commander'; +import { RedisSentinelType } from './types'; +import { BasicCommandParser } from '../client/parser'; +import { Tail } from '../commands/generic-transformers'; + +type CommandSignature< + REPLIES extends Array, + C extends Command, + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> = (...args: Tail>) => RedisSentinelMultiCommandType< + [...REPLIES, ReplyWithTypeMapping, TYPE_MAPPING>], + M, + F, + S, + RESP, + TYPE_MAPPING +>; + +type WithCommands< + REPLIES extends Array, + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> = { + [P in keyof typeof COMMANDS]: CommandSignature; +}; + +type WithModules< + REPLIES extends Array, + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> = { + [P in keyof M]: { + [C in keyof M[P]]: CommandSignature; + }; +}; + +type WithFunctions< + REPLIES extends Array, + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> = { + [L in keyof F]: { + [C in keyof F[L]]: CommandSignature; + }; +}; + +type WithScripts< + REPLIES extends Array, + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> = { + [P in keyof S]: CommandSignature; +}; + +export type RedisSentinelMultiCommandType< + REPLIES extends Array, + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> = ( + RedisSentinelMultiCommand & + WithCommands & + WithModules & + WithFunctions & + WithScripts +); + +export default class RedisSentinelMultiCommand { + private static _createCommand(command: Command, resp: RespVersions) { + const transformReply = getTransformReply(command, resp); + + return function (this: RedisSentinelMultiCommand, ...args: Array) { + const parser = new BasicCommandParser(); + command.parseCommand(parser, ...args); + + const redisArgs: CommandArguments = parser.redisArgs; + redisArgs.preserve = parser.preserve; + + return this.addCommand( + command.IS_READ_ONLY, + redisArgs, + transformReply + ); + }; + } + + private static _createModuleCommand(command: Command, resp: RespVersions) { + const transformReply = getTransformReply(command, resp); + + return function (this: { _self: RedisSentinelMultiCommand }, ...args: Array) { + const parser = new BasicCommandParser(); + command.parseCommand(parser, ...args); + + const redisArgs: CommandArguments = parser.redisArgs; + redisArgs.preserve = parser.preserve; + + return this._self.addCommand( + command.IS_READ_ONLY, + redisArgs, + transformReply + ); + }; + } + + private static _createFunctionCommand(name: string, fn: RedisFunction, resp: RespVersions) { + const prefix = functionArgumentsPrefix(name, fn); + const transformReply = getTransformReply(fn, resp); + + return function (this: { _self: RedisSentinelMultiCommand }, ...args: Array) { + const parser = new BasicCommandParser(); + parser.push(...prefix); + fn.parseCommand(parser, ...args); + + const redisArgs: CommandArguments = parser.redisArgs; + redisArgs.preserve = parser.preserve; + + return this._self.addCommand( + fn.IS_READ_ONLY, + redisArgs, + transformReply + ); + }; + } + + private static _createScriptCommand(script: RedisScript, resp: RespVersions) { + const transformReply = getTransformReply(script, resp); + + return function (this: RedisSentinelMultiCommand, ...args: Array) { + const parser = new BasicCommandParser(); + script.parseCommand(parser, ...args); + + const scriptArgs: CommandArguments = parser.redisArgs; + scriptArgs.preserve = parser.preserve; + + return this.#addScript( + script.IS_READ_ONLY, + script, + scriptArgs, + transformReply + ); + }; + } + + static extend< + M extends RedisModules = Record, + F extends RedisFunctions = Record, + S extends RedisScripts = Record, + RESP extends RespVersions = 2 + >(config?: CommanderConfig) { + return attachConfig({ + BaseClass: RedisSentinelMultiCommand, + commands: COMMANDS, + createCommand: RedisSentinelMultiCommand._createCommand, + createModuleCommand: RedisSentinelMultiCommand._createModuleCommand, + createFunctionCommand: RedisSentinelMultiCommand._createFunctionCommand, + createScriptCommand: RedisSentinelMultiCommand._createScriptCommand, + config + }); + } + + readonly #multi = new RedisMultiCommand(); + readonly #sentinel: RedisSentinelType + #isReadonly: boolean | undefined = true; + + constructor(sentinel: RedisSentinelType, typeMapping: TypeMapping) { + this.#multi = new RedisMultiCommand(typeMapping); + this.#sentinel = sentinel; + } + + #setState( + isReadonly: boolean | undefined, + ) { + this.#isReadonly &&= isReadonly; + } + + addCommand( + isReadonly: boolean | undefined, + args: CommandArguments, + transformReply?: TransformReply + ) { + this.#setState(isReadonly); + this.#multi.addCommand(args, transformReply); + return this; + } + + #addScript( + isReadonly: boolean | undefined, + script: RedisScript, + args: CommandArguments, + transformReply?: TransformReply + ) { + this.#setState(isReadonly); + this.#multi.addScript(script, args, transformReply); + + return this; + } + + async exec(execAsPipeline = false) { + if (execAsPipeline) return this.execAsPipeline(); + + return this.#multi.transformReplies( + await this.#sentinel._executeMulti( + this.#isReadonly, + this.#multi.queue + ) + ) as MultiReplyType; + } + + EXEC = this.exec; + + execTyped(execAsPipeline = false) { + return this.exec(execAsPipeline); + } + + async execAsPipeline() { + if (this.#multi.queue.length === 0) return [] as MultiReplyType; + + return this.#multi.transformReplies( + await this.#sentinel._executePipeline( + this.#isReadonly, + this.#multi.queue + ) + ) as MultiReplyType; + } + + execAsPipelineTyped() { + return this.execAsPipeline(); + } +} diff --git a/packages/client/lib/sentinel/pub-sub-proxy.ts b/packages/client/lib/sentinel/pub-sub-proxy.ts new file mode 100644 index 00000000000..68a6c3b58e6 --- /dev/null +++ b/packages/client/lib/sentinel/pub-sub-proxy.ts @@ -0,0 +1,209 @@ +import EventEmitter from 'node:events'; +import { RedisModules, RedisFunctions, RedisScripts, RespVersions, TypeMapping } from '../RESP/types'; +import { RedisClientOptions } from '../client'; +import { PUBSUB_TYPE, PubSubListener, PubSubTypeListeners } from '../client/pub-sub'; +import { RedisNode } from './types'; +import RedisClient from '../client'; + +type Client = RedisClient< + RedisModules, + RedisFunctions, + RedisScripts, + RespVersions, + TypeMapping +>; + +type Subscriptions = Record< + PUBSUB_TYPE['CHANNELS'] | PUBSUB_TYPE['PATTERNS'], + PubSubTypeListeners +>; + +type PubSubState = { + client: Client; + connectPromise: Promise | undefined; +}; + +type OnError = (err: unknown) => unknown; + +export class PubSubProxy extends EventEmitter { + #clientOptions; + #onError; + + #node?: RedisNode; + #state?: PubSubState; + #subscriptions?: Subscriptions; + + constructor(clientOptions: RedisClientOptions, onError: OnError) { + super(); + + this.#clientOptions = clientOptions; + this.#onError = onError; + } + + #createClient() { + if (this.#node === undefined) { + throw new Error("pubSubProxy: didn't define node to do pubsub against"); + } + + return new RedisClient({ + ...this.#clientOptions, + socket: { + ...this.#clientOptions.socket, + host: this.#node.host, + port: this.#node.port + } + }); + } + + async #initiatePubSubClient(withSubscriptions = false) { + const client = this.#createClient() + .on('error', this.#onError); + + const connectPromise = client.connect() + .then(async client => { + if (this.#state?.client !== client) { + // if pubsub was deactivated while connecting (`this.#pubSubClient === undefined`) + // or if the node changed (`this.#pubSubClient.client !== client`) + client.destroy(); + return this.#state?.connectPromise; + } + + if (withSubscriptions && this.#subscriptions) { + await Promise.all([ + client.extendPubSubListeners(PUBSUB_TYPE.CHANNELS, this.#subscriptions[PUBSUB_TYPE.CHANNELS]), + client.extendPubSubListeners(PUBSUB_TYPE.PATTERNS, this.#subscriptions[PUBSUB_TYPE.PATTERNS]) + ]); + } + + if (this.#state.client !== client) { + // if the node changed (`this.#pubSubClient.client !== client`) + client.destroy(); + return this.#state?.connectPromise; + } + + this.#state!.connectPromise = undefined; + return client; + }) + .catch(err => { + this.#state = undefined; + throw err; + }); + + this.#state = { + client, + connectPromise + }; + + return connectPromise; + } + + #getPubSubClient() { + if (!this.#state) return this.#initiatePubSubClient(); + + return ( + this.#state.connectPromise ?? + this.#state.client + ); + } + + async changeNode(node: RedisNode) { + this.#node = node; + + if (!this.#state) return; + + // if `connectPromise` is undefined, `this.#subscriptions` is already set + // and `this.#state.client` might not have the listeners set yet + if (this.#state.connectPromise === undefined) { + this.#subscriptions = { + [PUBSUB_TYPE.CHANNELS]: this.#state.client.getPubSubListeners(PUBSUB_TYPE.CHANNELS), + [PUBSUB_TYPE.PATTERNS]: this.#state.client.getPubSubListeners(PUBSUB_TYPE.PATTERNS) + }; + + this.#state.client.destroy(); + } + + await this.#initiatePubSubClient(true); + } + + #executeCommand(fn: (client: Client) => T) { + const client = this.#getPubSubClient(); + if (client instanceof RedisClient) { + return fn(client); + } + + return client.then(client => { + // if pubsub was deactivated while connecting + if (client === undefined) return; + + return fn(client); + }).catch(err => { + if (this.#state?.client.isPubSubActive) { + this.#state.client.destroy(); + this.#state = undefined; + } + + throw err; + }); + } + + subscribe( + channels: string | Array, + listener: PubSubListener, + bufferMode?: T + ) { + return this.#executeCommand( + client => client.SUBSCRIBE(channels, listener, bufferMode) + ); + } + + #unsubscribe(fn: (client: Client) => Promise) { + return this.#executeCommand(async client => { + const reply = await fn(client); + + if (!client.isPubSubActive) { + client.destroy(); + this.#state = undefined; + } + + return reply; + }); + } + + async unsubscribe( + channels?: string | Array, + listener?: PubSubListener, + bufferMode?: T + ) { + return this.#unsubscribe(client => client.UNSUBSCRIBE(channels, listener, bufferMode)); + } + + async pSubscribe( + patterns: string | Array, + listener: PubSubListener, + bufferMode?: T + ) { + return this.#executeCommand( + client => client.PSUBSCRIBE(patterns, listener, bufferMode) + ); + } + + async pUnsubscribe( + patterns?: string | Array, + listener?: PubSubListener, + bufferMode?: T + ) { + return this.#unsubscribe(client => client.PUNSUBSCRIBE(patterns, listener, bufferMode)); + } + + destroy() { + this.#subscriptions = undefined; + if (this.#state === undefined) return; + + // `connectPromise` already handles the case of `this.#pubSubState = undefined` + if (!this.#state.connectPromise) { + this.#state.client.destroy(); + } + + this.#state = undefined; + } +} diff --git a/packages/client/lib/sentinel/test-util.ts b/packages/client/lib/sentinel/test-util.ts new file mode 100644 index 00000000000..6998b31c7ff --- /dev/null +++ b/packages/client/lib/sentinel/test-util.ts @@ -0,0 +1,512 @@ +import { createConnection, Socket } from 'node:net'; +import { setTimeout } from 'node:timers/promises'; +import { once } from 'node:events'; +import { promisify } from 'node:util'; +import { exec } from 'node:child_process'; +import { RedisSentinelOptions, RedisSentinelType } from './types'; +import RedisClient, {RedisClientType} from '../client'; +import RedisSentinel from '.'; +import { RedisArgument, RedisFunctions, RedisModules, RedisScripts, RespVersions, TypeMapping } from '../RESP/types'; +const execAsync = promisify(exec); +import RedisSentinelModule from './module' +import TestUtils from '@redis/test-utils'; +import { DEBUG_MODE_ARGS } from '../test-utils' +interface ErrorWithCode extends Error { + code: string; +} + +async function isPortAvailable(port: number): Promise { + var socket: Socket | undefined = undefined; + try { + socket = createConnection({ port }); + await once(socket, 'connect'); + } catch (err) { + if (err instanceof Error && (err as ErrorWithCode).code === 'ECONNREFUSED') { + return true; + } + } finally { + if (socket !== undefined) { + socket.end(); + } + } + + return false; +} + +const portIterator = (async function* (): AsyncIterableIterator { + for (let i = 6379; i < 65535; i++) { + if (await isPortAvailable(i)) { + yield i; + } + } + + throw new Error('All ports are in use'); +})(); + +export interface RedisServerDockerConfig { + image: string; + version: string; +} + +export interface RedisServerDocker { + port: number; + dockerId: string; +} + +abstract class DockerBase { + async spawnRedisServerDocker({ image, version }: RedisServerDockerConfig, serverArguments: Array, environment?: string): Promise { + const port = (await portIterator.next()).value; + let cmdLine = `docker run --init -d --network host `; + if (environment !== undefined) { + cmdLine += `-e ${environment} `; + } + cmdLine += `${image}:${version} ${serverArguments.join(' ')}`; + cmdLine = cmdLine.replace('{port}', `--port ${port.toString()}`); + // console.log("spawnRedisServerDocker: cmdLine = " + cmdLine); + const { stdout, stderr } = await execAsync(cmdLine); + + if (!stdout) { + throw new Error(`docker run error - ${stderr}`); + } + + while (await isPortAvailable(port)) { + await setTimeout(50); + } + + return { + port, + dockerId: stdout.trim() + }; + } + + async dockerRemove(dockerId: string): Promise { + try { + await this.dockerStop(dockerId); `` + } catch (err) { + // its ok if stop failed, as we are just going to remove, will just be slower + console.log(`dockerStop failed in remove: ${err}`); + } + + const { stderr } = await execAsync(`docker rm -f ${dockerId}`); + if (stderr) { + console.log("docker rm failed"); + throw new Error(`docker rm error - ${stderr}`); + } + } + + async dockerStop(dockerId: string): Promise { + /* this is an optimization to get around slow docker stop times, but will fail if container is already stopped */ + try { + await execAsync(`docker exec ${dockerId} /bin/bash -c "kill -SIGINT 1"`); + } catch (err) { + /* this will fail if container is already not running, can be ignored */ + } + + let ret = await execAsync(`docker stop ${dockerId}`); + if (ret.stderr) { + throw new Error(`docker stop error - ${ret.stderr}`); + } + } + + async dockerStart(dockerId: string): Promise { + const { stderr } = await execAsync(`docker start ${dockerId}`); + if (stderr) { + throw new Error(`docker start error - ${stderr}`); + } + } +} + +export interface RedisSentinelConfig { + numberOfNodes?: number; + nodeDockerConfig?: RedisServerDockerConfig; + nodeServerArguments?: Array + + numberOfSentinels?: number; + sentinelDockerConfig?: RedisServerDockerConfig; + sentinelServerArgument?: Array + + sentinelName: string; + + password?: string; +} + +type ArrayElement = + ArrayType extends readonly (infer ElementType)[] ? ElementType : never; + +export interface SentinelController { + getMaster(): Promise; + getMasterPort(): Promise; + getRandomNode(): string; + getRandonNonMasterNode(): Promise; + getNodePort(id: string): number; + getAllNodesPort(): Array; + getSentinelPort(id: string): number; + getAllSentinelsPort(): Array; + getSetinel(i: number): string; + stopNode(id: string): Promise; + restartNode(id: string): Promise; + stopSentinel(id: string): Promise; + restartSentinel(id: string): Promise; + getSentinelClient(opts?: Partial>): RedisSentinelType<{}, {}, {}, 2, {}>; +} + +export class SentinelFramework extends DockerBase { + #testUtils: TestUtils; + #nodeList: Awaited> = []; + /* port -> docker info/client */ + #nodeMap: Map>>>; + #sentinelList: Awaited> = []; + /* port -> docker info/client */ + #sentinelMap: Map>>>; + + config: RedisSentinelConfig; + + #spawned: boolean = false; + + get spawned() { + return this.#spawned; + } + + constructor(config: RedisSentinelConfig) { + super(); + + this.config = config; + this.#testUtils = TestUtils.createFromConfig({ + dockerImageName: 'redislabs/client-libs-test', + dockerImageVersionArgument: 'redis-version', + defaultDockerVersion: '8.4-M01-pre' + }); + this.#nodeMap = new Map>>>(); + this.#sentinelMap = new Map>>>(); + } + + getSentinelClient(opts?: Partial>, errors = true) { + if (opts?.sentinelRootNodes !== undefined) { + throw new Error("cannot specify sentinelRootNodes here"); + } + if (opts?.name !== undefined) { + throw new Error("cannot specify sentinel db name here"); + } + + const options: RedisSentinelOptions = { + ...opts, + name: this.config.sentinelName, + sentinelRootNodes: this.#sentinelList.map((sentinel) => { return { host: '127.0.0.1', port: sentinel.port } }), + passthroughClientErrorEvents: errors + } + + if (this.config.password !== undefined) { + if (!options.nodeClientOptions) { + options.nodeClientOptions = {}; + } + options.nodeClientOptions.password = this.config.password; + + if (!options.sentinelClientOptions) { + options.sentinelClientOptions = {}; + } + options.sentinelClientOptions = {password: this.config.password}; + } + + return RedisSentinel.create(options); + } + + async spawnRedisSentinel() { + if (this.#spawned) { + return; + } + + if (this.#nodeMap.size != 0 || this.#sentinelMap.size != 0) { + throw new Error("inconsistent state with partial setup"); + } + + this.#nodeList = await this.spawnRedisSentinelNodes(2); + this.#nodeList.map((value) => this.#nodeMap.set(value.port.toString(), value)); + + this.#sentinelList = await this.spawnRedisSentinelSentinels(this.#nodeList[0].port, 3) + this.#sentinelList.map((value) => this.#sentinelMap.set(value.port.toString(), value)); + + this.#spawned = true; + } + + async cleanup() { + if (!this.#spawned) { + return; + } + + return Promise.all( + [...this.#nodeMap!.values(), ...this.#sentinelMap!.values()].map( + async ({ dockerId }) => { + this.dockerRemove(dockerId); + } + ) + ).finally(async () => { + this.#spawned = false; + this.#nodeMap.clear(); + this.#sentinelMap.clear(); + }); + } + + protected async spawnRedisSentinelNodes(replicasCount: number) { + const master = await this.#testUtils.spawnRedisServer({serverArguments: DEBUG_MODE_ARGS}) + + const replicas: Array = [] + for (let i = 0; i < replicasCount; i++) { + const replica = await this.#testUtils.spawnRedisServer({serverArguments: DEBUG_MODE_ARGS}) + replicas.push(replica) + + const client = RedisClient.create({ + socket: { + port: replica.port + } + }) + + await client.connect(); + await client.replicaOf("127.0.0.1", master.port); + await client.close(); + } + + return [ + master, + ...replicas + ] + } + + protected async spawnRedisSentinelSentinels(masterPort: number, sentinels: number) { + return this.#testUtils.spawnRedisSentinels({serverArguments: DEBUG_MODE_ARGS}, masterPort, this.config.sentinelName, sentinels) + } + + async getAllRunning() { + for (const port of this.getAllNodesPort()) { + let first = true; + while (await isPortAvailable(port)) { + if (!first) { + console.log(`problematic restart ${port}`); + await setTimeout(500); + } else { + first = false; + } + await this.restartNode(port.toString()); + } + } + + for (const port of this.getAllSentinelsPort()) { + let first = true; + while (await isPortAvailable(port)) { + if (!first) { + await setTimeout(500); + } else { + first = false; + } + await this.restartSentinel(port.toString()); + } + } + } + + async addSentinel() { + const nodes = await this.#testUtils.spawnRedisSentinels({serverArguments: DEBUG_MODE_ARGS}, this.#nodeList[0].port, this.config.sentinelName, 1) + this.#sentinelList.push(nodes[0]); + this.#sentinelMap.set(nodes[0].port.toString(), nodes[0]); + } + + async addNode() { + const masterPort = await this.getMasterPort(); + const replica = await this.#testUtils.spawnRedisServer({serverArguments: DEBUG_MODE_ARGS}) + + const client = RedisClient.create({ + socket: { + port: replica.port + } + }) + + await client.connect(); + await client.replicaOf("127.0.0.1", masterPort); + await client.close(); + + + this.#nodeList.push(replica); + this.#nodeMap.set(replica.port.toString(), replica); + } + + async getMaster(tracer?: Array): Promise { + const client = RedisClient.create({ + name: this.config.sentinelName, + socket: { + host: "127.0.0.1", + port: this.#sentinelList[0].port, + }, + modules: RedisSentinelModule, + }); + await client.connect() + const info = await client.sentinel.sentinelMaster(this.config.sentinelName); + await client.close() + + const master = this.#nodeMap.get(info.port); + if (master === undefined) { + throw new Error(`couldn't find master node for ${info.port}`); + } + + if (tracer) { + tracer.push(`getMaster: master port is either ${info.port} or ${master.port}`); + } + + return info.port; + } + + async getMasterPort(tracer?: Array): Promise { + const data = await this.getMaster(tracer) + + return this.#nodeMap.get(data!)!.port; + } + + getRandomNode() { + return this.#nodeList[Math.floor(Math.random() * this.#nodeList.length)].port.toString(); + } + + async getRandonNonMasterNode(): Promise { + const masterPort = await this.getMasterPort(); + while (true) { + const node = this.#nodeList[Math.floor(Math.random() * this.#nodeList.length)]; + if (node.port != masterPort) { + return node.port.toString(); + } + } + } + + async stopNode(id: string) { +// console.log(`stopping node ${id}`); + let node = this.#nodeMap.get(id); + if (node === undefined) { + throw new Error("unknown node: " + id); + } + + return await this.dockerStop(node.dockerId); + } + + async restartNode(id: string) { + let node = this.#nodeMap.get(id); + if (node === undefined) { + throw new Error("unknown node: " + id); + } + + await this.dockerStart(node.dockerId); + } + + async stopSentinel(id: string) { + let sentinel = this.#sentinelMap.get(id); + if (sentinel === undefined) { + throw new Error("unknown sentinel: " + id); + } + + return await this.dockerStop(sentinel.dockerId); + } + + async restartSentinel(id: string) { + let sentinel = this.#sentinelMap.get(id); + if (sentinel === undefined) { + throw new Error("unknown sentinel: " + id); + } + + await this.dockerStart(sentinel.dockerId); + } + + getNodePort(id: string) { + let node = this.#nodeMap.get(id); + if (node === undefined) { + throw new Error("unknown node: " + id); + } + + return node.port; + } + + getAllNodesPort() { + let ports: Array = []; + for (const node of this.#nodeList) { + ports.push(node.port); + } + + return ports + } + + getAllDockerIds() { + let ids = new Map(); + for (const node of this.#nodeList) { + ids.set(node.dockerId, node.port); + } + + return ids; + } + + getSentinelPort(id: string) { + let sentinel = this.#sentinelMap.get(id); + if (sentinel === undefined) { + throw new Error("unknown sentinel: " + id); + } + + return sentinel.port; + } + + getAllSentinelsPort() { + let ports: Array = []; + for (const sentinel of this.#sentinelList) { + ports.push(sentinel.port); + } + + return ports + } + + getSetinel(i: number): string { + return this.#sentinelList[i].port.toString(); + } + + async sentinelSentinels() { + const client = RedisClient.create({ + name: this.config.sentinelName, + socket: { + host: "127.0.0.1", + port: this.#sentinelList[0].port, + }, + modules: RedisSentinelModule, + }); + await client.connect() + const sentinels = client.sentinel.sentinelSentinels(this.config.sentinelName) + await client.close() + + return sentinels + } + + async sentinelMaster() { + const client = RedisClient.create({ + name: this.config.sentinelName, + socket: { + host: "127.0.0.1", + port: this.#sentinelList[0].port, + }, + modules: RedisSentinelModule, + }); + await client.connect() + const master = client.sentinel.sentinelMaster(this.config.sentinelName) + await client.close() + + return master + } + + async sentinelReplicas() { + const client = RedisClient.create({ + name: this.config.sentinelName, + socket: { + host: "127.0.0.1", + port: this.#sentinelList[0].port, + }, + modules: RedisSentinelModule, + }); + await client.connect() + const replicas = client.sentinel.sentinelReplicas(this.config.sentinelName) + await client.close() + + return replicas + } +} diff --git a/packages/client/lib/sentinel/types.ts b/packages/client/lib/sentinel/types.ts new file mode 100644 index 00000000000..e72f2eec2a0 --- /dev/null +++ b/packages/client/lib/sentinel/types.ts @@ -0,0 +1,219 @@ +import { RedisClientOptions } from '../client'; +import { CommandOptions } from '../client/commands-queue'; +import { CommandSignature, CommanderConfig, RedisFunctions, RedisModules, RedisScripts, RespVersions, TypeMapping } from '../RESP/types'; +import COMMANDS from '../commands'; +import RedisSentinel, { RedisSentinelClient } from '.'; +import { RedisTcpSocketOptions } from '../client/socket'; +import { ClientSideCacheConfig, PooledClientSideCacheProvider } from '../client/cache'; + +export interface RedisNode { + host: string; + port: number; +} + +export interface RedisSentinelOptions< + M extends RedisModules = RedisModules, + F extends RedisFunctions = RedisFunctions, + S extends RedisScripts = RedisScripts, + RESP extends RespVersions = RespVersions, + TYPE_MAPPING extends TypeMapping = TypeMapping +> extends SentinelCommander { + /** + * The sentinel identifier for a particular database cluster + */ + name: string; + /** + * An array of root nodes that are part of the sentinel cluster, which will be used to get the topology. Each element in the array is a client configuration object. There is no need to specify every node in the cluster: 3 should be enough to reliably connect and obtain the sentinel configuration from the server + */ + sentinelRootNodes: Array; + /** + * The maximum number of times a command will retry due to topology changes. + */ + maxCommandRediscovers?: number; + // TODO: omit properties that users shouldn't be able to specify for sentinel at this level + /** + * The configuration values for every node in the cluster. Use this for example when specifying an ACL user to connect with + */ + nodeClientOptions?: RedisClientOptions; + // TODO: omit properties that users shouldn't be able to specify for sentinel at this level + /** + * The configuration values for every sentinel in the cluster. Use this for example when specifying an ACL user to connect with + */ + sentinelClientOptions?: RedisClientOptions; + /** + * The number of clients connected to the master node + */ + masterPoolSize?: number; + /** + * The number of clients connected to each replica node. + * When greater than 0, the client will distribute the load by executing read-only commands (such as `GET`, `GEOSEARCH`, etc.) across all the cluster nodes. + */ + replicaPoolSize?: number; + /** + * Interval in milliseconds to periodically scan for changes in the sentinel topology. + * The client will query the sentinel for changes at this interval. + * + * Default: 10000 (10 seconds) + */ + scanInterval?: number; + /** + * When `true`, error events from client instances inside the sentinel will be propagated to the sentinel instance. + * This allows handling all client errors through a single error handler on the sentinel instance. + * + * Default: false + */ + passthroughClientErrorEvents?: boolean; + /** + * When `true`, one client will be reserved for the sentinel object. + * When `false`, the sentinel object will wait for the first available client from the pool. + */ + reserveClient?: boolean; + /** + * Client Side Caching configuration for the pool. + * + * Enables Redis Servers and Clients to work together to cache results from commands + * sent to a server. The server will notify the client when cached results are no longer valid. + * In pooled mode, the cache is shared across all clients in the pool. + * + * Note: Client Side Caching is only supported with RESP3. + * + * @example Anonymous cache configuration + * ``` + * const client = createSentinel({ + * clientSideCache: { + * ttl: 0, + * maxEntries: 0, + * evictPolicy: "LRU" + * }, + * minimum: 5 + * }); + * ``` + * + * @example Using a controllable cache + * ``` + * const cache = new BasicPooledClientSideCache({ + * ttl: 0, + * maxEntries: 0, + * evictPolicy: "LRU" + * }); + * const client = createSentinel({ + * clientSideCache: cache, + * minimum: 5 + * }); + * ``` + */ + clientSideCache?: PooledClientSideCacheProvider | ClientSideCacheConfig; +} + +export interface SentinelCommander< + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping, + // POLICIES extends CommandPolicies +> extends CommanderConfig { + commandOptions?: CommandOptions; +} + +export type RedisSentinelClientOptions = Omit< + RedisClientOptions, + keyof SentinelCommander +>; + +type WithCommands< + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> = { + [P in keyof typeof COMMANDS]: CommandSignature<(typeof COMMANDS)[P], RESP, TYPE_MAPPING>; +}; + +type WithModules< + M extends RedisModules, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> = { + [P in keyof M]: { + [C in keyof M[P]]: CommandSignature; + }; +}; + +type WithFunctions< + F extends RedisFunctions, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> = { + [L in keyof F]: { + [C in keyof F[L]]: CommandSignature; + }; +}; + +type WithScripts< + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> = { + [P in keyof S]: CommandSignature; +}; + +export type RedisSentinelClientType< + M extends RedisModules = {}, + F extends RedisFunctions = {}, + S extends RedisScripts = {}, + RESP extends RespVersions = 2, + TYPE_MAPPING extends TypeMapping = {}, +> = ( + RedisSentinelClient & + WithCommands & + WithModules & + WithFunctions & + WithScripts +); + +export type RedisSentinelType< + M extends RedisModules = {}, + F extends RedisFunctions = {}, + S extends RedisScripts = {}, + RESP extends RespVersions = 2, + TYPE_MAPPING extends TypeMapping = {}, + // POLICIES extends CommandPolicies = {} +> = ( + RedisSentinel & + WithCommands & + WithModules & + WithFunctions & + WithScripts +); + +export interface SentinelCommandOptions< + TYPE_MAPPING extends TypeMapping = TypeMapping +> extends CommandOptions {} + +export type ProxySentinel = RedisSentinel; +export type ProxySentinelClient = RedisSentinelClient; +export type NamespaceProxySentinel = { _self: ProxySentinel }; +export type NamespaceProxySentinelClient = { _self: ProxySentinelClient }; + +export type NodeInfo = { + ip: any, + port: any, + flags: any, +}; + +export type RedisSentinelEvent = NodeChangeEvent | SizeChangeEvent; + +export type NodeChangeEvent = { + type: "SENTINEL_CHANGE" | "MASTER_CHANGE" | "REPLICA_ADD" | "REPLICA_REMOVE"; + node: RedisNode; +} + +export type SizeChangeEvent = { + type: "SENTINE_LIST_CHANGE"; + size: Number; +} + +export type ClientErrorEvent = { + type: 'MASTER' | 'REPLICA' | 'SENTINEL' | 'PUBSUBPROXY'; + node: RedisNode; + error: Error; +} diff --git a/packages/client/lib/sentinel/utils.ts b/packages/client/lib/sentinel/utils.ts new file mode 100644 index 00000000000..c124981e257 --- /dev/null +++ b/packages/client/lib/sentinel/utils.ts @@ -0,0 +1,98 @@ +import { ArrayReply, Command, RedisFunction, RedisScript, RespVersions, UnwrapReply } from '../RESP/types'; +import { BasicCommandParser } from '../client/parser'; +import { RedisSocketOptions, RedisTcpSocketOptions } from '../client/socket'; +import { functionArgumentsPrefix, getTransformReply, scriptArgumentsPrefix } from '../commander'; +import { NamespaceProxySentinel, NamespaceProxySentinelClient, ProxySentinel, ProxySentinelClient, RedisNode } from './types'; + +/* TODO: should use map interface, would need a transform reply probably? as resp2 is list form, which this depends on */ +export function parseNode(node: Record): RedisNode | undefined{ + + if (node.flags.includes("s_down") || node.flags.includes("disconnected") || node.flags.includes("failover_in_progress")) { + return undefined; + } + + return { host: node.ip, port: Number(node.port) }; +} + +export function createNodeList(nodes: UnwrapReply>>) { + var nodeList: Array = []; + + for (const nodeData of nodes) { + const node = parseNode(nodeData) + if (node === undefined) { + continue; + } + nodeList.push(node); + } + + return nodeList; +} + +export function clientSocketToNode(socket: RedisSocketOptions): RedisNode { + const s = socket as RedisTcpSocketOptions; + + return { + host: s.host!, + port: s.port! + } +} + +export function createCommand(command: Command, resp: RespVersions) { + const transformReply = getTransformReply(command, resp); + + return async function (this: T, ...args: Array) { + const parser = new BasicCommandParser(); + command.parseCommand(parser, ...args); + + return this._self._execute( + command.IS_READ_ONLY, + client => client._executeCommand(command, parser, this.commandOptions, transformReply) + ); + }; +} + +export function createFunctionCommand(name: string, fn: RedisFunction, resp: RespVersions) { + const prefix = functionArgumentsPrefix(name, fn); + const transformReply = getTransformReply(fn, resp); + + return async function (this: T, ...args: Array) { + const parser = new BasicCommandParser(); + parser.push(...prefix); + fn.parseCommand(parser, ...args); + + return this._self._execute( + fn.IS_READ_ONLY, + client => client._executeCommand(fn, parser, this._self.commandOptions, transformReply) + ); + } +}; + +export function createModuleCommand(command: Command, resp: RespVersions) { + const transformReply = getTransformReply(command, resp); + + return async function (this: T, ...args: Array) { + const parser = new BasicCommandParser(); + command.parseCommand(parser, ...args); + + return this._self._execute( + command.IS_READ_ONLY, + client => client._executeCommand(command, parser, this._self.commandOptions, transformReply) + ); + } +}; + +export function createScriptCommand(script: RedisScript, resp: RespVersions) { + const prefix = scriptArgumentsPrefix(script); + const transformReply = getTransformReply(script, resp); + + return async function (this: T, ...args: Array) { + const parser = new BasicCommandParser(); + parser.push(...prefix); + script.parseCommand(parser, ...args); + + return this._self._execute( + script.IS_READ_ONLY, + client => client._executeScript(script, parser, this.commandOptions, transformReply) + ); + }; +} diff --git a/packages/client/lib/sentinel/wait-queue.ts b/packages/client/lib/sentinel/wait-queue.ts new file mode 100644 index 00000000000..138801eb4d9 --- /dev/null +++ b/packages/client/lib/sentinel/wait-queue.ts @@ -0,0 +1,24 @@ +import { SinglyLinkedList } from '../client/linked-list'; + +export class WaitQueue { + #list = new SinglyLinkedList(); + #queue = new SinglyLinkedList<(item: T) => unknown>(); + + push(value: T) { + const resolve = this.#queue.shift(); + if (resolve !== undefined) { + resolve(value); + return; + } + + this.#list.push(value); + } + + shift() { + return this.#list.shift(); + } + + wait() { + return new Promise(resolve => this.#queue.push(resolve)); + } +} diff --git a/packages/client/lib/single-entry-cache.spec.ts b/packages/client/lib/single-entry-cache.spec.ts new file mode 100644 index 00000000000..ef535738eac --- /dev/null +++ b/packages/client/lib/single-entry-cache.spec.ts @@ -0,0 +1,85 @@ +import assert from 'node:assert'; +import SingleEntryCache from './single-entry-cache'; + +describe('SingleEntryCache', () => { + let cache: SingleEntryCache; + beforeEach(() => { + cache = new SingleEntryCache(); + }); + + it('should return undefined when getting from empty cache', () => { + assert.strictEqual(cache.get({ key: 'value' }), undefined); + }); + + it('should return the cached instance when getting with the same key object', () => { + const keyObj = { key: 'value' }; + const instance = { data: 'test data' }; + + cache.set(keyObj, instance); + assert.strictEqual(cache.get(keyObj), instance); + }); + + it('should return undefined when getting with a different key object', () => { + const keyObj1 = { key: 'value1' }; + const keyObj2 = { key: 'value2' }; + const instance = { data: 'test data' }; + + cache.set(keyObj1, instance); + assert.strictEqual(cache.get(keyObj2), undefined); + }); + + it('should update the cached instance when setting with the same key object', () => { + const keyObj = { key: 'value' }; + const instance1 = { data: 'test data 1' }; + const instance2 = { data: 'test data 2' }; + + cache.set(keyObj, instance1); + assert.strictEqual(cache.get(keyObj), instance1); + + cache.set(keyObj, instance2); + assert.strictEqual(cache.get(keyObj), instance2); + }); + + it('should handle undefined key object', () => { + const instance = { data: 'test data' }; + + cache.set(undefined, instance); + assert.strictEqual(cache.get(undefined), instance); + }); + + it('should handle complex objects as keys', () => { + const keyObj = { + id: 123, + nested: { + prop: 'value', + array: [1, 2, 3] + } + }; + const instance = { data: 'complex test data' }; + + cache.set(keyObj, instance); + assert.strictEqual(cache.get(keyObj), instance); + }); + + it('should consider objects with same properties but different order as different keys', () => { + const keyObj1 = { a: 1, b: 2 }; + const keyObj2 = { b: 2, a: 1 }; // Same properties but different order + const instance = { data: 'test data' }; + + cache.set(keyObj1, instance); + + assert.strictEqual(cache.get(keyObj2), undefined); + }); + + it('should handle circular structures', () => { + const keyObj: any = {}; + keyObj.self = keyObj; + + const instance = { data: 'test data' }; + + cache.set(keyObj, instance); + + assert.strictEqual(cache.get(keyObj), instance); + }); + +}); diff --git a/packages/client/lib/single-entry-cache.ts b/packages/client/lib/single-entry-cache.ts new file mode 100644 index 00000000000..5c65df96660 --- /dev/null +++ b/packages/client/lib/single-entry-cache.ts @@ -0,0 +1,37 @@ +export default class SingleEntryCache { + #cached?: V; + #serializedKey?: string; + + /** + * Retrieves an instance from the cache based on the provided key object. + * + * @param keyObj - The key object to look up in the cache. + * @returns The cached instance if found, undefined otherwise. + * + * @remarks + * This method uses JSON.stringify for comparison, which may not work correctly + * if the properties in the key object are rearranged or reordered. + */ + get(keyObj?: K): V | undefined { + return JSON.stringify(keyObj, makeCircularReplacer()) === this.#serializedKey ? this.#cached : undefined; + } + + set(keyObj: K | undefined, obj: V) { + this.#cached = obj; + this.#serializedKey = JSON.stringify(keyObj, makeCircularReplacer()); + } +} + +function makeCircularReplacer() { + const seen = new WeakSet(); + return function serialize(_: string, value: any) { + if (value && typeof value === 'object') { + if (seen.has(value)) { + return 'circular'; + } + seen.add(value); + return value; + } + return value; + } +} \ No newline at end of file diff --git a/packages/client/lib/test-utils.ts b/packages/client/lib/test-utils.ts new file mode 100644 index 00000000000..e9998f1350e --- /dev/null +++ b/packages/client/lib/test-utils.ts @@ -0,0 +1,198 @@ +import TestUtils from '@redis/test-utils'; +import { SinonSpy } from 'sinon'; +import { setTimeout } from 'node:timers/promises'; +import { CredentialsProvider } from './authx'; +import { Command, NumberReply } from './RESP/types'; +import { BasicCommandParser, CommandParser } from './client/parser'; +import { defineScript } from './lua-script'; +import RedisBloomModules from '@redis/bloom'; +const utils = TestUtils.createFromConfig({ + dockerImageName: 'redislabs/client-libs-test', + dockerImageVersionArgument: 'redis-version', + defaultDockerVersion: '8.4-M01-pre' +}); + +export default utils; + +export const DEBUG_MODE_ARGS = utils.isVersionGreaterThan([7]) ? + ['--enable-debug-command', 'yes'] : + []; + +const asyncBasicAuthCredentialsProvider: CredentialsProvider = + { + type: 'async-credentials-provider', + credentials: async () => ({ password: 'password' }) + } as const; + +const streamingCredentialsProvider: CredentialsProvider = + { + type: 'streaming-credentials-provider', + + subscribe : (observable) => ( Promise.resolve([ + { password: 'password' }, + { + dispose: () => { + console.log('disposing credentials provider subscription'); + } + } + ])), + + onReAuthenticationError: (error) => { + console.error('re-authentication error', error); + } + + } as const; + +const SQUARE_SCRIPT = defineScript({ + SCRIPT: + `local number = redis.call('GET', KEYS[1]) + return number * number`, + NUMBER_OF_KEYS: 1, + FIRST_KEY_INDEX: 0, + parseCommand(parser: CommandParser, key: string) { + parser.pushKey(key); + }, + transformReply: undefined as unknown as () => NumberReply +}); + +export const MATH_FUNCTION = { + name: 'math', + engine: 'LUA', + code: + `#!LUA name=math + redis.register_function { + function_name = "square", + callback = function(keys, args) + local number = redis.call('GET', keys[1]) + return number * number + end, + flags = { "no-writes" } + }`, + library: { + square: { + NAME: 'square', + IS_READ_ONLY: true, + NUMBER_OF_KEYS: 1, + FIRST_KEY_INDEX: 0, + parseCommand(parser: CommandParser, key: string) { + parser.pushKey(key); + }, + transformReply: undefined as unknown as () => NumberReply + } + } +}; + +export const GLOBAL = { + SERVERS: { + OPEN: { + serverArguments: [...DEBUG_MODE_ARGS] + }, + PASSWORD: { + serverArguments: ['--requirepass', 'password', ...DEBUG_MODE_ARGS], + clientOptions: { + password: 'password' + } + }, + OPEN_RESP_3: { + serverArguments: [...DEBUG_MODE_ARGS], + clientOptions: { + RESP: 3, + } + }, + ASYNC_BASIC_AUTH: { + serverArguments: ['--requirepass', 'password', ...DEBUG_MODE_ARGS], + clientOptions: { + credentialsProvider: asyncBasicAuthCredentialsProvider + } + }, + STREAMING_AUTH: { + serverArguments: ['--requirepass', 'password', ...DEBUG_MODE_ARGS], + clientOptions: { + credentialsProvider: streamingCredentialsProvider + } + } + }, + CLUSTERS: { + OPEN: { + serverArguments: [...DEBUG_MODE_ARGS] + }, + PASSWORD: { + serverArguments: ['--requirepass', 'password', ...DEBUG_MODE_ARGS], + clusterConfiguration: { + defaults: { + password: 'password' + } + } + }, + WITH_REPLICAS: { + serverArguments: [...DEBUG_MODE_ARGS], + numberOfMasters: 2, + numberOfReplicas: 1, + clusterConfiguration: { + useReplicas: true + } + } + }, + SENTINEL: { + OPEN: { + serverArguments: [...DEBUG_MODE_ARGS], + }, + PASSWORD: { + serverArguments: ['--requirepass', 'test_password', ...DEBUG_MODE_ARGS], + }, + WITH_SCRIPT: { + serverArguments: [...DEBUG_MODE_ARGS], + scripts: { + square: SQUARE_SCRIPT, + }, + }, + WITH_FUNCTION: { + serverArguments: [...DEBUG_MODE_ARGS], + functions: { + math: MATH_FUNCTION.library, + }, + }, + WITH_MODULE: { + serverArguments: [...DEBUG_MODE_ARGS], + modules: RedisBloomModules, + }, + WITH_REPLICA_POOL_SIZE_1: { + serverArguments: [...DEBUG_MODE_ARGS], + replicaPoolSize: 1, + }, + WITH_RESERVE_CLIENT_MASTER_POOL_SIZE_2: { + serverArguments: [...DEBUG_MODE_ARGS], + masterPoolSize: 2, + reserveClient: true, + }, + WITH_MASTER_POOL_SIZE_2: { + serverArguments: [...DEBUG_MODE_ARGS], + masterPoolSize: 2, + } + } +}; + +export async function waitTillBeenCalled(spy: SinonSpy): Promise { + const start = process.hrtime.bigint(), + calls = spy.callCount; + + do { + if (process.hrtime.bigint() - start > 1_000_000_000) { + throw new Error('Waiting for more than 1 second'); + } + + await setTimeout(50); + } while (spy.callCount === calls); +} + +export const BLOCKING_MIN_VALUE = ( + utils.isVersionGreaterThan([7]) ? Number.MIN_VALUE : + utils.isVersionGreaterThan([6]) ? 0.01 : + 1 +); + +export function parseFirstKey(command: Command, ...args: Array) { + const parser = new BasicCommandParser(); + command.parseCommand!(parser, ...args); + return parser.firstKey; +} diff --git a/packages/client/lib/tests/test-scenario/configuration.e2e.ts b/packages/client/lib/tests/test-scenario/configuration.e2e.ts new file mode 100644 index 00000000000..a352a8f10e0 --- /dev/null +++ b/packages/client/lib/tests/test-scenario/configuration.e2e.ts @@ -0,0 +1,201 @@ +import assert from "node:assert"; +import diagnostics_channel from "node:diagnostics_channel"; +import { DiagnosticsEvent } from "../../client/enterprise-maintenance-manager"; + +import { + RedisConnectionConfig, + createTestClient, + getDatabaseConfig, + getDatabaseConfigFromEnv, + getEnvConfig, +} from "./test-scenario.util"; +import { createClient } from "../../.."; +import { FaultInjectorClient } from "./fault-injector-client"; +import { MovingEndpointType } from "../../../lib/client/enterprise-maintenance-manager"; +import { RedisTcpSocketOptions } from "../../client/socket"; + +describe("Client Configuration and Handshake", () => { + let clientConfig: RedisConnectionConfig; + let client: ReturnType>; + let faultInjectorClient: FaultInjectorClient; + let log: DiagnosticsEvent[] = []; + + before(() => { + const envConfig = getEnvConfig(); + const redisConfig = getDatabaseConfigFromEnv( + envConfig.redisEndpointsConfigPath, + ); + + faultInjectorClient = new FaultInjectorClient(envConfig.faultInjectorUrl); + clientConfig = getDatabaseConfig(redisConfig); + + diagnostics_channel.subscribe("redis.maintenance", (event) => { + log.push(event as DiagnosticsEvent); + }); + }); + + beforeEach(() => { + log.length = 0; + }); + + afterEach(async () => { + if (client && client.isOpen) { + await client.flushAll(); + client.destroy(); + } + }); + + describe("Parameter Configuration", () => { + const endpoints: MovingEndpointType[] = [ + "auto", + // "internal-ip", + // "internal-fqdn", + "external-ip", + "external-fqdn", + "none", + ]; + + for (const endpointType of endpoints) { + it(`clientHandshakeWithEndpointType '${endpointType}'`, async () => { + try { + client = await createTestClient(clientConfig, { + maintEndpointType: endpointType + }); + client.on("error", () => {}); + + //need to copy those because they will be mutated later + const oldOptions = JSON.parse(JSON.stringify(client.options)); + assert.ok(oldOptions); + + const { action_id } = await faultInjectorClient.migrateAndBindAction({ + bdbId: clientConfig.bdbId, + clusterIndex: 0, + }); + + await faultInjectorClient.waitForAction(action_id); + + const movingEvent = log.find((event) => event.type === "MOVING"); + assert(!!movingEvent, "Didnt receive moving PN"); + + let endpoint: string | undefined; + try { + //@ts-ignore + endpoint = movingEvent.data.push[3]; + } catch (err) { + assert( + false, + `couldnt get endpoint from event ${JSON.stringify(movingEvent)}`, + ); + } + + assert(endpoint !== undefined, "no endpoint"); + + const newOptions = client.options; + assert.ok(newOptions); + + if (oldOptions?.url) { + if (endpointType === "none") { + assert.equal( + newOptions!.url, + oldOptions.url, + "For movingEndpointTpe 'none', we expect old and new url to be the same", + ); + } else { + assert.equal( + newOptions.url, + endpoint, + "Expected what came through the wire to be set in the new client", + ); + assert.notEqual( + newOptions!.url, + oldOptions.url, + `For movingEndpointTpe ${endpointType}, we expect old and new url to be different`, + ); + } + } else { + const oldSocket = oldOptions.socket as RedisTcpSocketOptions; + const newSocket = newOptions.socket as RedisTcpSocketOptions; + assert.ok(oldSocket); + assert.ok(newSocket); + + if (endpointType === "none") { + assert.equal( + newSocket.host, + oldSocket.host, + "For movingEndpointTpe 'none', we expect old and new host to be the same", + ); + } else { + assert.equal( + newSocket.host + ":" + newSocket.port, + endpoint, + "Expected what came through the wire to be set in the new client", + ); + assert.notEqual( + newSocket.host, + oldSocket.host, + `For movingEndpointTpe ${endpointType}, we expect old and new host to be different`, + ); + } + } + } catch (error: any) { + if ( + endpointType === "internal-fqdn" || + endpointType === "internal-ip" + ) { + // errors are expected here, because we cannot connect to internal endpoints unless we are deployed in the same place as the server + } else { + assert(false, error); + } + } + }); + } + }); + + describe("Feature Enablement", () => { + it("connectionHandshakeIncludesEnablingNotifications", async () => { + client = await createTestClient(clientConfig, { + maintNotifications: "enabled" + }); + + const { action_id } = await faultInjectorClient.migrateAndBindAction({ + bdbId: clientConfig.bdbId, + clusterIndex: 0, + }); + + await faultInjectorClient.waitForAction(action_id); + + let movingEvent = false; + let migratingEvent = false; + let migratedEvent = false; + for (const event of log) { + if (event.type === "MOVING") movingEvent = true; + if (event.type === "MIGRATING") migratingEvent = true; + if (event.type === "MIGRATED") migratedEvent = true; + } + assert.ok(movingEvent, "didnt receive MOVING PN"); + assert.ok(migratingEvent, "didnt receive MIGRATING PN"); + assert.ok(migratedEvent, "didnt receive MIGRATED PN"); + }); + + it("disabledDontReceiveNotifications", async () => { + try { + client = await createTestClient(clientConfig, { + maintNotifications: "disabled", + socket: { + reconnectStrategy: false + } + }); + client.on('error', console.log.bind(console)) + + const { action_id } = await faultInjectorClient.migrateAndBindAction({ + bdbId: clientConfig.bdbId, + clusterIndex: 0, + }); + + await faultInjectorClient.waitForAction(action_id); + + assert.equal(log.length, 0, "received a PN while feature is disabled"); + } catch (error: any) { } + }); + }); +}); diff --git a/packages/client/lib/tests/test-scenario/connection-handoff.e2e.ts b/packages/client/lib/tests/test-scenario/connection-handoff.e2e.ts new file mode 100644 index 00000000000..7a9a4c24df1 --- /dev/null +++ b/packages/client/lib/tests/test-scenario/connection-handoff.e2e.ts @@ -0,0 +1,177 @@ +import { FaultInjectorClient } from "./fault-injector-client"; +import { + createTestClient, + getDatabaseConfig, + getDatabaseConfigFromEnv, + getEnvConfig, + RedisConnectionConfig, +} from "./test-scenario.util"; +import { createClient, RedisClientOptions } from "../../.."; +import { before } from "mocha"; +import Sinon, { SinonSpy, spy, stub } from "sinon"; +import assert from "node:assert"; + +/** + * Creates a spy on a duplicated client method + * @param client - The Redis client instance + * @param funcName - The name of the method to spy on + * @returns Object containing the promise that resolves with the spy and restore function + */ +const spyOnTemporaryClientInstanceMethod = ( + client: ReturnType>, + methodName: string +) => { + const { promise, resolve } = ( + Promise as typeof Promise & { + withResolvers: () => { + promise: Promise<{ spy: SinonSpy; restore: () => void }>; + resolve: (value: any) => void; + }; + } + ).withResolvers(); + + const originalDuplicate = client.duplicate.bind(client); + + const duplicateStub: Sinon.SinonStub = stub( + // Temporary clients (in the context of hitless upgrade) + // are created by calling the duplicate method on the client. + Object.getPrototypeOf(client), + "duplicate" + ).callsFake((opts) => { + const tmpClient = originalDuplicate(opts); + resolve({ + spy: spy(tmpClient, methodName), + restore: duplicateStub.restore, + }); + + return tmpClient; + }); + + return { + getSpy: () => promise, + }; +}; + +describe("Connection Handoff", () => { + let clientConfig: RedisConnectionConfig; + let client: ReturnType>; + let faultInjectorClient: FaultInjectorClient; + + before(() => { + const envConfig = getEnvConfig(); + const redisConfig = getDatabaseConfigFromEnv( + envConfig.redisEndpointsConfigPath + ); + + faultInjectorClient = new FaultInjectorClient(envConfig.faultInjectorUrl); + clientConfig = getDatabaseConfig(redisConfig); + }); + + afterEach(async () => { + if (client && client.isOpen) { + await client.flushAll(); + client.destroy(); + } + }); + + describe("New Connection Establishment & Traffic Resumption", () => { + const cases: Array<{ + name: string; + clientOptions: Partial; + }> = [ + { + name: "default options", + clientOptions: {}, + }, + { + name: "external-ip", + clientOptions: { + maintEndpointType: "external-ip", + }, + }, + { + name: "external-fqdn", + clientOptions: { + maintEndpointType: "external-fqdn", + }, + }, + { + name: "auto", + clientOptions: { + maintEndpointType: "auto", + }, + }, + { + name: "none", + clientOptions: { + maintEndpointType: "none", + }, + }, + ]; + + for (const { name, clientOptions } of cases) { + it(`should establish new connection and resume traffic afterwards - ${name}`, async () => { + client = await createTestClient(clientConfig, clientOptions); + + const spyObject = spyOnTemporaryClientInstanceMethod(client, "connect"); + + // PART 1 Establish initial connection + const { action_id: lowTimeoutBindAndMigrateActionId } = + await faultInjectorClient.migrateAndBindAction({ + bdbId: clientConfig.bdbId, + clusterIndex: 0, + }); + + await faultInjectorClient.waitForAction( + lowTimeoutBindAndMigrateActionId + ); + + const spyResult = await spyObject.getSpy(); + + assert.strictEqual(spyResult.spy.callCount, 1); + + // PART 2 Verify traffic resumption + const currentTime = Date.now().toString(); + await client.set("key", currentTime); + const result = await client.get("key"); + + assert.strictEqual(result, currentTime); + + spyResult.restore(); + }); + } + }); + + describe("TLS Connection Handoff", () => { + it.skip("TODO receiveMessagesWithTLSEnabledTest", async () => { + // + }); + it.skip("TODO connectionHandoffWithStaticInternalNameTest", async () => { + // + }); + it.skip("TODO connectionHandoffWithStaticExternalNameTest", async () => { + // + }); + }); + + describe("Connection Cleanup", () => { + it("should shut down old connection", async () => { + client = await createTestClient(clientConfig); + const spyObject = spyOnTemporaryClientInstanceMethod(client, "destroy"); + + const { action_id: lowTimeoutBindAndMigrateActionId } = + await faultInjectorClient.migrateAndBindAction({ + bdbId: clientConfig.bdbId, + clusterIndex: 0, + }); + + await faultInjectorClient.waitForAction(lowTimeoutBindAndMigrateActionId); + + const spyResult = await spyObject.getSpy(); + + assert.equal(spyResult.spy.callCount, 1); + + spyResult.restore(); + }); + }); +}); diff --git a/packages/client/lib/tests/test-scenario/fault-injector-client.ts b/packages/client/lib/tests/test-scenario/fault-injector-client.ts new file mode 100644 index 00000000000..c03fa1afa1e --- /dev/null +++ b/packages/client/lib/tests/test-scenario/fault-injector-client.ts @@ -0,0 +1,197 @@ +import { setTimeout } from "node:timers/promises"; + +export type ActionType = + | "dmc_restart" + | "failover" + | "reshard" + | "sequence_of_actions" + | "network_failure" + | "execute_rlutil_command" + | "execute_rladmin_command" + | "migrate" + | "bind" + | "update_cluster_config"; + +export interface ActionRequest { + type: ActionType; + parameters?: { + bdb_id?: string; + [key: string]: unknown; + }; +} + +export interface ActionStatus { + status: string; + error: unknown; + output: string; +} + +export class FaultInjectorClient { + private baseUrl: string; + #fetch: typeof fetch; + + constructor(baseUrl: string, fetchImpl: typeof fetch = fetch) { + this.baseUrl = baseUrl.replace(/\/+$/, ""); // trim trailing slash + this.#fetch = fetchImpl; + } + + /** + * Lists all available actions. + * @throws {Error} When the HTTP request fails or response cannot be parsed as JSON + */ + public listActions(): Promise { + return this.#request("GET", "/action"); + } + + /** + * Triggers a specific action. + * @param action The action request to trigger + * @throws {Error} When the HTTP request fails or response cannot be parsed as JSON + */ + public triggerAction( + action: ActionRequest + ): Promise { + return this.#request("POST", "/action", action); + } + + // public async printStatus() { + // const action = { + // type: 'execute_rladmin_command', + // parameters: { + // rladmin_command: "status", + // bdb_id: "1" + // } + // } + // const { action_id } = await this.#request<{action_id: string}>("POST", "/action", action); + // const status = await this.waitForAction(action_id); + // //@ts-ignore + // console.log(status.output.output); + // } + + /** + * Gets the status of a specific action. + * @param actionId The ID of the action to check + * @throws {Error} When the HTTP request fails or response cannot be parsed as JSON + */ + public getActionStatus(actionId: string): Promise { + return this.#request("GET", `/action/${actionId}`); + } + + /** + * Waits for an action to complete. + * @param actionId The ID of the action to wait for + * @param options Optional timeout and max wait time + * @throws {Error} When the action does not complete within the max wait time + */ + public async waitForAction( + actionId: string, + { + timeoutMs, + maxWaitTimeMs, + }: { + timeoutMs?: number; + maxWaitTimeMs?: number; + } = {} + ): Promise { + const timeout = timeoutMs || 1000; + const maxWaitTime = maxWaitTimeMs || 60000; + + const startTime = Date.now(); + + while (Date.now() - startTime < maxWaitTime) { + const action = await this.getActionStatus(actionId); + + if (action.status === "failed") { + throw new Error( + `Action id: ${actionId} failed! Error: ${action.error}` + ); + } + + if (["finished", "success"].includes(action.status)) { + return action; + } + + await setTimeout(timeout); + } + + throw new Error(`Timeout waiting for action ${actionId}`); + } + + async migrateAndBindAction({ + bdbId, + clusterIndex, + }: { + bdbId: string | number; + clusterIndex: string | number; + }) { + const bdbIdStr = bdbId.toString(); + const clusterIndexStr = clusterIndex.toString(); + + return this.triggerAction<{ + action_id: string; + }>({ + type: "sequence_of_actions", + parameters: { + bdbId: bdbIdStr, + actions: [ + { + type: "migrate", + params: { + cluster_index: clusterIndexStr, + bdb_id: bdbIdStr, + }, + }, + { + type: "bind", + params: { + cluster_index: clusterIndexStr, + bdb_id: bdbIdStr, + }, + }, + ], + }, + }); + } + + async #request( + method: string, + path: string, + body?: Object | string + ): Promise { + const url = `${this.baseUrl}${path}`; + const headers: Record = { + "Content-Type": "application/json", + }; + + let payload: string | undefined; + + if (body) { + if (typeof body === "string") { + headers["Content-Type"] = "text/plain"; + payload = body; + } else { + headers["Content-Type"] = "application/json"; + payload = JSON.stringify(body); + } + } + + const response = await this.#fetch(url, { method, headers, body: payload }); + + if (!response.ok) { + try { + const text = await response.text(); + throw new Error(`HTTP ${response.status} - ${text}`); + } catch { + throw new Error(`HTTP ${response.status}`); + } + } + + try { + return (await response.json()) as T; + } catch { + throw new Error( + `HTTP ${response.status} - Unable to parse response as JSON` + ); + } + } +} diff --git a/packages/client/lib/tests/test-scenario/negative-tests.e2e.ts b/packages/client/lib/tests/test-scenario/negative-tests.e2e.ts new file mode 100644 index 00000000000..51558777016 --- /dev/null +++ b/packages/client/lib/tests/test-scenario/negative-tests.e2e.ts @@ -0,0 +1,15 @@ +import assert from "assert"; +import { createClient } from "../../.."; + +describe("Negative tests", () => { + it("should only be enabled with RESP3", () => { + assert.throws( + () => + createClient({ + RESP: 2, + maintNotifications: "enabled", + }), + "Error: Graceful Maintenance is only supported with RESP3", + ); + }); +}); diff --git a/packages/client/lib/tests/test-scenario/pn-failover.e2e.ts b/packages/client/lib/tests/test-scenario/pn-failover.e2e.ts new file mode 100644 index 00000000000..7b977f33a25 --- /dev/null +++ b/packages/client/lib/tests/test-scenario/pn-failover.e2e.ts @@ -0,0 +1,226 @@ +import assert from "node:assert"; +import diagnostics_channel from "node:diagnostics_channel"; +import { FaultInjectorClient } from "./fault-injector-client"; +import { + createTestClient, + getDatabaseConfig, + getDatabaseConfigFromEnv, + getEnvConfig, + RedisConnectionConfig, +} from "./test-scenario.util"; +import { createClient } from "../../.."; +import { DiagnosticsEvent } from "../../client/enterprise-maintenance-manager"; +import { before } from "mocha"; + +describe("Push Notifications", () => { + const createNotificationMessageHandler = ( + result: Record, + notifications: Array + ) => { + return (message: unknown) => { + if (notifications.includes((message as DiagnosticsEvent).type)) { + const event = message as DiagnosticsEvent; + result[event.type] = (result[event.type] ?? 0) + 1; + } + }; + }; + + let onMessageHandler: ReturnType; + let clientConfig: RedisConnectionConfig; + let client: ReturnType>; + let faultInjectorClient: FaultInjectorClient; + + before(() => { + const envConfig = getEnvConfig(); + const redisConfig = getDatabaseConfigFromEnv( + envConfig.redisEndpointsConfigPath + ); + + faultInjectorClient = new FaultInjectorClient(envConfig.faultInjectorUrl); + clientConfig = getDatabaseConfig(redisConfig); + }); + + afterEach(() => { + if (onMessageHandler!) { + diagnostics_channel.unsubscribe("redis.maintenance", onMessageHandler); + } + + if (client && client.isOpen) { + client.destroy(); + } + }); + + describe("Push Notifications Enabled", () => { + beforeEach(async () => { + client = await createTestClient(clientConfig); + + await client.flushAll(); + }); + + it("should receive FAILING_OVER and FAILED_OVER push notifications", async () => { + const notifications: Array = [ + "FAILING_OVER", + "FAILED_OVER", + ]; + + const diagnosticsMap: Record = {}; + + onMessageHandler = createNotificationMessageHandler( + diagnosticsMap, + notifications + ); + + diagnostics_channel.subscribe("redis.maintenance", onMessageHandler); + + const { action_id: failoverActionId } = + await faultInjectorClient.triggerAction({ + type: "failover", + parameters: { + bdb_id: clientConfig.bdbId.toString(), + cluster_index: 0, + }, + }); + + await faultInjectorClient.waitForAction(failoverActionId); + + assert.strictEqual( + diagnosticsMap.FAILING_OVER, + 1, + "Should have received exactly one FAILING_OVER notification" + ); + assert.strictEqual( + diagnosticsMap.FAILED_OVER, + 1, + "Should have received exactly one FAILED_OVER notification" + ); + }); + }); + + describe("Push Notifications Disabled - Client", () => { + beforeEach(async () => { + client = await createTestClient(clientConfig, { + maintNotifications: "disabled", + }); + + client.on("error", (_err) => { + // Expect the socket to be closed + // Ignore errors + }); + + await client.flushAll(); + }); + + it("should NOT receive FAILING_OVER and FAILED_OVER push notifications", async () => { + const notifications: Array = [ + "FAILING_OVER", + "FAILED_OVER", + ]; + + const diagnosticsMap: Record = {}; + + onMessageHandler = createNotificationMessageHandler( + diagnosticsMap, + notifications + ); + + diagnostics_channel.subscribe("redis.maintenance", onMessageHandler); + + const { action_id: failoverActionId } = + await faultInjectorClient.triggerAction({ + type: "failover", + parameters: { + bdb_id: clientConfig.bdbId.toString(), + cluster_index: 0, + }, + }); + + await faultInjectorClient.waitForAction(failoverActionId); + + assert.strictEqual( + diagnosticsMap.FAILING_OVER, + undefined, + "Should have received exactly one FAILING_OVER notification" + ); + assert.strictEqual( + diagnosticsMap.FAILED_OVER, + undefined, + "Should have received exactly one FAILED_OVER notification" + ); + }); + }); + + describe("Push Notifications Disabled - Server", () => { + beforeEach(async () => { + client = await createTestClient(clientConfig); + + client.on("error", (_err) => { + // Expect the socket to be closed + // Ignore errors + }); + + await client.flushAll(); + }); + + before(async () => { + const { action_id: disablePushNotificationsActionId } = + await faultInjectorClient.triggerAction({ + type: "update_cluster_config", + parameters: { + config: { client_maint_notifications: false }, + }, + }); + + await faultInjectorClient.waitForAction(disablePushNotificationsActionId); + }); + + after(async () => { + const { action_id: enablePushNotificationsActionId } = + await faultInjectorClient.triggerAction({ + type: "update_cluster_config", + parameters: { + config: { client_maint_notifications: true }, + }, + }); + + await faultInjectorClient.waitForAction(enablePushNotificationsActionId); + }); + + it("should NOT receive FAILING_OVER and FAILED_OVER push notifications", async () => { + const notifications: Array = [ + "FAILING_OVER", + "FAILED_OVER", + ]; + + const diagnosticsMap: Record = {}; + + onMessageHandler = createNotificationMessageHandler( + diagnosticsMap, + notifications + ); + + diagnostics_channel.subscribe("redis.maintenance", onMessageHandler); + + const { action_id: failoverActionId } = + await faultInjectorClient.triggerAction({ + type: "failover", + parameters: { + bdb_id: clientConfig.bdbId.toString(), + cluster_index: 0, + }, + }); + + await faultInjectorClient.waitForAction(failoverActionId); + + assert.strictEqual( + diagnosticsMap.FAILING_OVER, + undefined, + "Should have received exactly one FAILING_OVER notification" + ); + assert.strictEqual( + diagnosticsMap.FAILED_OVER, + undefined, + "Should have received exactly one FAILED_OVER notification" + ); + }); + }); +}); diff --git a/packages/client/lib/tests/test-scenario/push-notification.e2e.ts b/packages/client/lib/tests/test-scenario/push-notification.e2e.ts new file mode 100644 index 00000000000..bfaef8351b4 --- /dev/null +++ b/packages/client/lib/tests/test-scenario/push-notification.e2e.ts @@ -0,0 +1,238 @@ +import assert from "node:assert"; +import diagnostics_channel from "node:diagnostics_channel"; +import { FaultInjectorClient } from "./fault-injector-client"; +import { + createTestClient, + getDatabaseConfig, + getDatabaseConfigFromEnv, + getEnvConfig, + RedisConnectionConfig, +} from "./test-scenario.util"; +import { createClient } from "../../.."; +import { DiagnosticsEvent } from "../../client/enterprise-maintenance-manager"; +import { before } from "mocha"; + +describe("Push Notifications", () => { + const createNotificationMessageHandler = ( + result: Record, + notifications: Array + ) => { + return (message: unknown) => { + if (notifications.includes((message as DiagnosticsEvent).type)) { + const event = message as DiagnosticsEvent; + result[event.type] = (result[event.type] ?? 0) + 1; + } + }; + }; + + let onMessageHandler: ReturnType; + let clientConfig: RedisConnectionConfig; + let client: ReturnType>; + let faultInjectorClient: FaultInjectorClient; + + before(() => { + const envConfig = getEnvConfig(); + const redisConfig = getDatabaseConfigFromEnv( + envConfig.redisEndpointsConfigPath + ); + + faultInjectorClient = new FaultInjectorClient(envConfig.faultInjectorUrl); + clientConfig = getDatabaseConfig(redisConfig); + }); + + afterEach(() => { + if (onMessageHandler!) { + diagnostics_channel.unsubscribe("redis.maintenance", onMessageHandler); + } + + if (client && client.isOpen) { + client.destroy(); + } + }); + + describe("Push Notifications Enabled", () => { + beforeEach(async () => { + client = await createTestClient(clientConfig); + + await client.flushAll(); + }); + + it("should receive MOVING, MIGRATING, and MIGRATED push notifications", async () => { + const notifications: Array = [ + "MOVING", + "MIGRATING", + "MIGRATED", + ]; + + const diagnosticsMap: Record = {}; + + onMessageHandler = createNotificationMessageHandler( + diagnosticsMap, + notifications + ); + + diagnostics_channel.subscribe("redis.maintenance", onMessageHandler); + + const { action_id: bindAndMigrateActionId } = + await faultInjectorClient.migrateAndBindAction({ + bdbId: clientConfig.bdbId, + clusterIndex: 0, + }); + + await faultInjectorClient.waitForAction(bindAndMigrateActionId); + + assert.strictEqual( + diagnosticsMap.MOVING, + 1, + "Should have received exactly one MOVING notification" + ); + assert.strictEqual( + diagnosticsMap.MIGRATING, + 1, + "Should have received exactly one MIGRATING notification" + ); + assert.strictEqual( + diagnosticsMap.MIGRATED, + 1, + "Should have received exactly one MIGRATED notification" + ); + }); + + }); + + describe("Push Notifications Disabled - Client", () => { + beforeEach(async () => { + client = await createTestClient(clientConfig, { + maintNotifications: "disabled", + }); + + client.on("error", (_err) => { + // Expect the socket to be closed + // Ignore errors + }); + + await client.flushAll(); + }); + + it("should NOT receive MOVING, MIGRATING, and MIGRATED push notifications", async () => { + const notifications: Array = [ + "MOVING", + "MIGRATING", + "MIGRATED", + ]; + + const diagnosticsMap: Record = {}; + + onMessageHandler = createNotificationMessageHandler( + diagnosticsMap, + notifications + ); + + diagnostics_channel.subscribe("redis.maintenance", onMessageHandler); + + const { action_id: bindAndMigrateActionId } = + await faultInjectorClient.migrateAndBindAction({ + bdbId: clientConfig.bdbId, + clusterIndex: 0, + }); + + await faultInjectorClient.waitForAction(bindAndMigrateActionId); + + assert.strictEqual( + diagnosticsMap.MOVING, + undefined, + "Should NOT have received exactly one MOVING notification" + ); + assert.strictEqual( + diagnosticsMap.MIGRATING, + undefined, + "Should NOT have received exactly one MIGRATING notification" + ); + assert.strictEqual( + diagnosticsMap.MIGRATED, + undefined, + "Should NOT have received exactly one MIGRATED notification" + ); + }); + + }); + + describe("Push Notifications Disabled - Server", () => { + beforeEach(async () => { + client = await createTestClient(clientConfig); + + client.on("error", (_err) => { + // Expect the socket to be closed + // Ignore errors + }); + + await client.flushAll(); + }); + + before(async () => { + const { action_id: disablePushNotificationsActionId } = + await faultInjectorClient.triggerAction({ + type: "update_cluster_config", + parameters: { + config: { client_maint_notifications: false }, + }, + }); + + await faultInjectorClient.waitForAction(disablePushNotificationsActionId); + }); + + after(async () => { + const { action_id: enablePushNotificationsActionId } = + await faultInjectorClient.triggerAction({ + type: "update_cluster_config", + parameters: { + config: { client_maint_notifications: true }, + }, + }); + + await faultInjectorClient.waitForAction(enablePushNotificationsActionId); + }); + + it("should NOT receive MOVING, MIGRATING, and MIGRATED push notifications", async () => { + const notifications: Array = [ + "MOVING", + "MIGRATING", + "MIGRATED", + ]; + + const diagnosticsMap: Record = {}; + + onMessageHandler = createNotificationMessageHandler( + diagnosticsMap, + notifications + ); + + diagnostics_channel.subscribe("redis.maintenance", onMessageHandler); + + const { action_id: bindAndMigrateActionId } = + await faultInjectorClient.migrateAndBindAction({ + bdbId: clientConfig.bdbId, + clusterIndex: 0, + }); + + await faultInjectorClient.waitForAction(bindAndMigrateActionId); + + assert.strictEqual( + diagnosticsMap.MOVING, + undefined, + "Should NOT have received exactly one MOVING notification" + ); + assert.strictEqual( + diagnosticsMap.MIGRATING, + undefined, + "Should NOT have received exactly one MIGRATING notification" + ); + assert.strictEqual( + diagnosticsMap.MIGRATED, + undefined, + "Should NOT have received exactly one MIGRATED notification" + ); + }); + + }); +}); diff --git a/packages/client/lib/tests/test-scenario/sharded-pubsub/spubsub.e2e.ts b/packages/client/lib/tests/test-scenario/sharded-pubsub/spubsub.e2e.ts new file mode 100644 index 00000000000..46ef252da8e --- /dev/null +++ b/packages/client/lib/tests/test-scenario/sharded-pubsub/spubsub.e2e.ts @@ -0,0 +1,362 @@ +import type { Cluster, TestConfig } from "./utils/test.util"; +import { createClusterTestClient, getConfig } from "./utils/test.util"; +import { FaultInjectorClient } from "../fault-injector-client"; +import { TestCommandRunner } from "./utils/command-runner"; +import { CHANNELS, CHANNELS_BY_SLOT } from "./utils/test.util"; +import { MessageTracker } from "./utils/message-tracker"; +import assert from "node:assert"; +import { setTimeout } from "node:timers/promises"; + +describe("Sharded Pub/Sub E2E", () => { + let faultInjectorClient: FaultInjectorClient; + let config: TestConfig; + + before(() => { + config = getConfig(); + + faultInjectorClient = new FaultInjectorClient(config.faultInjectorUrl); + }); + + describe("Single Subscriber", () => { + let subscriber: Cluster; + let publisher: Cluster; + let messageTracker: MessageTracker; + + beforeEach(async () => { + messageTracker = new MessageTracker(CHANNELS); + subscriber = createClusterTestClient(config.clientConfig, {}); + publisher = createClusterTestClient(config.clientConfig, {}); + await Promise.all([subscriber.connect(), publisher.connect()]); + }); + + afterEach(async () => { + await Promise.all([subscriber.quit(), publisher.quit()]); + }); + + it("should receive messages published to multiple channels", async () => { + for (const channel of CHANNELS) { + await subscriber.sSubscribe(channel, (_msg, channel) => + messageTracker.incrementReceived(channel), + ); + } + const { controller, result } = + TestCommandRunner.publishMessagesUntilAbortSignal( + publisher, + CHANNELS, + messageTracker, + ); + // Wait for 10 seconds, while publishing messages + await setTimeout(10_000); + controller.abort(); + await result; + + for (const channel of CHANNELS) { + assert.strictEqual( + messageTracker.getChannelStats(channel)?.received, + messageTracker.getChannelStats(channel)?.sent, + ); + } + }); + + it("should resume publishing and receiving after failover", async () => { + for (const channel of CHANNELS) { + await subscriber.sSubscribe(channel, (_msg, channel) => { + messageTracker.incrementReceived(channel); + }); + } + + // Trigger failover twice + for (let i = 0; i < 2; i++) { + // Start publishing messages + const { controller: publishAbort, result: publishResult } = + TestCommandRunner.publishMessagesUntilAbortSignal( + publisher, + CHANNELS, + messageTracker, + ); + + // Trigger failover during publishing + const { action_id: failoverActionId } = + await faultInjectorClient.triggerAction({ + type: "failover", + parameters: { + bdb_id: config.clientConfig.bdbId.toString(), + cluster_index: 0, + }, + }); + + // Wait for failover to complete + await faultInjectorClient.waitForAction(failoverActionId); + + publishAbort.abort(); + await publishResult; + + for (const channel of CHANNELS) { + const sent = messageTracker.getChannelStats(channel)!.sent; + const received = messageTracker.getChannelStats(channel)!.received; + + assert.ok( + received <= sent, + `Channel ${channel}: received (${received}) should be <= sent (${sent})`, + ); + } + + // Wait for 2 seconds before resuming publishing + await setTimeout(2_000); + messageTracker.reset(); + + const { + controller: afterFailoverController, + result: afterFailoverResult, + } = TestCommandRunner.publishMessagesUntilAbortSignal( + publisher, + CHANNELS, + messageTracker, + ); + + await setTimeout(10_000); + afterFailoverController.abort(); + await afterFailoverResult; + + for (const channel of CHANNELS) { + const sent = messageTracker.getChannelStats(channel)!.sent; + const received = messageTracker.getChannelStats(channel)!.received; + assert.ok(sent > 0, `Channel ${channel} should have sent messages`); + assert.ok( + received > 0, + `Channel ${channel} should have received messages`, + ); + assert.strictEqual( + messageTracker.getChannelStats(channel)!.received, + messageTracker.getChannelStats(channel)!.sent, + `Channel ${channel} received (${received}) should equal sent (${sent}) once resumed after failover`, + ); + } + } + }); + + it("should NOT receive messages after sunsubscribe", async () => { + for (const channel of CHANNELS) { + await subscriber.sSubscribe(channel, (_msg, channel) => messageTracker.incrementReceived(channel)); + } + + const { controller, result } = + TestCommandRunner.publishMessagesUntilAbortSignal( + publisher, + CHANNELS, + messageTracker, + ); + + // Wait for 5 seconds, while publishing messages + await setTimeout(5_000); + controller.abort(); + await result; + + for (const channel of CHANNELS) { + assert.strictEqual( + messageTracker.getChannelStats(channel)?.received, + messageTracker.getChannelStats(channel)?.sent, + ); + } + + // Reset message tracker + messageTracker.reset(); + + const unsubscribeChannels = [ + CHANNELS_BY_SLOT["1000"], + CHANNELS_BY_SLOT["8000"], + CHANNELS_BY_SLOT["16000"], + ]; + + for (const channel of unsubscribeChannels) { + await subscriber.sUnsubscribe(channel); + } + + const { + controller: afterUnsubscribeController, + result: afterUnsubscribeResult, + } = TestCommandRunner.publishMessagesUntilAbortSignal( + publisher, + CHANNELS, + messageTracker, + ); + + // Wait for 5 seconds, while publishing messages + await setTimeout(5_000); + afterUnsubscribeController.abort(); + await afterUnsubscribeResult; + + for (const channel of unsubscribeChannels) { + assert.strictEqual( + messageTracker.getChannelStats(channel)?.received, + 0, + `Channel ${channel} should not have received messages after unsubscribe`, + ); + } + + // All other channels should have received messages + const stillSubscribedChannels = CHANNELS.filter( + (channel) => !unsubscribeChannels.includes(channel as any), + ); + + for (const channel of stillSubscribedChannels) { + assert.ok( + messageTracker.getChannelStats(channel)!.received > 0, + `Channel ${channel} should have received messages`, + ); + } + }); + }); + + describe("Multiple Subscribers", () => { + let subscriber1: Cluster; + let subscriber2: Cluster; + + let publisher: Cluster; + + let messageTracker1: MessageTracker; + let messageTracker2: MessageTracker; + + beforeEach(async () => { + messageTracker1 = new MessageTracker(CHANNELS); + messageTracker2 = new MessageTracker(CHANNELS); + subscriber1 = createClusterTestClient(config.clientConfig); + subscriber2 = createClusterTestClient(config.clientConfig); + publisher = createClusterTestClient(config.clientConfig); + await Promise.all([ + subscriber1.connect(), + subscriber2.connect(), + publisher.connect(), + ]); + }); + + afterEach(async () => { + await Promise.all([ + subscriber1.quit(), + subscriber2.quit(), + publisher.quit(), + ]); + }); + + it("should receive messages published to multiple channels", async () => { + for (const channel of CHANNELS) { + await subscriber1.sSubscribe(channel, (_msg, channel) => { messageTracker1.incrementReceived(channel); }); + await subscriber2.sSubscribe(channel, (_msg, channel) => { messageTracker2.incrementReceived(channel); }); + } + + const { controller, result } = + TestCommandRunner.publishMessagesUntilAbortSignal( + publisher, + CHANNELS, + messageTracker1, // Use messageTracker1 for all publishing + ); + + // Wait for 10 seconds, while publishing messages + await setTimeout(10_000); + controller.abort(); + await result; + + for (const channel of CHANNELS) { + assert.strictEqual( + messageTracker1.getChannelStats(channel)?.received, + messageTracker1.getChannelStats(channel)?.sent, + ); + assert.strictEqual( + messageTracker2.getChannelStats(channel)?.received, + messageTracker1.getChannelStats(channel)?.sent, + ); + } + }); + + it("should resume publishing and receiving after failover", async () => { + for (const channel of CHANNELS) { + await subscriber1.sSubscribe(channel, (_msg, channel) => { messageTracker1.incrementReceived(channel); }); + await subscriber2.sSubscribe(channel, (_msg, channel) => { messageTracker2.incrementReceived(channel); }); + } + + // Start publishing messages + const { controller: publishAbort, result: publishResult } = + TestCommandRunner.publishMessagesUntilAbortSignal( + publisher, + CHANNELS, + messageTracker1, // Use messageTracker1 for all publishing + ); + + // Trigger failover during publishing + const { action_id: failoverActionId } = + await faultInjectorClient.triggerAction({ + type: "failover", + parameters: { + bdb_id: config.clientConfig.bdbId.toString(), + cluster_index: 0, + }, + }); + + // Wait for failover to complete + await faultInjectorClient.waitForAction(failoverActionId); + + publishAbort.abort(); + await publishResult; + + for (const channel of CHANNELS) { + const sent = messageTracker1.getChannelStats(channel)!.sent; + const received1 = messageTracker1.getChannelStats(channel)!.received; + + const received2 = messageTracker2.getChannelStats(channel)!.received; + + assert.ok( + received1 <= sent, + `Channel ${channel}: received (${received1}) should be <= sent (${sent})`, + ); + assert.ok( + received2 <= sent, + `Channel ${channel}: received2 (${received2}) should be <= sent (${sent})`, + ); + } + + // Wait for 2 seconds before resuming publishing + await setTimeout(2_000); + + messageTracker1.reset(); + messageTracker2.reset(); + + const { + controller: afterFailoverController, + result: afterFailoverResult, + } = TestCommandRunner.publishMessagesUntilAbortSignal( + publisher, + CHANNELS, + messageTracker1, + ); + + await setTimeout(10_000); + afterFailoverController.abort(); + await afterFailoverResult; + + for (const channel of CHANNELS) { + const sent = messageTracker1.getChannelStats(channel)!.sent; + const received1 = messageTracker1.getChannelStats(channel)!.received; + const received2 = messageTracker2.getChannelStats(channel)!.received; + assert.ok(sent > 0, `Channel ${channel} should have sent messages`); + assert.ok( + received1 > 0, + `Channel ${channel} should have received messages by subscriber 1`, + ); + assert.ok( + received2 > 0, + `Channel ${channel} should have received messages by subscriber 2`, + ); + assert.strictEqual( + received1, + sent, + `Channel ${channel} received (${received1}) should equal sent (${sent}) once resumed after failover by subscriber 1`, + ); + assert.strictEqual( + received2, + sent, + `Channel ${channel} received (${received2}) should equal sent (${sent}) once resumed after failover by subscriber 2`, + ); + } + }); + }); +}); diff --git a/packages/client/lib/tests/test-scenario/sharded-pubsub/utils/command-runner.ts b/packages/client/lib/tests/test-scenario/sharded-pubsub/utils/command-runner.ts new file mode 100644 index 00000000000..7b1a217bbfd --- /dev/null +++ b/packages/client/lib/tests/test-scenario/sharded-pubsub/utils/command-runner.ts @@ -0,0 +1,90 @@ +import type { MessageTracker } from "./message-tracker"; +import { Cluster } from "./test.util"; +import { setTimeout } from "timers/promises"; + +/** + * Options for the `publishMessagesUntilAbortSignal` method + */ +interface PublishMessagesUntilAbortSignalOptions { + /** + * Number of messages to publish in each batch + */ + batchSize: number; + /** + * Timeout between batches in milliseconds + */ + timeoutMs: number; + /** + * Function that generates the message content to be published + */ + createMessage: () => string; +} + +/** + * Utility class for running test commands until a stop signal is received + */ +export class TestCommandRunner { + private static readonly defaultPublishOptions: PublishMessagesUntilAbortSignalOptions = + { + batchSize: 10, + timeoutMs: 10, + createMessage: () => Date.now().toString(), + }; + + /** + * Continuously publishes messages to the given Redis channels until aborted. + * + * @param {Redis|Cluster} client - Redis client or cluster instance used to publish messages. + * @param {string[]} channels - List of channel names to publish messages to. + * @param {MessageTracker} messageTracker - Tracks sent and failed message counts per channel. + * @param {Partial} [options] - Optional overrides for batch size, timeout, and message factory. + * @param {AbortController} [externalAbortController] - Optional external abort controller to control publishing lifecycle. + * @returns {{ controller: AbortController, result: Promise }} + * An object containing the abort controller and a promise that resolves when publishing stops. + */ + static publishMessagesUntilAbortSignal( + client: Cluster, + channels: string[], + messageTracker: MessageTracker, + options?: Partial, + externalAbortController?: AbortController, + ) { + const publishOptions = { + ...TestCommandRunner.defaultPublishOptions, + ...options, + }; + + const abortController = externalAbortController ?? new AbortController(); + + const result = async () => { + while (!abortController.signal.aborted) { + const batchPromises: Promise[] = []; + + for (let i = 0; i < publishOptions.batchSize; i++) { + for (const channel of channels) { + const message = publishOptions.createMessage(); + + const publishPromise = client + .sPublish(channel, message) + .then(() => { + messageTracker.incrementSent(channel); + }) + .catch(() => { + messageTracker.incrementFailed(channel); + }); + + batchPromises.push(publishPromise); + } + } + + await Promise.all(batchPromises); + await setTimeout(publishOptions.timeoutMs); + } + }; + + return { + controller: abortController, + result: result(), + }; + } +} diff --git a/packages/client/lib/tests/test-scenario/sharded-pubsub/utils/message-tracker.ts b/packages/client/lib/tests/test-scenario/sharded-pubsub/utils/message-tracker.ts new file mode 100644 index 00000000000..6393356c8c4 --- /dev/null +++ b/packages/client/lib/tests/test-scenario/sharded-pubsub/utils/message-tracker.ts @@ -0,0 +1,52 @@ +export interface MessageStats { + sent: number; + received: number; + failed: number; +} + +export class MessageTracker { + private stats: Record = {}; + + constructor(channels: string[]) { + this.initializeChannels(channels); + } + + private initializeChannels(channels: string[]): void { + this.stats = channels.reduce((acc, channel) => { + acc[channel] = { sent: 0, received: 0, failed: 0 }; + return acc; + }, {} as Record); + } + + reset(): void { + Object.keys(this.stats).forEach((channel) => { + this.stats[channel] = { sent: 0, received: 0, failed: 0 }; + }); + } + + incrementSent(channel: string): void { + if (this.stats[channel]) { + this.stats[channel].sent++; + } + } + + incrementReceived(channel: string): void { + if (this.stats[channel]) { + this.stats[channel].received++; + } + } + + incrementFailed(channel: string): void { + if (this.stats[channel]) { + this.stats[channel].failed++; + } + } + + getChannelStats(channel: string): MessageStats | undefined { + return this.stats[channel]; + } + + getAllStats(): Record { + return this.stats; + } +} diff --git a/packages/client/lib/tests/test-scenario/sharded-pubsub/utils/test.util.ts b/packages/client/lib/tests/test-scenario/sharded-pubsub/utils/test.util.ts new file mode 100644 index 00000000000..9ef683e5e58 --- /dev/null +++ b/packages/client/lib/tests/test-scenario/sharded-pubsub/utils/test.util.ts @@ -0,0 +1,211 @@ +import { readFileSync } from "fs"; +import RedisCluster, { + RedisClusterOptions, +} from "../../../../cluster"; + +interface DatabaseEndpoint { + addr: string[]; + addr_type: string; + dns_name: string; + oss_cluster_api_preferred_endpoint_type: string; + oss_cluster_api_preferred_ip_type: string; + port: number; + proxy_policy: string; + uid: string; +} + +interface DatabaseConfig { + bdb_id: number; + username: string; + password: string; + tls: boolean; + raw_endpoints: DatabaseEndpoint[]; + endpoints: string[]; +} + +type DatabasesConfig = Record; + +interface EnvConfig { + redisEndpointsConfigPath: string; + faultInjectorUrl: string; +} + +export interface RedisConnectionConfig { + host: string; + port: number; + username: string; + password: string; + tls: boolean; + bdbId: number; +} + +export interface TestConfig { + clientConfig: RedisConnectionConfig; + faultInjectorUrl: string; +} + +/** + * Reads environment variables required for the test scenario + * @returns Environment configuration object + * @throws Error if required environment variables are not set + */ +const getEnvConfig = (): EnvConfig => { + if (!process.env["REDIS_ENDPOINTS_CONFIG_PATH"]) { + throw new Error( + "REDIS_ENDPOINTS_CONFIG_PATH environment variable must be set", + ); + } + + if (!process.env["RE_FAULT_INJECTOR_URL"]) { + throw new Error("RE_FAULT_INJECTOR_URL environment variable must be set"); + } + + return { + redisEndpointsConfigPath: process.env["REDIS_ENDPOINTS_CONFIG_PATH"], + faultInjectorUrl: process.env["RE_FAULT_INJECTOR_URL"], + }; +}; + +/** + * Reads database configuration from a file + * @param filePath - The path to the database configuration file + * @returns Parsed database configuration object + * @throws Error if file doesn't exist or JSON is invalid + */ +const getDatabaseConfigFromEnv = (filePath: string): DatabasesConfig => { + try { + const fileContent = readFileSync(filePath, "utf8"); + return JSON.parse(fileContent) as DatabasesConfig; + } catch (_error) { + throw new Error(`Failed to read or parse database config from ${filePath}`); + } +}; + +/** + * Gets Redis connection parameters for a specific database + * @param databasesConfig - The parsed database configuration object + * @param databaseName - Optional name of the database to retrieve (defaults to the first one) + * @returns Redis connection configuration with host, port, username, password, and tls + * @throws Error if the specified database is not found in the configuration + */ +const getDatabaseConfig = ( + databasesConfig: DatabasesConfig, + databaseName?: string, +): RedisConnectionConfig => { + const dbConfig = databaseName + ? databasesConfig[databaseName] + : Object.values(databasesConfig)[0]; + + if (!dbConfig) { + throw new Error( + `Database ${databaseName || ""} not found in configuration`, + ); + } + + const endpoint = dbConfig.raw_endpoints[0]; // Use the first endpoint + + if (!endpoint) { + throw new Error(`No endpoints found for database ${databaseName}`); + } + + return { + host: endpoint.dns_name, + port: endpoint.port, + username: dbConfig.username, + password: dbConfig.password, + tls: dbConfig.tls, + bdbId: dbConfig.bdb_id, + }; +}; + +/** + * Gets Redis connection parameters for a specific database + * @returns Redis client config and fault injector URL + * @throws Error if required environment variables are not set or if database config is invalid + */ +export const getConfig = (): TestConfig => { + const envConfig = getEnvConfig(); + const redisConfig = getDatabaseConfigFromEnv( + envConfig.redisEndpointsConfigPath, + ); + + return { + clientConfig: getDatabaseConfig(redisConfig), + faultInjectorUrl: envConfig.faultInjectorUrl, + }; +}; + +/** + * Creates a test cluster client with the provided configuration, connects it and attaches an error handler listener + * @param clientConfig - The Redis connection configuration + * @param options - Optional cluster options + * @returns The created Redis Cluster client + */ +export const createClusterTestClient = ( + clientConfig: RedisConnectionConfig, + options: Partial = {}, +) => { + return RedisCluster.create({ + ...options, + rootNodes: [ + { + socket: { + host: clientConfig.host, + port: clientConfig.port, + }, + }, + ], + defaults: { + credentialsProvider: { + type: "async-credentials-provider", + credentials: async () => ({ + username: clientConfig.username, + password: clientConfig.password, + }), + }, + }, + }); +}; + +export type Cluster = ReturnType; + +/** + * A list of example Redis Cluster channel keys covering all slot ranges. + */ +export const CHANNELS = [ + "channel:11kv:1000", + "channel:osy:2000", + "channel:jn6:3000", + "channel:l00:4000", + "channel:4ez:5000", + "channel:4ek:6000", + "channel:9vn:7000", + "channel:dw1:8000", + "channel:9zi:9000", + "channel:4vl:10000", + "channel:utl:11000", + "channel:lyo:12000", + "channel:jzn:13000", + "channel:14uc:14000", + "channel:mz:15000", + "channel:d0v:16000", +]; + +export const CHANNELS_BY_SLOT = { + 1000: "channel:11kv:1000", + 2000: "channel:osy:2000", + 3000: "channel:jn6:3000", + 4000: "channel:l00:4000", + 5000: "channel:4ez:5000", + 6000: "channel:4ek:6000", + 7000: "channel:9vn:7000", + 8000: "channel:dw1:8000", + 9000: "channel:9zi:9000", + 10000: "channel:4vl:10000", + 11000: "channel:utl:11000", + 12000: "channel:lyo:12000", + 13000: "channel:jzn:13000", + 14000: "channel:14uc:14000", + 15000: "channel:mz:15000", + 16000: "channel:d0v:16000", +} as const; diff --git a/packages/client/lib/tests/test-scenario/test-scenario.util.ts b/packages/client/lib/tests/test-scenario/test-scenario.util.ts new file mode 100644 index 00000000000..96df0acbd6d --- /dev/null +++ b/packages/client/lib/tests/test-scenario/test-scenario.util.ts @@ -0,0 +1,174 @@ +import { readFileSync } from "fs"; +import { createClient, RedisClientOptions } from "../../.."; +import { stub } from "sinon"; + +type DatabaseEndpoint = { + addr: string[]; + addr_type: string; + dns_name: string; + oss_cluster_api_preferred_endpoint_type: string; + oss_cluster_api_preferred_ip_type: string; + port: number; + proxy_policy: string; + uid: string; +}; + +type DatabaseConfig = { + bdb_id: number; + username: string; + password: string; + tls: boolean; + raw_endpoints: DatabaseEndpoint[]; + endpoints: string[]; +}; + +type DatabasesConfig = { + [databaseName: string]: DatabaseConfig; +}; + +type EnvConfig = { + redisEndpointsConfigPath: string; + faultInjectorUrl: string; +}; + +/** + * Reads environment variables required for the test scenario + * @returns Environment configuration object + * @throws Error if required environment variables are not set + */ +export function getEnvConfig(): EnvConfig { + if (!process.env.REDIS_ENDPOINTS_CONFIG_PATH) { + throw new Error( + "REDIS_ENDPOINTS_CONFIG_PATH environment variable must be set" + ); + } + + if (!process.env.RE_FAULT_INJECTOR_URL) { + throw new Error("RE_FAULT_INJECTOR_URL environment variable must be set"); + } + + return { + redisEndpointsConfigPath: process.env.REDIS_ENDPOINTS_CONFIG_PATH, + faultInjectorUrl: process.env.RE_FAULT_INJECTOR_URL, + }; +} + +/** + * Reads database configuration from a file + * @param filePath - The path to the database configuration file + * @returns Parsed database configuration object + * @throws Error if file doesn't exist or JSON is invalid + */ +export function getDatabaseConfigFromEnv(filePath: string): DatabasesConfig { + try { + const fileContent = readFileSync(filePath, "utf8"); + return JSON.parse(fileContent) as DatabasesConfig; + } catch (error) { + throw new Error(`Failed to read or parse database config from ${filePath}`); + } +} + +export interface RedisConnectionConfig { + host: string; + port: number; + username: string; + password: string; + tls: boolean; + bdbId: number; +} + +/** + * Gets Redis connection parameters for a specific database + * @param databasesConfig - The parsed database configuration object + * @param databaseName - Optional name of the database to retrieve (defaults to the first one) + * @returns Redis connection configuration with host, port, username, password, and tls + * @throws Error if the specified database is not found in the configuration + */ +export function getDatabaseConfig( + databasesConfig: DatabasesConfig, + databaseName = process.env.DATABASE_NAME +): RedisConnectionConfig { + const dbConfig = databaseName + ? databasesConfig[databaseName] + : Object.values(databasesConfig)[0]; + + if (!dbConfig) { + throw new Error( + `Database ${databaseName ? databaseName : ""} not found in configuration` + ); + } + + const endpoint = dbConfig.raw_endpoints[0]; // Use the first endpoint + + return { + host: endpoint.dns_name, + port: endpoint.port, + username: dbConfig.username, + password: dbConfig.password, + tls: dbConfig.tls, + bdbId: dbConfig.bdb_id, + }; +} + +/** + * Executes the provided function in a context where setImmediate is stubbed to not do anything. + * This blocks setImmediate callbacks from executing + * + * @param command - The command to execute + * @returns The error and duration of the command execution + */ +export async function blockCommand(command: () => Promise) { + let error: any; + + const start = performance.now(); + + let setImmediateStub: any; + + try { + setImmediateStub = stub(global, "setImmediate"); + setImmediateStub.callsFake(() => { + //Dont call the callback, effectively blocking execution + }); + await command(); + } catch (err: any) { + error = err; + } finally { + if (setImmediateStub) { + setImmediateStub.restore(); + } + } + + return { + error, + duration: performance.now() - start, + }; +} + +/** + * Creates a test client with the provided configuration, connects it and attaches an error handler listener + * @param clientConfig - The Redis connection configuration + * @param options - Optional client options + * @returns The created Redis client + */ +export async function createTestClient( + clientConfig: RedisConnectionConfig, + options: Partial = {} +) { + const client = createClient({ + socket: { + host: clientConfig.host, + port: clientConfig.port, + ...(clientConfig.tls === true ? { tls: true } : {}), + }, + password: clientConfig.password, + username: clientConfig.username, + RESP: 3, + maintNotifications: "auto", + maintEndpointType: "auto", + ...options, + }); + + await client.connect(); + + return client; +} diff --git a/packages/client/lib/tests/test-scenario/timeout-during-notifications.e2e.ts b/packages/client/lib/tests/test-scenario/timeout-during-notifications.e2e.ts new file mode 100644 index 00000000000..848e17f4506 --- /dev/null +++ b/packages/client/lib/tests/test-scenario/timeout-during-notifications.e2e.ts @@ -0,0 +1,212 @@ +import assert from "node:assert"; + +import { FaultInjectorClient } from "./fault-injector-client"; +import { + getDatabaseConfig, + getDatabaseConfigFromEnv, + getEnvConfig, + RedisConnectionConfig, + blockCommand, + createTestClient, +} from "./test-scenario.util"; +import { createClient } from "../../.."; +import { before } from "mocha"; +import diagnostics_channel from "node:diagnostics_channel"; +import { DiagnosticsEvent } from "../../client/enterprise-maintenance-manager"; + +describe("Timeout Handling During Notifications", () => { + let clientConfig: RedisConnectionConfig; + let faultInjectorClient: FaultInjectorClient; + let client: ReturnType>; + + const NORMAL_COMMAND_TIMEOUT = 50; + const RELAXED_COMMAND_TIMEOUT = 2000; + + /** + * Creates a handler for the `redis.maintenance` channel that will execute and block a command on the client + * when a notification is received and save the result in the `result` object. + * This is used to test that the command timeout is relaxed during notifications. + */ + const createNotificationMessageHandler = ( + client: ReturnType>, + result: Record, + notifications: Array + ) => { + return (message: unknown) => { + if (notifications.includes((message as DiagnosticsEvent).type)) { + setImmediate(async () => { + result[(message as DiagnosticsEvent).type] = await blockCommand( + async () => { + await client.set("key", "value"); + } + ); + }); + } + }; + }; + + before(() => { + const envConfig = getEnvConfig(); + const redisConfig = getDatabaseConfigFromEnv( + envConfig.redisEndpointsConfigPath + ); + + clientConfig = getDatabaseConfig(redisConfig); + faultInjectorClient = new FaultInjectorClient(envConfig.faultInjectorUrl); + }); + + beforeEach(async () => { + client = await createTestClient(clientConfig, { + commandOptions: { timeout: NORMAL_COMMAND_TIMEOUT }, + maintRelaxedCommandTimeout: RELAXED_COMMAND_TIMEOUT, + }); + + await client.flushAll(); + }); + + afterEach(() => { + if (client && client.isOpen) { + client.destroy(); + } + }); + + it("should relax command timeout on MOVING, MIGRATING", async () => { + // PART 1 + // Normal command timeout + const { error, duration } = await blockCommand(async () => { + await client.set("key", "value"); + }); + + assert.ok( + error instanceof Error, + "Command Timeout error should be instanceof Error" + ); + assert.ok( + duration >= NORMAL_COMMAND_TIMEOUT && + duration < NORMAL_COMMAND_TIMEOUT * 1.2, + `Normal command should timeout within normal timeout ms` + ); + assert.strictEqual( + error?.constructor?.name, + "TimeoutError", + "Command Timeout error should be TimeoutError" + ); + + // PART 2 + // Command timeout during maintenance + const notifications: Array = [ + "MOVING", + "MIGRATING", + ]; + + const result: Record< + DiagnosticsEvent["type"], + { error: any; duration: number } + > = {}; + + const onMessageHandler = createNotificationMessageHandler( + client, + result, + notifications + ); + + diagnostics_channel.subscribe("redis.maintenance", onMessageHandler); + + const { action_id: bindAndMigrateActionId } = + await faultInjectorClient.migrateAndBindAction({ + bdbId: clientConfig.bdbId, + clusterIndex: 0, + }); + + await faultInjectorClient.waitForAction(bindAndMigrateActionId); + + diagnostics_channel.unsubscribe("redis.maintenance", onMessageHandler); + + notifications.forEach((notification) => { + assert.ok( + result[notification]?.error instanceof Error, + `${notification} notification error should be instanceof Error` + ); + assert.ok( + result[notification]?.duration >= RELAXED_COMMAND_TIMEOUT && + result[notification]?.duration < RELAXED_COMMAND_TIMEOUT * 1.2, + `${notification} notification should timeout within relaxed timeout` + ); + assert.strictEqual( + result[notification]?.error?.constructor?.name, + "CommandTimeoutDuringMaintenanceError", + `${notification} notification error should be CommandTimeoutDuringMaintenanceError` + ); + }); + }); + + it("should unrelax command timeout after MIGRATED and MOVING", async () => { + const { action_id: migrateActionId } = + await faultInjectorClient.triggerAction({ + type: "migrate", + parameters: { + cluster_index: 0, + bdb_id: clientConfig.bdbId.toString(), + }, + }); + + await faultInjectorClient.waitForAction(migrateActionId); + + // PART 1 + // After migration + const { error: errorMigrate, duration: durationMigrate } = + await blockCommand(async () => { + await client.set("key", "value"); + }); + + assert.ok( + errorMigrate instanceof Error, + "Command Timeout error should be instanceof Error" + ); + assert.ok( + durationMigrate >= NORMAL_COMMAND_TIMEOUT && + durationMigrate < NORMAL_COMMAND_TIMEOUT * 1.2, + `Normal command should timeout within normal timeout ms` + ); + assert.strictEqual( + errorMigrate?.constructor?.name, + "TimeoutError", + "Command Timeout error should be TimeoutError" + ); + + const { action_id: bindActionId } = await faultInjectorClient.triggerAction( + { + type: "bind", + parameters: { + bdb_id: clientConfig.bdbId.toString(), + cluster_index: 0, + }, + } + ); + + await faultInjectorClient.waitForAction(bindActionId); + + // PART 2 + // After bind + const { error: errorBind, duration: durationBind } = await blockCommand( + async () => { + await client.set("key", "value"); + } + ); + + assert.ok( + errorBind instanceof Error, + "Command Timeout error should be instanceof Error" + ); + assert.ok( + durationBind >= NORMAL_COMMAND_TIMEOUT && + durationBind < NORMAL_COMMAND_TIMEOUT * 1.2, + `Normal command should timeout within normal timeout ms` + ); + assert.strictEqual( + errorBind?.constructor?.name, + "TimeoutError", + "Command Timeout error should be TimeoutError" + ); + }); +}); diff --git a/packages/client/lib/tests/test-scenario/to-failover.e2e.ts b/packages/client/lib/tests/test-scenario/to-failover.e2e.ts new file mode 100644 index 00000000000..765859bfc8a --- /dev/null +++ b/packages/client/lib/tests/test-scenario/to-failover.e2e.ts @@ -0,0 +1,151 @@ +import assert from "node:assert"; + +import { FaultInjectorClient } from "./fault-injector-client"; +import { + getDatabaseConfig, + getDatabaseConfigFromEnv, + getEnvConfig, + RedisConnectionConfig, + blockCommand, + createTestClient, +} from "./test-scenario.util"; +import { createClient } from "../../.."; +import { before } from "mocha"; +import diagnostics_channel from "node:diagnostics_channel"; +import { DiagnosticsEvent } from "../../client/enterprise-maintenance-manager"; + +describe("Timeout Handling During Notifications", () => { + let clientConfig: RedisConnectionConfig; + let faultInjectorClient: FaultInjectorClient; + let client: ReturnType>; + + const NORMAL_COMMAND_TIMEOUT = 50; + const RELAXED_COMMAND_TIMEOUT = 2000; + + /** + * Creates a handler for the `redis.maintenance` channel that will execute and block a command on the client + * when a notification is received and save the result in the `result` object. + * This is used to test that the command timeout is relaxed during notifications. + */ + const createNotificationMessageHandler = ( + client: ReturnType>, + result: Record, + notifications: Array + ) => { + return (message: unknown) => { + if (notifications.includes((message as DiagnosticsEvent).type)) { + setImmediate(async () => { + result[(message as DiagnosticsEvent).type] = await blockCommand( + async () => { + await client.set("key", "value"); + } + ); + }); + } + }; + }; + + before(() => { + const envConfig = getEnvConfig(); + const redisConfig = getDatabaseConfigFromEnv( + envConfig.redisEndpointsConfigPath + ); + + clientConfig = getDatabaseConfig(redisConfig); + faultInjectorClient = new FaultInjectorClient(envConfig.faultInjectorUrl); + }); + + beforeEach(async () => { + client = await createTestClient(clientConfig, { + commandOptions: { timeout: NORMAL_COMMAND_TIMEOUT }, + maintRelaxedCommandTimeout: RELAXED_COMMAND_TIMEOUT, + }); + + await client.flushAll(); + }); + + afterEach(() => { + if (client && client.isOpen) { + client.destroy(); + } + }); + + it("should relax command timeout on FAILING_OVER", async () => { + const notifications: Array = ["FAILING_OVER"]; + + const result: Record< + DiagnosticsEvent["type"], + { error: any; duration: number } + > = {}; + + const onMessageHandler = createNotificationMessageHandler( + client, + result, + notifications + ); + + diagnostics_channel.subscribe("redis.maintenance", onMessageHandler); + + const { action_id: failoverActionId } = + await faultInjectorClient.triggerAction({ + type: "failover", + parameters: { + bdb_id: clientConfig.bdbId.toString(), + cluster_index: 0, + }, + }); + + await faultInjectorClient.waitForAction(failoverActionId); + + diagnostics_channel.unsubscribe("redis.maintenance", onMessageHandler); + + notifications.forEach((notification) => { + assert.ok( + result[notification]?.error instanceof Error, + `${notification} notification error should be instanceof Error` + ); + assert.ok( + result[notification]?.duration >= RELAXED_COMMAND_TIMEOUT && + result[notification]?.duration < RELAXED_COMMAND_TIMEOUT * 1.2, + `${notification} notification should timeout within relaxed timeout` + ); + assert.strictEqual( + result[notification]?.error?.constructor?.name, + "CommandTimeoutDuringMaintenanceError", + `${notification} notification error should be CommandTimeoutDuringMaintenanceError` + ); + }); + }); + + it("should unrelax command timeout after FAILED_OVER", async () => { + const { action_id: failoverActionId } = + await faultInjectorClient.triggerAction({ + type: "failover", + parameters: { + bdb_id: clientConfig.bdbId.toString(), + cluster_index: 0, + }, + }); + + await faultInjectorClient.waitForAction(failoverActionId); + + const { error, duration } = await blockCommand(async () => { + await client.set("key", "value"); + }); + + assert.ok( + error instanceof Error, + "Command Timeout error should be instanceof Error" + ); + assert.ok( + duration >= NORMAL_COMMAND_TIMEOUT && + duration < NORMAL_COMMAND_TIMEOUT * 1.2, + `Normal command should timeout within normal timeout ms` + ); + assert.strictEqual( + error?.constructor?.name, + "TimeoutError", + "Command Timeout error should be TimeoutError" + ); + }); +}); diff --git a/packages/client/package.json b/packages/client/package.json new file mode 100644 index 00000000000..cbe6274d25d --- /dev/null +++ b/packages/client/package.json @@ -0,0 +1,37 @@ +{ + "name": "@redis/client", + "version": "5.9.0-beta.2", + "license": "MIT", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "files": [ + "dist/", + "!dist/tsconfig.tsbuildinfo" + ], + "scripts": { + "test": "nyc -r text-summary -r lcov mocha -r tsx './lib/**/*.spec.ts'", + "release": "release-it" + }, + "dependencies": { + "cluster-key-slot": "1.1.2" + }, + "devDependencies": { + "@redis/test-utils": "*", + "@types/sinon": "^17.0.3", + "sinon": "^17.0.1" + }, + "engines": { + "node": ">= 18" + }, + "repository": { + "type": "git", + "url": "git://github.com/redis/node-redis.git" + }, + "bugs": { + "url": "https://github.com/redis/node-redis/issues" + }, + "homepage": "https://github.com/redis/node-redis/tree/master/packages/client", + "keywords": [ + "redis" + ] +} diff --git a/packages/client/tsconfig.json b/packages/client/tsconfig.json new file mode 100644 index 00000000000..f87c7d4f533 --- /dev/null +++ b/packages/client/tsconfig.json @@ -0,0 +1,25 @@ +{ + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "outDir": "./dist", + }, + "include": [ + "./index.ts", + "./lib/**/*.ts", + "./package.json" + ], + "exclude": [ + "./lib/test-utils.ts", + "./lib/**/*.spec.ts", + "./lib/sentinel/test-util.ts", + "./lib/tests/**/*.ts" + ], + "typedocOptions": { + "entryPoints": [ + "./index.ts", + "./lib" + ], + "entryPointStrategy": "expand", + "out": "../../documentation/client", + } +} diff --git a/packages/entraid/.nycrc.json b/packages/entraid/.nycrc.json new file mode 100644 index 00000000000..848af2b5a27 --- /dev/null +++ b/packages/entraid/.nycrc.json @@ -0,0 +1,10 @@ +{ + "extends": "@istanbuljs/nyc-config-typescript", + "exclude": [ + "integration-tests", + "samples", + "dist", + "**/*.spec.ts", + "lib/test-utils.ts" + ] +} diff --git a/packages/entraid/.release-it.json b/packages/entraid/.release-it.json new file mode 100644 index 00000000000..a0609a242f7 --- /dev/null +++ b/packages/entraid/.release-it.json @@ -0,0 +1,22 @@ +{ + "npm": { + "publish": true, + "publishArgs": ["--access", "public"] + }, + "git": { + "tagName": "entraid@${version}", + "tagMatch": "entraid@*", + "commitMessage": "Release ${tagName}", + "tagAnnotation": "Release ${tagName}", + "commitArgs": "--all" + }, + "plugins": { + "@release-it/bumper": { + "out": { + "file": "package.json", + "path": ["peerDependencies.@redis/client"], + "versionPrefix": "^" + } + } + } +} diff --git a/packages/entraid/README.md b/packages/entraid/README.md new file mode 100644 index 00000000000..733cf895a7e --- /dev/null +++ b/packages/entraid/README.md @@ -0,0 +1,188 @@ +# @redis/entraid + +Secure token-based authentication for Redis clients using Microsoft Entra ID (formerly Azure Active Directory). + +## Features + +- Token-based authentication using Microsoft Entra ID +- Automatic token refresh before expiration +- Automatic re-authentication of all connections after token refresh +- Support for multiple authentication flows: + - Managed identities (system-assigned and user-assigned) + - Service principals (with or without certificates) + - Authorization Code with PKCE flow + - DefaultAzureCredential from @azure/identity +- Built-in retry mechanisms for transient failures + +## Installation + + +```bash +npm install "@redis/client@5.0.0-next.7" +npm install "@redis/entraid@5.0.0-next.7" +``` + +## Getting Started + +The first step to using @redis/entraid is choosing the right credentials provider for your authentication needs. The `EntraIdCredentialsProviderFactory` class provides several factory methods to create the appropriate provider: + +- `createForSystemAssignedManagedIdentity`: Use when your application runs in Azure with a system-assigned managed identity +- `createForUserAssignedManagedIdentity`: Use when your application runs in Azure with a user-assigned managed identity +- `createForClientCredentials`: Use when authenticating with a service principal using client secret +- `createForClientCredentialsWithCertificate`: Use when authenticating with a service principal using a certificate +- `createForAuthorizationCodeWithPKCE`: Use for interactive authentication flows in user applications +- `createForDefaultAzureCredential`: Use when you want to leverage Azure Identity's DefaultAzureCredential + +## Usage Examples + +### Service Principal Authentication + +```typescript +import { createClient } from '@redis/client'; +import { EntraIdCredentialsProviderFactory } from '@redis/entraid'; + +const provider = EntraIdCredentialsProviderFactory.createForClientCredentials({ + clientId: 'your-client-id', + clientSecret: 'your-client-secret', + authorityConfig: { + type: 'multi-tenant', + tenantId: 'your-tenant-id' + }, + tokenManagerConfig: { + expirationRefreshRatio: 0.8 // Refresh token after 80% of its lifetime + } +}); + +const client = createClient({ + url: 'redis://your-host', + credentialsProvider: provider +}); + +await client.connect(); +``` + +### System-Assigned Managed Identity + +```typescript +const provider = EntraIdCredentialsProviderFactory.createForSystemAssignedManagedIdentity({ + clientId: 'your-client-id', + tokenManagerConfig: { + expirationRefreshRatio: 0.8 + } +}); +``` + +### User-Assigned Managed Identity + +```typescript +const provider = EntraIdCredentialsProviderFactory.createForUserAssignedManagedIdentity({ + clientId: 'your-client-id', + userAssignedClientId: 'your-user-assigned-client-id', + tokenManagerConfig: { + expirationRefreshRatio: 0.8 + } +}); +``` + +### DefaultAzureCredential Authentication + +tip: see a real sample here: [samples/interactive-browser/index.ts](./samples/interactive-browser/index.ts) + +The DefaultAzureCredential from @azure/identity provides a simplified authentication experience that automatically tries different authentication methods based on the environment. This is especially useful for applications that need to work in different environments (local development, CI/CD, and production). + +```typescript +import { createClient } from '@redis/client'; +import { getDefaultAzureCredential } from '@azure/identity'; +import { EntraIdCredentialsProviderFactory, REDIS_SCOPE_DEFAULT } from '@redis/entraid'; + +// Create a DefaultAzureCredential instance +const credential = getDefaultAzureCredential(); + +// Create a provider using DefaultAzureCredential +const provider = EntraIdCredentialsProviderFactory.createForDefaultAzureCredential({ + // Use the same parameters you would pass to credential.getToken() + credential, + scopes: REDIS_SCOPE_DEFAULT, // The Redis scope + // Optional additional parameters for getToken + options: { + // Any options you would normally pass to credential.getToken() + }, + tokenManagerConfig: { + expirationRefreshRatio: 0.8 + } +}); + +const client = createClient({ + url: 'redis://your-host', + credentialsProvider: provider +}); + +await client.connect(); +``` + +#### Important Notes on Using DefaultAzureCredential + +When using the `createForDefaultAzureCredential` method, you need to: + +1. Create your own instance of `DefaultAzureCredential` +2. Pass the same parameters to the factory method that you would use with the `getToken()` method: + - `scopes`: The Redis scope (use the exported `REDIS_SCOPE_DEFAULT` constant) + - `options`: Any additional options for the getToken method + +This factory method creates a wrapper around DefaultAzureCredential that adapts it to the Redis client's +authentication system, while maintaining all the flexibility of the original Azure Identity authentication. + +## Important Limitations + +### RESP2 PUB/SUB Limitations + +When using RESP2 (Redis Serialization Protocol 2), there are important limitations with PUB/SUB: + +- **No Re-Authentication in PUB/SUB Mode**: In RESP2, once a connection enters PUB/SUB mode, the socket is blocked and cannot process out-of-band commands like AUTH. This means that connections in PUB/SUB mode cannot be re-authenticated when tokens are refreshed. +- **Connection Eviction**: As a result, PUB/SUB connections will be evicted by the Redis proxy when their tokens expire. The client will need to establish new connections with fresh tokens. + +### Transaction Safety + +When using token-based authentication, special care must be taken with Redis transactions. The token manager runs in the background and may attempt to re-authenticate connections at any time by sending AUTH commands. This can interfere with manually constructed transactions. + +#### βœ… Recommended: Use the Official Transaction API + +Always use the official transaction API provided by the client: + +```typescript +// Correct way to handle transactions +const multi = client.multi(); +multi.set('key1', 'value1'); +multi.set('key2', 'value2'); +await multi.exec(); +``` + +#### ❌ Avoid: Manual Transaction Construction + +Do not manually construct transactions by sending individual MULTI/EXEC commands: + +```typescript +// Incorrect and potentially dangerous +await client.sendCommand(['MULTI']); +await client.sendCommand(['SET', 'key1', 'value1']); +await client.sendCommand(['SET', 'key2', 'value2']); +await client.sendCommand(['EXEC']); // Risk of AUTH command being injected before EXEC +``` + +## Error Handling + +The provider includes built-in retry mechanisms for transient errors: + +```typescript +const provider = EntraIdCredentialsProviderFactory.createForClientCredentials({ + // ... other config ... + tokenManagerConfig: { + retry: { + maxAttempts: 3, + initialDelayMs: 100, + maxDelayMs: 1000, + backoffMultiplier: 2 + } + } +}); +``` diff --git a/packages/entraid/index.ts b/packages/entraid/index.ts new file mode 100644 index 00000000000..303b5dc6e14 --- /dev/null +++ b/packages/entraid/index.ts @@ -0,0 +1 @@ +export * from './lib/index' \ No newline at end of file diff --git a/packages/entraid/integration-tests/entraid-integration.spec.ts b/packages/entraid/integration-tests/entraid-integration.spec.ts new file mode 100644 index 00000000000..4d078a01ede --- /dev/null +++ b/packages/entraid/integration-tests/entraid-integration.spec.ts @@ -0,0 +1,262 @@ +import { DefaultAzureCredential, EnvironmentCredential } from '@azure/identity'; +import { BasicAuth } from '@redis/client/dist/lib/authx'; +import { createClient } from '@redis/client'; +import { EntraIdCredentialsProviderFactory, REDIS_SCOPE_DEFAULT } from '../lib/entra-id-credentials-provider-factory'; +import { strict as assert } from 'node:assert'; +import { spy, SinonSpy } from 'sinon'; +import { randomUUID } from 'crypto'; +import { loadFromFile, RedisEndpointsConfig } from '@redis/test-utils/lib/cae-client-testing'; +import { EntraidCredentialsProvider } from '../lib/entraid-credentials-provider'; +import * as crypto from 'node:crypto'; + +describe('EntraID Integration Tests', () => { + + it('client configured with client secret should be able to authenticate/re-authenticate', async () => { + const config = await readConfigFromEnv(); + await runAuthenticationTest(() => + EntraIdCredentialsProviderFactory.createForClientCredentials({ + clientId: config.clientId, + clientSecret: config.clientSecret, + authorityConfig: { type: 'multi-tenant', tenantId: config.tenantId }, + tokenManagerConfig: { + expirationRefreshRatio: 0.0001 + } + }) + ); + }); + + it('client configured with client certificate should be able to authenticate/re-authenticate', async () => { + const config = await readConfigFromEnv(); + await runAuthenticationTest(() => + EntraIdCredentialsProviderFactory.createForClientCredentialsWithCertificate({ + clientId: config.clientId, + certificate: convertCertsForMSAL(config.cert, config.privateKey), + authorityConfig: { type: 'multi-tenant', tenantId: config.tenantId }, + tokenManagerConfig: { + expirationRefreshRatio: 0.0001 + } + }) + ); + }); + + it('client with system managed identity should be able to authenticate/re-authenticate', async () => { + const config = await readConfigFromEnv(); + await runAuthenticationTest(() => + EntraIdCredentialsProviderFactory.createForSystemAssignedManagedIdentity({ + clientId: config.clientId, + authorityConfig: { type: 'multi-tenant', tenantId: config.tenantId }, + tokenManagerConfig: { + expirationRefreshRatio: 0.00001 + } + }) + ); + }); + + it('client with DefaultAzureCredential should be able to authenticate/re-authenticate', async () => { + + const azureCredential = new DefaultAzureCredential(); + + await runAuthenticationTest(() => + EntraIdCredentialsProviderFactory.createForDefaultAzureCredential({ + credential: azureCredential, + scopes: REDIS_SCOPE_DEFAULT, + tokenManagerConfig: { + expirationRefreshRatio: 0.00001 + } + }) + , { testingDefaultAzureCredential: true }); + }); + + it('client with EnvironmentCredential should be able to authenticate/re-authenticate', async () => { + const envCredential = new EnvironmentCredential(); + + await runAuthenticationTest(() => + EntraIdCredentialsProviderFactory.createForDefaultAzureCredential({ + credential: envCredential, + scopes: REDIS_SCOPE_DEFAULT, + tokenManagerConfig: { + expirationRefreshRatio: 0.00001 + } + }) + , { testingDefaultAzureCredential: true }); + }); + + interface TestConfig { + clientId: string; + clientSecret: string; + authority: string; + tenantId: string; + redisScopes: string; + cert: string; + privateKey: string; + userAssignedManagedId: string; + endpoints: RedisEndpointsConfig; + } + + const readConfigFromEnv = async (): Promise => { + const requiredEnvVars = { + AZURE_CLIENT_ID: process.env.AZURE_CLIENT_ID, + AZURE_CLIENT_SECRET: process.env.AZURE_CLIENT_SECRET, + AZURE_AUTHORITY: process.env.AZURE_AUTHORITY, + AZURE_TENANT_ID: process.env.AZURE_TENANT_ID, + AZURE_REDIS_SCOPES: process.env.AZURE_REDIS_SCOPES, + AZURE_CERT: process.env.AZURE_CERT, + AZURE_PRIVATE_KEY: process.env.AZURE_PRIVATE_KEY, + AZURE_USER_ASSIGNED_MANAGED_ID: process.env.AZURE_USER_ASSIGNED_MANAGED_ID, + REDIS_ENDPOINTS_CONFIG_PATH: process.env.REDIS_ENDPOINTS_CONFIG_PATH + }; + + Object.entries(requiredEnvVars).forEach(([key, value]) => { + if (value == undefined) { + throw new Error(`${key} environment variable must be set`); + } + }); + + return { + endpoints: await loadFromFile(requiredEnvVars.REDIS_ENDPOINTS_CONFIG_PATH as string), + clientId: requiredEnvVars.AZURE_CLIENT_ID as string, + clientSecret: requiredEnvVars.AZURE_CLIENT_SECRET as string, + authority: requiredEnvVars.AZURE_AUTHORITY as string, + tenantId: requiredEnvVars.AZURE_TENANT_ID as string, + redisScopes: requiredEnvVars.AZURE_REDIS_SCOPES as string, + cert: requiredEnvVars.AZURE_CERT as string, + privateKey: requiredEnvVars.AZURE_PRIVATE_KEY as string, + userAssignedManagedId: requiredEnvVars.AZURE_USER_ASSIGNED_MANAGED_ID as string + }; + }; + + interface TokenDetail { + token: string; + exp: number; + iat: number; + lifetime: number; + uti: string; + } + + const setupTestClient = async (credentialsProvider: EntraidCredentialsProvider) => { + const config = await readConfigFromEnv(); + const client = createClient({ + url: config.endpoints['standalone-entraid-acl'].endpoints[0], + credentialsProvider + }); + + const clientInstance = (client as any)._self; + const reAuthSpy: SinonSpy = spy(clientInstance, 'reAuthenticate'); + + return { client, reAuthSpy }; + }; + + const runClientOperations = async (client: any) => { + const startTime = Date.now(); + while (Date.now() - startTime < 1000) { + const key = randomUUID(); + await client.set(key, 'value'); + const value = await client.get(key); + assert.equal(value, 'value'); + await client.del(key); + } + }; + + /** + * Validates authentication tokens generated during re-authentication + * + * @param reAuthSpy - The Sinon spy on the reAuthenticate method + * @param skipUniqueCheckForDefaultAzureCredential - Skip the unique check for DefaultAzureCredential as there are no guarantees that the tokens will be unique + * if the test is using default azure credential + */ + const validateTokens = (reAuthSpy: SinonSpy, skipUniqueCheckForDefaultAzureCredential: boolean) => { + assert(reAuthSpy.callCount >= 1, + `reAuthenticate should have been called at least once, but was called ${reAuthSpy.callCount} times`); + + const tokenDetails: TokenDetail[] = reAuthSpy.getCalls().map(call => { + const creds = call.args[0] as BasicAuth; + if (!creds.password) { + throw new Error('Expected password to be set in BasicAuth credentials'); + } + const tokenPayload = JSON.parse( + Buffer.from(creds.password.split('.')[1], 'base64').toString() + ); + + return { + token: creds.password, + exp: tokenPayload.exp, + iat: tokenPayload.iat, + lifetime: tokenPayload.exp - tokenPayload.iat, + uti: tokenPayload.uti + }; + }); + + // we can't guarantee that the tokens will be unique when using DefaultAzureCredential + if (!skipUniqueCheckForDefaultAzureCredential) { + // Verify unique tokens + const uniqueTokens = new Set(tokenDetails.map(detail => detail.token)); + assert.equal( + uniqueTokens.size, + reAuthSpy.callCount, + `Expected ${reAuthSpy.callCount} different tokens, but got ${uniqueTokens.size} unique tokens` + ); + + // Verify all tokens are not cached (i.e. have the same lifetime) + const uniqueLifetimes = new Set(tokenDetails.map(detail => detail.lifetime)); + assert.equal( + uniqueLifetimes.size, + 1, + `Expected all tokens to have the same lifetime, but found ${uniqueLifetimes.size} different lifetimes: ${(Array.from(uniqueLifetimes).join(','))} seconds` + ); + + // Verify that all tokens have different uti (unique token identifier) + const uniqueUti = new Set(tokenDetails.map(detail => detail.uti)); + assert.equal( + uniqueUti.size, + reAuthSpy.callCount, + `Expected all tokens to have different uti, but found ${uniqueUti.size} different uti in: ${(Array.from(uniqueUti).join(','))}` + ); + } + }; + + const runAuthenticationTest = async (setupCredentialsProvider: () => any, options: { + testingDefaultAzureCredential: boolean + } = { testingDefaultAzureCredential: false }) => { + const { client, reAuthSpy } = await setupTestClient(setupCredentialsProvider()); + + try { + await client.connect(); + await runClientOperations(client); + validateTokens(reAuthSpy, options.testingDefaultAzureCredential); + } finally { + await client.destroy(); + } + }; + +}); + +function getCertificate(certBase64) { + try { + const decodedCert = Buffer.from(certBase64, 'base64'); + const cert = new crypto.X509Certificate(decodedCert); + return cert; + } catch (error) { + console.error('Error parsing certificate:', error); + throw error; + } +} + +function getCertificateThumbprint(certBase64) { + const cert = getCertificate(certBase64); + return cert.fingerprint.replace(/:/g, ''); +} + +function convertCertsForMSAL(certBase64, privateKeyBase64) { + const thumbprint = getCertificateThumbprint(certBase64); + + const privateKeyPEM = `-----BEGIN PRIVATE KEY-----\n${privateKeyBase64}\n-----END PRIVATE KEY-----`; + + return { + thumbprint: thumbprint, + privateKey: privateKeyPEM, + x5c: certBase64 + } + +} + + diff --git a/packages/entraid/lib/azure-identity-provider.ts b/packages/entraid/lib/azure-identity-provider.ts new file mode 100644 index 00000000000..d522c9d4b89 --- /dev/null +++ b/packages/entraid/lib/azure-identity-provider.ts @@ -0,0 +1,22 @@ +import type { AccessToken } from '@azure/core-auth'; + +import { IdentityProvider, TokenResponse } from '@redis/client/dist/lib/authx'; + +export class AzureIdentityProvider implements IdentityProvider { + private readonly getToken: () => Promise; + + constructor(getToken: () => Promise) { + this.getToken = getToken; + } + + async requestToken(): Promise> { + const result = await this.getToken(); + return { + token: result, + ttlMs: result.expiresOnTimestamp - Date.now() + }; + } + +} + + diff --git a/packages/entraid/lib/entra-id-credentials-provider-factory.ts b/packages/entraid/lib/entra-id-credentials-provider-factory.ts new file mode 100644 index 00000000000..98a3a11078a --- /dev/null +++ b/packages/entraid/lib/entra-id-credentials-provider-factory.ts @@ -0,0 +1,417 @@ +import type { GetTokenOptions, TokenCredential } from '@azure/core-auth'; +import { NetworkError } from '@azure/msal-common'; +import { + LogLevel, + ManagedIdentityApplication, + ManagedIdentityConfiguration, + AuthenticationResult, + PublicClientApplication, + ConfidentialClientApplication, AuthorizationUrlRequest, AuthorizationCodeRequest, CryptoProvider, Configuration, NodeAuthOptions, AccountInfo +} from '@azure/msal-node'; +import { RetryPolicy, TokenManager, TokenManagerConfig, ReAuthenticationError, BasicAuth } from '@redis/client/dist/lib/authx'; +import { AzureIdentityProvider } from './azure-identity-provider'; +import { AuthenticationResponse, DEFAULT_CREDENTIALS_MAPPER, EntraidCredentialsProvider, OID_CREDENTIALS_MAPPER } from './entraid-credentials-provider'; +import { MSALIdentityProvider } from './msal-identity-provider'; + +/** + * This class is used to create credentials providers for different types of authentication flows. + */ +export class EntraIdCredentialsProviderFactory { + + /** + * This method is used to create a ManagedIdentityProvider for both system-assigned and user-assigned managed identities. + * + * @param params + * @param userAssignedClientId For user-assigned managed identities, the developer needs to pass either the client ID, + * full resource identifier, or the object ID of the managed identity when creating ManagedIdentityApplication. + * + */ + public static createManagedIdentityProvider( + params: CredentialParams, userAssignedClientId?: string + ): EntraidCredentialsProvider { + const config: ManagedIdentityConfiguration = { + // For user-assigned identity, include the client ID + ...(userAssignedClientId && { + managedIdentityIdParams: { + userAssignedClientId + } + }), + system: { + loggerOptions + } + }; + + const client = new ManagedIdentityApplication(config); + + const idp = new MSALIdentityProvider( + () => client.acquireToken({ + resource: params.scopes?.[0] ?? REDIS_SCOPE, + forceRefresh: true + }).then(x => x === null ? Promise.reject('Token is null') : x) + ); + + return new EntraidCredentialsProvider( + new TokenManager(idp, params.tokenManagerConfig), + idp, + { + onReAuthenticationError: params.onReAuthenticationError, + credentialsMapper: params.credentialsMapper ?? OID_CREDENTIALS_MAPPER, + onRetryableError: params.onRetryableError + } + ); + } + + /** + * This method is used to create a credentials provider for system-assigned managed identities. + * @param params + */ + static createForSystemAssignedManagedIdentity( + params: CredentialParams + ): EntraidCredentialsProvider { + return this.createManagedIdentityProvider(params); + } + + /** + * This method is used to create a credentials provider for user-assigned managed identities. + * It will include the client ID as the userAssignedClientId in the ManagedIdentityConfiguration. + * @param params + */ + static createForUserAssignedManagedIdentity( + params: CredentialParams & { userAssignedClientId: string } + ): EntraidCredentialsProvider { + return this.createManagedIdentityProvider(params, params.userAssignedClientId); + } + + static #createForClientCredentials( + authConfig: NodeAuthOptions, + params: CredentialParams + ): EntraidCredentialsProvider { + const config: Configuration = { + auth: { + ...authConfig, + authority: this.getAuthority(params.authorityConfig ?? { type: 'default' }) + }, + system: { + loggerOptions + } + }; + + const client = new ConfidentialClientApplication(config); + + const idp = new MSALIdentityProvider( + () => client.acquireTokenByClientCredential({ + skipCache: true, + scopes: params.scopes ?? [REDIS_SCOPE_DEFAULT] + }).then(x => x === null ? Promise.reject('Token is null') : x) + ); + + return new EntraidCredentialsProvider(new TokenManager(idp, params.tokenManagerConfig), idp, + { + onReAuthenticationError: params.onReAuthenticationError, + credentialsMapper: params.credentialsMapper ?? OID_CREDENTIALS_MAPPER, + onRetryableError: params.onRetryableError + }); + } + + /** + * This method is used to create a credentials provider for service principals using certificate. + * @param params + */ + static createForClientCredentialsWithCertificate( + params: ClientCredentialsWithCertificateParams + ): EntraidCredentialsProvider { + return this.#createForClientCredentials( + { + clientId: params.clientId, + clientCertificate: params.certificate + }, + params + ); + } + + /** + * This method is used to create a credentials provider for service principals using client secret. + * @param params + */ + static createForClientCredentials( + params: ClientSecretCredentialsParams + ): EntraidCredentialsProvider { + return this.#createForClientCredentials( + { + clientId: params.clientId, + clientSecret: params.clientSecret + }, + params + ); + } + + /** + * This method is used to create a credentials provider using DefaultAzureCredential. + * + * The user needs to create a configured instance of DefaultAzureCredential ( or any other class that implements TokenCredential )and pass it to this method. + * + * The default credentials mapper for this method is OID_CREDENTIALS_MAPPER which extracts the object ID from JWT + * encoded token. + * + * Depending on the actual flow that DefaultAzureCredential uses, the user may need to provide different + * credential mapper via the credentialsMapper parameter. + * + */ + static createForDefaultAzureCredential( + { + credential, + scopes, + options, + tokenManagerConfig, + onReAuthenticationError, + credentialsMapper, + onRetryableError + }: DefaultAzureCredentialsParams + ): EntraidCredentialsProvider { + + const idp = new AzureIdentityProvider( + () => credential.getToken(scopes, options).then(x => x === null ? Promise.reject('Token is null') : x) + ); + + return new EntraidCredentialsProvider(new TokenManager(idp, tokenManagerConfig), idp, + { + onReAuthenticationError: onReAuthenticationError, + credentialsMapper: credentialsMapper ?? OID_CREDENTIALS_MAPPER, + onRetryableError: onRetryableError + }); + } + + /** + * This method is used to create a credentials provider for the Authorization Code Flow with PKCE. + * @param params + */ + static createForAuthorizationCodeWithPKCE( + params: AuthCodePKCEParams + ): { + getPKCECodes: () => Promise<{ + verifier: string; + challenge: string; + challengeMethod: string; + }>; + getAuthCodeUrl: ( + pkceCodes: { challenge: string; challengeMethod: string } + ) => Promise; + createCredentialsProvider: ( + params: PKCEParams + ) => EntraidCredentialsProvider; + } { + + const requiredScopes = ['user.read', 'offline_access']; + const scopes = [...new Set([...(params.scopes || []), ...requiredScopes])]; + + const authFlow = AuthCodeFlowHelper.create({ + clientId: params.clientId, + redirectUri: params.redirectUri, + scopes: scopes, + authorityConfig: params.authorityConfig + }); + + return { + getPKCECodes: AuthCodeFlowHelper.generatePKCE, + getAuthCodeUrl: (pkceCodes) => authFlow.getAuthCodeUrl(pkceCodes), + createCredentialsProvider: (pkceParams) => { + + // This is used to store the initial credentials account to be used + // for silent token acquisition after the initial token acquisition. + let initialCredentialsAccount: AccountInfo | null = null; + + const idp = new MSALIdentityProvider( + async () => { + if (!initialCredentialsAccount) { + let authResult = await authFlow.acquireTokenByCode(pkceParams); + initialCredentialsAccount = authResult.account; + return authResult; + } else { + return authFlow.client.acquireTokenSilent({ + forceRefresh: true, + account: initialCredentialsAccount, + scopes + }); + } + + } + ); + const tm = new TokenManager(idp, params.tokenManagerConfig); + return new EntraidCredentialsProvider(tm, idp, { + onReAuthenticationError: params.onReAuthenticationError, + credentialsMapper: params.credentialsMapper ?? DEFAULT_CREDENTIALS_MAPPER, + onRetryableError: params.onRetryableError + }); + } + }; + } + + static getAuthority(config: AuthorityConfig): string { + switch (config.type) { + case 'multi-tenant': + return `https://login.microsoftonline.com/${config.tenantId}`; + case 'custom': + return config.authorityUrl; + case 'default': + return 'https://login.microsoftonline.com/common'; + default: + throw new Error('Invalid authority configuration'); + } + } + +} + +export const REDIS_SCOPE_DEFAULT = 'https://redis.azure.com/.default'; +export const REDIS_SCOPE = 'https://redis.azure.com' + +export type AuthorityConfig = + | { type: 'multi-tenant'; tenantId: string } + | { type: 'custom'; authorityUrl: string } + | { type: 'default' }; + +export type PKCEParams = { + code: string; + verifier: string; + clientInfo?: string; +} + +export type CredentialParams = { + clientId: string; + scopes?: string[]; + authorityConfig?: AuthorityConfig; + + tokenManagerConfig: TokenManagerConfig + onReAuthenticationError?: (error: ReAuthenticationError) => void + credentialsMapper?: (token: AuthenticationResponse) => BasicAuth + onRetryableError?: (error: string) => void +} + +export type DefaultAzureCredentialsParams = { + scopes: string | string[], + options?: GetTokenOptions, + credential: TokenCredential + tokenManagerConfig: TokenManagerConfig + onReAuthenticationError?: (error: ReAuthenticationError) => void + credentialsMapper?: (token: AuthenticationResponse) => BasicAuth + onRetryableError?: (error: string) => void +} + +export type AuthCodePKCEParams = CredentialParams & { + redirectUri: string; +}; + +export type ClientSecretCredentialsParams = CredentialParams & { + clientSecret: string; +}; + +export type ClientCredentialsWithCertificateParams = CredentialParams & { + certificate: { + thumbprint: string; + privateKey: string; + x5c?: string; + }; +}; + +const loggerOptions = { + loggerCallback(loglevel: LogLevel, message: string, containsPii: boolean) { + if (!containsPii) console.log(message); + }, + piiLoggingEnabled: false, + logLevel: LogLevel.Error +} + +/** + * The most important part of the RetryPolicy is the `isRetryable` function. This function is used to determine if a request should be retried based + * on the error returned from the identity provider. The default for is to retry on network errors only. + */ +export const DEFAULT_RETRY_POLICY: RetryPolicy = { + // currently only retry on network errors + isRetryable: (error: unknown) => error instanceof NetworkError, + maxAttempts: 10, + initialDelayMs: 100, + maxDelayMs: 100000, + backoffMultiplier: 2, + jitterPercentage: 0.1 + +}; + +export const DEFAULT_TOKEN_MANAGER_CONFIG: TokenManagerConfig = { + retry: DEFAULT_RETRY_POLICY, + expirationRefreshRatio: 0.7 // Refresh token when 70% of the token has expired +} + +/** + * This class is used to help with the Authorization Code Flow with PKCE. + * It provides methods to generate PKCE codes, get the authorization URL, and create the credential provider. + */ +export class AuthCodeFlowHelper { + private constructor( + readonly client: PublicClientApplication, + readonly scopes: string[], + readonly redirectUri: string + ) {} + + async getAuthCodeUrl(pkceCodes: { + challenge: string; + challengeMethod: string; + }): Promise { + const authCodeUrlParameters: AuthorizationUrlRequest = { + scopes: this.scopes, + redirectUri: this.redirectUri, + codeChallenge: pkceCodes.challenge, + codeChallengeMethod: pkceCodes.challengeMethod + }; + + return this.client.getAuthCodeUrl(authCodeUrlParameters); + } + + async acquireTokenByCode(params: PKCEParams): Promise { + const tokenRequest: AuthorizationCodeRequest = { + code: params.code, + scopes: this.scopes, + redirectUri: this.redirectUri, + codeVerifier: params.verifier, + clientInfo: params.clientInfo + }; + + return this.client.acquireTokenByCode(tokenRequest); + } + + static async generatePKCE(): Promise<{ + verifier: string; + challenge: string; + challengeMethod: string; + }> { + const cryptoProvider = new CryptoProvider(); + const { verifier, challenge } = await cryptoProvider.generatePkceCodes(); + return { + verifier, + challenge, + challengeMethod: 'S256' + }; + } + + static create(params: { + clientId: string; + redirectUri: string; + scopes?: string[]; + authorityConfig?: AuthorityConfig; + }): AuthCodeFlowHelper { + const config = { + auth: { + clientId: params.clientId, + authority: EntraIdCredentialsProviderFactory.getAuthority(params.authorityConfig ?? { type: 'default' }) + }, + system: { + loggerOptions + } + }; + + return new AuthCodeFlowHelper( + new PublicClientApplication(config), + params.scopes ?? ['user.read'], + params.redirectUri + ); + } +} + diff --git a/packages/entraid/lib/entraid-credentials-provider.spec.ts b/packages/entraid/lib/entraid-credentials-provider.spec.ts new file mode 100644 index 00000000000..1bdf4e9b65f --- /dev/null +++ b/packages/entraid/lib/entraid-credentials-provider.spec.ts @@ -0,0 +1,199 @@ +import { AuthenticationResult } from '@azure/msal-node'; +import { IdentityProvider, TokenManager, TokenResponse, BasicAuth } from '@redis/client/dist/lib/authx'; +import { EntraidCredentialsProvider } from './entraid-credentials-provider'; +import { setTimeout } from 'timers/promises'; +import { strict as assert } from 'node:assert'; +import { GLOBAL, testUtils } from './test-utils' + + +describe('EntraID authentication in cluster mode', () => { + + testUtils.testWithCluster('sendCommand', async cluster => { + assert.equal( + await cluster.sendCommand(undefined, true, ['PING']), + 'PONG' + ); + }, GLOBAL.CLUSTERS.PASSWORD_WITH_REPLICAS); +}) + +describe('EntraID CredentialsProvider Subscription Behavior', () => { + + it('should properly handle token refresh sequence for multiple subscribers', async () => { + const networkDelay = 20; + const tokenTTL = 100; + const refreshRatio = 0.5; // Refresh at 50% of TTL + + const idp = new SequenceEntraIDProvider(tokenTTL, networkDelay); + const tokenManager = new TokenManager(idp, { + expirationRefreshRatio: refreshRatio + }); + const entraid = new EntraidCredentialsProvider(tokenManager, idp); + + // Create two initial subscribers + const subscriber1 = new TestSubscriber('subscriber1'); + const subscriber2 = new TestSubscriber('subscriber2'); + + assert.equal(entraid.hasActiveSubscriptions(), false, 'There should be no active subscriptions'); + assert.equal(entraid.getSubscriptionsCount(), 0, 'There should be 0 subscriptions'); + + // Start the first two subscriptions almost simultaneously + const [sub1Initial, sub2Initial] = await Promise.all([ + entraid.subscribe(subscriber1), + entraid.subscribe(subscriber2)] + ); + + assertCredentials(sub1Initial[0], 'initial-token', 'Subscriber 1 should receive initial token'); + assertCredentials(sub2Initial[0], 'initial-token', 'Subscriber 2 should receive initial token'); + + assert.equal(entraid.hasActiveSubscriptions(), true, 'There should be active subscriptions'); + assert.equal(entraid.getSubscriptionsCount(), 2, 'There should be 2 subscriptions'); + + // add a third subscriber after a very short delay + const subscriber3 = new TestSubscriber('subscriber3'); + await setTimeout(1); + const sub3Initial = await entraid.subscribe(subscriber3) + + assert.equal(entraid.hasActiveSubscriptions(), true, 'There should be active subscriptions'); + assert.equal(entraid.getSubscriptionsCount(), 3, 'There should be 3 subscriptions'); + + // make sure the third subscriber gets the initial token as well + assertCredentials(sub3Initial[0], 'initial-token', 'Subscriber 3 should receive initial token'); + + // Wait for first refresh (50% of TTL + network delay + small buffer) + await setTimeout((tokenTTL * refreshRatio) + networkDelay + 15); + + // All 3 subscribers should receive refresh-token-1 + assertCredentials(subscriber1.credentials[0], 'refresh-token-1', 'Subscriber 1 should receive first refresh token'); + assertCredentials(subscriber2.credentials[0], 'refresh-token-1', 'Subscriber 2 should receive first refresh token'); + assertCredentials(subscriber3.credentials[0], 'refresh-token-1', 'Subscriber 3 should receive first refresh token'); + + // Add a late subscriber - should immediately get refresh-token-1 + const subscriber4 = new TestSubscriber('subscriber4'); + const sub4Initial = await entraid.subscribe(subscriber4); + + assert.equal(entraid.hasActiveSubscriptions(), true, 'There should be active subscriptions'); + assert.equal(entraid.getSubscriptionsCount(), 4, 'There should be 4 subscriptions'); + + assertCredentials(sub4Initial[0], 'refresh-token-1', 'Late subscriber should receive refresh-token-1'); + + // Wait for second refresh + await setTimeout((tokenTTL * refreshRatio) + networkDelay + 15); + + assertCredentials(subscriber1.credentials[1], 'refresh-token-2', 'Subscriber 1 should receive second refresh token'); + assertCredentials(subscriber2.credentials[1], 'refresh-token-2', 'Subscriber 2 should receive second refresh token'); + assertCredentials(subscriber3.credentials[1], 'refresh-token-2', 'Subscriber 3 should receive second refresh token'); + + assertCredentials(subscriber4.credentials[0], 'refresh-token-2', 'Subscriber 4 should receive second refresh token'); + + // Verify refreshes happen after minimum expected time + const minimumRefreshInterval = tokenTTL * 0.4; // 40% of TTL as safety margin + + verifyRefreshTiming(subscriber1, minimumRefreshInterval); + verifyRefreshTiming(subscriber2, minimumRefreshInterval); + verifyRefreshTiming(subscriber3, minimumRefreshInterval); + verifyRefreshTiming(subscriber4, minimumRefreshInterval); + + // Cleanup + + assert.equal(tokenManager.isRunning(), true); + sub1Initial[1].dispose(); + sub2Initial[1].dispose(); + sub3Initial[1].dispose(); + assert.equal(entraid.hasActiveSubscriptions(), true, 'There should be active subscriptions'); + assert.equal(entraid.getSubscriptionsCount(), 1, 'There should be 1 subscriptions'); + sub4Initial[1].dispose(); + assert.equal(entraid.hasActiveSubscriptions(), false, 'There should be no active subscriptions'); + assert.equal(entraid.getSubscriptionsCount(), 0, 'There should be 0 subscriptions'); + assert.equal(tokenManager.isRunning(), false) + }); + + const verifyRefreshTiming = ( + subscriber: TestSubscriber, + expectedMinimumInterval: number, + message?: string + ) => { + const intervals = []; + for (let i = 1; i < subscriber.timestamps.length; i++) { + intervals.push(subscriber.timestamps[i] - subscriber.timestamps[i - 1]); + } + + intervals.forEach((interval, index) => { + assert.ok( + interval > expectedMinimumInterval, + message || `Refresh ${index + 1} for ${subscriber.name} should happen after minimum interval of ${expectedMinimumInterval}ms` + ); + }); + }; + + class SequenceEntraIDProvider implements IdentityProvider { + private currentIndex = 0; + + constructor( + private readonly tokenTTL: number = 100, + private tokenDeliveryDelayMs: number = 0, + private readonly tokenSequence: AuthenticationResult[] = [ + { + accessToken: 'initial-token', + uniqueId: 'test-user' + } as AuthenticationResult, + { + accessToken: 'refresh-token-1', + uniqueId: 'test-user' + } as AuthenticationResult, + { + accessToken: 'refresh-token-2', + uniqueId: 'test-user' + } as AuthenticationResult + ] + ) {} + + setTokenDeliveryDelay(delayMs: number): void { + this.tokenDeliveryDelayMs = delayMs; + } + + async requestToken(): Promise> { + if (this.tokenDeliveryDelayMs > 0) { + await setTimeout(this.tokenDeliveryDelayMs); + } + + if (this.currentIndex >= this.tokenSequence.length) { + throw new Error('No more tokens in sequence'); + } + + return { + token: this.tokenSequence[this.currentIndex++], + ttlMs: this.tokenTTL + }; + } + } + + class TestSubscriber { + public readonly credentials: Array = []; + public readonly errors: Error[] = []; + public readonly timestamps: number[] = []; + + constructor(public readonly name: string = 'unnamed') {} + + onNext = (creds: BasicAuth) => { + this.credentials.push(creds); + this.timestamps.push(Date.now()); + } + + onError = (error: Error) => { + this.errors.push(error); + } + } + + /** + * Assert that the actual credentials match the expected token + * @param actual + * @param expectedToken + * @param message + */ + const assertCredentials = (actual: BasicAuth, expectedToken: string, message: string) => { + assert.deepEqual(actual, { + username: 'test-user', + password: expectedToken + }, message); + }; +}); \ No newline at end of file diff --git a/packages/entraid/lib/entraid-credentials-provider.ts b/packages/entraid/lib/entraid-credentials-provider.ts new file mode 100644 index 00000000000..465c9e8a975 --- /dev/null +++ b/packages/entraid/lib/entraid-credentials-provider.ts @@ -0,0 +1,195 @@ +import { AuthenticationResult } from '@azure/msal-common/node'; +import { AccessToken } from '@azure/core-auth'; +import { + BasicAuth, StreamingCredentialsProvider, IdentityProvider, TokenManager, + ReAuthenticationError, StreamingCredentialsListener, IDPError, Token, Disposable +} from '@redis/client/dist/lib/authx'; + +/** + * A streaming credentials provider that uses the Entraid identity provider to provide credentials. + * Please use one of the factory functions in `entraid-credetfactories.ts` to create an instance of this class for the different + * type of authentication flows. + */ + +export type AuthenticationResponse = AuthenticationResult | AccessToken + +export class EntraidCredentialsProvider implements StreamingCredentialsProvider { + readonly type = 'streaming-credentials-provider'; + + readonly #listeners: Set> = new Set(); + + #tokenManagerDisposable: Disposable | null = null; + #isStarting: boolean = false; + + #pendingSubscribers: Array<{ + resolve: (value: [BasicAuth, Disposable]) => void; + reject: (error: Error) => void; + pendingListener: StreamingCredentialsListener; + }> = []; + + constructor( + public readonly tokenManager: TokenManager, + public readonly idp: IdentityProvider, + private readonly options: { + onReAuthenticationError?: (error: ReAuthenticationError) => void; + credentialsMapper?: (token: AuthenticationResponse) => BasicAuth; + onRetryableError?: (error: string) => void; + } = {} + ) { + this.onReAuthenticationError = options.onReAuthenticationError ?? DEFAULT_ERROR_HANDLER; + this.#credentialsMapper = options.credentialsMapper ?? DEFAULT_CREDENTIALS_MAPPER; + } + + async subscribe( + listener: StreamingCredentialsListener + ): Promise<[BasicAuth, Disposable]> { + + const currentToken = this.tokenManager.getCurrentToken(); + + if (currentToken) { + return [this.#credentialsMapper(currentToken.value), this.#createDisposable(listener)]; + } + + if (this.#isStarting) { + return new Promise((resolve, reject) => { + this.#pendingSubscribers.push({ resolve, reject, pendingListener: listener }); + }); + } + + this.#isStarting = true; + try { + const initialToken = await this.#startTokenManagerAndObtainInitialToken(); + + this.#pendingSubscribers.forEach(({ resolve, pendingListener }) => { + resolve([this.#credentialsMapper(initialToken.value), this.#createDisposable(pendingListener)]); + }); + this.#pendingSubscribers = []; + + return [this.#credentialsMapper(initialToken.value), this.#createDisposable(listener)]; + } finally { + this.#isStarting = false; + } + } + + onReAuthenticationError: (error: ReAuthenticationError) => void; + + #credentialsMapper: (token: AuthenticationResponse) => BasicAuth; + + #createTokenManagerListener(subscribers: Set>) { + return { + onError: (error: IDPError): void => { + if (!error.isRetryable) { + subscribers.forEach(listener => listener.onError(error)); + } else { + this.options.onRetryableError?.(error.message); + } + }, + onNext: (token: { value: AuthenticationResult | AccessToken }): void => { + const credentials = this.#credentialsMapper(token.value); + subscribers.forEach(listener => listener.onNext(credentials)); + } + }; + } + + #createDisposable(listener: StreamingCredentialsListener): Disposable { + this.#listeners.add(listener); + + return { + dispose: () => { + this.#listeners.delete(listener); + if (this.#listeners.size === 0 && this.#tokenManagerDisposable) { + this.#tokenManagerDisposable.dispose(); + this.#tokenManagerDisposable = null; + } + } + }; + } + + async #startTokenManagerAndObtainInitialToken(): Promise> { + const { ttlMs, token: initialToken } = await this.idp.requestToken(); + + const token = this.tokenManager.wrapAndSetCurrentToken(initialToken, ttlMs); + this.#tokenManagerDisposable = this.tokenManager.start( + this.#createTokenManagerListener(this.#listeners), + this.tokenManager.calculateRefreshTime(token) + ); + return token; + } + + public hasActiveSubscriptions(): boolean { + return this.#tokenManagerDisposable !== null && this.#listeners.size > 0; + } + + public getSubscriptionsCount(): number { + return this.#listeners.size; + } + + public getTokenManager() { + return this.tokenManager; + } + + public getCurrentCredentials(): BasicAuth | null { + const currentToken = this.tokenManager.getCurrentToken(); + return currentToken ? this.#credentialsMapper(currentToken.value) : null; + } + +} + +export const DEFAULT_CREDENTIALS_MAPPER = (token: AuthenticationResponse): BasicAuth => { + if (isAuthenticationResult(token)) { + return { + username: token.uniqueId, + password: token.accessToken + } + } else { + return OID_CREDENTIALS_MAPPER(token) + } +}; + +const DEFAULT_ERROR_HANDLER = (error: ReAuthenticationError) => + console.error('ReAuthenticationError', error); + +export const OID_CREDENTIALS_MAPPER = (token: (AuthenticationResult | AccessToken)) => { + + if (isAuthenticationResult(token)) { + // Client credentials flow is app-only authentication (no user context), + // so only access token is provided without user-specific claims (uniqueId, idToken, ...) + // this means that we need to extract the oid from the access token manually + const accessToken = JSON.parse(Buffer.from(token.accessToken.split('.')[1], 'base64').toString()); + + return ({ + username: accessToken.oid, + password: token.accessToken + }) + } else { + const accessToken = JSON.parse(Buffer.from(token.token.split('.')[1], 'base64').toString()); + + return ({ + username: accessToken.oid, + password: token.token + }) + } + +} + +/** + * Type guard to check if a token is an MSAL AuthenticationResult + * + * @param auth - The token to check + * @returns true if the token is an AuthenticationResult + */ +export function isAuthenticationResult(auth: AuthenticationResult | AccessToken): auth is AuthenticationResult { + return typeof (auth as AuthenticationResult).accessToken === 'string' && + !('token' in auth) +} + +/** + * Type guard to check if a token is an Azure Identity AccessToken + * + * @param auth - The token to check + * @returns true if the token is an AccessToken + */ +export function isAccessToken(auth: AuthenticationResult | AccessToken): auth is AccessToken { + return typeof (auth as AccessToken).token === 'string' && + !('accessToken' in auth); +} \ No newline at end of file diff --git a/packages/entraid/lib/index.ts b/packages/entraid/lib/index.ts new file mode 100644 index 00000000000..4873c9935c5 --- /dev/null +++ b/packages/entraid/lib/index.ts @@ -0,0 +1,3 @@ +export * from './entra-id-credentials-provider-factory'; +export * from './entraid-credentials-provider'; +export * from './msal-identity-provider'; \ No newline at end of file diff --git a/packages/entraid/lib/msal-identity-provider.ts b/packages/entraid/lib/msal-identity-provider.ts new file mode 100644 index 00000000000..0f15e01fcdc --- /dev/null +++ b/packages/entraid/lib/msal-identity-provider.ts @@ -0,0 +1,25 @@ +import { + AuthenticationResult +} from '@azure/msal-node'; +import { IdentityProvider, TokenResponse } from '@redis/client/dist/lib/authx'; + +export class MSALIdentityProvider implements IdentityProvider { + private readonly getToken: () => Promise; + + constructor(getToken: () => Promise) { + this.getToken = getToken; + } + + async requestToken(): Promise> { + const result = await this.getToken(); + + if (!result?.accessToken || !result?.expiresOn) { + throw new Error('Invalid token response'); + } + return { + token: result, + ttlMs: result.expiresOn.getTime() - Date.now() + }; + } + +} diff --git a/packages/entraid/lib/test-utils.ts b/packages/entraid/lib/test-utils.ts new file mode 100644 index 00000000000..add48e79d74 --- /dev/null +++ b/packages/entraid/lib/test-utils.ts @@ -0,0 +1,46 @@ +import { AuthenticationResult } from '@azure/msal-node'; +import { IdentityProvider, StreamingCredentialsProvider, TokenManager, TokenResponse } from '@redis/client/dist/lib/authx'; +import TestUtils from '@redis/test-utils'; +import { EntraidCredentialsProvider } from './entraid-credentials-provider'; + +export const testUtils = TestUtils.createFromConfig({ + dockerImageName: 'redislabs/client-libs-test', + dockerImageVersionArgument: 'redis-version', + defaultDockerVersion: '8.4-M01-pre' +}); + +const DEBUG_MODE_ARGS = testUtils.isVersionGreaterThan([7]) ? + ['--enable-debug-command', 'yes'] : + []; + +const idp: IdentityProvider = { + requestToken(): Promise> { + // @ts-ignore + return Promise.resolve({ + ttlMs: 100000, + token: { + accessToken: 'password' + } + }) + } +} + +const tokenManager = new TokenManager(idp, { expirationRefreshRatio: 0.8 }); +const entraIdCredentialsProvider: StreamingCredentialsProvider = new EntraidCredentialsProvider(tokenManager, idp) + +const PASSWORD_WITH_REPLICAS = { + serverArguments: ['--requirepass', 'password', ...DEBUG_MODE_ARGS], + numberOfMasters: 2, + numberOfReplicas: 1, + clusterConfiguration: { + defaults: { + credentialsProvider: entraIdCredentialsProvider + } + } +} + +export const GLOBAL = { + CLUSTERS: { + PASSWORD_WITH_REPLICAS + } +} diff --git a/packages/entraid/package.json b/packages/entraid/package.json new file mode 100644 index 00000000000..555e2c79411 --- /dev/null +++ b/packages/entraid/package.json @@ -0,0 +1,50 @@ +{ + "name": "@redis/entraid", + "version": "5.9.0-beta.2", + "license": "MIT", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "files": [ + "dist/", + "!dist/tsconfig.tsbuildinfo" + ], + "scripts": { + "clean": "rimraf dist", + "build": "npm run clean && tsc", + "start:auth-pkce": "tsx --tsconfig tsconfig.samples.json ./samples/auth-code-pkce/index.ts", + "start:interactive-browser": "tsx --tsconfig tsconfig.samples.json ./samples/interactive-browser/index.ts", + "test-integration": "mocha -r tsx --tsconfig tsconfig.integration-tests.json './integration-tests/**/*.spec.ts'", + "test": "nyc -r text-summary -r lcov mocha -r tsx './lib/**/*.spec.ts'", + "release": "release-it" + }, + "dependencies": { + "@azure/identity": "^4.7.0", + "@azure/msal-node": "^2.16.1" + }, + "peerDependencies": { + "@redis/client": "^5.9.0-beta.2" + }, + "devDependencies": { + "@types/express": "^4.17.21", + "@types/express-session": "^1.18.0", + "@types/node": "^22.9.0", + "dotenv": "^16.3.1", + "express": "^4.21.1", + "express-session": "^1.18.1", + "@redis/test-utils": "*" + }, + "engines": { + "node": ">= 18" + }, + "repository": { + "type": "git", + "url": "git://github.com/redis/node-redis.git" + }, + "bugs": { + "url": "https://github.com/redis/node-redis/issues" + }, + "homepage": "https://github.com/redis/node-redis/tree/master/packages/entraid", + "keywords": [ + "redis" + ] +} diff --git a/packages/entraid/samples/auth-code-pkce/index.ts b/packages/entraid/samples/auth-code-pkce/index.ts new file mode 100644 index 00000000000..25429269c44 --- /dev/null +++ b/packages/entraid/samples/auth-code-pkce/index.ts @@ -0,0 +1,153 @@ +import express, { Request, Response } from 'express'; +import session from 'express-session'; +import dotenv from 'dotenv'; +import { DEFAULT_TOKEN_MANAGER_CONFIG, EntraIdCredentialsProviderFactory } from '../../lib/entra-id-credentials-provider-factory'; + +dotenv.config(); + +if (!process.env.SESSION_SECRET) { + throw new Error('SESSION_SECRET environment variable must be set'); +} + +interface PKCESession extends session.Session { + pkceCodes?: { + verifier: string; + challenge: string; + challengeMethod: string; + }; +} + +interface AuthRequest extends Request { + session: PKCESession; +} + +const app = express(); + +const sessionConfig = { + secret: process.env.SESSION_SECRET, + resave: false, + saveUninitialized: false, + cookie: { + secure: process.env.NODE_ENV === 'production', // Only use secure in production + httpOnly: true, + sameSite: 'lax', + maxAge: 3600000 // 1 hour + } +} as const; + +app.use(session(sessionConfig)); + +if (!process.env.MSAL_CLIENT_ID || !process.env.MSAL_TENANT_ID) { + throw new Error('MSAL_CLIENT_ID and MSAL_TENANT_ID environment variables must be set'); +} + +// Initialize MSAL provider with authorization code PKCE flow +const { + getPKCECodes, + createCredentialsProvider, + getAuthCodeUrl +} = EntraIdCredentialsProviderFactory.createForAuthorizationCodeWithPKCE({ + clientId: process.env.MSAL_CLIENT_ID, + redirectUri: process.env.REDIRECT_URI || 'http://localhost:3000/redirect', + authorityConfig: { type: 'multi-tenant', tenantId: process.env.MSAL_TENANT_ID }, + tokenManagerConfig: DEFAULT_TOKEN_MANAGER_CONFIG +}); + +app.get('/login', async (req: AuthRequest, res: Response) => { + try { + // Generate PKCE Codes before starting the authorization flow + const pkceCodes = await getPKCECodes(); + + // Store PKCE codes in session + req.session.pkceCodes = pkceCodes + + await new Promise((resolve, reject) => { + req.session.save((err) => { + if (err) reject(err); + else resolve(); + }); + }); + + const authUrl = await getAuthCodeUrl({ + challenge: pkceCodes.challenge, + challengeMethod: pkceCodes.challengeMethod + }); + + res.redirect(authUrl); + } catch (error) { + console.error('Login flow failed:', error); + res.status(500).send('Authentication failed'); + } +}); + +app.get('/redirect', async (req: AuthRequest, res: Response) => { + try { + + // The authorization code is in req.query.code + const { code, client_info } = req.query; + const { pkceCodes } = req.session; + + if (!pkceCodes) { + console.error('Session state:', { + hasSession: !!req.session, + sessionID: req.sessionID, + pkceCodes: req.session.pkceCodes + }); + return res.status(400).send('PKCE codes not found in session'); + } + + // Check both possible error scenarios + if (req.query.error) { + console.error('OAuth error:', req.query.error, req.query.error_description); + return res.status(400).send(`OAuth error: ${req.query.error_description || req.query.error}`); + } + + if (!code) { + console.error('Missing authorization code. Query parameters received:', req.query); + return res.status(400).send('Authorization code not found in request. Query params: ' + JSON.stringify(req.query)); + } + + // Configure with the received code + const entraidCredentialsProvider = createCredentialsProvider( + { + code: code as string, + verifier: pkceCodes.verifier, + clientInfo: client_info as string | undefined + }, + ); + + const initialCredentials = entraidCredentialsProvider.subscribe({ + onNext: (token) => { + console.log('Token acquired:', token); + }, + onError: (error) => { + console.error('Token acquisition failed:', error); + } + }); + + const [credentials] = await initialCredentials; + + console.log('Credentials acquired:', credentials) + + // Clear sensitive data + delete req.session.pkceCodes; + + await new Promise((resolve, reject) => { + req.session.save((err) => { + if (err) reject(err); + else resolve(); + }); + }); + + res.json({ message: 'Authentication successful' }); + } catch (error) { + console.error('Token acquisition failed:', error); + res.status(500).send('Failed to acquire token'); + } +}); + +const PORT = process.env.PORT || 3000; +app.listen(PORT, () => { + console.log(`Server running on port ${PORT}`); + console.log(`Login URL: http://localhost:${PORT}/login`); +}); \ No newline at end of file diff --git a/packages/entraid/samples/interactive-browser/index.ts b/packages/entraid/samples/interactive-browser/index.ts new file mode 100644 index 00000000000..f458ad9e190 --- /dev/null +++ b/packages/entraid/samples/interactive-browser/index.ts @@ -0,0 +1,111 @@ +import express, { Request, Response } from 'express'; +import session from 'express-session'; +import dotenv from 'dotenv'; +import { DEFAULT_TOKEN_MANAGER_CONFIG, EntraIdCredentialsProviderFactory } from '../../lib/entra-id-credentials-provider-factory'; +import { InteractiveBrowserCredential } from '@azure/identity'; + +dotenv.config(); + +if (!process.env.SESSION_SECRET) { + throw new Error('SESSION_SECRET environment variable must be set'); +} + +const app = express(); + +const sessionConfig = { + secret: process.env.SESSION_SECRET, + resave: false, + saveUninitialized: false, + cookie: { + secure: process.env.NODE_ENV === 'production', // Only use secure in production + httpOnly: true, + sameSite: 'lax', + maxAge: 3600000 // 1 hour + } +} as const; + +app.use(session(sessionConfig)); + +if (!process.env.MSAL_CLIENT_ID || !process.env.MSAL_TENANT_ID) { + throw new Error('MSAL_CLIENT_ID and MSAL_TENANT_ID environment variables must be set'); +} + + +app.get('/login', async (req: Request, res: Response) => { + try { + // Create an instance of InteractiveBrowserCredential + const credential = new InteractiveBrowserCredential({ + clientId: process.env.MSAL_CLIENT_ID!, + tenantId: process.env.MSAL_TENANT_ID!, + loginStyle: 'popup', + redirectUri: 'http://localhost:3000/redirect' + }); + + // Create Redis client using the EntraID credentials provider + const entraidCredentialsProvider = EntraIdCredentialsProviderFactory.createForDefaultAzureCredential({ + credential, + scopes: ['user.read'], + tokenManagerConfig: DEFAULT_TOKEN_MANAGER_CONFIG + }); + + // Subscribe to credentials updates + const initialCredentials = entraidCredentialsProvider.subscribe({ + onNext: (token) => { + // Never log the full token in production + console.log('Token acquired successfully'); + console.log('Username:', token.username); + + }, + onError: (error) => { + console.error('Token acquisition failed:', error); + } + }); + + // Wait for the initial credentials + const [credentials] = await initialCredentials; + + // Return success response + res.json({ + status: 'success', + message: 'Authentication successful', + credentials: { + username: credentials.username, + password: credentials.password + } + }); + } catch (error) { + console.error('Authentication failed:', error); + res.status(500).json({ + status: 'error', + message: 'Authentication failed', + error: error instanceof Error ? error.message : String(error) + }); + } +}); + +// Create a simple status page +app.get('/', (req: Request, res: Response) => { + res.send(` + + + Interactive Browser Credential Demo + + + +

Interactive Browser Credential Demo

+

This example demonstrates using the InteractiveBrowserCredential from @azure/identity to authenticate with Microsoft Entra ID.

+

When you click the button below, you'll be redirected to the Microsoft login page.

+ Login with Microsoft + + + `); +}); + +const PORT = process.env.PORT || 3000; +app.listen(PORT, () => { + console.log(`Server running on port ${PORT}`); + console.log(`Open http://localhost:${PORT} in your browser to start`); +}); diff --git a/packages/entraid/tsconfig.integration-tests.json b/packages/entraid/tsconfig.integration-tests.json new file mode 100644 index 00000000000..5d15f4f2753 --- /dev/null +++ b/packages/entraid/tsconfig.integration-tests.json @@ -0,0 +1,10 @@ +{ + "extends": "./tsconfig.json", + "include": [ + "./integration-tests/**/*.ts", + "./lib/**/*.ts" + ], + "compilerOptions": { + "noEmit": true + }, +} \ No newline at end of file diff --git a/packages/entraid/tsconfig.json b/packages/entraid/tsconfig.json new file mode 100644 index 00000000000..47100f5b87d --- /dev/null +++ b/packages/entraid/tsconfig.json @@ -0,0 +1,21 @@ +{ + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "outDir": "./dist" + }, + "include": [ + "./lib/**/*.ts", + "./index.ts" + ], + "exclude": [ + "./lib/**/*.spec.ts", + "./lib/test-util.ts", + ], + "typedocOptions": { + "entryPoints": [ + "./lib" + ], + "entryPointStrategy": "expand", + "out": "../../documentation/entraid" + } +} diff --git a/packages/entraid/tsconfig.samples.json b/packages/entraid/tsconfig.samples.json new file mode 100644 index 00000000000..0eb936369ff --- /dev/null +++ b/packages/entraid/tsconfig.samples.json @@ -0,0 +1,10 @@ +{ + "extends": "./tsconfig.json", + "include": [ + "./samples/**/*.ts", + "./lib/**/*.ts" + ], + "compilerOptions": { + "noEmit": true + } +} \ No newline at end of file diff --git a/packages/json/.nycrc.json b/packages/json/.nycrc.json new file mode 100644 index 00000000000..367a89ad32c --- /dev/null +++ b/packages/json/.nycrc.json @@ -0,0 +1,4 @@ +{ + "extends": "@istanbuljs/nyc-config-typescript", + "exclude": ["dist", "**/*.spec.ts", "lib/test-utils.ts"] +} diff --git a/packages/json/.release-it.json b/packages/json/.release-it.json new file mode 100644 index 00000000000..a384e3e3d4d --- /dev/null +++ b/packages/json/.release-it.json @@ -0,0 +1,22 @@ +{ + "npm": { + "publish": true, + "publishArgs": ["--access", "public"] + }, + "git": { + "tagName": "json@${version}", + "tagMatch": "json@*", + "commitMessage": "Release ${tagName}", + "tagAnnotation": "Release ${tagName}", + "commitArgs": "--all" + }, + "plugins": { + "@release-it/bumper": { + "out": { + "file": "package.json", + "path": ["peerDependencies.@redis/client"], + "versionPrefix": "^" + } + } + } +} diff --git a/packages/json/README.md b/packages/json/README.md new file mode 100644 index 00000000000..ed60a64351a --- /dev/null +++ b/packages/json/README.md @@ -0,0 +1,76 @@ +# @redis/json + +This package provides support for the [RedisJSON](https://redis.io/docs/latest/develop/data-types/json/) module, which adds JSON as a native data type to Redis. + +Should be used with [`redis`/`@redis/client`](https://github.com/redis/node-redis). + +:warning: To use these extra commands, your Redis server must have the RedisJSON module installed. + +## Usage + +For a complete example, see [`managing-json.js`](https://github.com/redis/node-redis/blob/master/examples/managing-json.js) in the [examples folder](https://github.com/redis/node-redis/tree/master/examples). + +### Storing JSON Documents in Redis + +The [`JSON.SET`](https://redis.io/commands/json.set/) command stores a JSON value at a given JSON Path in a Redis key. + +Here, we'll store a JSON document in the root of the Redis key "`mydoc`": + +```javascript +await client.json.set('noderedis:jsondata', '$', { + name: 'Roberta McDonald', + pets: [{ + name: 'Rex', + species: 'dog', + age: 3, + isMammal: true + }, { + name: 'Goldie', + species: 'fish', + age: 2, + isMammal: false + }] +}); +``` + +For more information about RedisJSON's path syntax, [check out the documentation](https://redis.io/docs/latest/develop/data-types/json/path). + +### Retrieving JSON Documents from Redis + +With RedisJSON, we can retrieve all or part(s) of a JSON document using the [`JSON.GET`](https://redis.io/commands/json.get/) command and one or more JSON Paths. Let's get the name and age of one of the pets: + +```javascript +const results = await client.json.get('noderedis:jsondata', { + path: [ + '.pets[1].name', + '.pets[1].age' + ] +}); +``` + +`results` will contain the following: + +```javascript + { '.pets[1].name': 'Goldie', '.pets[1].age': 2 } +``` + +### Performing Atomic Updates on JSON Documents Stored in Redis + +RedisJSON includes commands that can atomically update values in a JSON document, in place in Redis without having to first retrieve the entire document. + +Using the [`JSON.NUMINCRBY`](https://redis.io/commands/json.numincrby/) command, we can update the age of one of the pets like this: + +```javascript +await client.json.numIncrBy('noderedis:jsondata', '.pets[1].age', 1); +``` + +And we can add a new object to the pets array with the [`JSON.ARRAPPEND`](https://redis.io/commands/json.arrappend/) command: + +```javascript +await client.json.arrAppend('noderedis:jsondata', '.pets', { + name: 'Robin', + species: 'bird', + age: 1, + isMammal: false +}); +``` diff --git a/packages/json/lib/commands/ARRAPPEND.spec.ts b/packages/json/lib/commands/ARRAPPEND.spec.ts new file mode 100644 index 00000000000..b2c22e0b9c0 --- /dev/null +++ b/packages/json/lib/commands/ARRAPPEND.spec.ts @@ -0,0 +1,31 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ARRAPPEND from './ARRAPPEND'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('JSON.ARRAPPEND', () => { + describe('transformArguments', () => { + it('single element', () => { + assert.deepEqual( + parseArgs(ARRAPPEND, 'key', '$', 'value'), + ['JSON.ARRAPPEND', 'key', '$', '"value"'] + ); + }); + + it('multiple elements', () => { + assert.deepEqual( + parseArgs(ARRAPPEND, 'key', '$', 1, 2), + ['JSON.ARRAPPEND', 'key', '$', '1', '2'] + ); + }); + }); + + testUtils.testWithClient('client.json.arrAppend', async client => { + const [, reply] = await Promise.all([ + client.json.set('key', '$', []), + client.json.arrAppend('key', '$', 'value') + ]); + + assert.deepEqual(reply, [1]); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/json/lib/commands/ARRAPPEND.ts b/packages/json/lib/commands/ARRAPPEND.ts new file mode 100644 index 00000000000..b98d1532b4e --- /dev/null +++ b/packages/json/lib/commands/ARRAPPEND.ts @@ -0,0 +1,33 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisJSON, transformRedisJsonArgument } from '@redis/client/dist/lib/commands/generic-transformers'; +import { RedisArgument, NumberReply, ArrayReply, NullReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export default { + IS_READ_ONLY: false, + /** + * Appends one or more values to the end of an array in a JSON document. + * Returns the new array length after append, or null if the path does not exist. + * + * @param parser - The Redis command parser + * @param key - The key to append to + * @param path - Path to the array in the JSON document + * @param json - The first value to append + * @param jsons - Additional values to append + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + path: RedisArgument, + json: RedisJSON, + ...jsons: Array + ) { + parser.push('JSON.ARRAPPEND'); + parser.pushKey(key); + parser.push(path, transformRedisJsonArgument(json)); + + for (let i = 0; i < jsons.length; i++) { + parser.push(transformRedisJsonArgument(jsons[i])); + } + }, + transformReply: undefined as unknown as () => NumberReply | ArrayReply +} as const satisfies Command; diff --git a/packages/json/lib/commands/ARRINDEX.spec.ts b/packages/json/lib/commands/ARRINDEX.spec.ts new file mode 100644 index 00000000000..3c1377354f1 --- /dev/null +++ b/packages/json/lib/commands/ARRINDEX.spec.ts @@ -0,0 +1,49 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ARRINDEX from './ARRINDEX'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('JSON.ARRINDEX', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(ARRINDEX, 'key', '$', 'value'), + ['JSON.ARRINDEX', 'key', '$', '"value"'] + ); + }); + + describe('with range', () => { + it('start only', () => { + assert.deepEqual( + parseArgs(ARRINDEX, 'key', '$', 'value', { + range: { + start: 0 + } + }), + ['JSON.ARRINDEX', 'key', '$', '"value"', '0'] + ); + }); + + it('with start and stop', () => { + assert.deepEqual( + parseArgs(ARRINDEX, 'key', '$', 'value', { + range: { + start: 0, + stop: 1 + } + }), + ['JSON.ARRINDEX', 'key', '$', '"value"', '0', '1'] + ); + }); + }); + }); + + testUtils.testWithClient('client.json.arrIndex', async client => { + const [, reply] = await Promise.all([ + client.json.set('key', '$', []), + client.json.arrIndex('key', '$', 'value') + ]); + + assert.deepEqual(reply, [-1]); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/json/lib/commands/ARRINDEX.ts b/packages/json/lib/commands/ARRINDEX.ts new file mode 100644 index 00000000000..1437fab4d56 --- /dev/null +++ b/packages/json/lib/commands/ARRINDEX.ts @@ -0,0 +1,46 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, NumberReply, ArrayReply, NullReply, Command } from '@redis/client/dist/lib/RESP/types'; +import { RedisJSON, transformRedisJsonArgument } from '@redis/client/dist/lib/commands/generic-transformers'; + +export interface JsonArrIndexOptions { + range?: { + start: number; + stop?: number; + }; +} + +export default { + IS_READ_ONLY: true, + /** + * Returns the index of the first occurrence of a value in a JSON array. + * If the specified value is not found, it returns -1, or null if the path does not exist. + * + * @param parser - The Redis command parser + * @param key - The key containing the array + * @param path - Path to the array in the JSON document + * @param json - The value to search for + * @param options - Optional range parameters for the search + * @param options.range.start - Starting index for the search + * @param options.range.stop - Optional ending index for the search + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + path: RedisArgument, + json: RedisJSON, + options?: JsonArrIndexOptions + ) { + parser.push('JSON.ARRINDEX'); + parser.pushKey(key); + parser.push(path, transformRedisJsonArgument(json)); + + if (options?.range) { + parser.push(options.range.start.toString()); + + if (options.range.stop !== undefined) { + parser.push(options.range.stop.toString()); + } + } + }, + transformReply: undefined as unknown as () => NumberReply | ArrayReply +} as const satisfies Command; diff --git a/packages/json/lib/commands/ARRINSERT.spec.ts b/packages/json/lib/commands/ARRINSERT.spec.ts new file mode 100644 index 00000000000..bf9c8a2a051 --- /dev/null +++ b/packages/json/lib/commands/ARRINSERT.spec.ts @@ -0,0 +1,31 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ARRINSERT from './ARRINSERT'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('JSON.ARRINSERT', () => { + describe('transformArguments', () => { + it('single element', () => { + assert.deepEqual( + parseArgs(ARRINSERT, 'key', '$', 0, 'value'), + ['JSON.ARRINSERT', 'key', '$', '0', '"value"'] + ); + }); + + it('multiple elements', () => { + assert.deepEqual( + parseArgs(ARRINSERT, 'key', '$', 0, '1', '2'), + ['JSON.ARRINSERT', 'key', '$', '0', '"1"', '"2"'] + ); + }); + }); + + testUtils.testWithClient('client.json.arrInsert', async client => { + const [, reply] = await Promise.all([ + client.json.set('key', '$', []), + client.json.arrInsert('key', '$', 0, 'value') + ]); + + assert.deepEqual(reply, [1]); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/json/lib/commands/ARRINSERT.ts b/packages/json/lib/commands/ARRINSERT.ts new file mode 100644 index 00000000000..7a5ab945892 --- /dev/null +++ b/packages/json/lib/commands/ARRINSERT.ts @@ -0,0 +1,35 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, NumberReply, ArrayReply, NullReply, Command } from '@redis/client/dist/lib/RESP/types'; +import { RedisJSON, transformRedisJsonArgument } from '@redis/client/dist/lib/commands/generic-transformers'; + +export default { + IS_READ_ONLY: false, + /** + * Inserts one or more values into an array at the specified index. + * Returns the new array length after insert, or null if the path does not exist. + * + * @param parser - The Redis command parser + * @param key - The key containing the array + * @param path - Path to the array in the JSON document + * @param index - The position where to insert the values + * @param json - The first value to insert + * @param jsons - Additional values to insert + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + path: RedisArgument, + index: number, + json: RedisJSON, + ...jsons: Array + ) { + parser.push('JSON.ARRINSERT'); + parser.pushKey(key); + parser.push(path, index.toString(), transformRedisJsonArgument(json)); + + for (let i = 0; i < jsons.length; i++) { + parser.push(transformRedisJsonArgument(jsons[i])); + } + }, + transformReply: undefined as unknown as () => NumberReply | ArrayReply +} as const satisfies Command; diff --git a/packages/json/lib/commands/ARRLEN.spec.ts b/packages/json/lib/commands/ARRLEN.spec.ts new file mode 100644 index 00000000000..dcf7d35acb0 --- /dev/null +++ b/packages/json/lib/commands/ARRLEN.spec.ts @@ -0,0 +1,33 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ARRLEN from './ARRLEN'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('JSON.ARRLEN', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(ARRLEN, 'key'), + ['JSON.ARRLEN', 'key'] + ); + }); + + it('with path', () => { + assert.deepEqual( + parseArgs(ARRLEN, 'key', { + path: '$' + }), + ['JSON.ARRLEN', 'key', '$'] + ); + }); + }); + + testUtils.testWithClient('client.json.arrLen', async client => { + const [, reply] = await Promise.all([ + client.json.set('key', '$', []), + client.json.arrLen('key') + ]); + + assert.equal(reply, 0); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/json/lib/commands/ARRLEN.ts b/packages/json/lib/commands/ARRLEN.ts new file mode 100644 index 00000000000..f2111986c0e --- /dev/null +++ b/packages/json/lib/commands/ARRLEN.ts @@ -0,0 +1,27 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, ArrayReply, NumberReply, NullReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export interface JsonArrLenOptions { + path?: RedisArgument; +} + +export default { + IS_READ_ONLY: true, + /** + * Returns the length of an array in a JSON document. + * Returns null if the path does not exist or the value is not an array. + * + * @param parser - The Redis command parser + * @param key - The key containing the array + * @param options - Optional parameters + * @param options.path - Path to the array in the JSON document + */ + parseCommand(parser: CommandParser, key: RedisArgument, options?: JsonArrLenOptions) { + parser.push('JSON.ARRLEN'); + parser.pushKey(key); + if (options?.path !== undefined) { + parser.push(options.path); + } + }, + transformReply: undefined as unknown as () => NumberReply | ArrayReply +} as const satisfies Command; diff --git a/packages/json/lib/commands/ARRPOP.spec.ts b/packages/json/lib/commands/ARRPOP.spec.ts new file mode 100644 index 00000000000..f823e7fc08a --- /dev/null +++ b/packages/json/lib/commands/ARRPOP.spec.ts @@ -0,0 +1,67 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ARRPOP from './ARRPOP'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('JSON.ARRPOP', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(ARRPOP, 'key'), + ['JSON.ARRPOP', 'key'] + ); + }); + + it('with path', () => { + assert.deepEqual( + parseArgs(ARRPOP, 'key', { + path: '$' + }), + ['JSON.ARRPOP', 'key', '$'] + ); + }); + + it('with path and index', () => { + assert.deepEqual( + parseArgs(ARRPOP, 'key', { + path: '$', + index: 0 + }), + ['JSON.ARRPOP', 'key', '$', '0'] + ); + }); + }); + + describe('client.json.arrPop', () => { + testUtils.testWithClient('without path and value', async client => { + const [, reply] = await Promise.all([ + client.json.set('key', '$', []), + client.json.arrPop('key') + ]); + + assert.equal(reply, null); + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClient('. path with value', async client => { + const [, reply] = await Promise.all([ + client.json.set('key', '.', ['value']), + client.json.arrPop('key', { + path: '.' + }) + ]); + + assert.equal(reply, 'value'); + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClient('$ path with value', async client => { + const [, reply] = await Promise.all([ + client.json.set('key', '$', ['value']), + client.json.arrPop('key', { + path: '$' + }) + ]); + + assert.deepEqual(reply, ['value']); + }, GLOBAL.SERVERS.OPEN); + }); +}); diff --git a/packages/json/lib/commands/ARRPOP.ts b/packages/json/lib/commands/ARRPOP.ts new file mode 100644 index 00000000000..88e4da96980 --- /dev/null +++ b/packages/json/lib/commands/ARRPOP.ts @@ -0,0 +1,40 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, ArrayReply, NullReply, BlobStringReply, Command, UnwrapReply } from '@redis/client/dist/lib/RESP/types'; +import { isArrayReply, transformRedisJsonNullReply } from '@redis/client/dist/lib/commands/generic-transformers'; + +export interface RedisArrPopOptions { + path: RedisArgument; + index?: number; +} + +export default { + IS_READ_ONLY: false, + /** + * Removes and returns an element from an array in a JSON document. + * Returns null if the path does not exist or the value is not an array. + * + * @param parser - The Redis command parser + * @param key - The key containing the array + * @param options - Optional parameters + * @param options.path - Path to the array in the JSON document + * @param options.index - Optional index to pop from. Default is -1 (last element) + */ + parseCommand(parser: CommandParser, key: RedisArgument, options?: RedisArrPopOptions) { + parser.push('JSON.ARRPOP'); + parser.pushKey(key); + + if (options) { + parser.push(options.path); + + if (options.index !== undefined) { + parser.push(options.index.toString()); + } + } + }, + transformReply(reply: NullReply | BlobStringReply | ArrayReply) { + return isArrayReply(reply) ? + (reply as unknown as UnwrapReply).map(item => transformRedisJsonNullReply(item)) : + transformRedisJsonNullReply(reply); + } +} as const satisfies Command; + diff --git a/packages/json/lib/commands/ARRTRIM.spec.ts b/packages/json/lib/commands/ARRTRIM.spec.ts new file mode 100644 index 00000000000..e346716e8df --- /dev/null +++ b/packages/json/lib/commands/ARRTRIM.spec.ts @@ -0,0 +1,22 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ARRTRIM from './ARRTRIM'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('JSON.ARRTRIM', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(ARRTRIM, 'key', '$', 0, 1), + ['JSON.ARRTRIM', 'key', '$', '0', '1'] + ); + }); + + testUtils.testWithClient('client.json.arrTrim', async client => { + const [, reply] = await Promise.all([ + client.json.set('key', '$', []), + client.json.arrTrim('key', '$', 0, 1) + ]); + + assert.deepEqual(reply, [0]); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/json/lib/commands/ARRTRIM.ts b/packages/json/lib/commands/ARRTRIM.ts new file mode 100644 index 00000000000..bfcb1da14aa --- /dev/null +++ b/packages/json/lib/commands/ARRTRIM.ts @@ -0,0 +1,22 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, ArrayReply, NumberReply, NullReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export default { + IS_READ_ONLY: false, + /** + * Trims an array in a JSON document to include only elements within the specified range. + * Returns the new array length after trimming, or null if the path does not exist. + * + * @param parser - The Redis command parser + * @param key - The key containing the array + * @param path - Path to the array in the JSON document + * @param start - Starting index (inclusive) + * @param stop - Ending index (inclusive) + */ + parseCommand(parser: CommandParser, key: RedisArgument, path: RedisArgument, start: number, stop: number) { + parser.push('JSON.ARRTRIM'); + parser.pushKey(key); + parser.push(path, start.toString(), stop.toString()); + }, + transformReply: undefined as unknown as () => NumberReply | ArrayReply +} as const satisfies Command; diff --git a/packages/json/lib/commands/CLEAR.spec.ts b/packages/json/lib/commands/CLEAR.spec.ts new file mode 100644 index 00000000000..c1786cc1dde --- /dev/null +++ b/packages/json/lib/commands/CLEAR.spec.ts @@ -0,0 +1,33 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import CLEAR from './CLEAR'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('JSON.CLEAR', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(CLEAR, 'key'), + ['JSON.CLEAR', 'key'] + ); + }); + + it('with path', () => { + assert.deepEqual( + parseArgs(CLEAR, 'key', { + path: '$' + }), + ['JSON.CLEAR', 'key', '$'] + ); + }); + }); + + testUtils.testWithClient('client.json.clear', async client => { + const [, reply] = await Promise.all([ + client.json.set('key', '$', null), + client.json.clear('key') + ]); + + assert.equal(reply, 0); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/json/lib/commands/CLEAR.ts b/packages/json/lib/commands/CLEAR.ts new file mode 100644 index 00000000000..281c5e6abae --- /dev/null +++ b/packages/json/lib/commands/CLEAR.ts @@ -0,0 +1,28 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, NumberReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export interface JsonClearOptions { + path?: RedisArgument; +} + +export default { + IS_READ_ONLY: false, + /** + * Clears container values (arrays/objects) in a JSON document. + * Returns the number of values cleared (0 or 1), or null if the path does not exist. + * + * @param parser - The Redis command parser + * @param key - The key containing the JSON document + * @param options - Optional parameters + * @param options.path - Path to the container to clear + */ + parseCommand(parser: CommandParser, key: RedisArgument, options?: JsonClearOptions) { + parser.push('JSON.CLEAR'); + parser.pushKey(key); + + if (options?.path !== undefined) { + parser.push(options.path); + } + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/json/lib/commands/DEBUG_MEMORY.spec.ts b/packages/json/lib/commands/DEBUG_MEMORY.spec.ts new file mode 100644 index 00000000000..09c29328d8e --- /dev/null +++ b/packages/json/lib/commands/DEBUG_MEMORY.spec.ts @@ -0,0 +1,31 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import DEBUG_MEMORY from './DEBUG_MEMORY'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('JSON.DEBUG MEMORY', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(DEBUG_MEMORY, 'key'), + ['JSON.DEBUG', 'MEMORY', 'key'] + ); + }); + + it('with path', () => { + assert.deepEqual( + parseArgs(DEBUG_MEMORY, 'key', { + path: '$' + }), + ['JSON.DEBUG', 'MEMORY', 'key', '$'] + ); + }); + }); + + testUtils.testWithClient('client.json.debugMemory', async client => { + assert.equal( + await client.json.debugMemory('key'), + 0 + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/json/lib/commands/DEBUG_MEMORY.ts b/packages/json/lib/commands/DEBUG_MEMORY.ts new file mode 100644 index 00000000000..cf0f2c8f215 --- /dev/null +++ b/packages/json/lib/commands/DEBUG_MEMORY.ts @@ -0,0 +1,28 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, NumberReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export interface JsonDebugMemoryOptions { + path?: RedisArgument; +} + +export default { + IS_READ_ONLY: false, + /** + * Reports memory usage details for a JSON document value. + * Returns size in bytes of the value, or null if the key or path does not exist. + * + * @param parser - The Redis command parser + * @param key - The key containing the JSON document + * @param options - Optional parameters + * @param options.path - Path to the value to examine + */ + parseCommand(parser: CommandParser, key: RedisArgument, options?: JsonDebugMemoryOptions) { + parser.push('JSON.DEBUG', 'MEMORY'); + parser.pushKey(key); + + if (options?.path !== undefined) { + parser.push(options.path); + } + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/json/lib/commands/DEL.spec.ts b/packages/json/lib/commands/DEL.spec.ts new file mode 100644 index 00000000000..a008c3b9b2b --- /dev/null +++ b/packages/json/lib/commands/DEL.spec.ts @@ -0,0 +1,32 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import DEL from './DEL'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('JSON.DEL', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(DEL, 'key'), + ['JSON.DEL', 'key'] + ); + }); + + it('with path', () => { + assert.deepEqual( + parseArgs(DEL, 'key', { + path: '$.path' + }), + ['JSON.DEL', 'key', '$.path'] + ); + }); + }); + + testUtils.testWithClient('client.json.del', async client => { + assert.equal( + await client.json.del('key'), + 0 + ); + }, GLOBAL.SERVERS.OPEN); +}); + diff --git a/packages/json/lib/commands/DEL.ts b/packages/json/lib/commands/DEL.ts new file mode 100644 index 00000000000..4d768927088 --- /dev/null +++ b/packages/json/lib/commands/DEL.ts @@ -0,0 +1,28 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, NumberReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export interface JsonDelOptions { + path?: RedisArgument +} + +export default { + IS_READ_ONLY: false, + /** + * Deletes a value from a JSON document. + * Returns the number of paths deleted (0 or 1), or null if the key does not exist. + * + * @param parser - The Redis command parser + * @param key - The key containing the JSON document + * @param options - Optional parameters + * @param options.path - Path to the value to delete + */ + parseCommand(parser: CommandParser, key: RedisArgument, options?: JsonDelOptions) { + parser.push('JSON.DEL'); + parser.pushKey(key); + + if (options?.path !== undefined) { + parser.push(options.path); + } + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/json/lib/commands/FORGET.spec.ts b/packages/json/lib/commands/FORGET.spec.ts new file mode 100644 index 00000000000..888fff5659b --- /dev/null +++ b/packages/json/lib/commands/FORGET.spec.ts @@ -0,0 +1,31 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import FORGET from './FORGET'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('JSON.FORGET', () => { + describe('transformArguments', () => { + it('key', () => { + assert.deepEqual( + parseArgs(FORGET, 'key'), + ['JSON.FORGET', 'key'] + ); + }); + + it('key, path', () => { + assert.deepEqual( + parseArgs(FORGET, 'key', { + path: '$.path' + }), + ['JSON.FORGET', 'key', '$.path'] + ); + }); + }); + + testUtils.testWithClient('client.json.forget', async client => { + assert.equal( + await client.json.forget('key'), + 0 + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/json/lib/commands/FORGET.ts b/packages/json/lib/commands/FORGET.ts new file mode 100644 index 00000000000..ea924c74247 --- /dev/null +++ b/packages/json/lib/commands/FORGET.ts @@ -0,0 +1,28 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, NumberReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export interface JsonForgetOptions { + path?: RedisArgument; +} + +export default { + IS_READ_ONLY: false, + /** + * Alias for JSON.DEL - Deletes a value from a JSON document. + * Returns the number of paths deleted (0 or 1), or null if the key does not exist. + * + * @param parser - The Redis command parser + * @param key - The key containing the JSON document + * @param options - Optional parameters + * @param options.path - Path to the value to delete + */ + parseCommand(parser: CommandParser, key: RedisArgument, options?: JsonForgetOptions) { + parser.push('JSON.FORGET'); + parser.pushKey(key); + + if (options?.path !== undefined) { + parser.push(options.path); + } + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/json/lib/commands/GET.spec.ts b/packages/json/lib/commands/GET.spec.ts new file mode 100644 index 00000000000..6b4f44871cb --- /dev/null +++ b/packages/json/lib/commands/GET.spec.ts @@ -0,0 +1,44 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import GET from './GET'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('JSON.GET', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(GET, 'key'), + ['JSON.GET', 'key'] + ); + }); + + describe('with path', () => { + it('string', () => { + assert.deepEqual( + parseArgs(GET, 'key', { path: '$' }), + ['JSON.GET', 'key', '$'] + ); + }); + + it('array', () => { + assert.deepEqual( + parseArgs(GET, 'key', { path: ['$.1', '$.2'] }), + ['JSON.GET', 'key', '$.1', '$.2'] + ); + }); + }); + }); + + testUtils.testWithClient('client.json.get', async client => { + assert.equal( + await client.json.get('key'), + null + ); + + await client.json.set('noderedis:users:1', '$', { name: 'Alice', age: 32, }) + const res = await client.json.get('noderedis:users:1'); + assert.equal(typeof res, 'object') + assert.deepEqual(res, { name: 'Alice', age: 32, }) + + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/json/lib/commands/GET.ts b/packages/json/lib/commands/GET.ts new file mode 100644 index 00000000000..14ec46a53af --- /dev/null +++ b/packages/json/lib/commands/GET.ts @@ -0,0 +1,32 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, Command } from '@redis/client/dist/lib/RESP/types'; +import { RedisVariadicArgument, transformRedisJsonNullReply } from '@redis/client/dist/lib/commands/generic-transformers'; + +export interface JsonGetOptions { + path?: RedisVariadicArgument; +} + +export default { + IS_READ_ONLY: false, + /** + * Gets values from a JSON document. + * Returns the value at the specified path, or null if the key or path does not exist. + * + * @param parser - The Redis command parser + * @param key - The key containing the JSON document + * @param options - Optional parameters + * @param options.path - Path(s) to the value(s) to retrieve + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + options?: JsonGetOptions + ) { + parser.push('JSON.GET'); + parser.pushKey(key); + if (options?.path !== undefined) { + parser.pushVariadic(options.path); + } + }, + transformReply: transformRedisJsonNullReply +} as const satisfies Command; \ No newline at end of file diff --git a/packages/json/lib/commands/MERGE.spec.ts b/packages/json/lib/commands/MERGE.spec.ts new file mode 100644 index 00000000000..30a092035c5 --- /dev/null +++ b/packages/json/lib/commands/MERGE.spec.ts @@ -0,0 +1,20 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import MERGE from './MERGE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('JSON.MERGE', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(MERGE, 'key', '$', 'value'), + ['JSON.MERGE', 'key', '$', '"value"'] + ); + }); + + testUtils.testWithClient('client.json.merge', async client => { + assert.equal( + await client.json.merge('key', '$', 'value'), + 'OK' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/json/lib/commands/MERGE.ts b/packages/json/lib/commands/MERGE.ts new file mode 100644 index 00000000000..1a4b54fc4ba --- /dev/null +++ b/packages/json/lib/commands/MERGE.ts @@ -0,0 +1,22 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { SimpleStringReply, Command, RedisArgument } from '@redis/client/dist/lib/RESP/types'; +import { RedisJSON, transformRedisJsonArgument } from '@redis/client/dist/lib/commands/generic-transformers'; + +export default { + IS_READ_ONLY: false, + /** + * Merges a given JSON value into a JSON document. + * Returns OK on success, or null if the key does not exist. + * + * @param parser - The Redis command parser + * @param key - The key containing the JSON document + * @param path - Path to merge into + * @param value - JSON value to merge + */ + parseCommand(parser: CommandParser, key: RedisArgument, path: RedisArgument, value: RedisJSON) { + parser.push('JSON.MERGE'); + parser.pushKey(key); + parser.push(path, transformRedisJsonArgument(value)); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/json/lib/commands/MGET.spec.ts b/packages/json/lib/commands/MGET.spec.ts new file mode 100644 index 00000000000..2d8efafde71 --- /dev/null +++ b/packages/json/lib/commands/MGET.spec.ts @@ -0,0 +1,20 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import MGET from './MGET'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('JSON.MGET', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(MGET, ['1', '2'], '$'), + ['JSON.MGET', '1', '2', '$'] + ); + }); + + testUtils.testWithClient('client.json.mGet', async client => { + assert.deepEqual( + await client.json.mGet(['1', '2'], '$'), + [null, null] + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/json/lib/commands/MGET.ts b/packages/json/lib/commands/MGET.ts new file mode 100644 index 00000000000..01a7783b922 --- /dev/null +++ b/packages/json/lib/commands/MGET.ts @@ -0,0 +1,23 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, UnwrapReply, ArrayReply, NullReply, BlobStringReply, Command } from '@redis/client/dist/lib/RESP/types'; +import { transformRedisJsonNullReply } from '@redis/client/dist/lib/commands/generic-transformers'; + +export default { + IS_READ_ONLY: true, + /** + * Gets values at a specific path from multiple JSON documents. + * Returns an array of values at the path from each key, null for missing keys/paths. + * + * @param parser - The Redis command parser + * @param keys - Array of keys containing JSON documents + * @param path - Path to retrieve from each document + */ + parseCommand(parser: CommandParser, keys: Array, path: RedisArgument) { + parser.push('JSON.MGET'); + parser.pushKeys(keys); + parser.push(path); + }, + transformReply(reply: UnwrapReply>) { + return reply.map(json => transformRedisJsonNullReply(json)) + } +} as const satisfies Command; diff --git a/packages/json/lib/commands/MSET.spec.ts b/packages/json/lib/commands/MSET.spec.ts new file mode 100644 index 00000000000..38e8b077e81 --- /dev/null +++ b/packages/json/lib/commands/MSET.spec.ts @@ -0,0 +1,36 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import MSET from './MSET'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('JSON.MSET', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(MSET, [{ + key: '1', + path: '$', + value: 1 + }, { + key: '2', + path: '$', + value: '2' + }]), + ['JSON.MSET', '1', '$', '1', '2', '$', '"2"'] + ); + }); + + testUtils.testWithClient('client.json.mSet', async client => { + assert.equal( + await client.json.mSet([{ + key: '1', + path: '$', + value: 1 + }, { + key: '2', + path: '$', + value: '2' + }]), + 'OK' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/json/lib/commands/MSET.ts b/packages/json/lib/commands/MSET.ts new file mode 100644 index 00000000000..81e8d4c6bdf --- /dev/null +++ b/packages/json/lib/commands/MSET.ts @@ -0,0 +1,32 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '@redis/client/dist/lib/RESP/types'; +import { RedisJSON, transformRedisJsonArgument } from '@redis/client/dist/lib/commands/generic-transformers'; + +export interface JsonMSetItem { + key: RedisArgument; + path: RedisArgument; + value: RedisJSON; +} + +export default { + IS_READ_ONLY: false, + /** + * Sets multiple JSON values in multiple documents. + * Returns OK on success. + * + * @param parser - The Redis command parser + * @param items - Array of objects containing key, path, and value to set + * @param items[].key - The key containing the JSON document + * @param items[].path - Path in the document to set + * @param items[].value - JSON value to set at the path + */ + parseCommand(parser: CommandParser, items: Array) { + parser.push('JSON.MSET'); + + for (let i = 0; i < items.length; i++) { + parser.pushKey(items[i].key); + parser.push(items[i].path, transformRedisJsonArgument(items[i].value)); + } + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/json/lib/commands/NUMINCRBY.spec.ts b/packages/json/lib/commands/NUMINCRBY.spec.ts new file mode 100644 index 00000000000..b438069e80f --- /dev/null +++ b/packages/json/lib/commands/NUMINCRBY.spec.ts @@ -0,0 +1,22 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import NUMINCRBY from './NUMINCRBY'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('JSON.NUMINCRBY', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(NUMINCRBY, 'key', '$', 1), + ['JSON.NUMINCRBY', 'key', '$', '1'] + ); + }); + + testUtils.testWithClient('client.json.numIncrBy', async client => { + const [, reply] = await Promise.all([ + client.json.set('key', '$', 0), + client.json.numIncrBy('key', '$', 1) + ]); + + assert.deepEqual(reply, [1]); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/json/lib/commands/NUMINCRBY.ts b/packages/json/lib/commands/NUMINCRBY.ts new file mode 100644 index 00000000000..d8884385354 --- /dev/null +++ b/packages/json/lib/commands/NUMINCRBY.ts @@ -0,0 +1,26 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, ArrayReply, NumberReply, DoubleReply, NullReply, BlobStringReply, UnwrapReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export default { + IS_READ_ONLY: false, + /** + * Increments a numeric value stored in a JSON document by a given number. + * Returns the value after increment, or null if the key/path doesn't exist or value is not numeric. + * + * @param parser - The Redis command parser + * @param key - The key containing the JSON document + * @param path - Path to the numeric value + * @param by - Amount to increment by + */ + parseCommand(parser: CommandParser, key: RedisArgument, path: RedisArgument, by: number) { + parser.push('JSON.NUMINCRBY'); + parser.pushKey(key); + parser.push(path, by.toString()); + }, + transformReply: { + 2: (reply: UnwrapReply) => { + return JSON.parse(reply.toString()) as number | Array; + }, + 3: undefined as unknown as () => ArrayReply + } +} as const satisfies Command; diff --git a/packages/json/lib/commands/NUMMULTBY.spec.ts b/packages/json/lib/commands/NUMMULTBY.spec.ts new file mode 100644 index 00000000000..24ee932e952 --- /dev/null +++ b/packages/json/lib/commands/NUMMULTBY.spec.ts @@ -0,0 +1,22 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import NUMMULTBY from './NUMMULTBY'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('JSON.NUMMULTBY', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(NUMMULTBY, 'key', '$', 2), + ['JSON.NUMMULTBY', 'key', '$', '2'] + ); + }); + + testUtils.testWithClient('client.json.numMultBy', async client => { + const [, reply] = await Promise.all([ + client.json.set('key', '$', 1), + client.json.numMultBy('key', '$', 2) + ]); + + assert.deepEqual(reply, [2]); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/json/lib/commands/NUMMULTBY.ts b/packages/json/lib/commands/NUMMULTBY.ts new file mode 100644 index 00000000000..22b25640d4b --- /dev/null +++ b/packages/json/lib/commands/NUMMULTBY.ts @@ -0,0 +1,22 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, Command } from '@redis/client/dist/lib/RESP/types'; +import NUMINCRBY from './NUMINCRBY'; + +export default { + IS_READ_ONLY: false, + /** + * Multiplies a numeric value stored in a JSON document by a given number. + * Returns the value after multiplication, or null if the key/path doesn't exist or value is not numeric. + * + * @param parser - The Redis command parser + * @param key - The key containing the JSON document + * @param path - Path to the numeric value + * @param by - Amount to multiply by + */ + parseCommand(parser: CommandParser, key: RedisArgument, path: RedisArgument, by: number) { + parser.push('JSON.NUMMULTBY'); + parser.pushKey(key); + parser.push(path, by.toString()); + }, + transformReply: NUMINCRBY.transformReply +} as const satisfies Command; diff --git a/packages/json/lib/commands/OBJKEYS.spec.ts b/packages/json/lib/commands/OBJKEYS.spec.ts new file mode 100644 index 00000000000..0d2176248e4 --- /dev/null +++ b/packages/json/lib/commands/OBJKEYS.spec.ts @@ -0,0 +1,31 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import OBJKEYS from './OBJKEYS'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('JSON.OBJKEYS', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(OBJKEYS, 'key'), + ['JSON.OBJKEYS', 'key'] + ); + }); + + it('with path', () => { + assert.deepEqual( + parseArgs(OBJKEYS, 'key', { + path: '$' + }), + ['JSON.OBJKEYS', 'key', '$'] + ); + }); + }); + + testUtils.testWithClient('client.json.objKeys', async client => { + assert.equal( + await client.json.objKeys('key'), + null + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/json/lib/commands/OBJKEYS.ts b/packages/json/lib/commands/OBJKEYS.ts new file mode 100644 index 00000000000..9f8abdc42c4 --- /dev/null +++ b/packages/json/lib/commands/OBJKEYS.ts @@ -0,0 +1,27 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, ArrayReply, BlobStringReply, NullReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export interface JsonObjKeysOptions { + path?: RedisArgument; +} + +export default { + IS_READ_ONLY: false, + /** + * Returns the keys in the object stored in a JSON document. + * Returns array of keys, array of arrays for multiple paths, or null if path doesn't exist. + * + * @param parser - The Redis command parser + * @param key - The key containing the JSON document + * @param options - Optional parameters + * @param options.path - Path to the object to examine + */ + parseCommand(parser: CommandParser, key: RedisArgument, options?: JsonObjKeysOptions) { + parser.push('JSON.OBJKEYS'); + parser.pushKey(key); + if (options?.path !== undefined) { + parser.push(options.path); + } + }, + transformReply: undefined as unknown as () => ArrayReply | ArrayReply | NullReply> +} as const satisfies Command; diff --git a/packages/json/lib/commands/OBJLEN.spec.ts b/packages/json/lib/commands/OBJLEN.spec.ts new file mode 100644 index 00000000000..a5664a4d6bc --- /dev/null +++ b/packages/json/lib/commands/OBJLEN.spec.ts @@ -0,0 +1,31 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import OBJLEN from './OBJLEN'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('JSON.OBJLEN', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(OBJLEN, 'key'), + ['JSON.OBJLEN', 'key'] + ); + }); + + it('with path', () => { + assert.deepEqual( + parseArgs(OBJLEN, 'key', { + path: '$' + }), + ['JSON.OBJLEN', 'key', '$'] + ); + }); + }); + + testUtils.testWithClient('client.json.objLen', async client => { + assert.equal( + await client.json.objLen('key'), + null + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/json/lib/commands/OBJLEN.ts b/packages/json/lib/commands/OBJLEN.ts new file mode 100644 index 00000000000..0cee11770c1 --- /dev/null +++ b/packages/json/lib/commands/OBJLEN.ts @@ -0,0 +1,27 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, NumberReply, ArrayReply, NullReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export interface JsonObjLenOptions { + path?: RedisArgument; +} + +export default { + IS_READ_ONLY: true, + /** + * Returns the number of keys in the object stored in a JSON document. + * Returns length of object, array of lengths for multiple paths, or null if path doesn't exist. + * + * @param parser - The Redis command parser + * @param key - The key containing the JSON document + * @param options - Optional parameters + * @param options.path - Path to the object to examine + */ + parseCommand(parser: CommandParser, key: RedisArgument, options?: JsonObjLenOptions) { + parser.push('JSON.OBJLEN'); + parser.pushKey(key); + if (options?.path !== undefined) { + parser.push(options.path); + } + }, + transformReply: undefined as unknown as () => NumberReply | ArrayReply +} as const satisfies Command; diff --git a/packages/json/lib/commands/RESP.spec.ts b/packages/json/lib/commands/RESP.spec.ts new file mode 100644 index 00000000000..2cb3e9e15c3 --- /dev/null +++ b/packages/json/lib/commands/RESP.spec.ts @@ -0,0 +1,29 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import RESP from './RESP'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('RESP', () => { + describe('transformArguments', () => { + it('without path', () => { + assert.deepEqual( + parseArgs(RESP, 'key'), + ['JSON.RESP', 'key'] + ); + }); + + it('with path', () => { + assert.deepEqual( + parseArgs(RESP, 'key', '$'), + ['JSON.RESP', 'key', '$'] + ); + }); + }); + + // testUtils.testWithClient('client.json.resp', async client => { + // assert.deepEqual( + // await client.json.resp('key', '$'), + // [null] + // ); + // }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/json/lib/commands/RESP.ts b/packages/json/lib/commands/RESP.ts new file mode 100644 index 00000000000..79cc2dac8d1 --- /dev/null +++ b/packages/json/lib/commands/RESP.ts @@ -0,0 +1,24 @@ +import { CommandParser } from "@redis/client/dist/lib/client/parser"; +import { Command, RedisArgument } from "@redis/client/dist/lib/RESP/types"; + +type RESPReply = Array; + +export default { + IS_READ_ONLY: true, + /** + * Returns the JSON value at the specified path in RESP (Redis Serialization Protocol) format. + * Returns the value in RESP form, useful for language-independent processing. + * + * @param parser - The Redis command parser + * @param key - The key containing the JSON document + * @param path - Optional path to the value in the document + */ + parseCommand(parser: CommandParser, key: RedisArgument, path?: string) { + parser.push('JSON.RESP'); + parser.pushKey(key); + if (path !== undefined) { + parser.push(path); + } + }, + transformReply: undefined as unknown as () => RESPReply + } as const satisfies Command; \ No newline at end of file diff --git a/packages/json/lib/commands/SET.spec.ts b/packages/json/lib/commands/SET.spec.ts new file mode 100644 index 00000000000..7bd927f08e4 --- /dev/null +++ b/packages/json/lib/commands/SET.spec.ts @@ -0,0 +1,36 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SET from './SET'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('JSON.SET', () => { + describe('transformArguments', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(SET, 'key', '$', 'json'), + ['JSON.SET', 'key', '$', '"json"'] + ); + }); + + it('NX', () => { + assert.deepEqual( + parseArgs(SET, 'key', '$', 'json', { NX: true }), + ['JSON.SET', 'key', '$', '"json"', 'NX'] + ); + }); + + it('XX', () => { + assert.deepEqual( + parseArgs(SET, 'key', '$', 'json', { XX: true }), + ['JSON.SET', 'key', '$', '"json"', 'XX'] + ); + }); + }); + + testUtils.testWithClient('client.json.set', async client => { + assert.equal( + await client.json.set('key', '$', 'json'), + 'OK' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/json/lib/commands/SET.ts b/packages/json/lib/commands/SET.ts new file mode 100644 index 00000000000..9ab680b4898 --- /dev/null +++ b/packages/json/lib/commands/SET.ts @@ -0,0 +1,52 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, SimpleStringReply, NullReply, Command } from '@redis/client/dist/lib/RESP/types'; +import { RedisJSON, transformRedisJsonArgument } from '@redis/client/dist/lib/commands/generic-transformers'; + +export interface JsonSetOptions { + condition?: 'NX' | 'XX'; + /** + * @deprecated Use `{ condition: 'NX' }` instead. + */ + NX?: boolean; + /** + * @deprecated Use `{ condition: 'XX' }` instead. + */ + XX?: boolean; +} + +export default { + IS_READ_ONLY: false, + /** + * Sets a JSON value at a specific path in a JSON document. + * Returns OK on success, or null if condition (NX/XX) is not met. + * + * @param parser - The Redis command parser + * @param key - The key containing the JSON document + * @param path - Path in the document to set + * @param json - JSON value to set at the path + * @param options - Optional parameters + * @param options.condition - Set condition: NX (only if doesn't exist) or XX (only if exists) + * @deprecated options.NX - Use options.condition instead + * @deprecated options.XX - Use options.condition instead + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + path: RedisArgument, + json: RedisJSON, + options?: JsonSetOptions + ) { + parser.push('JSON.SET'); + parser.pushKey(key); + parser.push(path, transformRedisJsonArgument(json)); + + if (options?.condition) { + parser.push(options?.condition); + } else if (options?.NX) { + parser.push('NX'); + } else if (options?.XX) { + parser.push('XX'); + } + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> | NullReply +} as const satisfies Command; diff --git a/packages/json/lib/commands/STRAPPEND.spec.ts b/packages/json/lib/commands/STRAPPEND.spec.ts new file mode 100644 index 00000000000..ebd539130e1 --- /dev/null +++ b/packages/json/lib/commands/STRAPPEND.spec.ts @@ -0,0 +1,33 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import STRAPPEND from './STRAPPEND'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('JSON.STRAPPEND', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(STRAPPEND, 'key', 'append'), + ['JSON.STRAPPEND', 'key', '"append"'] + ); + }); + + it('with path', () => { + assert.deepEqual( + parseArgs(STRAPPEND, 'key', 'append', { + path: '$' + }), + ['JSON.STRAPPEND', 'key', '$', '"append"'] + ); + }); + }); + + testUtils.testWithClient('client.json.strAppend', async client => { + const [, reply] = await Promise.all([ + client.json.set('key', '$', ''), + client.json.strAppend('key', 'append') + ]); + + assert.deepEqual(reply, 6); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/json/lib/commands/STRAPPEND.ts b/packages/json/lib/commands/STRAPPEND.ts new file mode 100644 index 00000000000..b3115f684c3 --- /dev/null +++ b/packages/json/lib/commands/STRAPPEND.ts @@ -0,0 +1,32 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, Command, NullReply, NumberReply, ArrayReply } from '@redis/client/dist/lib/RESP/types'; +import { transformRedisJsonArgument } from '@redis/client/dist/lib/commands/generic-transformers'; + +export interface JsonStrAppendOptions { + path?: RedisArgument; +} + +export default { + IS_READ_ONLY: false, + /** + * Appends a string to a string value stored in a JSON document. + * Returns new string length after append, or null if the path doesn't exist or value is not a string. + * + * @param parser - The Redis command parser + * @param key - The key containing the JSON document + * @param append - String to append + * @param options - Optional parameters + * @param options.path - Path to the string value + */ + parseCommand(parser: CommandParser, key: RedisArgument, append: string, options?: JsonStrAppendOptions) { + parser.push('JSON.STRAPPEND'); + parser.pushKey(key); + + if (options?.path !== undefined) { + parser.push(options.path); + } + + parser.push(transformRedisJsonArgument(append)); + }, + transformReply: undefined as unknown as () => NumberReply | ArrayReply +} as const satisfies Command; diff --git a/packages/json/lib/commands/STRLEN.spec.ts b/packages/json/lib/commands/STRLEN.spec.ts new file mode 100644 index 00000000000..b6881b5bd52 --- /dev/null +++ b/packages/json/lib/commands/STRLEN.spec.ts @@ -0,0 +1,33 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import STRLEN from './STRLEN'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('JSON.STRLEN', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(STRLEN, 'key'), + ['JSON.STRLEN', 'key'] + ); + }); + + it('with path', () => { + assert.deepEqual( + parseArgs(STRLEN, 'key', { + path: '$' + }), + ['JSON.STRLEN', 'key', '$'] + ); + }); + }); + + testUtils.testWithClient('client.json.strLen', async client => { + const [, reply] = await Promise.all([ + client.json.set('key', '$', ''), + client.json.strLen('key') + ]); + + assert.deepEqual(reply, 0); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/json/lib/commands/STRLEN.ts b/packages/json/lib/commands/STRLEN.ts new file mode 100644 index 00000000000..ca1923d7c6a --- /dev/null +++ b/packages/json/lib/commands/STRLEN.ts @@ -0,0 +1,28 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, ArrayReply, NumberReply, NullReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export interface JsonStrLenOptions { + path?: RedisArgument; +} + +export default { + IS_READ_ONLY: true, + /** + * Returns the length of a string value stored in a JSON document. + * Returns string length, array of lengths for multiple paths, or null if path doesn't exist. + * + * @param parser - The Redis command parser + * @param key - The key containing the JSON document + * @param options - Optional parameters + * @param options.path - Path to the string value + */ + parseCommand(parser: CommandParser, key: RedisArgument, options?: JsonStrLenOptions) { + parser.push('JSON.STRLEN'); + parser.pushKey(key); + + if (options?.path) { + parser.push(options.path); + } + }, + transformReply: undefined as unknown as () => NumberReply | ArrayReply +} as const satisfies Command; diff --git a/packages/json/lib/commands/TOGGLE.spec.ts b/packages/json/lib/commands/TOGGLE.spec.ts new file mode 100644 index 00000000000..173c7708f4a --- /dev/null +++ b/packages/json/lib/commands/TOGGLE.spec.ts @@ -0,0 +1,22 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import TOGGLE from './TOGGLE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('JSON.TOGGLE', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(TOGGLE, 'key', '$'), + ['JSON.TOGGLE', 'key', '$'] + ); + }); + + testUtils.testWithClient('client.json.toggle', async client => { + const [, reply] = await Promise.all([ + client.json.set('key', '$', true), + client.json.toggle('key', '$') + ]); + + assert.deepEqual(reply, [0]); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/json/lib/commands/TOGGLE.ts b/packages/json/lib/commands/TOGGLE.ts new file mode 100644 index 00000000000..2d93d391164 --- /dev/null +++ b/packages/json/lib/commands/TOGGLE.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, ArrayReply, NumberReply, NullReply, Command, } from '@redis/client/dist/lib/RESP/types'; + +export default { + IS_READ_ONLY: false, + /** + * Toggles a boolean value stored in a JSON document. + * Returns 1 if value was toggled to true, 0 if toggled to false, or null if path doesn't exist. + * + * @param parser - The Redis command parser + * @param key - The key containing the JSON document + * @param path - Path to the boolean value + */ + parseCommand(parser: CommandParser, key: RedisArgument, path: RedisArgument) { + parser.push('JSON.TOGGLE'); + parser.pushKey(key); + parser.push(path); + }, + transformReply: undefined as unknown as () => NumberReply | NullReply | ArrayReply +} as const satisfies Command; diff --git a/packages/json/lib/commands/TYPE.spec.ts b/packages/json/lib/commands/TYPE.spec.ts new file mode 100644 index 00000000000..1b6ad109816 --- /dev/null +++ b/packages/json/lib/commands/TYPE.spec.ts @@ -0,0 +1,31 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import TYPE from './TYPE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('JSON.TYPE', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(TYPE, 'key'), + ['JSON.TYPE', 'key'] + ); + }); + + it('with path', () => { + assert.deepEqual( + parseArgs(TYPE, 'key', { + path: '$' + }), + ['JSON.TYPE', 'key', '$'] + ); + }); + }); + + testUtils.testWithClient('client.json.type', async client => { + assert.equal( + await client.json.type('key'), + null + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/json/lib/commands/TYPE.ts b/packages/json/lib/commands/TYPE.ts new file mode 100644 index 00000000000..758335a7361 --- /dev/null +++ b/packages/json/lib/commands/TYPE.ts @@ -0,0 +1,34 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { NullReply, BlobStringReply, ArrayReply, Command, RedisArgument, UnwrapReply } from '@redis/client/dist/lib/RESP/types'; + +export interface JsonTypeOptions { + path?: RedisArgument; +} + +export default { + IS_READ_ONLY: true, + /** + * Returns the type of JSON value at a specific path in a JSON document. + * Returns the type as a string, array of types for multiple paths, or null if path doesn't exist. + * + * @param parser - The Redis command parser + * @param key - The key containing the JSON document + * @param options - Optional parameters + * @param options.path - Path to examine + */ + parseCommand(parser: CommandParser, key: RedisArgument, options?: JsonTypeOptions) { + parser.push('JSON.TYPE'); + parser.pushKey(key); + + if (options?.path) { + parser.push(options.path); + } + }, + transformReply: { + 2: undefined as unknown as () => NullReply | BlobStringReply | ArrayReply, + // TODO: RESP3 wraps the response in another array, but only returns 1 + 3: (reply: UnwrapReply>>) => { + return reply[0]; + } + }, +} as const satisfies Command; diff --git a/packages/json/lib/commands/index.ts b/packages/json/lib/commands/index.ts new file mode 100644 index 00000000000..0e29bdd648d --- /dev/null +++ b/packages/json/lib/commands/index.ts @@ -0,0 +1,86 @@ +import ARRAPPEND from './ARRAPPEND'; +import ARRINDEX from './ARRINDEX'; +import ARRINSERT from './ARRINSERT'; +import ARRLEN from './ARRLEN'; +import ARRPOP from './ARRPOP'; +import ARRTRIM from './ARRTRIM'; +import CLEAR from './CLEAR'; +import DEBUG_MEMORY from './DEBUG_MEMORY'; +import DEL from './DEL'; +import FORGET from './FORGET'; +import GET from './GET'; +import MERGE from './MERGE'; +import MGET from './MGET'; +import MSET from './MSET'; +import NUMINCRBY from './NUMINCRBY'; +import NUMMULTBY from './NUMMULTBY'; +import OBJKEYS from './OBJKEYS'; +import OBJLEN from './OBJLEN'; +// import RESP from './RESP'; +import SET from './SET'; +import STRAPPEND from './STRAPPEND'; +import STRLEN from './STRLEN'; +import TOGGLE from './TOGGLE'; +import TYPE from './TYPE'; + +// Re-export helper types and functions from client package +export type { RedisJSON } from '@redis/client/dist/lib/commands/generic-transformers'; +export { transformRedisJsonArgument, transformRedisJsonReply, transformRedisJsonNullReply } from '@redis/client/dist/lib/commands/generic-transformers'; + +export default { + ARRAPPEND, + arrAppend: ARRAPPEND, + ARRINDEX, + arrIndex: ARRINDEX, + ARRINSERT, + arrInsert: ARRINSERT, + ARRLEN, + arrLen: ARRLEN, + ARRPOP, + arrPop: ARRPOP, + ARRTRIM, + arrTrim: ARRTRIM, + CLEAR, + clear: CLEAR, + DEBUG_MEMORY, + debugMemory: DEBUG_MEMORY, + DEL, + del: DEL, + FORGET, + forget: FORGET, + GET, + get: GET, + MERGE, + merge: MERGE, + MGET, + mGet: MGET, + MSET, + mSet: MSET, + NUMINCRBY, + numIncrBy: NUMINCRBY, + /** + * @deprecated since JSON version 2.0 + */ + NUMMULTBY, + /** + * @deprecated since JSON version 2.0 + */ + numMultBy: NUMMULTBY, + OBJKEYS, + objKeys: OBJKEYS, + OBJLEN, + objLen: OBJLEN, + // RESP, + // resp: RESP, + SET, + set: SET, + STRAPPEND, + strAppend: STRAPPEND, + STRLEN, + strLen: STRLEN, + TOGGLE, + toggle: TOGGLE, + TYPE, + type: TYPE +}; + diff --git a/packages/json/lib/index.ts b/packages/json/lib/index.ts new file mode 100644 index 00000000000..1993f9ef42d --- /dev/null +++ b/packages/json/lib/index.ts @@ -0,0 +1,2 @@ +export { default } from './commands'; +export type { RedisJSON } from './commands'; diff --git a/packages/json/lib/test-utils.ts b/packages/json/lib/test-utils.ts new file mode 100644 index 00000000000..41e743b7132 --- /dev/null +++ b/packages/json/lib/test-utils.ts @@ -0,0 +1,21 @@ +import TestUtils from '@redis/test-utils'; +import RedisJSON from '.'; + +export default TestUtils.createFromConfig({ + dockerImageName: 'redislabs/client-libs-test', + dockerImageVersionArgument: 'redis-version', + defaultDockerVersion: '8.4-M01-pre' +}); + +export const GLOBAL = { + SERVERS: { + OPEN: { + serverArguments: [], + clientOptions: { + modules: { + json: RedisJSON + } + } + } + } +}; diff --git a/packages/json/package.json b/packages/json/package.json new file mode 100644 index 00000000000..826e65bc846 --- /dev/null +++ b/packages/json/package.json @@ -0,0 +1,36 @@ +{ + "name": "@redis/json", + "version": "5.9.0-beta.2", + "license": "MIT", + "main": "./dist/lib/index.js", + "types": "./dist/lib/index.d.ts", + "files": [ + "dist/", + "!dist/tsconfig.tsbuildinfo" + ], + "scripts": { + "test": "nyc -r text-summary -r lcov mocha -r tsx './lib/**/*.spec.ts'", + "release": "release-it" + }, + "peerDependencies": { + "@redis/client": "^5.9.0-beta.2" + }, + "devDependencies": { + "@redis/test-utils": "*" + }, + "engines": { + "node": ">= 18" + }, + "repository": { + "type": "git", + "url": "git://github.com/redis/node-redis.git" + }, + "bugs": { + "url": "https://github.com/redis/node-redis/issues" + }, + "homepage": "https://github.com/redis/node-redis/tree/master/packages/json", + "keywords": [ + "redis", + "RedisJSON" + ] +} diff --git a/packages/json/tsconfig.json b/packages/json/tsconfig.json new file mode 100644 index 00000000000..367b3ef16c4 --- /dev/null +++ b/packages/json/tsconfig.json @@ -0,0 +1,20 @@ +{ + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "outDir": "./dist" + }, + "include": [ + "./lib/**/*.ts" + ], + "exclude": [ + "./lib/test-utils.ts", + "./lib/**/*.spec.ts" + ], + "typedocOptions": { + "entryPoints": [ + "./lib" + ], + "entryPointStrategy": "expand", + "out": "../../documentation/json" + } +} diff --git a/packages/redis/.release-it.json b/packages/redis/.release-it.json new file mode 100644 index 00000000000..af127d79ae0 --- /dev/null +++ b/packages/redis/.release-it.json @@ -0,0 +1,27 @@ +{ + "npm": { + "publish": true, + "publishArgs": ["--access", "public"] + }, + "git": { + "tagName": "redis@${version}", + "tagMatch": "redis@*", + "commitMessage": "Release ${tagName}", + "tagAnnotation": "Release ${tagName}", + "commitArgs": "--all" + }, + "plugins": { + "@release-it/bumper": { + "out": { + "file": "package.json", + "path": [ + "dependencies.@redis/client", + "dependencies.@redis/bloom", + "dependencies.@redis/json", + "dependencies.@redis/search", + "dependencies.@redis/time-series" + ] + } + } + } +} diff --git a/packages/redis/README.md b/packages/redis/README.md new file mode 100644 index 00000000000..ab6b4707e6f --- /dev/null +++ b/packages/redis/README.md @@ -0,0 +1,330 @@ +# Node-Redis + +[![Tests](https://img.shields.io/github/actions/workflow/status/redis/node-redis/tests.yml?branch=master)](https://github.com/redis/node-redis/actions/workflows/tests.yml) +[![Coverage](https://codecov.io/gh/redis/node-redis/branch/master/graph/badge.svg?token=xcfqHhJC37)](https://codecov.io/gh/redis/node-redis) +[![License](https://img.shields.io/github/license/redis/node-redis.svg)](https://github.com/redis/node-redis/blob/master/LICENSE) + +[![Discord](https://img.shields.io/discord/697882427875393627.svg?style=social&logo=discord)](https://discord.gg/redis) +[![Twitch](https://img.shields.io/twitch/status/redisinc?style=social)](https://www.twitch.tv/redisinc) +[![YouTube](https://img.shields.io/youtube/channel/views/UCD78lHSwYqMlyetR0_P4Vig?style=social)](https://www.youtube.com/redisinc) +[![Twitter](https://img.shields.io/twitter/follow/redisinc?style=social)](https://twitter.com/redisinc) + +node-redis is a modern, high performance [Redis](https://redis.io) client for Node.js. + +## How do I Redis? + +[Learn for free at Redis University](https://university.redis.com/) + +[Build faster with the Redis Launchpad](https://launchpad.redis.com/) + +[Try the Redis Cloud](https://redis.com/try-free/) + +[Dive in developer tutorials](https://developer.redis.com/) + +[Join the Redis community](https://redis.com/community/) + +[Work at Redis](https://redis.com/company/careers/jobs/) + +## Installation + +Start a redis via docker: + +```bash +docker run -p 6379:6379 -d redis:8.0-rc1 +``` + +To install node-redis, simply: + +```bash +npm install redis +``` +> "redis" is the "whole in one" package that includes all the other packages. If you only need a subset of the commands, +> you can install the individual packages. See the list below. + +## Packages + +| Name | Description | +| ---------------------------------------------- | ------------------------------------------------------------------------------------------- | +| [`redis`](https://github.com/redis/node-redis/tree/master/packages/redis) | The client with all the ["redis-stack"](https://github.com/redis-stack/redis-stack) modules | +| [`@redis/client`](https://github.com/redis/node-redis/tree/master/packages/client) | The base clients (i.e `RedisClient`, `RedisCluster`, etc.) | +| [`@redis/bloom`](https://github.com/redis/node-redis/tree/master/packages/bloom) | [Redis Bloom](https://redis.io/docs/data-types/probabilistic/) commands | +| [`@redis/json`](https://github.com/redis/node-redis/tree/master/packages/json) | [Redis JSON](https://redis.io/docs/data-types/json/) commands | +| [`@redis/search`](https://github.com/redis/node-redis/tree/master/packages/search) | [RediSearch](https://redis.io/docs/interact/search-and-query/) commands | +| [`@redis/time-series`](https://github.com/redis/node-redis/tree/master/packages/time-series) | [Redis Time-Series](https://redis.io/docs/data-types/timeseries/) commands | +| [`@redis/entraid`](https://github.com/redis/node-redis/tree/master/packages/entraid) | Secure token-based authentication for Redis clients using Microsoft Entra ID | + +> Looking for a high-level library to handle object mapping? +> See [redis-om-node](https://github.com/redis/redis-om-node)! + + +## Usage + +### Basic Example + +```typescript +import { createClient } from "redis"; + +const client = await createClient() + .on("error", (err) => console.log("Redis Client Error", err)) + .connect(); + +await client.set("key", "value"); +const value = await client.get("key"); +client.destroy(); +``` + +The above code connects to localhost on port 6379. To connect to a different host or port, use a connection string in +the format `redis[s]://[[username][:password]@][host][:port][/db-number]`: + +```typescript +createClient({ + url: "redis://alice:foobared@awesome.redis.server:6380", +}); +``` + +You can also use discrete parameters, UNIX sockets, and even TLS to connect. Details can be found in +the [client configuration guide](https://github.com/redis/node-redis/blob/master/docs/client-configuration.md). + +To check if the the client is connected and ready to send commands, use `client.isReady` which returns a boolean. +`client.isOpen` is also available. This returns `true` when the client's underlying socket is open, and `false` when it +isn't (for example when the client is still connecting or reconnecting after a network error). + +### Redis Commands + +There is built-in support for all of the [out-of-the-box Redis commands](https://redis.io/commands). They are exposed +using the raw Redis command names (`HSET`, `HGETALL`, etc.) and a friendlier camel-cased version (`hSet`, `hGetAll`, +etc.): + +```typescript +// raw Redis commands +await client.HSET("key", "field", "value"); +await client.HGETALL("key"); + +// friendly JavaScript commands +await client.hSet("key", "field", "value"); +await client.hGetAll("key"); +``` + +Modifiers to commands are specified using a JavaScript object: + +```typescript +await client.set("key", "value", { + EX: 10, + NX: true, +}); +``` + +Replies will be transformed into useful data structures: + +```typescript +await client.hGetAll("key"); // { field1: 'value1', field2: 'value2' } +await client.hVals("key"); // ['value1', 'value2'] +``` + +`Buffer`s are supported as well: + +```typescript +const client = createClient().withTypeMapping({ + [RESP_TYPES.BLOB_STRING]: Buffer +}); + +await client.hSet("key", "field", Buffer.from("value")); // 'OK' +await client.hGet("key", "field"); // { field: } + +``` + +### Unsupported Redis Commands + +If you want to run commands and/or use arguments that Node Redis doesn't know about (yet!) use `.sendCommand()`: + +```typescript +await client.sendCommand(["SET", "key", "value", "NX"]); // 'OK' + +await client.sendCommand(["HGETALL", "key"]); // ['key1', 'field1', 'key2', 'field2'] +``` + +### Transactions (Multi/Exec) + +Start a [transaction](https://redis.io/topics/transactions) by calling `.multi()`, then chaining your commands. When +you're done, call `.exec()` and you'll get an array back with your results: + +```typescript +await client.set("another-key", "another-value"); + +const [setKeyReply, otherKeyValue] = await client + .multi() + .set("key", "value") + .get("another-key") + .exec(); // ['OK', 'another-value'] +``` + +You can also [watch](https://redis.io/topics/transactions#optimistic-locking-using-check-and-set) keys by calling +`.watch()`. Your transaction will abort if any of the watched keys change. + + +### Blocking Commands + +In v4, `RedisClient` had the ability to create a pool of connections using an "Isolation Pool" on top of the "main" +connection. However, there was no way to use the pool without a "main" connection: + +```javascript +const client = await createClient() + .on("error", (err) => console.error(err)) + .connect(); + +await client.ping(client.commandOptions({ isolated: true })); +``` + +In v5 we've extracted this pool logic into its own classβ€”`RedisClientPool`: + +```javascript +const pool = await createClientPool() + .on("error", (err) => console.error(err)) + .connect(); + +await pool.ping(); +``` + + +### Pub/Sub + +See the [Pub/Sub overview](https://github.com/redis/node-redis/blob/master/docs/pub-sub.md). + +### Scan Iterator + +[`SCAN`](https://redis.io/commands/scan) results can be looped over +using [async iterators](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Symbol/asyncIterator): + +```typescript +for await (const key of client.scanIterator()) { + // use the key! + await client.get(key); +} +``` + +This works with `HSCAN`, `SSCAN`, and `ZSCAN` too: + +```typescript +for await (const { field, value } of client.hScanIterator("hash")) { +} +for await (const member of client.sScanIterator("set")) { +} +for await (const { score, value } of client.zScanIterator("sorted-set")) { +} +``` + +You can override the default options by providing a configuration object: + +```typescript +client.scanIterator({ + TYPE: "string", // `SCAN` only + MATCH: "patter*", + COUNT: 100, +}); +``` + +### Disconnecting + +The `QUIT` command has been deprecated in Redis 7.2 and should now also be considered deprecated in Node-Redis. Instead +of sending a `QUIT` command to the server, the client can simply close the network connection. + +`client.QUIT/quit()` is replaced by `client.close()`. and, to avoid confusion, `client.disconnect()` has been renamed to +`client.destroy()`. + +```typescript +client.destroy(); +``` +### Client Side Caching + +Node Redis v5 adds support for [Client Side Caching](https://redis.io/docs/manual/client-side-caching/), which enables clients to cache query results locally. The Redis server will notify the client when cached results are no longer valid. + +```typescript +// Enable client side caching with RESP3 +const client = createClient({ + RESP: 3, + clientSideCache: { + ttl: 0, // Time-to-live (0 = no expiration) + maxEntries: 0, // Maximum entries (0 = unlimited) + evictPolicy: "LRU" // Eviction policy: "LRU" or "FIFO" + } +}); +``` + +See the [V5 documentation](https://github.com/redis/node-redis/blob/master/docs/v5.md#client-side-caching) for more details and advanced usage. + +### Auto-Pipelining + +Node Redis will automatically pipeline requests that are made during the same "tick". + +```typescript +client.set("Tm9kZSBSZWRpcw==", "users:1"); +client.sAdd("users:1:tokens", "Tm9kZSBSZWRpcw=="); +``` + +Of course, if you don't do something with your Promises you're certain to +get [unhandled Promise exceptions](https://nodejs.org/api/process.html#process_event_unhandledrejection). To take +advantage of auto-pipelining and handle your Promises, use `Promise.all()`. + +```typescript +await Promise.all([ + client.set("Tm9kZSBSZWRpcw==", "users:1"), + client.sAdd("users:1:tokens", "Tm9kZSBSZWRpcw=="), +]); +``` + +### Programmability + +See the [Programmability overview](https://github.com/redis/node-redis/blob/master/docs/programmability.md). + +### Clustering + +Check out the [Clustering Guide](https://github.com/redis/node-redis/blob/master/docs/clustering.md) when using Node Redis to connect to a Redis Cluster. + +### Events + +The Node Redis client class is an Nodejs EventEmitter and it emits an event each time the network status changes: + +| Name | When | Listener arguments | +| ----------------------- | ---------------------------------------------------------------------------------- | --------------------------------------------------------- | +| `connect` | Initiating a connection to the server | _No arguments_ | +| `ready` | Client is ready to use | _No arguments_ | +| `end` | Connection has been closed (via `.disconnect()`) | _No arguments_ | +| `error` | An error has occurredβ€”usually a network issue such as "Socket closed unexpectedly" | `(error: Error)` | +| `reconnecting` | Client is trying to reconnect to the server | _No arguments_ | +| `sharded-channel-moved` | See [here](https://github.com/redis/node-redis/blob/master/docs/pub-sub.md#sharded-channel-moved-event) | See [here](https://github.com/redis/node-redis/blob/master/docs/pub-sub.md#sharded-channel-moved-event) | + +> :warning: You **MUST** listen to `error` events. If a client doesn't have at least one `error` listener registered and +> an `error` occurs, that error will be thrown and the Node.js process will exit. See the [ > `EventEmitter` docs](https://nodejs.org/api/events.html#events_error_events) for more details. + +> The client will not emit [any other events](https://github.com/redis/node-redis/blob/master/docs/v3-to-v4.md#all-the-removed-events) beyond those listed above. + +## Supported Redis versions + +Node Redis is supported with the following versions of Redis: + +| Version | Supported | +| ------- | ------------------ | +| 8.0.z | :heavy_check_mark: | +| 7.4.z | :heavy_check_mark: | +| 7.2.z | :heavy_check_mark: | +| < 7.2 | :x: | + +> Node Redis should work with older versions of Redis, but it is not fully tested and we cannot offer support. + +## Migration + +- [From V3 to V4](https://github.com/redis/node-redis/blob/master/docs/v3-to-v4.md) +- [From V4 to V5](https://github.com/redis/node-redis/blob/master/docs/v4-to-v5.md) +- [V5](https://github.com/redis/node-redis/blob/master/docs/v5.md) + +## Contributing + +If you'd like to contribute, check out the [contributing guide](https://github.com/redis/node-redis/blob/master/CONTRIBUTING.md). + +Thank you to all the people who already contributed to Node Redis! + +[![Contributors](https://contrib.rocks/image?repo=redis/node-redis)](https://github.com/redis/node-redis/graphs/contributors) + +## License + +This repository is licensed under the "MIT" license. See [LICENSE](https://github.com/redis/node-redis/blob/master/LICENSE). diff --git a/packages/redis/index.ts b/packages/redis/index.ts new file mode 100644 index 00000000000..f4341bbf48f --- /dev/null +++ b/packages/redis/index.ts @@ -0,0 +1,133 @@ +import { + RedisModules, + RedisFunctions, + RedisScripts, + RespVersions, + TypeMapping, + createClient as genericCreateClient, + RedisClientOptions, + RedisClientType as GenericRedisClientType, + createCluster as genericCreateCluster, + RedisClusterOptions, + RedisClusterType as genericRedisClusterType, + RedisSentinelOptions, + RedisSentinelType as genericRedisSentinelType, + createSentinel as genericCreateSentinel, + createClientPool as genericCreateClientPool, + RedisClientPoolType as GenericRedisClientPoolType, + RedisPoolOptions, +} from '@redis/client'; +import RedisBloomModules from '@redis/bloom'; +import RedisJSON from '@redis/json'; +import RediSearch from '@redis/search'; +import RedisTimeSeries from '@redis/time-series'; + +export * from '@redis/client'; +export * from '@redis/bloom'; +export * from '@redis/json'; +export * from '@redis/search'; +export * from '@redis/time-series'; + +const modules = { + ...RedisBloomModules, + json: RedisJSON, + ft: RediSearch, + ts: RedisTimeSeries +}; + +export type RedisDefaultModules = typeof modules; + +export type RedisClientType< + M extends RedisModules = RedisDefaultModules, + F extends RedisFunctions = {}, + S extends RedisScripts = {}, + RESP extends RespVersions = 2, + TYPE_MAPPING extends TypeMapping = {} +> = GenericRedisClientType; + +export function createClient< + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +>( + options?: RedisClientOptions +): GenericRedisClientType { + return genericCreateClient({ + ...options, + modules: { + ...modules, + ...(options?.modules as M) + } + }); +} + +export function createClientPool< + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping = {} +>(clientOptions?: Omit, "clientSideCache">, + options?: Partial): GenericRedisClientPoolType { + return genericCreateClientPool({ + ...clientOptions, + modules: { + ...modules, + ...(clientOptions?.modules as M) + } + }, options); +} + +export type RedisClusterType< + M extends RedisModules = RedisDefaultModules, + F extends RedisFunctions = {}, + S extends RedisScripts = {}, + RESP extends RespVersions = 2, + TYPE_MAPPING extends TypeMapping = {} +> = genericRedisClusterType; + +export function createCluster< + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +>( + options: RedisClusterOptions +): RedisClusterType { + return genericCreateCluster({ + ...options, + modules: { + ...modules, + ...(options?.modules as M) + } + }); +} + +export type RedisSentinelType< + M extends RedisModules = RedisDefaultModules, + F extends RedisFunctions = {}, + S extends RedisScripts = {}, + RESP extends RespVersions = 2, + TYPE_MAPPING extends TypeMapping = {} +> = genericRedisSentinelType; + +export function createSentinel< + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +>( + options: RedisSentinelOptions +): RedisSentinelType { + return genericCreateSentinel({ + ...options, + modules: { + ...modules, + ...(options?.modules as M) + } + }); +} diff --git a/packages/redis/package.json b/packages/redis/package.json new file mode 100644 index 00000000000..c2939f2c5ac --- /dev/null +++ b/packages/redis/package.json @@ -0,0 +1,36 @@ +{ + "name": "redis", + "description": "A modern, high performance Redis client", + "version": "5.9.0-beta.2", + "license": "MIT", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "files": [ + "dist/", + "!dist/tsconfig.tsbuildinfo" + ], + "scripts": { + "release": "release-it" + }, + "dependencies": { + "@redis/bloom": "5.9.0-beta.2", + "@redis/client": "5.9.0-beta.2", + "@redis/json": "5.9.0-beta.2", + "@redis/search": "5.9.0-beta.2", + "@redis/time-series": "5.9.0-beta.2" + }, + "engines": { + "node": ">= 18" + }, + "repository": { + "type": "git", + "url": "git://github.com/redis/node-redis.git" + }, + "bugs": { + "url": "https://github.com/redis/node-redis/issues" + }, + "homepage": "https://github.com/redis/node-redis", + "keywords": [ + "redis" + ] +} diff --git a/packages/redis/tsconfig.json b/packages/redis/tsconfig.json new file mode 100644 index 00000000000..50da0ba733a --- /dev/null +++ b/packages/redis/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "outDir": "./dist" + }, + "include": [ + "./index.ts" + ] +} diff --git a/packages/search/.nycrc.json b/packages/search/.nycrc.json new file mode 100644 index 00000000000..367a89ad32c --- /dev/null +++ b/packages/search/.nycrc.json @@ -0,0 +1,4 @@ +{ + "extends": "@istanbuljs/nyc-config-typescript", + "exclude": ["dist", "**/*.spec.ts", "lib/test-utils.ts"] +} diff --git a/packages/search/.release-it.json b/packages/search/.release-it.json new file mode 100644 index 00000000000..85d55c087de --- /dev/null +++ b/packages/search/.release-it.json @@ -0,0 +1,22 @@ +{ + "npm": { + "publish": true, + "publishArgs": ["--access", "public"] + }, + "git": { + "tagName": "search@${version}", + "tagMatch": "search@*", + "commitMessage": "Release ${tagName}", + "tagAnnotation": "Release ${tagName}", + "commitArgs": "--all" + }, + "plugins": { + "@release-it/bumper": { + "out": { + "file": "package.json", + "path": ["peerDependencies.@redis/client"], + "versionPrefix": "^" + } + } + } +} diff --git a/packages/search/README.md b/packages/search/README.md new file mode 100644 index 00000000000..37597e6580a --- /dev/null +++ b/packages/search/README.md @@ -0,0 +1,121 @@ +# @redis/search + +This package provides support for the [RediSearch](https://redis.io/docs/interact/search-and-query/) module, which adds indexing and querying support for data stored in Redis Hashes or as JSON documents with the [RedisJSON](https://redis.io/docs/data-types/json/) module. + +Should be used with [`redis`/`@redis/client`](https://github.com/redis/node-redis). + +:warning: To use these extra commands, your Redis server must have the RediSearch module installed. To index and query JSON documents, you'll also need to add the RedisJSON module. + +## Usage + +For complete examples, see [`search-hashes.js`](https://github.com/redis/node-redis/blob/master/examples/search-hashes.js) and [`search-json.js`](https://github.com/redis/node-redis/blob/master/examples/search-json.js) in the [examples folder](https://github.com/redis/node-redis/tree/master/examples). + +### Indexing and Querying Data in Redis Hashes + +#### Creating an Index + +Before we can perform any searches, we need to tell RediSearch how to index our data, and which Redis keys to find that data in. The [FT.CREATE](https://redis.io/commands/ft.create) command creates a RediSearch index. Here's how to use it to create an index we'll call `idx:animals` where we want to index hashes containing `name`, `species` and `age` fields, and whose key names in Redis begin with the prefix `noderedis:animals`: + +```javascript +await client.ft.create('idx:animals', { + name: { + type: SCHEMA_FIELD_TYPE.TEXT, + SORTABLE: true + }, + species: SCHEMA_FIELD_TYPE.TAG, + age: SCHEMA_FIELD_TYPE.NUMERIC +}, { + ON: 'HASH', + PREFIX: 'noderedis:animals' +}); +``` + +See the [`FT.CREATE` documentation](https://redis.io/commands/ft.create/#description) for information about the different field types and additional options. + +#### Querying the Index + +Once we've created an index, and added some data to Redis hashes whose keys begin with the prefix `noderedis:animals`, we can start writing some search queries. RediSearch supports a rich query syntax for full-text search, faceted search, aggregation and more. Check out the [`FT.SEARCH` documentation](https://redis.io/commands/ft.search) and the [query syntax reference](https://redis.io/docs/interact/search-and-query/query) for more information. + +Let's write a query to find all the animals where the `species` field has the value `dog`: + +```javascript +const results = await client.ft.search('idx:animals', '@species:{dog}'); +``` + +`results` looks like this: + +```javascript +{ + total: 2, + documents: [ + { + id: 'noderedis:animals:4', + value: { + name: 'Fido', + species: 'dog', + age: '7' + } + }, + { + id: 'noderedis:animals:3', + value: { + name: 'Rover', + species: 'dog', + age: '9' + } + } + ] +} +``` + +### Indexing and Querying Data with RedisJSON + +RediSearch can also index and query JSON documents stored in Redis using the RedisJSON module. The approach is similar to that for indexing and searching data in hashes, but we can now use JSON Path like syntax and the data no longer has to be flat name/value pairs - it can contain nested objects and arrays. + +#### Creating an Index + +As before, we create an index with the `FT.CREATE` command, this time specifying we want to index JSON documents that look like this: + +```javascript +{ + name: 'Alice', + age: 32, + coins: 100 +} +``` + +Each document represents a user in some system, and users have name, age and coins properties. + +One way we might choose to index these documents is as follows: + +```javascript +await client.ft.create('idx:users', { + '$.name': { + type: SCHEMA_FIELD_TYPE.TEXT, + SORTABLE: 'UNF' + }, + '$.age': { + type: SCHEMA_FIELD_TYPE.NUMERIC, + AS: 'age' + }, + '$.coins': { + type: SCHEMA_FIELD_TYPE.NUMERIC, + AS: 'coins' + } +}, { + ON: 'JSON', + PREFIX: 'noderedis:users' +}); +``` + +Note that we're using JSON Path to specify where the fields to index are in our JSON documents, and the `AS` clause to define a name/alias for each field. We'll use these when writing queries. + +#### Querying the Index + +Now we have an index and some data stored as JSON documents in Redis (see the [JSON package documentation](https://github.com/redis/node-redis/tree/master/packages/json) for examples of how to store JSON), we can write some queries... + +We'll use the [RediSearch query language](https://redis.io/docs/interact/search-and-query/query) and [`FT.SEARCH`](https://redis.io/commands/ft.search) command. Here's a query to find users under the age of 30: + +```javascript +await client.ft.search('idx:users', '@age:[0 30]'); +``` diff --git a/packages/search/lib/commands/AGGREGATE.spec.ts b/packages/search/lib/commands/AGGREGATE.spec.ts new file mode 100644 index 00000000000..420911c5600 --- /dev/null +++ b/packages/search/lib/commands/AGGREGATE.spec.ts @@ -0,0 +1,522 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import AGGREGATE from './AGGREGATE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; +import { DEFAULT_DIALECT } from '../dialect/default'; + +describe('AGGREGATE', () => { + describe('transformArguments', () => { + it('without options', () => { + assert.deepEqual( + parseArgs(AGGREGATE, 'index', '*'), + ['FT.AGGREGATE', 'index', '*', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('with VERBATIM', () => { + assert.deepEqual( + parseArgs(AGGREGATE, 'index', '*', { + VERBATIM: true + }), + ['FT.AGGREGATE', 'index', '*', 'VERBATIM', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('with ADDSCORES', () => { + assert.deepEqual( + parseArgs(AGGREGATE, 'index', '*', { ADDSCORES: true }), + ['FT.AGGREGATE', 'index', '*', 'ADDSCORES', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + describe('with LOAD', () => { + describe('single', () => { + describe('without alias', () => { + it('string', () => { + assert.deepEqual( + parseArgs(AGGREGATE, 'index', '*', { + LOAD: '@property' + }), + ['FT.AGGREGATE', 'index', '*', 'LOAD', '1', '@property', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('{ identifier: string }', () => { + assert.deepEqual( + parseArgs(AGGREGATE, 'index', '*', { + LOAD: { + identifier: '@property' + } + }), + ['FT.AGGREGATE', 'index', '*', 'LOAD', '1', '@property', 'DIALECT', DEFAULT_DIALECT] + ); + }); + }); + + it('with alias', () => { + assert.deepEqual( + parseArgs(AGGREGATE, 'index', '*', { + LOAD: { + identifier: '@property', + AS: 'alias' + } + }), + ['FT.AGGREGATE', 'index', '*', 'LOAD', '3', '@property', 'AS', 'alias', 'DIALECT', DEFAULT_DIALECT] + ); + }); + }); + + it('multiple', () => { + assert.deepEqual( + parseArgs(AGGREGATE, 'index', '*', { + LOAD: ['@1', '@2'] + }), + ['FT.AGGREGATE', 'index', '*', 'LOAD', '2', '@1', '@2', 'DIALECT', DEFAULT_DIALECT] + ); + }); + }); + + describe('with STEPS', () => { + describe('GROUPBY', () => { + describe('COUNT', () => { + describe('without properties', () => { + it('without alias', () => { + assert.deepEqual( + parseArgs(AGGREGATE, 'index', '*', { + STEPS: [{ + type: 'GROUPBY', + REDUCE: { + type: 'COUNT' + } + }] + }), + ['FT.AGGREGATE', 'index', '*', 'GROUPBY', '0', 'REDUCE', 'COUNT', '0', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('with alias', () => { + assert.deepEqual( + parseArgs(AGGREGATE, 'index', '*', { + STEPS: [{ + type: 'GROUPBY', + REDUCE: { + type: 'COUNT', + AS: 'count' + } + }] + }), + ['FT.AGGREGATE', 'index', '*', 'GROUPBY', '0', 'REDUCE', 'COUNT', '0', 'AS', 'count', 'DIALECT', DEFAULT_DIALECT] + ); + }); + }); + + describe('with properties', () => { + it('single', () => { + assert.deepEqual( + parseArgs(AGGREGATE, 'index', '*', { + STEPS: [{ + type: 'GROUPBY', + properties: '@property', + REDUCE: { + type: 'COUNT' + } + }] + }), + ['FT.AGGREGATE', 'index', '*', 'GROUPBY', '1', '@property', 'REDUCE', 'COUNT', '0', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('multiple', () => { + assert.deepEqual( + parseArgs(AGGREGATE, 'index', '*', { + STEPS: [{ + type: 'GROUPBY', + properties: ['@1', '@2'], + REDUCE: { + type: 'COUNT' + } + }] + }), + ['FT.AGGREGATE', 'index', '*', 'GROUPBY', '2', '@1', '@2', 'REDUCE', 'COUNT', '0', 'DIALECT', DEFAULT_DIALECT] + ); + }); + }); + }); + + it('COUNT_DISTINCT', () => { + assert.deepEqual( + parseArgs(AGGREGATE, 'index', '*', { + STEPS: [{ + type: 'GROUPBY', + REDUCE: { + type: 'COUNT_DISTINCT', + property: '@property' + } + }] + }), + ['FT.AGGREGATE', 'index', '*', 'GROUPBY', '0', 'REDUCE', 'COUNT_DISTINCT', '1', '@property', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('COUNT_DISTINCTISH', () => { + assert.deepEqual( + parseArgs(AGGREGATE, 'index', '*', { + STEPS: [{ + type: 'GROUPBY', + REDUCE: { + type: 'COUNT_DISTINCTISH', + property: '@property' + } + }] + }), + ['FT.AGGREGATE', 'index', '*', 'GROUPBY', '0', 'REDUCE', 'COUNT_DISTINCTISH', '1', '@property', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('SUM', () => { + assert.deepEqual( + parseArgs(AGGREGATE, 'index', '*', { + STEPS: [{ + type: 'GROUPBY', + REDUCE: { + type: 'SUM', + property: '@property' + } + }] + }), + ['FT.AGGREGATE', 'index', '*', 'GROUPBY', '0', 'REDUCE', 'SUM', '1', '@property', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('MIN', () => { + assert.deepEqual( + parseArgs(AGGREGATE, 'index', '*', { + STEPS: [{ + type: 'GROUPBY', + REDUCE: { + type: 'MIN', + property: '@property' + } + }] + }), + ['FT.AGGREGATE', 'index', '*', 'GROUPBY', '0', 'REDUCE', 'MIN', '1', '@property', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('MAX', () => { + assert.deepEqual( + parseArgs(AGGREGATE, 'index', '*', { + STEPS: [{ + type: 'GROUPBY', + REDUCE: { + type: 'MAX', + property: '@property' + } + }] + }), + ['FT.AGGREGATE', 'index', '*', 'GROUPBY', '0', 'REDUCE', 'MAX', '1', '@property', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('AVG', () => { + assert.deepEqual( + parseArgs(AGGREGATE, 'index', '*', { + STEPS: [{ + type: 'GROUPBY', + REDUCE: { + type: 'AVG', + property: '@property' + } + }] + }), + ['FT.AGGREGATE', 'index', '*', 'GROUPBY', '0', 'REDUCE', 'AVG', '1', '@property', 'DIALECT', DEFAULT_DIALECT] + ); + }); + it('STDDEV', () => { + assert.deepEqual( + parseArgs(AGGREGATE, 'index', '*', { + STEPS: [{ + type: 'GROUPBY', + REDUCE: { + type: 'STDDEV', + property: '@property' + } + }] + }), + ['FT.AGGREGATE', 'index', '*', 'GROUPBY', '0', 'REDUCE', 'STDDEV', '1', '@property', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('QUANTILE', () => { + assert.deepEqual( + parseArgs(AGGREGATE, 'index', '*', { + STEPS: [{ + type: 'GROUPBY', + REDUCE: { + type: 'QUANTILE', + property: '@property', + quantile: 0.5 + } + }] + }), + ['FT.AGGREGATE', 'index', '*', 'GROUPBY', '0', 'REDUCE', 'QUANTILE', '2', '@property', '0.5', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('TOLIST', () => { + assert.deepEqual( + parseArgs(AGGREGATE, 'index', '*', { + STEPS: [{ + type: 'GROUPBY', + REDUCE: { + type: 'TOLIST', + property: '@property' + } + }] + }), + ['FT.AGGREGATE', 'index', '*', 'GROUPBY', '0', 'REDUCE', 'TOLIST', '1', '@property', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + describe('FIRST_VALUE', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(AGGREGATE, 'index', '*', { + STEPS: [{ + type: 'GROUPBY', + REDUCE: { + type: 'FIRST_VALUE', + property: '@property' + } + }] + }), + ['FT.AGGREGATE', 'index', '*', 'GROUPBY', '0', 'REDUCE', 'FIRST_VALUE', '1', '@property', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + describe('with BY', () => { + describe('without direction', () => { + it('string', () => { + assert.deepEqual( + parseArgs(AGGREGATE, 'index', '*', { + STEPS: [{ + type: 'GROUPBY', + REDUCE: { + type: 'FIRST_VALUE', + property: '@property', + BY: '@by' + } + }] + }), + ['FT.AGGREGATE', 'index', '*', 'GROUPBY', '0', 'REDUCE', 'FIRST_VALUE', '3', '@property', 'BY', '@by', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + + it('{ property: string }', () => { + assert.deepEqual( + parseArgs(AGGREGATE, 'index', '*', { + STEPS: [{ + type: 'GROUPBY', + REDUCE: { + type: 'FIRST_VALUE', + property: '@property', + BY: { + property: '@by' + } + } + }] + }), + ['FT.AGGREGATE', 'index', '*', 'GROUPBY', '0', 'REDUCE', 'FIRST_VALUE', '3', '@property', 'BY', '@by', 'DIALECT', DEFAULT_DIALECT] + ); + }); + }); + + it('with direction', () => { + assert.deepEqual( + parseArgs(AGGREGATE, 'index', '*', { + STEPS: [{ + type: 'GROUPBY', + REDUCE: { + type: 'FIRST_VALUE', + property: '@property', + BY: { + property: '@by', + direction: 'ASC' + } + } + }] + }), + ['FT.AGGREGATE', 'index', '*', 'GROUPBY', '0', 'REDUCE', 'FIRST_VALUE', '4', '@property', 'BY', '@by', 'ASC', 'DIALECT', DEFAULT_DIALECT] + ); + }); + }); + }); + + it('RANDOM_SAMPLE', () => { + assert.deepEqual( + parseArgs(AGGREGATE, 'index', '*', { + STEPS: [{ + type: 'GROUPBY', + REDUCE: { + type: 'RANDOM_SAMPLE', + property: '@property', + sampleSize: 1 + } + }] + }), + ['FT.AGGREGATE', 'index', '*', 'GROUPBY', '0', 'REDUCE', 'RANDOM_SAMPLE', '2', '@property', '1', 'DIALECT', DEFAULT_DIALECT] + ); + }); + }); + + describe('SORTBY', () => { + it('string', () => { + assert.deepEqual( + parseArgs(AGGREGATE, 'index', '*', { + STEPS: [{ + type: 'SORTBY', + BY: '@by' + }] + }), + ['FT.AGGREGATE', 'index', '*', 'SORTBY', '1', '@by', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('Array', () => { + assert.deepEqual( + parseArgs(AGGREGATE, 'index', '*', { + STEPS: [{ + type: 'SORTBY', + BY: ['@1', '@2'] + }] + }), + ['FT.AGGREGATE', 'index', '*', 'SORTBY', '2', '@1', '@2', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('with MAX', () => { + assert.deepEqual( + parseArgs(AGGREGATE, 'index', '*', { + STEPS: [{ + type: 'SORTBY', + BY: '@by', + MAX: 1 + }] + }), + ['FT.AGGREGATE', 'index', '*', 'SORTBY', '3', '@by', 'MAX', '1', 'DIALECT', DEFAULT_DIALECT] + ); + }); + }); + + describe('APPLY', () => { + assert.deepEqual( + parseArgs(AGGREGATE, 'index', '*', { + STEPS: [{ + type: 'APPLY', + expression: '@field + 1', + AS: 'as' + }] + }), + ['FT.AGGREGATE', 'index', '*', 'APPLY', '@field + 1', 'AS', 'as', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + describe('LIMIT', () => { + assert.deepEqual( + parseArgs(AGGREGATE, 'index', '*', { + STEPS: [{ + type: 'LIMIT', + from: 0, + size: 1 + }] + }), + ['FT.AGGREGATE', 'index', '*', 'LIMIT', '0', '1', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + describe('FILTER', () => { + assert.deepEqual( + parseArgs(AGGREGATE, 'index', '*', { + STEPS: [{ + type: 'FILTER', + expression: '@field != ""' + }] + }), + ['FT.AGGREGATE', 'index', '*', 'FILTER', '@field != ""', 'DIALECT', DEFAULT_DIALECT] + ); + }); + }); + + it('with PARAMS', () => { + assert.deepEqual( + parseArgs(AGGREGATE, 'index', '*', { + PARAMS: { + param: 'value' + } + }), + ['FT.AGGREGATE', 'index', '*', 'PARAMS', '2', 'param', 'value', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('with DIALECT', () => { + assert.deepEqual( + parseArgs(AGGREGATE, 'index', '*', { + DIALECT: 1 + }), + ['FT.AGGREGATE', 'index', '*', 'DIALECT', '1'] + ); + }); + + it('with TIMEOUT', () => { + assert.deepEqual( + parseArgs(AGGREGATE, 'index', '*', { TIMEOUT: 10 }), + ['FT.AGGREGATE', 'index', '*', 'TIMEOUT', '10', 'DIALECT', DEFAULT_DIALECT] + ); + }); + }); + + testUtils.testWithClient('client.ft.aggregate', async client => { + await Promise.all([ + client.ft.create('index', { + field: 'NUMERIC' + }), + client.hSet('1', 'field', '1'), + client.hSet('2', 'field', '2') + ]); + + assert.deepEqual( + await client.ft.aggregate('index', '*', { + STEPS: [{ + type: 'GROUPBY', + REDUCE: [{ + type: 'SUM', + property: '@field', + AS: 'sum' + }, { + type: 'AVG', + property: '@field', + AS: 'avg' + }] + }] + }), + { + total: 1, + results: [ + Object.create(null, { + sum: { + value: '3', + configurable: true, + enumerable: true + }, + avg: { + value: '1.5', + configurable: true, + enumerable: true + } + }) + ] + } + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/search/lib/commands/AGGREGATE.ts b/packages/search/lib/commands/AGGREGATE.ts new file mode 100644 index 00000000000..9e8fb7810d6 --- /dev/null +++ b/packages/search/lib/commands/AGGREGATE.ts @@ -0,0 +1,348 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { ArrayReply, BlobStringReply, Command, MapReply, NumberReply, RedisArgument, ReplyUnion, TypeMapping, UnwrapReply } from '@redis/client/dist/lib/RESP/types'; +import { RediSearchProperty } from './CREATE'; +import { FtSearchParams, parseParamsArgument } from './SEARCH'; +import { transformTuplesReply } from '@redis/client/dist/lib/commands/generic-transformers'; +import { DEFAULT_DIALECT } from '../dialect/default'; + +type LoadField = RediSearchProperty | { + identifier: RediSearchProperty; + AS?: RedisArgument; +} + +export const FT_AGGREGATE_STEPS = { + GROUPBY: 'GROUPBY', + SORTBY: 'SORTBY', + APPLY: 'APPLY', + LIMIT: 'LIMIT', + FILTER: 'FILTER' +} as const; + +type FT_AGGREGATE_STEPS = typeof FT_AGGREGATE_STEPS; + +export type FtAggregateStep = FT_AGGREGATE_STEPS[keyof FT_AGGREGATE_STEPS]; + +interface AggregateStep { + type: T; +} + +export const FT_AGGREGATE_GROUP_BY_REDUCERS = { + COUNT: 'COUNT', + COUNT_DISTINCT: 'COUNT_DISTINCT', + COUNT_DISTINCTISH: 'COUNT_DISTINCTISH', + SUM: 'SUM', + MIN: 'MIN', + MAX: 'MAX', + AVG: 'AVG', + STDDEV: 'STDDEV', + QUANTILE: 'QUANTILE', + TOLIST: 'TOLIST', + FIRST_VALUE: 'FIRST_VALUE', + RANDOM_SAMPLE: 'RANDOM_SAMPLE' +} as const; + +type FT_AGGREGATE_GROUP_BY_REDUCERS = typeof FT_AGGREGATE_GROUP_BY_REDUCERS; + +export type FtAggregateGroupByReducer = FT_AGGREGATE_GROUP_BY_REDUCERS[keyof FT_AGGREGATE_GROUP_BY_REDUCERS]; + +interface GroupByReducer { + type: T; + AS?: RedisArgument; +} + +interface GroupByReducerWithProperty extends GroupByReducer { + property: RediSearchProperty; +} + +type CountReducer = GroupByReducer; + +type CountDistinctReducer = GroupByReducerWithProperty; + +type CountDistinctishReducer = GroupByReducerWithProperty; + +type SumReducer = GroupByReducerWithProperty; + +type MinReducer = GroupByReducerWithProperty; + +type MaxReducer = GroupByReducerWithProperty; + +type AvgReducer = GroupByReducerWithProperty; + +type StdDevReducer = GroupByReducerWithProperty; + +interface QuantileReducer extends GroupByReducerWithProperty { + quantile: number; +} + +type ToListReducer = GroupByReducerWithProperty; + +interface FirstValueReducer extends GroupByReducerWithProperty { + BY?: RediSearchProperty | { + property: RediSearchProperty; + direction?: 'ASC' | 'DESC'; + }; +} + +interface RandomSampleReducer extends GroupByReducerWithProperty { + sampleSize: number; +} + +type GroupByReducers = CountReducer | CountDistinctReducer | CountDistinctishReducer | SumReducer | MinReducer | MaxReducer | AvgReducer | StdDevReducer | QuantileReducer | ToListReducer | FirstValueReducer | RandomSampleReducer; + +interface GroupByStep extends AggregateStep { + properties?: RediSearchProperty | Array; + REDUCE: GroupByReducers | Array; +} + +type SortByProperty = RedisArgument | { + BY: RediSearchProperty; + DIRECTION?: 'ASC' | 'DESC'; +}; + +interface SortStep extends AggregateStep { + BY: SortByProperty | Array; + MAX?: number; +} + +interface ApplyStep extends AggregateStep { + expression: RedisArgument; + AS: RedisArgument; +} + +interface LimitStep extends AggregateStep { + from: number; + size: number; +} + +interface FilterStep extends AggregateStep { + expression: RedisArgument; +} + +export interface FtAggregateOptions { + VERBATIM?: boolean; + ADDSCORES?: boolean; + LOAD?: LoadField | Array; + TIMEOUT?: number; + STEPS?: Array; + PARAMS?: FtSearchParams; + DIALECT?: number; +} + +export type AggregateRawReply = [ + total: UnwrapReply, + ...results: UnwrapReply>> +]; + +export interface AggregateReply { + total: number; + results: Array>; +}; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: false, + /** + * Performs an aggregation query on a RediSearch index. + * @param parser - The command parser + * @param index - The index name to query + * @param query - The text query to use as filter, use * to indicate no filtering + * @param options - Optional parameters for aggregation: + * - VERBATIM: disable stemming in query evaluation + * - LOAD: specify fields to load from documents + * - STEPS: sequence of aggregation steps (GROUPBY, SORTBY, APPLY, LIMIT, FILTER) + * - PARAMS: bind parameters for query evaluation + * - TIMEOUT: maximum time to run the query + */ + parseCommand(parser: CommandParser, index: RedisArgument, query: RedisArgument, options?: FtAggregateOptions) { + parser.push('FT.AGGREGATE', index, query); + + return parseAggregateOptions(parser, options); + }, + transformReply: { + 2: (rawReply: AggregateRawReply, preserve?: any, typeMapping?: TypeMapping): AggregateReply => { + const results: Array> = []; + for (let i = 1; i < rawReply.length; i++) { + results.push( + transformTuplesReply(rawReply[i] as ArrayReply, preserve, typeMapping) + ); + } + + return { + // https://redis.io/docs/latest/commands/ft.aggregate/#return + // FT.AGGREGATE returns an array reply where each row is an array reply and represents a single aggregate result. + // The integer reply at position 1 does not represent a valid value. + total: Number(rawReply[0]), + results + }; + }, + 3: undefined as unknown as () => ReplyUnion + }, + unstableResp3: true +} as const satisfies Command; + +export function parseAggregateOptions(parser: CommandParser , options?: FtAggregateOptions) { + if (options?.VERBATIM) { + parser.push('VERBATIM'); + } + + if (options?.ADDSCORES) { + parser.push('ADDSCORES'); + } + + if (options?.LOAD) { + const args: Array = []; + + if (Array.isArray(options.LOAD)) { + for (const load of options.LOAD) { + pushLoadField(args, load); + } + } else { + pushLoadField(args, options.LOAD); + } + + parser.push('LOAD'); + parser.pushVariadicWithLength(args); + } + + if (options?.TIMEOUT !== undefined) { + parser.push('TIMEOUT', options.TIMEOUT.toString()); + } + + if (options?.STEPS) { + for (const step of options.STEPS) { + parser.push(step.type); + switch (step.type) { + case FT_AGGREGATE_STEPS.GROUPBY: + if (!step.properties) { + parser.push('0'); + } else { + parser.pushVariadicWithLength(step.properties); + } + + if (Array.isArray(step.REDUCE)) { + for (const reducer of step.REDUCE) { + parseGroupByReducer(parser, reducer); + } + } else { + parseGroupByReducer(parser, step.REDUCE); + } + + break; + + case FT_AGGREGATE_STEPS.SORTBY: + const args: Array = []; + + if (Array.isArray(step.BY)) { + for (const by of step.BY) { + pushSortByProperty(args, by); + } + } else { + pushSortByProperty(args, step.BY); + } + + if (step.MAX) { + args.push('MAX', step.MAX.toString()); + } + + parser.pushVariadicWithLength(args); + + break; + + case FT_AGGREGATE_STEPS.APPLY: + parser.push(step.expression, 'AS', step.AS); + break; + + case FT_AGGREGATE_STEPS.LIMIT: + parser.push(step.from.toString(), step.size.toString()); + break; + + case FT_AGGREGATE_STEPS.FILTER: + parser.push(step.expression); + break; + } + } + } + + parseParamsArgument(parser, options?.PARAMS); + + if (options?.DIALECT) { + parser.push('DIALECT', options.DIALECT.toString()); + } else { + parser.push('DIALECT', DEFAULT_DIALECT); + } +} + +function pushLoadField(args: Array, toLoad: LoadField) { + if (typeof toLoad === 'string' || toLoad instanceof Buffer) { + args.push(toLoad); + } else { + args.push(toLoad.identifier); + + if (toLoad.AS) { + args.push('AS', toLoad.AS); + } + } +} + +function parseGroupByReducer(parser: CommandParser, reducer: GroupByReducers) { + parser.push('REDUCE', reducer.type); + + switch (reducer.type) { + case FT_AGGREGATE_GROUP_BY_REDUCERS.COUNT: + parser.push('0'); + break; + + case FT_AGGREGATE_GROUP_BY_REDUCERS.COUNT_DISTINCT: + case FT_AGGREGATE_GROUP_BY_REDUCERS.COUNT_DISTINCTISH: + case FT_AGGREGATE_GROUP_BY_REDUCERS.SUM: + case FT_AGGREGATE_GROUP_BY_REDUCERS.MIN: + case FT_AGGREGATE_GROUP_BY_REDUCERS.MAX: + case FT_AGGREGATE_GROUP_BY_REDUCERS.AVG: + case FT_AGGREGATE_GROUP_BY_REDUCERS.STDDEV: + case FT_AGGREGATE_GROUP_BY_REDUCERS.TOLIST: + parser.push('1', reducer.property); + break; + + case FT_AGGREGATE_GROUP_BY_REDUCERS.QUANTILE: + parser.push('2', reducer.property, reducer.quantile.toString()); + break; + + case FT_AGGREGATE_GROUP_BY_REDUCERS.FIRST_VALUE: { + const args: Array = [reducer.property]; + + if (reducer.BY) { + args.push('BY'); + if (typeof reducer.BY === 'string' || reducer.BY instanceof Buffer) { + args.push(reducer.BY); + } else { + args.push(reducer.BY.property); + if (reducer.BY.direction) { + args.push(reducer.BY.direction); + } + } + } + + parser.pushVariadicWithLength(args); + break; + } + + case FT_AGGREGATE_GROUP_BY_REDUCERS.RANDOM_SAMPLE: + parser.push('2', reducer.property, reducer.sampleSize.toString()); + break; + } + + if (reducer.AS) { + parser.push('AS', reducer.AS); + } +} + +function pushSortByProperty(args: Array, sortBy: SortByProperty) { + if (typeof sortBy === 'string' || sortBy instanceof Buffer) { + args.push(sortBy); + } else { + args.push(sortBy.BY); + if (sortBy.DIRECTION) { + args.push(sortBy.DIRECTION); + } + } +} diff --git a/packages/search/lib/commands/AGGREGATE_WITHCURSOR.spec.ts b/packages/search/lib/commands/AGGREGATE_WITHCURSOR.spec.ts new file mode 100644 index 00000000000..0e89346c49f --- /dev/null +++ b/packages/search/lib/commands/AGGREGATE_WITHCURSOR.spec.ts @@ -0,0 +1,49 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import AGGREGATE_WITHCURSOR from './AGGREGATE_WITHCURSOR'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; +import { DEFAULT_DIALECT } from '../dialect/default'; + +describe('AGGREGATE WITHCURSOR', () => { + describe('transformArguments', () => { + it('without options', () => { + assert.deepEqual( + parseArgs(AGGREGATE_WITHCURSOR, 'index', '*'), + ['FT.AGGREGATE', 'index', '*', 'DIALECT', DEFAULT_DIALECT, 'WITHCURSOR'] + ); + }); + + it('with COUNT', () => { + assert.deepEqual( + parseArgs(AGGREGATE_WITHCURSOR, 'index', '*', { + COUNT: 1 + }), + ['FT.AGGREGATE', 'index', '*', 'DIALECT', DEFAULT_DIALECT, 'WITHCURSOR', 'COUNT', '1'] + ); + }); + + it('with MAXIDLE', () => { + assert.deepEqual( + parseArgs(AGGREGATE_WITHCURSOR, 'index', '*', { + MAXIDLE: 1 + }), + ['FT.AGGREGATE', 'index', '*', 'DIALECT', DEFAULT_DIALECT, 'WITHCURSOR', 'MAXIDLE', '1'] + ); + }); + }); + + testUtils.testWithClient('client.ft.aggregateWithCursor', async client => { + await client.ft.create('index', { + field: 'NUMERIC' + }); + + assert.deepEqual( + await client.ft.aggregateWithCursor('index', '*'), + { + total: 0, + results: [], + cursor: 0 + } + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/search/lib/commands/AGGREGATE_WITHCURSOR.ts b/packages/search/lib/commands/AGGREGATE_WITHCURSOR.ts new file mode 100644 index 00000000000..e1b0e42f9fe --- /dev/null +++ b/packages/search/lib/commands/AGGREGATE_WITHCURSOR.ts @@ -0,0 +1,54 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, Command, ReplyUnion, NumberReply } from '@redis/client/dist/lib/RESP/types'; +import AGGREGATE, { AggregateRawReply, AggregateReply, FtAggregateOptions } from './AGGREGATE'; + +export interface FtAggregateWithCursorOptions extends FtAggregateOptions { + COUNT?: number; + MAXIDLE?: number; +} + + +type AggregateWithCursorRawReply = [ + result: AggregateRawReply, + cursor: NumberReply +]; + +export interface AggregateWithCursorReply extends AggregateReply { + cursor: NumberReply; +} + +export default { + IS_READ_ONLY: AGGREGATE.IS_READ_ONLY, + /** + * Performs an aggregation with a cursor for retrieving large result sets. + * @param parser - The command parser + * @param index - Name of the index to query + * @param query - The aggregation query + * @param options - Optional parameters: + * - All options supported by FT.AGGREGATE + * - COUNT: Number of results to return per cursor fetch + * - MAXIDLE: Maximum idle time for cursor in milliseconds + */ + parseCommand(parser: CommandParser, index: RedisArgument, query: RedisArgument, options?: FtAggregateWithCursorOptions) { + AGGREGATE.parseCommand(parser, index, query, options); + parser.push('WITHCURSOR'); + + if (options?.COUNT !== undefined) { + parser.push('COUNT', options.COUNT.toString()); + } + + if(options?.MAXIDLE !== undefined) { + parser.push('MAXIDLE', options.MAXIDLE.toString()); + } + }, + transformReply: { + 2: (reply: AggregateWithCursorRawReply): AggregateWithCursorReply => { + return { + ...AGGREGATE.transformReply[2](reply[0]), + cursor: reply[1] + }; + }, + 3: undefined as unknown as () => ReplyUnion + }, + unstableResp3: true +} as const satisfies Command; diff --git a/packages/search/lib/commands/ALIASADD.spec.ts b/packages/search/lib/commands/ALIASADD.spec.ts new file mode 100644 index 00000000000..b8332aed6a6 --- /dev/null +++ b/packages/search/lib/commands/ALIASADD.spec.ts @@ -0,0 +1,25 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ALIASADD from './ALIASADD'; +import { SCHEMA_FIELD_TYPE } from './CREATE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('FT.ALIASADD', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(ALIASADD, 'alias', 'index'), + ['FT.ALIASADD', 'alias', 'index'] + ); + }); + + testUtils.testWithClient('client.ft.aliasAdd', async client => { + const [, reply] = await Promise.all([ + client.ft.create('index', { + field: SCHEMA_FIELD_TYPE.TEXT + }), + client.ft.aliasAdd('alias', 'index') + ]); + + assert.equal(reply, 'OK'); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/search/lib/commands/ALIASADD.ts b/packages/search/lib/commands/ALIASADD.ts new file mode 100644 index 00000000000..7d3a03498e6 --- /dev/null +++ b/packages/search/lib/commands/ALIASADD.ts @@ -0,0 +1,17 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Adds an alias to a RediSearch index. + * @param parser - The command parser + * @param alias - The alias to add + * @param index - The index name to alias + */ + parseCommand(parser: CommandParser, alias: RedisArgument, index: RedisArgument) { + parser.push('FT.ALIASADD', alias, index); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/search/lib/commands/ALIASDEL.spec.ts b/packages/search/lib/commands/ALIASDEL.spec.ts new file mode 100644 index 00000000000..19c2473f8cd --- /dev/null +++ b/packages/search/lib/commands/ALIASDEL.spec.ts @@ -0,0 +1,26 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ALIASDEL from './ALIASDEL'; +import { SCHEMA_FIELD_TYPE } from './CREATE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('FT.ALIASDEL', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(ALIASDEL, 'alias'), + ['FT.ALIASDEL', 'alias'] + ); + }); + + testUtils.testWithClient('client.ft.aliasAdd', async client => { + const [, , reply] = await Promise.all([ + client.ft.create('index', { + field: SCHEMA_FIELD_TYPE.TEXT + }), + client.ft.aliasAdd('alias', 'index'), + client.ft.aliasDel('alias') + ]); + + assert.equal(reply, 'OK'); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/search/lib/commands/ALIASDEL.ts b/packages/search/lib/commands/ALIASDEL.ts new file mode 100644 index 00000000000..3058be13997 --- /dev/null +++ b/packages/search/lib/commands/ALIASDEL.ts @@ -0,0 +1,16 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Removes an existing alias from a RediSearch index. + * @param parser - The command parser + * @param alias - The alias to remove + */ + parseCommand(parser: CommandParser, alias: RedisArgument) { + parser.push('FT.ALIASDEL', alias); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/search/lib/commands/ALIASUPDATE.spec.ts b/packages/search/lib/commands/ALIASUPDATE.spec.ts new file mode 100644 index 00000000000..f23af30229c --- /dev/null +++ b/packages/search/lib/commands/ALIASUPDATE.spec.ts @@ -0,0 +1,25 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ALIASUPDATE from './ALIASUPDATE'; +import { SCHEMA_FIELD_TYPE } from './CREATE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('FT.ALIASUPDATE', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(ALIASUPDATE, 'alias', 'index'), + ['FT.ALIASUPDATE', 'alias', 'index'] + ); + }); + + testUtils.testWithClient('client.ft.aliasUpdate', async client => { + const [, reply] = await Promise.all([ + client.ft.create('index', { + field: SCHEMA_FIELD_TYPE.TEXT + }), + client.ft.aliasUpdate('alias', 'index') + ]); + + assert.equal(reply, 'OK'); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/search/lib/commands/ALIASUPDATE.ts b/packages/search/lib/commands/ALIASUPDATE.ts new file mode 100644 index 00000000000..35879ea79cb --- /dev/null +++ b/packages/search/lib/commands/ALIASUPDATE.ts @@ -0,0 +1,17 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { SimpleStringReply, Command, RedisArgument } from '@redis/client/dist/lib/RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Updates the index pointed to by an existing alias. + * @param parser - The command parser + * @param alias - The existing alias to update + * @param index - The new index name that the alias should point to + */ + parseCommand(parser: CommandParser, alias: RedisArgument, index: RedisArgument) { + parser.push('FT.ALIASUPDATE', alias, index); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/search/lib/commands/ALTER.spec.ts b/packages/search/lib/commands/ALTER.spec.ts new file mode 100644 index 00000000000..c34f7e045d5 --- /dev/null +++ b/packages/search/lib/commands/ALTER.spec.ts @@ -0,0 +1,36 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ALTER from './ALTER'; +import { SCHEMA_FIELD_TYPE } from './CREATE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('FT.ALTER', () => { + describe('transformArguments', () => { + it('with NOINDEX', () => { + assert.deepEqual( + parseArgs(ALTER, 'index', { + field: { + type: SCHEMA_FIELD_TYPE.TEXT, + NOINDEX: true, + SORTABLE: 'UNF', + AS: 'text' + } + }), + ['FT.ALTER', 'index', 'SCHEMA', 'ADD', 'field', 'AS', 'text', 'TEXT', 'SORTABLE', 'UNF', 'NOINDEX'] + ); + }); + }); + + testUtils.testWithClient('client.ft.create', async client => { + const [, reply] = await Promise.all([ + client.ft.create('index', { + title: SCHEMA_FIELD_TYPE.TEXT + }), + client.ft.alter('index', { + body: SCHEMA_FIELD_TYPE.TEXT + }) + ]); + + assert.equal(reply, 'OK'); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/search/lib/commands/ALTER.ts b/packages/search/lib/commands/ALTER.ts new file mode 100644 index 00000000000..05c1b799eb1 --- /dev/null +++ b/packages/search/lib/commands/ALTER.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '@redis/client/dist/lib/RESP/types'; +import { RediSearchSchema, parseSchema } from './CREATE'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Alters an existing RediSearch index schema by adding new fields. + * @param parser - The command parser + * @param index - The index to alter + * @param schema - The schema definition containing new fields to add + */ + parseCommand(parser: CommandParser, index: RedisArgument, schema: RediSearchSchema) { + parser.push('FT.ALTER', index, 'SCHEMA', 'ADD'); + parseSchema(parser, schema); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/search/lib/commands/CONFIG_GET.spec.ts b/packages/search/lib/commands/CONFIG_GET.spec.ts new file mode 100644 index 00000000000..598a2a9ac41 --- /dev/null +++ b/packages/search/lib/commands/CONFIG_GET.spec.ts @@ -0,0 +1,26 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import CONFIG_GET from './CONFIG_GET'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('FT.CONFIG GET', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(CONFIG_GET, 'TIMEOUT'), + ['FT.CONFIG', 'GET', 'TIMEOUT'] + ); + }); + + testUtils.testWithClient('client.ft.configGet', async client => { + assert.deepEqual( + await client.ft.configGet('TIMEOUT'), + Object.create(null, { + TIMEOUT: { + value: '500', + configurable: true, + enumerable: true + } + }) + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/search/lib/commands/CONFIG_GET.ts b/packages/search/lib/commands/CONFIG_GET.ts new file mode 100644 index 00000000000..8073805c533 --- /dev/null +++ b/packages/search/lib/commands/CONFIG_GET.ts @@ -0,0 +1,24 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { ArrayReply, TuplesReply, BlobStringReply, NullReply, UnwrapReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Gets a RediSearch configuration option value. + * @param parser - The command parser + * @param option - The name of the configuration option to retrieve + */ + parseCommand(parser: CommandParser, option: string) { + parser.push('FT.CONFIG', 'GET', option); + }, + transformReply(reply: UnwrapReply>>) { + const transformedReply: Record = Object.create(null); + for (const item of reply) { + const [key, value] = item as unknown as UnwrapReply; + transformedReply[key.toString()] = value; + } + + return transformedReply; + } +} as const satisfies Command; diff --git a/packages/search/lib/commands/CONFIG_SET.spec.ts b/packages/search/lib/commands/CONFIG_SET.spec.ts new file mode 100644 index 00000000000..c5922a28756 --- /dev/null +++ b/packages/search/lib/commands/CONFIG_SET.spec.ts @@ -0,0 +1,56 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import CONFIG_SET from './CONFIG_SET'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('FT.CONFIG SET', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(CONFIG_SET, 'TIMEOUT', '500'), + ['FT.CONFIG', 'SET', 'TIMEOUT', '500'] + ); + }); + + testUtils.testWithClient('client.ft.configSet', async client => { + assert.deepEqual( + await client.ft.configSet('TIMEOUT', '500'), + 'OK' + ); + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClientIfVersionWithinRange([[8], 'LATEST'], 'setSearchConfigGloballyTest', async client => { + + const normalizeObject = obj => JSON.parse(JSON.stringify(obj)); + assert.equal(await client.configSet('search-default-dialect', '3'), + 'OK', 'CONFIG SET should return OK'); + + assert.deepEqual( + normalizeObject(await client.configGet('search-default-dialect')), + { 'search-default-dialect': '3' }, + 'CONFIG GET should return 3' + ); + + assert.deepEqual( + normalizeObject(await client.ft.configGet('DEFAULT_DIALECT')), + { 'DEFAULT_DIALECT': '3' }, + 'FT.CONFIG GET should return 3' + ); + + const ftConfigSetResult = await client.ft.configSet('DEFAULT_DIALECT', '2'); + assert.equal(normalizeObject(ftConfigSetResult), 'OK', 'FT.CONFIG SET should return OK'); + + assert.deepEqual( + normalizeObject(await client.ft.configGet('DEFAULT_DIALECT')), + { 'DEFAULT_DIALECT': '2' }, + 'FT.CONFIG GET should return 2' + ); + + assert.deepEqual( + normalizeObject(await client.configGet('search-default-dialect')), + { 'search-default-dialect': '2' }, + 'CONFIG GET should return 22' + ); + + }, GLOBAL.SERVERS.OPEN); + +}); diff --git a/packages/search/lib/commands/CONFIG_SET.ts b/packages/search/lib/commands/CONFIG_SET.ts new file mode 100644 index 00000000000..c3c8cc7259b --- /dev/null +++ b/packages/search/lib/commands/CONFIG_SET.ts @@ -0,0 +1,21 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '@redis/client/dist/lib/RESP/types'; + +// using `string & {}` to avoid TS widening the type to `string` +// TODO +type FtConfigProperties = 'a' | 'b' | (string & {}) | Buffer; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Sets a RediSearch configuration option value. + * @param parser - The command parser + * @param property - The name of the configuration option to set + * @param value - The value to set for the configuration option + */ + parseCommand(parser: CommandParser, property: FtConfigProperties, value: RedisArgument) { + parser.push('FT.CONFIG', 'SET', property, value); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/search/lib/commands/CREATE.spec.ts b/packages/search/lib/commands/CREATE.spec.ts new file mode 100644 index 00000000000..268421ef35f --- /dev/null +++ b/packages/search/lib/commands/CREATE.spec.ts @@ -0,0 +1,669 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import CREATE, { SCHEMA_FIELD_TYPE, SCHEMA_TEXT_FIELD_PHONETIC, SCHEMA_VECTOR_FIELD_ALGORITHM, REDISEARCH_LANGUAGE, VAMANA_COMPRESSION_ALGORITHM } from './CREATE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('FT.CREATE', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', {}), + ['FT.CREATE', 'index', 'SCHEMA'] + ); + }); + + describe('with fields', () => { + describe('TEXT', () => { + it('without options', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', { + field: SCHEMA_FIELD_TYPE.TEXT + }), + ['FT.CREATE', 'index', 'SCHEMA', 'field', 'TEXT'] + ); + }); + + it('with NOSTEM', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', { + field: { + type: SCHEMA_FIELD_TYPE.TEXT, + NOSTEM: true + } + }), + ['FT.CREATE', 'index', 'SCHEMA', 'field', 'TEXT', 'NOSTEM'] + ); + }); + + it('with WEIGHT', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', { + field: { + type: SCHEMA_FIELD_TYPE.TEXT, + WEIGHT: 1 + } + }), + ['FT.CREATE', 'index', 'SCHEMA', 'field', 'TEXT', 'WEIGHT', '1'] + ); + }); + + it('with PHONETIC', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', { + field: { + type: SCHEMA_FIELD_TYPE.TEXT, + PHONETIC: SCHEMA_TEXT_FIELD_PHONETIC.DM_EN + } + }), + ['FT.CREATE', 'index', 'SCHEMA', 'field', 'TEXT', 'PHONETIC', SCHEMA_TEXT_FIELD_PHONETIC.DM_EN] + ); + }); + + it('with WITHSUFFIXTRIE', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', { + field: { + type: SCHEMA_FIELD_TYPE.TEXT, + WITHSUFFIXTRIE: true + } + }), + ['FT.CREATE', 'index', 'SCHEMA', 'field', 'TEXT', 'WITHSUFFIXTRIE'] + ); + }); + }); + + it('NUMERIC', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', { + field: SCHEMA_FIELD_TYPE.NUMERIC + }), + ['FT.CREATE', 'index', 'SCHEMA', 'field', 'NUMERIC'] + ); + }); + + it('GEO', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', { + field: SCHEMA_FIELD_TYPE.GEO + }), + ['FT.CREATE', 'index', 'SCHEMA', 'field', 'GEO'] + ); + }); + + describe('TAG', () => { + describe('without options', () => { + it('SCHEMA_FIELD_TYPE.TAG', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', { + field: SCHEMA_FIELD_TYPE.TAG + }), + ['FT.CREATE', 'index', 'SCHEMA', 'field', 'TAG'] + ); + }); + + it('{ type: SCHEMA_FIELD_TYPE.TAG }', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', { + field: { + type: SCHEMA_FIELD_TYPE.TAG + } + }), + ['FT.CREATE', 'index', 'SCHEMA', 'field', 'TAG'] + ); + }); + }); + + it('with SEPARATOR', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', { + field: { + type: SCHEMA_FIELD_TYPE.TAG, + SEPARATOR: 'separator' + } + }), + ['FT.CREATE', 'index', 'SCHEMA', 'field', 'TAG', 'SEPARATOR', 'separator'] + ); + }); + + it('with CASESENSITIVE', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', { + field: { + type: SCHEMA_FIELD_TYPE.TAG, + CASESENSITIVE: true + } + }), + ['FT.CREATE', 'index', 'SCHEMA', 'field', 'TAG', 'CASESENSITIVE'] + ); + }); + + it('with WITHSUFFIXTRIE', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', { + field: { + type: SCHEMA_FIELD_TYPE.TAG, + WITHSUFFIXTRIE: true + } + }), + ['FT.CREATE', 'index', 'SCHEMA', 'field', 'TAG', 'WITHSUFFIXTRIE'] + ); + }); + + it('with INDEXEMPTY', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', { + field: { + type: SCHEMA_FIELD_TYPE.TAG, + INDEXEMPTY: true + } + }), + ['FT.CREATE', 'index', 'SCHEMA', 'field', 'TAG', 'INDEXEMPTY'] + ); + }); + }); + + describe('VECTOR', () => { + it('Flat algorithm', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', { + field: { + type: SCHEMA_FIELD_TYPE.VECTOR, + ALGORITHM: SCHEMA_VECTOR_FIELD_ALGORITHM.FLAT, + TYPE: 'FLOAT32', + DIM: 2, + DISTANCE_METRIC: 'L2', + INITIAL_CAP: 1000000, + BLOCK_SIZE: 1000 + } + }), + [ + 'FT.CREATE', 'index', 'SCHEMA', 'field', 'VECTOR', 'FLAT', '10', 'TYPE', + 'FLOAT32', 'DIM', '2', 'DISTANCE_METRIC', 'L2', 'INITIAL_CAP', '1000000', + 'BLOCK_SIZE', '1000' + ] + ); + }); + + it('HNSW algorithm', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', { + field: { + type: SCHEMA_FIELD_TYPE.VECTOR, + ALGORITHM: SCHEMA_VECTOR_FIELD_ALGORITHM.HNSW, + TYPE: 'FLOAT32', + DIM: 2, + DISTANCE_METRIC: 'L2', + INITIAL_CAP: 1000000, + M: 40, + EF_CONSTRUCTION: 250, + EF_RUNTIME: 20 + } + }), + [ + 'FT.CREATE', 'index', 'SCHEMA', 'field', 'VECTOR', 'HNSW', '14', 'TYPE', + 'FLOAT32', 'DIM', '2', 'DISTANCE_METRIC', 'L2', 'INITIAL_CAP', '1000000', + 'M', '40', 'EF_CONSTRUCTION', '250', 'EF_RUNTIME', '20' + ] + ); + }); + + it('VAMANA algorithm', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', { + field: { + type: SCHEMA_FIELD_TYPE.VECTOR, + ALGORITHM: SCHEMA_VECTOR_FIELD_ALGORITHM.VAMANA, + TYPE: "FLOAT32", + COMPRESSION: VAMANA_COMPRESSION_ALGORITHM.LVQ8, + DIM: 1024, + DISTANCE_METRIC: 'COSINE', + CONSTRUCTION_WINDOW_SIZE: 300, + GRAPH_MAX_DEGREE: 128, + SEARCH_WINDOW_SIZE: 20, + EPSILON: 0.02, + TRAINING_THRESHOLD: 20480, + REDUCE: 512, + } + }), + [ + 'FT.CREATE', 'index', 'SCHEMA', 'field', 'VECTOR', 'SVS-VAMANA', '20', 'TYPE', + 'FLOAT32', 'DIM', '1024', 'DISTANCE_METRIC', 'COSINE', 'COMPRESSION', 'LVQ8', + 'CONSTRUCTION_WINDOW_SIZE', '300', 'GRAPH_MAX_DEGREE', '128', 'SEARCH_WINDOW_SIZE', '20', + 'EPSILON', '0.02', 'TRAINING_THRESHOLD', '20480', 'REDUCE', '512' + ] + ); + }); + }); + + describe('GEOSHAPE', () => { + describe('without options', () => { + it('SCHEMA_FIELD_TYPE.GEOSHAPE', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', { + field: SCHEMA_FIELD_TYPE.GEOSHAPE + }), + ['FT.CREATE', 'index', 'SCHEMA', 'field', 'GEOSHAPE'] + ); + }); + + it('{ type: SCHEMA_FIELD_TYPE.GEOSHAPE }', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', { + field: { + type: SCHEMA_FIELD_TYPE.GEOSHAPE + } + }), + ['FT.CREATE', 'index', 'SCHEMA', 'field', 'GEOSHAPE'] + ); + }); + }); + + it('with COORD_SYSTEM', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', { + field: { + type: SCHEMA_FIELD_TYPE.GEOSHAPE, + COORD_SYSTEM: 'SPHERICAL' + } + }), + ['FT.CREATE', 'index', 'SCHEMA', 'field', 'GEOSHAPE', 'COORD_SYSTEM', 'SPHERICAL'] + ); + }); + }); + + it('with AS', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', { + field: { + type: SCHEMA_FIELD_TYPE.TEXT, + AS: 'as' + } + }), + ['FT.CREATE', 'index', 'SCHEMA', 'field', 'AS', 'as', 'TEXT'] + ); + }); + + describe('with SORTABLE', () => { + it('true', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', { + field: { + type: SCHEMA_FIELD_TYPE.TEXT, + SORTABLE: true + } + }), + ['FT.CREATE', 'index', 'SCHEMA', 'field', 'TEXT', 'SORTABLE'] + ); + }); + + it('UNF', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', { + field: { + type: SCHEMA_FIELD_TYPE.TEXT, + SORTABLE: 'UNF' + } + }), + ['FT.CREATE', 'index', 'SCHEMA', 'field', 'TEXT', 'SORTABLE', 'UNF'] + ); + }); + }); + + it('with NOINDEX', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', { + field: { + type: SCHEMA_FIELD_TYPE.TEXT, + NOINDEX: true + } + }), + ['FT.CREATE', 'index', 'SCHEMA', 'field', 'TEXT', 'NOINDEX'] + ); + }); + + it('with INDEXMISSING', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', { + field: { + type: SCHEMA_FIELD_TYPE.TEXT, + INDEXMISSING: true + } + }), + ['FT.CREATE', 'index', 'SCHEMA', 'field', 'TEXT', 'INDEXMISSING'] + ); + }); + }); + + it('with ON', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', {}, { + ON: 'HASH' + }), + ['FT.CREATE', 'index', 'ON', 'HASH', 'SCHEMA'] + ); + }); + + describe('with PREFIX', () => { + it('string', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', {}, { + PREFIX: 'prefix' + }), + ['FT.CREATE', 'index', 'PREFIX', '1', 'prefix', 'SCHEMA'] + ); + }); + + it('Array', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', {}, { + PREFIX: ['1', '2'] + }), + ['FT.CREATE', 'index', 'PREFIX', '2', '1', '2', 'SCHEMA'] + ); + }); + }); + + it('with FILTER', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', {}, { + FILTER: '@field != ""' + }), + ['FT.CREATE', 'index', 'FILTER', '@field != ""', 'SCHEMA'] + ); + }); + + it('with LANGUAGE', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', {}, { + LANGUAGE: REDISEARCH_LANGUAGE.ARABIC + }), + ['FT.CREATE', 'index', 'LANGUAGE', REDISEARCH_LANGUAGE.ARABIC, 'SCHEMA'] + ); + }); + + it('with LANGUAGE_FIELD', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', {}, { + LANGUAGE_FIELD: '@field' + }), + ['FT.CREATE', 'index', 'LANGUAGE_FIELD', '@field', 'SCHEMA'] + ); + }); + + it('with SCORE', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', {}, { + SCORE: 1 + }), + ['FT.CREATE', 'index', 'SCORE', '1', 'SCHEMA'] + ); + }); + + it('with SCORE_FIELD', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', {}, { + SCORE_FIELD: '@field' + }), + ['FT.CREATE', 'index', 'SCORE_FIELD', '@field', 'SCHEMA'] + ); + }); + + it('with MAXTEXTFIELDS', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', {}, { + MAXTEXTFIELDS: true + }), + ['FT.CREATE', 'index', 'MAXTEXTFIELDS', 'SCHEMA'] + ); + }); + + it('with TEMPORARY', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', {}, { + TEMPORARY: 1 + }), + ['FT.CREATE', 'index', 'TEMPORARY', '1', 'SCHEMA'] + ); + }); + + it('with NOOFFSETS', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', {}, { + NOOFFSETS: true + }), + ['FT.CREATE', 'index', 'NOOFFSETS', 'SCHEMA'] + ); + }); + + it('with NOHL', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', {}, { + NOHL: true + }), + ['FT.CREATE', 'index', 'NOHL', 'SCHEMA'] + ); + }); + + it('with NOFIELDS', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', {}, { + NOFIELDS: true + }), + ['FT.CREATE', 'index', 'NOFIELDS', 'SCHEMA'] + ); + }); + + it('with NOFREQS', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', {}, { + NOFREQS: true + }), + ['FT.CREATE', 'index', 'NOFREQS', 'SCHEMA'] + ); + }); + + it('with SKIPINITIALSCAN', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', {}, { + SKIPINITIALSCAN: true + }), + ['FT.CREATE', 'index', 'SKIPINITIALSCAN', 'SCHEMA'] + ); + }); + + describe('with STOPWORDS', () => { + it('string', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', {}, { + STOPWORDS: 'stopword' + }), + ['FT.CREATE', 'index', 'STOPWORDS', '1', 'stopword', 'SCHEMA'] + ); + }); + + it('Array', () => { + assert.deepEqual( + parseArgs(CREATE, 'index', {}, { + STOPWORDS: ['1', '2'] + }), + ['FT.CREATE', 'index', 'STOPWORDS', '2', '1', '2', 'SCHEMA'] + ); + }); + }); + }); + + testUtils.testWithClient('client.ft.create', async client => { + assert.equal( + await client.ft.create('index', { + field: SCHEMA_FIELD_TYPE.TEXT + }), + 'OK' + ); + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClientIfVersionWithinRange([[7], 'LATEST'], 'client.ft.create vector types big floats', async client => { + assert.equal( + await client.ft.create("index_float32", { + field: { + ALGORITHM: "FLAT", + TYPE: "FLOAT32", + DIM: 1, + DISTANCE_METRIC: 'COSINE', + type: 'VECTOR' + }, + }), + "OK" + ); + + assert.equal( + await client.ft.create("index_float64", { + field: { + ALGORITHM: "FLAT", + TYPE: "FLOAT64", + DIM: 1, + DISTANCE_METRIC: 'COSINE', + type: 'VECTOR' + }, + }), + "OK" + ); + }, GLOBAL.SERVERS.OPEN); + + + testUtils.testWithClientIfVersionWithinRange([[8], 'LATEST'], 'client.ft.create vector types small floats and ints', async client => { + assert.equal( + await client.ft.create("index_float16", { + field: { + ALGORITHM: "FLAT", + TYPE: "FLOAT16", + DIM: 1, + DISTANCE_METRIC: 'COSINE', + type: 'VECTOR' + }, + }), + "OK" + ); + + assert.equal( + await client.ft.create("index_bloat16", { + field: { + ALGORITHM: "FLAT", + TYPE: "BFLOAT16", + DIM: 1, + DISTANCE_METRIC: 'COSINE', + type: 'VECTOR' + }, + }), + "OK" + ); + + assert.equal( + await client.ft.create("index_int8", { + field: { + ALGORITHM: "FLAT", + TYPE: "INT8", + DIM: 1, + DISTANCE_METRIC: 'COSINE', + type: 'VECTOR' + }, + }), + "OK" + ); + + assert.equal( + await client.ft.create("index_uint8", { + field: { + ALGORITHM: "FLAT", + TYPE: "UINT8", + DIM: 1, + DISTANCE_METRIC: 'COSINE', + type: 'VECTOR' + }, + }), + "OK" + ); + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClientIfVersionWithinRange([[8, 2], 'LATEST'], 'client.ft.create vector svs-vamana', async client => { + assert.equal( + await client.ft.create("index_svs_vamana_min_config", { + field: { + type: SCHEMA_FIELD_TYPE.VECTOR, + ALGORITHM: SCHEMA_VECTOR_FIELD_ALGORITHM.VAMANA, + TYPE: "FLOAT32", + DIM: 768, + DISTANCE_METRIC: 'L2', + }, + }), + "OK" + ); + + assert.equal( + await client.ft.create("index_svs_vamana_no_compression", { + field: { + type: SCHEMA_FIELD_TYPE.VECTOR, + ALGORITHM: SCHEMA_VECTOR_FIELD_ALGORITHM.VAMANA, + TYPE: "FLOAT32", + DIM: 512, + DISTANCE_METRIC: 'L2', + CONSTRUCTION_WINDOW_SIZE: 200, + GRAPH_MAX_DEGREE: 64, + SEARCH_WINDOW_SIZE: 50, + EPSILON: 0.01 + }, + }), + "OK" + ); + + assert.equal( + await client.ft.create("index_svs_vamana_compression", { + field: { + type: SCHEMA_FIELD_TYPE.VECTOR, + ALGORITHM: SCHEMA_VECTOR_FIELD_ALGORITHM.VAMANA, + TYPE: "FLOAT32", + COMPRESSION: VAMANA_COMPRESSION_ALGORITHM.LeanVec4x8, + DIM: 1024, + DISTANCE_METRIC: 'COSINE', + CONSTRUCTION_WINDOW_SIZE: 300, + GRAPH_MAX_DEGREE: 128, + SEARCH_WINDOW_SIZE: 20, + EPSILON: 0.02, + TRAINING_THRESHOLD: 20480, + REDUCE: 512, + }, + }), + "OK" + ); + + assert.equal( + await client.ft.create("index_svs_vamana_float16", { + field: { + type: SCHEMA_FIELD_TYPE.VECTOR, + ALGORITHM: SCHEMA_VECTOR_FIELD_ALGORITHM.VAMANA, + TYPE: "FLOAT16", + DIM: 128, + DISTANCE_METRIC: 'IP', + }, + }), + "OK" + ); + + await assert.rejects( + client.ft.create("index_svs_vamana_invalid_config", { + field: { + type: SCHEMA_FIELD_TYPE.VECTOR, + ALGORITHM: SCHEMA_VECTOR_FIELD_ALGORITHM.VAMANA, + TYPE: "FLOAT32", + DIM: 2, + DISTANCE_METRIC: 'L2', + CONSTRUCTION_WINDOW_SIZE: 200, + GRAPH_MAX_DEGREE: 64, + SEARCH_WINDOW_SIZE: 50, + EPSILON: 0.01, + // TRAINING_THRESHOLD should error without COMPRESSION + TRAINING_THRESHOLD: 2048 + }, + }), + ) + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/search/lib/commands/CREATE.ts b/packages/search/lib/commands/CREATE.ts new file mode 100644 index 00000000000..3f892df40d2 --- /dev/null +++ b/packages/search/lib/commands/CREATE.ts @@ -0,0 +1,444 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '@redis/client/dist/lib/RESP/types'; +import { RedisVariadicArgument, parseOptionalVariadicArgument } from '@redis/client/dist/lib/commands/generic-transformers'; + +export const SCHEMA_FIELD_TYPE = { + TEXT: 'TEXT', + NUMERIC: 'NUMERIC', + GEO: 'GEO', + TAG: 'TAG', + VECTOR: 'VECTOR', + GEOSHAPE: 'GEOSHAPE' +} as const; + +export type SchemaFieldType = typeof SCHEMA_FIELD_TYPE[keyof typeof SCHEMA_FIELD_TYPE]; + +interface SchemaField { + type: T; + AS?: RedisArgument; + INDEXMISSING?: boolean; +} + +interface SchemaCommonField extends SchemaField { + SORTABLE?: boolean | 'UNF' + NOINDEX?: boolean; +} + +export const SCHEMA_TEXT_FIELD_PHONETIC = { + DM_EN: 'dm:en', + DM_FR: 'dm:fr', + FM_PT: 'dm:pt', + DM_ES: 'dm:es' +} as const; + +export type SchemaTextFieldPhonetic = typeof SCHEMA_TEXT_FIELD_PHONETIC[keyof typeof SCHEMA_TEXT_FIELD_PHONETIC]; + +interface SchemaTextField extends SchemaCommonField { + NOSTEM?: boolean; + WEIGHT?: number; + PHONETIC?: SchemaTextFieldPhonetic; + WITHSUFFIXTRIE?: boolean; + INDEXEMPTY?: boolean; +} + +interface SchemaNumericField extends SchemaCommonField {} + +interface SchemaGeoField extends SchemaCommonField {} + +interface SchemaTagField extends SchemaCommonField { + SEPARATOR?: RedisArgument; + CASESENSITIVE?: boolean; + WITHSUFFIXTRIE?: boolean; + INDEXEMPTY?: boolean; +} + +export const SCHEMA_VECTOR_FIELD_ALGORITHM = { + FLAT: 'FLAT', + HNSW: 'HNSW', + /** + * available since 8.2 + */ + VAMANA: 'SVS-VAMANA' +} as const; + +export type SchemaVectorFieldAlgorithm = typeof SCHEMA_VECTOR_FIELD_ALGORITHM[keyof typeof SCHEMA_VECTOR_FIELD_ALGORITHM]; + +interface SchemaVectorField extends SchemaField { + ALGORITHM: SchemaVectorFieldAlgorithm; + TYPE: 'FLOAT32' | 'FLOAT64' | 'BFLOAT16' | 'FLOAT16' | 'INT8' | 'UINT8'; + DIM: number; + DISTANCE_METRIC: 'L2' | 'IP' | 'COSINE'; + INITIAL_CAP?: number; +} + +interface SchemaFlatVectorField extends SchemaVectorField { + ALGORITHM: typeof SCHEMA_VECTOR_FIELD_ALGORITHM['FLAT']; + BLOCK_SIZE?: number; +} + +interface SchemaHNSWVectorField extends SchemaVectorField { + ALGORITHM: typeof SCHEMA_VECTOR_FIELD_ALGORITHM['HNSW']; + M?: number; + EF_CONSTRUCTION?: number; + EF_RUNTIME?: number; +} + +export const VAMANA_COMPRESSION_ALGORITHM = { + LVQ4: 'LVQ4', + LVQ8: 'LVQ8', + LVQ4x4: 'LVQ4x4', + LVQ4x8: 'LVQ4x8', + LeanVec4x8: 'LeanVec4x8', + LeanVec8x8: 'LeanVec8x8' +} as const; + +export type VamanaCompressionAlgorithm = + typeof VAMANA_COMPRESSION_ALGORITHM[keyof typeof VAMANA_COMPRESSION_ALGORITHM]; + +interface SchemaVAMANAVectorField extends SchemaVectorField { + ALGORITHM: typeof SCHEMA_VECTOR_FIELD_ALGORITHM['VAMANA']; + TYPE: 'FLOAT16' | 'FLOAT32'; + // VAMANA-specific parameters + COMPRESSION?: VamanaCompressionAlgorithm; + CONSTRUCTION_WINDOW_SIZE?: number; + GRAPH_MAX_DEGREE?: number; + SEARCH_WINDOW_SIZE?: number; + EPSILON?: number; + /** + * applicable only with COMPRESSION + */ + TRAINING_THRESHOLD?: number; + /** + * applicable only with LeanVec COMPRESSION + */ + REDUCE?: number; +} + +export const SCHEMA_GEO_SHAPE_COORD_SYSTEM = { + SPHERICAL: 'SPHERICAL', + FLAT: 'FLAT' +} as const; + +export type SchemaGeoShapeFieldCoordSystem = typeof SCHEMA_GEO_SHAPE_COORD_SYSTEM[keyof typeof SCHEMA_GEO_SHAPE_COORD_SYSTEM]; + +interface SchemaGeoShapeField extends SchemaField { + COORD_SYSTEM?: SchemaGeoShapeFieldCoordSystem; +} + +export interface RediSearchSchema { + [field: string]: ( + SchemaTextField | + SchemaNumericField | + SchemaGeoField | + SchemaTagField | + SchemaFlatVectorField | + SchemaHNSWVectorField | + SchemaVAMANAVectorField | + SchemaGeoShapeField | + SchemaFieldType + ); +} + +function parseCommonSchemaFieldOptions(parser: CommandParser, fieldOptions: SchemaCommonField) { + if (fieldOptions.SORTABLE) { + parser.push('SORTABLE'); + + if (fieldOptions.SORTABLE === 'UNF') { + parser.push('UNF'); + } + } + + if (fieldOptions.NOINDEX) { + parser.push('NOINDEX'); + } +} + +export function parseSchema(parser: CommandParser, schema: RediSearchSchema) { + for (const [field, fieldOptions] of Object.entries(schema)) { + parser.push(field); + + if (typeof fieldOptions === 'string') { + parser.push(fieldOptions); + continue; + } + + if (fieldOptions.AS) { + parser.push('AS', fieldOptions.AS); + } + + parser.push(fieldOptions.type); + + if (fieldOptions.INDEXMISSING) { + parser.push('INDEXMISSING'); + } + + switch (fieldOptions.type) { + case SCHEMA_FIELD_TYPE.TEXT: + if (fieldOptions.NOSTEM) { + parser.push('NOSTEM'); + } + + if (fieldOptions.WEIGHT !== undefined) { + parser.push('WEIGHT', fieldOptions.WEIGHT.toString()); + } + + if (fieldOptions.PHONETIC) { + parser.push('PHONETIC', fieldOptions.PHONETIC); + } + + if (fieldOptions.WITHSUFFIXTRIE) { + parser.push('WITHSUFFIXTRIE'); + } + + if (fieldOptions.INDEXEMPTY) { + parser.push('INDEXEMPTY'); + } + + parseCommonSchemaFieldOptions(parser, fieldOptions) + break; + + case SCHEMA_FIELD_TYPE.NUMERIC: + case SCHEMA_FIELD_TYPE.GEO: + parseCommonSchemaFieldOptions(parser, fieldOptions) + break; + + case SCHEMA_FIELD_TYPE.TAG: + if (fieldOptions.SEPARATOR) { + parser.push('SEPARATOR', fieldOptions.SEPARATOR); + } + + if (fieldOptions.CASESENSITIVE) { + parser.push('CASESENSITIVE'); + } + + if (fieldOptions.WITHSUFFIXTRIE) { + parser.push('WITHSUFFIXTRIE'); + } + + if (fieldOptions.INDEXEMPTY) { + parser.push('INDEXEMPTY'); + } + + parseCommonSchemaFieldOptions(parser, fieldOptions) + break; + + case SCHEMA_FIELD_TYPE.VECTOR: + parser.push(fieldOptions.ALGORITHM); + + const args: Array = []; + + args.push( + 'TYPE', fieldOptions.TYPE, + 'DIM', fieldOptions.DIM.toString(), + 'DISTANCE_METRIC', fieldOptions.DISTANCE_METRIC + ); + + if (fieldOptions.INITIAL_CAP !== undefined) { + args.push('INITIAL_CAP', fieldOptions.INITIAL_CAP.toString()); + } + + switch (fieldOptions.ALGORITHM) { + case SCHEMA_VECTOR_FIELD_ALGORITHM.FLAT: + if (fieldOptions.BLOCK_SIZE !== undefined) { + args.push('BLOCK_SIZE', fieldOptions.BLOCK_SIZE.toString()); + } + + break; + + case SCHEMA_VECTOR_FIELD_ALGORITHM.HNSW: + if (fieldOptions.M !== undefined) { + args.push('M', fieldOptions.M.toString()); + } + + if (fieldOptions.EF_CONSTRUCTION !== undefined) { + args.push('EF_CONSTRUCTION', fieldOptions.EF_CONSTRUCTION.toString()); + } + + if (fieldOptions.EF_RUNTIME !== undefined) { + args.push('EF_RUNTIME', fieldOptions.EF_RUNTIME.toString()); + } + + break; + + case SCHEMA_VECTOR_FIELD_ALGORITHM['VAMANA']: + if (fieldOptions.COMPRESSION) { + args.push('COMPRESSION', fieldOptions.COMPRESSION); + } + + if (fieldOptions.CONSTRUCTION_WINDOW_SIZE !== undefined) { + args.push('CONSTRUCTION_WINDOW_SIZE', fieldOptions.CONSTRUCTION_WINDOW_SIZE.toString()); + } + + if (fieldOptions.GRAPH_MAX_DEGREE !== undefined) { + args.push('GRAPH_MAX_DEGREE', fieldOptions.GRAPH_MAX_DEGREE.toString()); + } + + if (fieldOptions.SEARCH_WINDOW_SIZE !== undefined) { + args.push('SEARCH_WINDOW_SIZE', fieldOptions.SEARCH_WINDOW_SIZE.toString()); + } + + if (fieldOptions.EPSILON !== undefined) { + args.push('EPSILON', fieldOptions.EPSILON.toString()); + } + + if (fieldOptions.TRAINING_THRESHOLD !== undefined) { + args.push('TRAINING_THRESHOLD', fieldOptions.TRAINING_THRESHOLD.toString()); + } + + if (fieldOptions.REDUCE !== undefined) { + args.push('REDUCE', fieldOptions.REDUCE.toString()); + } + + break; + } + parser.pushVariadicWithLength(args); + + break; + + case SCHEMA_FIELD_TYPE.GEOSHAPE: + if (fieldOptions.COORD_SYSTEM !== undefined) { + parser.push('COORD_SYSTEM', fieldOptions.COORD_SYSTEM); + } + + break; + } + } +} + +export const REDISEARCH_LANGUAGE = { + ARABIC: 'Arabic', + BASQUE: 'Basque', + CATALANA: 'Catalan', + DANISH: 'Danish', + DUTCH: 'Dutch', + ENGLISH: 'English', + FINNISH: 'Finnish', + FRENCH: 'French', + GERMAN: 'German', + GREEK: 'Greek', + HUNGARIAN: 'Hungarian', + INDONESAIN: 'Indonesian', + IRISH: 'Irish', + ITALIAN: 'Italian', + LITHUANIAN: 'Lithuanian', + NEPALI: 'Nepali', + NORWEIGAN: 'Norwegian', + PORTUGUESE: 'Portuguese', + ROMANIAN: 'Romanian', + RUSSIAN: 'Russian', + SPANISH: 'Spanish', + SWEDISH: 'Swedish', + TAMIL: 'Tamil', + TURKISH: 'Turkish', + CHINESE: 'Chinese' +} as const; + +export type RediSearchLanguage = typeof REDISEARCH_LANGUAGE[keyof typeof REDISEARCH_LANGUAGE]; + +export type RediSearchProperty = `${'@' | '$.'}${string}`; + +export interface CreateOptions { + ON?: 'HASH' | 'JSON'; + PREFIX?: RedisVariadicArgument; + FILTER?: RedisArgument; + LANGUAGE?: RediSearchLanguage; + LANGUAGE_FIELD?: RediSearchProperty; + SCORE?: number; + SCORE_FIELD?: RediSearchProperty; + // PAYLOAD_FIELD?: string; + MAXTEXTFIELDS?: boolean; + TEMPORARY?: number; + NOOFFSETS?: boolean; + NOHL?: boolean; + NOFIELDS?: boolean; + NOFREQS?: boolean; + SKIPINITIALSCAN?: boolean; + STOPWORDS?: RedisVariadicArgument; +} + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Creates a new search index with the given schema and options. + * @param parser - The command parser + * @param index - Name of the index to create + * @param schema - Index schema defining field names and types (TEXT, NUMERIC, GEO, TAG, VECTOR, GEOSHAPE) + * @param options - Optional parameters: + * - ON: Type of container to index (HASH or JSON) + * - PREFIX: Prefixes for document keys to index + * - FILTER: Expression that filters indexed documents + * - LANGUAGE/LANGUAGE_FIELD: Default language for indexing + * - SCORE/SCORE_FIELD: Document ranking parameters + * - MAXTEXTFIELDS: Index all text fields without specifying them + * - TEMPORARY: Create a temporary index + * - NOOFFSETS/NOHL/NOFIELDS/NOFREQS: Index optimization flags + * - STOPWORDS: Custom stopword list + */ + parseCommand(parser: CommandParser, index: RedisArgument, schema: RediSearchSchema, options?: CreateOptions) { + parser.push('FT.CREATE', index); + + if (options?.ON) { + parser.push('ON', options.ON); + } + + parseOptionalVariadicArgument(parser, 'PREFIX', options?.PREFIX); + + if (options?.FILTER) { + parser.push('FILTER', options.FILTER); + } + + if (options?.LANGUAGE) { + parser.push('LANGUAGE', options.LANGUAGE); + } + + if (options?.LANGUAGE_FIELD) { + parser.push('LANGUAGE_FIELD', options.LANGUAGE_FIELD); + } + + if (options?.SCORE) { + parser.push('SCORE', options.SCORE.toString()); + } + + if (options?.SCORE_FIELD) { + parser.push('SCORE_FIELD', options.SCORE_FIELD); + } + + // if (options?.PAYLOAD_FIELD) { + // parser.push('PAYLOAD_FIELD', options.PAYLOAD_FIELD); + // } + + if (options?.MAXTEXTFIELDS) { + parser.push('MAXTEXTFIELDS'); + } + + if (options?.TEMPORARY) { + parser.push('TEMPORARY', options.TEMPORARY.toString()); + } + + if (options?.NOOFFSETS) { + parser.push('NOOFFSETS'); + } + + if (options?.NOHL) { + parser.push('NOHL'); + } + + if (options?.NOFIELDS) { + parser.push('NOFIELDS'); + } + + if (options?.NOFREQS) { + parser.push('NOFREQS'); + } + + if (options?.SKIPINITIALSCAN) { + parser.push('SKIPINITIALSCAN'); + } + + parseOptionalVariadicArgument(parser, 'STOPWORDS', options?.STOPWORDS); + parser.push('SCHEMA'); + parseSchema(parser, schema); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/search/lib/commands/CURSOR_DEL.spec.ts b/packages/search/lib/commands/CURSOR_DEL.spec.ts new file mode 100644 index 00000000000..230a5fd0feb --- /dev/null +++ b/packages/search/lib/commands/CURSOR_DEL.spec.ts @@ -0,0 +1,33 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import CURSOR_DEL from './CURSOR_DEL'; +import { SCHEMA_FIELD_TYPE } from './CREATE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('FT.CURSOR DEL', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(CURSOR_DEL, 'index', 0), + ['FT.CURSOR', 'DEL', 'index', '0'] + ); + }); + + testUtils.testWithClient('client.ft.cursorDel', async client => { + const [, , { cursor }] = await Promise.all([ + client.ft.create('idx', { + field: { + type: SCHEMA_FIELD_TYPE.TEXT + } + }), + client.hSet('key', 'field', 'value'), + client.ft.aggregateWithCursor('idx', '*', { + COUNT: 1 + }) + ]); + + assert.equal( + await client.ft.cursorDel('idx', cursor), + 'OK' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/search/lib/commands/CURSOR_DEL.ts b/packages/search/lib/commands/CURSOR_DEL.ts new file mode 100644 index 00000000000..39d0dc8af01 --- /dev/null +++ b/packages/search/lib/commands/CURSOR_DEL.ts @@ -0,0 +1,17 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { SimpleStringReply, Command, RedisArgument, NumberReply, UnwrapReply } from '@redis/client/dist/lib/RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Deletes a cursor from an index. + * @param parser - The command parser + * @param index - The index name that contains the cursor + * @param cursorId - The cursor ID to delete + */ + parseCommand(parser: CommandParser, index: RedisArgument, cursorId: UnwrapReply) { + parser.push('FT.CURSOR', 'DEL', index, cursorId.toString()); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/search/lib/commands/CURSOR_READ.spec.ts b/packages/search/lib/commands/CURSOR_READ.spec.ts new file mode 100644 index 00000000000..42dca0c5756 --- /dev/null +++ b/packages/search/lib/commands/CURSOR_READ.spec.ts @@ -0,0 +1,45 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import CURSOR_READ from './CURSOR_READ'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('FT.CURSOR READ', () => { + describe('transformArguments', () => { + it('without options', () => { + assert.deepEqual( + parseArgs(CURSOR_READ, 'index', '0'), + ['FT.CURSOR', 'READ', 'index', '0'] + ); + }); + + it('with COUNT', () => { + assert.deepEqual( + parseArgs(CURSOR_READ, 'index', '0', { + COUNT: 1 + }), + ['FT.CURSOR', 'READ', 'index', '0', 'COUNT', '1'] + ); + }); + }); + + testUtils.testWithClient('client.ft.cursorRead', async client => { + const [, , { cursor }] = await Promise.all([ + client.ft.create('idx', { + field: 'TEXT' + }), + client.hSet('key', 'field', 'value'), + client.ft.aggregateWithCursor('idx', '*', { + COUNT: 1 + }) + ]); + + assert.deepEqual( + await client.ft.cursorRead('idx', cursor), + { + total: 0, + results: [], + cursor: 0 + } + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/search/lib/commands/CURSOR_READ.ts b/packages/search/lib/commands/CURSOR_READ.ts new file mode 100644 index 00000000000..50ee5eafbd6 --- /dev/null +++ b/packages/search/lib/commands/CURSOR_READ.ts @@ -0,0 +1,29 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, Command, NumberReply, UnwrapReply } from '@redis/client/dist/lib/RESP/types'; +import AGGREGATE_WITHCURSOR from './AGGREGATE_WITHCURSOR'; + +export interface FtCursorReadOptions { + COUNT?: number; +} + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Reads from an existing cursor to get more results from an index. + * @param parser - The command parser + * @param index - The index name that contains the cursor + * @param cursor - The cursor ID to read from + * @param options - Optional parameters: + * - COUNT: Maximum number of results to return + */ + parseCommand(parser: CommandParser, index: RedisArgument, cursor: UnwrapReply, options?: FtCursorReadOptions) { + parser.push('FT.CURSOR', 'READ', index, cursor.toString()); + + if (options?.COUNT !== undefined) { + parser.push('COUNT', options.COUNT.toString()); + } + }, + transformReply: AGGREGATE_WITHCURSOR.transformReply, + unstableResp3: true +} as const satisfies Command; diff --git a/packages/search/lib/commands/DICTADD.spec.ts b/packages/search/lib/commands/DICTADD.spec.ts new file mode 100644 index 00000000000..4707db02dcf --- /dev/null +++ b/packages/search/lib/commands/DICTADD.spec.ts @@ -0,0 +1,29 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import DICTADD from './DICTADD'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('FT.DICTADD', () => { + describe('transformArguments', () => { + it('string', () => { + assert.deepEqual( + parseArgs(DICTADD, 'dictionary', 'term'), + ['FT.DICTADD', 'dictionary', 'term'] + ); + }); + + it('Array', () => { + assert.deepEqual( + parseArgs(DICTADD, 'dictionary', ['1', '2']), + ['FT.DICTADD', 'dictionary', '1', '2'] + ); + }); + }); + + testUtils.testWithClient('client.ft.dictAdd', async client => { + assert.equal( + await client.ft.dictAdd('dictionary', 'term'), + 1 + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/search/lib/commands/DICTADD.ts b/packages/search/lib/commands/DICTADD.ts new file mode 100644 index 00000000000..84936ff5f73 --- /dev/null +++ b/packages/search/lib/commands/DICTADD.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, NumberReply, Command } from '@redis/client/dist/lib/RESP/types'; +import { RedisVariadicArgument } from '@redis/client/dist/lib/commands/generic-transformers'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Adds terms to a dictionary. + * @param parser - The command parser + * @param dictionary - Name of the dictionary to add terms to + * @param term - One or more terms to add to the dictionary + */ + parseCommand(parser: CommandParser, dictionary: RedisArgument, term: RedisVariadicArgument) { + parser.push('FT.DICTADD', dictionary); + parser.pushVariadic(term); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/search/lib/commands/DICTDEL.spec.ts b/packages/search/lib/commands/DICTDEL.spec.ts new file mode 100644 index 00000000000..a9f997bdf38 --- /dev/null +++ b/packages/search/lib/commands/DICTDEL.spec.ts @@ -0,0 +1,29 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import DICTDEL from './DICTDEL'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('FT.DICTDEL', () => { + describe('transformArguments', () => { + it('string', () => { + assert.deepEqual( + parseArgs(DICTDEL, 'dictionary', 'term'), + ['FT.DICTDEL', 'dictionary', 'term'] + ); + }); + + it('Array', () => { + assert.deepEqual( + parseArgs(DICTDEL, 'dictionary', ['1', '2']), + ['FT.DICTDEL', 'dictionary', '1', '2'] + ); + }); + }); + + testUtils.testWithClient('client.ft.dictDel', async client => { + assert.equal( + await client.ft.dictDel('dictionary', 'term'), + 0 + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/search/lib/commands/DICTDEL.ts b/packages/search/lib/commands/DICTDEL.ts new file mode 100644 index 00000000000..c39b03f45ef --- /dev/null +++ b/packages/search/lib/commands/DICTDEL.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, NumberReply, Command } from '@redis/client/dist/lib/RESP/types'; +import { RedisVariadicArgument } from '@redis/client/dist/lib/commands/generic-transformers'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Deletes terms from a dictionary. + * @param parser - The command parser + * @param dictionary - Name of the dictionary to remove terms from + * @param term - One or more terms to delete from the dictionary + */ + parseCommand(parser: CommandParser, dictionary: RedisArgument, term: RedisVariadicArgument) { + parser.push('FT.DICTDEL', dictionary); + parser.pushVariadic(term); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/search/lib/commands/DICTDUMP.spec.ts b/packages/search/lib/commands/DICTDUMP.spec.ts new file mode 100644 index 00000000000..1a3faa9dc9d --- /dev/null +++ b/packages/search/lib/commands/DICTDUMP.spec.ts @@ -0,0 +1,22 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import DICTDUMP from './DICTDUMP'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('FT.DICTDUMP', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(DICTDUMP, 'dictionary'), + ['FT.DICTDUMP', 'dictionary'] + ); + }); + + testUtils.testWithClient('client.ft.dictDump', async client => { + const [, reply] = await Promise.all([ + client.ft.dictAdd('dictionary', 'string'), + client.ft.dictDump('dictionary') + ]); + + assert.deepEqual(reply, ['string']); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/search/lib/commands/DICTDUMP.ts b/packages/search/lib/commands/DICTDUMP.ts new file mode 100644 index 00000000000..1ae40b4edb3 --- /dev/null +++ b/packages/search/lib/commands/DICTDUMP.ts @@ -0,0 +1,19 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, ArrayReply, SetReply, BlobStringReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Returns all terms in a dictionary. + * @param parser - The command parser + * @param dictionary - Name of the dictionary to dump + */ + parseCommand(parser: CommandParser, dictionary: RedisArgument) { + parser.push('FT.DICTDUMP', dictionary); + }, + transformReply: { + 2: undefined as unknown as () => ArrayReply, + 3: undefined as unknown as () => SetReply + } +} as const satisfies Command; diff --git a/packages/search/lib/commands/DROPINDEX.spec.ts b/packages/search/lib/commands/DROPINDEX.spec.ts new file mode 100644 index 00000000000..f1f0b0efddb --- /dev/null +++ b/packages/search/lib/commands/DROPINDEX.spec.ts @@ -0,0 +1,34 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import DROPINDEX from './DROPINDEX'; +import { SCHEMA_FIELD_TYPE } from './CREATE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('FT.DROPINDEX', () => { + describe('transformArguments', () => { + it('without options', () => { + assert.deepEqual( + parseArgs(DROPINDEX, 'index'), + ['FT.DROPINDEX', 'index'] + ); + }); + + it('with DD', () => { + assert.deepEqual( + parseArgs(DROPINDEX, 'index', { DD: true }), + ['FT.DROPINDEX', 'index', 'DD'] + ); + }); + }); + + testUtils.testWithClient('client.ft.dropIndex', async client => { + const [, reply] = await Promise.all([ + client.ft.create('index', { + field: SCHEMA_FIELD_TYPE.TEXT + }), + client.ft.dropIndex('index') + ]); + + assert.equal(reply, 'OK'); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/search/lib/commands/DROPINDEX.ts b/packages/search/lib/commands/DROPINDEX.ts new file mode 100644 index 00000000000..5b6e0dde786 --- /dev/null +++ b/packages/search/lib/commands/DROPINDEX.ts @@ -0,0 +1,29 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, SimpleStringReply, NumberReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export interface FtDropIndexOptions { + DD?: true; +} + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Deletes an index and all associated documents. + * @param parser - The command parser + * @param index - Name of the index to delete + * @param options - Optional parameters: + * - DD: Also delete the indexed documents themselves + */ + parseCommand(parser: CommandParser, index: RedisArgument, options?: FtDropIndexOptions) { + parser.push('FT.DROPINDEX', index); + + if (options?.DD) { + parser.push('DD'); + } + }, + transformReply: { + 2: undefined as unknown as () => SimpleStringReply<'OK'>, + 3: undefined as unknown as () => NumberReply + } +} as const satisfies Command; diff --git a/packages/search/lib/commands/EXPLAIN.spec.ts b/packages/search/lib/commands/EXPLAIN.spec.ts new file mode 100644 index 00000000000..d1691bc7c25 --- /dev/null +++ b/packages/search/lib/commands/EXPLAIN.spec.ts @@ -0,0 +1,48 @@ +import { strict as assert } from 'node:assert'; +import EXPLAIN from './EXPLAIN'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; +import testUtils, { GLOBAL } from '../test-utils'; +import { SCHEMA_FIELD_TYPE } from './CREATE'; +import { DEFAULT_DIALECT } from '../dialect/default'; + +describe('EXPLAIN', () => { + describe('transformArguments', () => { + it('simple', () => { + assert.deepEqual( + parseArgs(EXPLAIN, 'index', '*'), + ['FT.EXPLAIN', 'index', '*', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('with PARAMS', () => { + assert.deepEqual( + parseArgs(EXPLAIN, 'index', '*', { + PARAMS: { + param: 'value' + } + }), + ['FT.EXPLAIN', 'index', '*', 'PARAMS', '2', 'param', 'value', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('with DIALECT', () => { + assert.deepEqual( + parseArgs(EXPLAIN, 'index', '*', { + DIALECT: 1 + }), + ['FT.EXPLAIN', 'index', '*', 'DIALECT', '1'] + ); + }); + }); + + testUtils.testWithClient('client.ft.dropIndex', async client => { + const [, reply] = await Promise.all([ + client.ft.create('index', { + field: SCHEMA_FIELD_TYPE.TEXT + }), + client.ft.explain('index', '*') + ]); + + assert.equal(reply, '\n'); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/search/lib/commands/EXPLAIN.ts b/packages/search/lib/commands/EXPLAIN.ts new file mode 100644 index 00000000000..78d09ffeded --- /dev/null +++ b/packages/search/lib/commands/EXPLAIN.ts @@ -0,0 +1,40 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '@redis/client/dist/lib/RESP/types'; +import { FtSearchParams, parseParamsArgument } from './SEARCH'; +import { DEFAULT_DIALECT } from '../dialect/default'; + +export interface FtExplainOptions { + PARAMS?: FtSearchParams; + DIALECT?: number; +} + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Returns the execution plan for a complex query. + * @param parser - The command parser + * @param index - Name of the index to explain query against + * @param query - The query string to explain + * @param options - Optional parameters: + * - PARAMS: Named parameters to use in the query + * - DIALECT: Version of query dialect to use (defaults to 1) + */ + parseCommand( + parser: CommandParser, + index: RedisArgument, + query: RedisArgument, + options?: FtExplainOptions + ) { + parser.push('FT.EXPLAIN', index, query); + + parseParamsArgument(parser, options?.PARAMS); + + if (options?.DIALECT) { + parser.push('DIALECT', options.DIALECT.toString()); + } else { + parser.push('DIALECT', DEFAULT_DIALECT); + } + }, + transformReply: undefined as unknown as () => SimpleStringReply +} as const satisfies Command; diff --git a/packages/search/lib/commands/EXPLAINCLI.spec.ts b/packages/search/lib/commands/EXPLAINCLI.spec.ts new file mode 100644 index 00000000000..1812b674094 --- /dev/null +++ b/packages/search/lib/commands/EXPLAINCLI.spec.ts @@ -0,0 +1,20 @@ +import { strict as assert } from 'node:assert'; +import EXPLAINCLI from './EXPLAINCLI'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; +import { DEFAULT_DIALECT } from '../dialect/default'; + +describe('EXPLAINCLI', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(EXPLAINCLI, 'index', '*'), + ['FT.EXPLAINCLI', 'index', '*', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('with dialect', () => { + assert.deepEqual( + parseArgs(EXPLAINCLI, 'index', '*', {DIALECT: 1}), + ['FT.EXPLAINCLI', 'index', '*', 'DIALECT', '1'] + ); + }); +}); diff --git a/packages/search/lib/commands/EXPLAINCLI.ts b/packages/search/lib/commands/EXPLAINCLI.ts new file mode 100644 index 00000000000..42e489ce10e --- /dev/null +++ b/packages/search/lib/commands/EXPLAINCLI.ts @@ -0,0 +1,35 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, ArrayReply, BlobStringReply, Command } from '@redis/client/dist/lib/RESP/types'; +import { DEFAULT_DIALECT } from '../dialect/default'; + +export interface FtExplainCLIOptions { + DIALECT?: number; +} + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Returns the execution plan for a complex query in a more verbose format than FT.EXPLAIN. + * @param parser - The command parser + * @param index - Name of the index to explain query against + * @param query - The query string to explain + * @param options - Optional parameters: + * - DIALECT: Version of query dialect to use (defaults to 1) + */ + parseCommand( + parser: CommandParser, + index: RedisArgument, + query: RedisArgument, + options?: FtExplainCLIOptions + ) { + parser.push('FT.EXPLAINCLI', index, query); + + if (options?.DIALECT) { + parser.push('DIALECT', options.DIALECT.toString()); + } else { + parser.push('DIALECT', DEFAULT_DIALECT); + } + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/search/lib/commands/INFO.spec.ts b/packages/search/lib/commands/INFO.spec.ts new file mode 100644 index 00000000000..b52e99ab9b0 --- /dev/null +++ b/packages/search/lib/commands/INFO.spec.ts @@ -0,0 +1,216 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import INFO, { InfoReply } from './INFO'; +import { SCHEMA_FIELD_TYPE } from './CREATE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('INFO', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(INFO, 'index'), + ['FT.INFO', 'index'] + ); + }); + + testUtils.testWithClientIfVersionWithinRange([[8], 'LATEST'], 'client.ft.info', async client => { + + await client.ft.create('index', { + field: SCHEMA_FIELD_TYPE.TEXT + }); + const ret = await client.ft.info('index'); + assert.equal(ret.index_name, 'index'); + + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClientIfVersionWithinRange([[7, 4, 2], [7, 4, 2]], 'client.ft.info', async client => { + + await client.ft.create('index', { + field: SCHEMA_FIELD_TYPE.TEXT + }); + const ret = await client.ft.info('index'); + // effectively testing that stopwords_list is not in ret + assert.deepEqual( + ret, + { + index_name: 'index', + index_options: [], + index_definition: Object.create(null, { + default_score: { + value: '1', + configurable: true, + enumerable: true + }, + key_type: { + value: 'HASH', + configurable: true, + enumerable: true + }, + prefixes: { + value: [''], + configurable: true, + enumerable: true + } + }), + attributes: [Object.create(null, { + identifier: { + value: 'field', + configurable: true, + enumerable: true + }, + attribute: { + value: 'field', + configurable: true, + enumerable: true + }, + type: { + value: 'TEXT', + configurable: true, + enumerable: true + }, + WEIGHT: { + value: '1', + configurable: true, + enumerable: true + } + })], + num_docs: 0, + max_doc_id: 0, + num_terms: 0, + num_records: 0, + inverted_sz_mb: 0, + vector_index_sz_mb: 0, + total_inverted_index_blocks: 0, + offset_vectors_sz_mb: 0, + doc_table_size_mb: 0, + sortable_values_size_mb: 0, + key_table_size_mb: 0, + records_per_doc_avg: NaN, + bytes_per_record_avg: NaN, + cleaning: 0, + offsets_per_term_avg: NaN, + offset_bits_per_record_avg: NaN, + geoshapes_sz_mb: 0, + hash_indexing_failures: 0, + indexing: 0, + percent_indexed: 1, + number_of_uses: 1, + tag_overhead_sz_mb: 0, + text_overhead_sz_mb: 0, + total_index_memory_sz_mb: 0, + total_indexing_time: 0, + gc_stats: { + bytes_collected: 0, + total_ms_run: 0, + total_cycles: 0, + average_cycle_time_ms: NaN, + last_run_time_ms: 0, + gc_numeric_trees_missed: 0, + gc_blocks_denied: 0 + }, + cursor_stats: { + global_idle: 0, + global_total: 0, + index_capacity: 128, + index_total: 0 + }, + } + ); + + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClientIfVersionWithinRange([[7, 2, 0], [7, 2, 0]], 'client.ft.info', async client => { + + await client.ft.create('index', { + field: SCHEMA_FIELD_TYPE.TEXT + }); + const ret = await client.ft.info('index'); + // effectively testing that stopwords_list is not in ret + assert.deepEqual( + ret, + { + index_name: 'index', + index_options: [], + index_definition: Object.create(null, { + default_score: { + value: '1', + configurable: true, + enumerable: true + }, + key_type: { + value: 'HASH', + configurable: true, + enumerable: true + }, + prefixes: { + value: [''], + configurable: true, + enumerable: true + } + }), + attributes: [Object.create(null, { + identifier: { + value: 'field', + configurable: true, + enumerable: true + }, + attribute: { + value: 'field', + configurable: true, + enumerable: true + }, + type: { + value: 'TEXT', + configurable: true, + enumerable: true + }, + WEIGHT: { + value: '1', + configurable: true, + enumerable: true + } + })], + num_docs: "0", + max_doc_id: "0", + num_terms: "0", + num_records: "0", + inverted_sz_mb: 0, + vector_index_sz_mb: 0, + total_inverted_index_blocks: "0", + offset_vectors_sz_mb: 0, + doc_table_size_mb: 0, + sortable_values_size_mb: 0, + key_table_size_mb: 0, + records_per_doc_avg: NaN, + bytes_per_record_avg: NaN, + cleaning: 0, + offsets_per_term_avg: NaN, + offset_bits_per_record_avg: NaN, + geoshapes_sz_mb: 0, + hash_indexing_failures: "0", + indexing: "0", + percent_indexed: 1, + number_of_uses: 1, + tag_overhead_sz_mb: 0, + text_overhead_sz_mb: 0, + total_index_memory_sz_mb: 0, + total_indexing_time: 0, + gc_stats: { + bytes_collected: 0, + total_ms_run: 0, + total_cycles: 0, + average_cycle_time_ms: NaN, + last_run_time_ms: 0, + gc_numeric_trees_missed: 0, + gc_blocks_denied: 0 + }, + cursor_stats: { + global_idle: 0, + global_total: 0, + index_capacity: 128, + index_total: 0 + }, + } + ); + + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/search/lib/commands/INFO.ts b/packages/search/lib/commands/INFO.ts new file mode 100644 index 00000000000..03cf21edfd8 --- /dev/null +++ b/packages/search/lib/commands/INFO.ts @@ -0,0 +1,169 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument } from "@redis/client"; +import { ArrayReply, BlobStringReply, Command, DoubleReply, MapReply, NullReply, NumberReply, ReplyUnion, SimpleStringReply, TypeMapping } from "@redis/client/dist/lib/RESP/types"; +import { createTransformTuplesReplyFunc, transformDoubleReply } from "@redis/client/dist/lib/commands/generic-transformers"; +import { TuplesReply } from '@redis/client/dist/lib/RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Returns information and statistics about an index. + * @param parser - The command parser + * @param index - Name of the index to get information about + */ + parseCommand(parser: CommandParser, index: RedisArgument) { + parser.push('FT.INFO', index); + }, + transformReply: { + 2: transformV2Reply, + 3: undefined as unknown as () => ReplyUnion + }, + unstableResp3: true +} as const satisfies Command; + +export interface InfoReply { + index_name: SimpleStringReply; + index_options: ArrayReply; + index_definition: MapReply; + attributes: Array>; + num_docs: NumberReply + max_doc_id: NumberReply; + num_terms: NumberReply; + num_records: NumberReply; + inverted_sz_mb: DoubleReply; + vector_index_sz_mb: DoubleReply; + total_inverted_index_blocks: NumberReply; + offset_vectors_sz_mb: DoubleReply; + doc_table_size_mb: DoubleReply; + sortable_values_size_mb: DoubleReply; + key_table_size_mb: DoubleReply; + tag_overhead_sz_mb: DoubleReply; + text_overhead_sz_mb: DoubleReply; + total_index_memory_sz_mb: DoubleReply; + geoshapes_sz_mb: DoubleReply; + records_per_doc_avg: DoubleReply; + bytes_per_record_avg: DoubleReply; + offsets_per_term_avg: DoubleReply; + offset_bits_per_record_avg: DoubleReply; + hash_indexing_failures: NumberReply; + total_indexing_time: DoubleReply; + indexing: NumberReply; + percent_indexed: DoubleReply; + number_of_uses: NumberReply; + cleaning: NumberReply; + gc_stats: { + bytes_collected: DoubleReply; + total_ms_run: DoubleReply; + total_cycles: DoubleReply; + average_cycle_time_ms: DoubleReply; + last_run_time_ms: DoubleReply; + gc_numeric_trees_missed: DoubleReply; + gc_blocks_denied: DoubleReply; + }; + cursor_stats: { + global_idle: NumberReply; + global_total: NumberReply; + index_capacity: NumberReply; + index_total: NumberReply; + }; + stopwords_list?: ArrayReply | TuplesReply<[NullReply]>; +} + +function transformV2Reply(reply: Array, preserve?: any, typeMapping?: TypeMapping): InfoReply { + const myTransformFunc = createTransformTuplesReplyFunc(preserve, typeMapping); + + const ret = {} as unknown as InfoReply; + + for (let i=0; i < reply.length; i += 2) { + const key = reply[i].toString() as keyof InfoReply; + + switch (key) { + case 'index_name': + case 'index_options': + case 'num_docs': + case 'max_doc_id': + case 'num_terms': + case 'num_records': + case 'total_inverted_index_blocks': + case 'hash_indexing_failures': + case 'indexing': + case 'number_of_uses': + case 'cleaning': + case 'stopwords_list': + ret[key] = reply[i+1]; + break; + case 'inverted_sz_mb': + case 'vector_index_sz_mb': + case 'offset_vectors_sz_mb': + case 'doc_table_size_mb': + case 'sortable_values_size_mb': + case 'key_table_size_mb': + case 'text_overhead_sz_mb': + case 'tag_overhead_sz_mb': + case 'total_index_memory_sz_mb': + case 'geoshapes_sz_mb': + case 'records_per_doc_avg': + case 'bytes_per_record_avg': + case 'offsets_per_term_avg': + case 'offset_bits_per_record_avg': + case 'total_indexing_time': + case 'percent_indexed': + ret[key] = transformDoubleReply[2](reply[i+1], undefined, typeMapping) as DoubleReply; + break; + case 'index_definition': + ret[key] = myTransformFunc(reply[i+1]); + break; + case 'attributes': + ret[key] = (reply[i+1] as Array>).map(attribute => myTransformFunc(attribute)); + break; + case 'gc_stats': { + const innerRet = {} as unknown as InfoReply['gc_stats']; + + const array = reply[i+1]; + + for (let i=0; i < array.length; i += 2) { + const innerKey = array[i].toString() as keyof InfoReply['gc_stats']; + + switch (innerKey) { + case 'bytes_collected': + case 'total_ms_run': + case 'total_cycles': + case 'average_cycle_time_ms': + case 'last_run_time_ms': + case 'gc_numeric_trees_missed': + case 'gc_blocks_denied': + innerRet[innerKey] = transformDoubleReply[2](array[i+1], undefined, typeMapping) as DoubleReply; + break; + } + } + + ret[key] = innerRet; + break; + } + case 'cursor_stats': { + const innerRet = {} as unknown as InfoReply['cursor_stats']; + + const array = reply[i+1]; + + for (let i=0; i < array.length; i += 2) { + const innerKey = array[i].toString() as keyof InfoReply['cursor_stats']; + + switch (innerKey) { + case 'global_idle': + case 'global_total': + case 'index_capacity': + case 'index_total': + innerRet[innerKey] = array[i+1]; + break; + } + } + + ret[key] = innerRet; + break; + } + } + } + + return ret; +} diff --git a/packages/search/lib/commands/PROFILE_AGGREGATE.spec.ts b/packages/search/lib/commands/PROFILE_AGGREGATE.spec.ts new file mode 100644 index 00000000000..82783fbaba9 --- /dev/null +++ b/packages/search/lib/commands/PROFILE_AGGREGATE.spec.ts @@ -0,0 +1,118 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import { FT_AGGREGATE_STEPS } from './AGGREGATE'; +import PROFILE_AGGREGATE from './PROFILE_AGGREGATE'; +import { SCHEMA_FIELD_TYPE } from './CREATE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; +import { DEFAULT_DIALECT } from '../dialect/default'; + +describe('PROFILE AGGREGATE', () => { + describe('transformArguments', () => { + it('without options', () => { + assert.deepEqual( + parseArgs(PROFILE_AGGREGATE, 'index', 'query'), + ['FT.PROFILE', 'index', 'AGGREGATE', 'QUERY', 'query', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('with options', () => { + assert.deepEqual( + parseArgs(PROFILE_AGGREGATE, 'index', 'query', { + LIMITED: true, + VERBATIM: true, + STEPS: [{ + type: FT_AGGREGATE_STEPS.SORTBY, + BY: '@by' + }] + }), + ['FT.PROFILE', 'index', 'AGGREGATE', 'LIMITED', 'QUERY', 'query', + 'VERBATIM', 'SORTBY', '1', '@by', 'DIALECT', DEFAULT_DIALECT] + ); + }); + }); + + testUtils.testWithClientIfVersionWithinRange([[8], 'LATEST'], 'client.ft.search', async client => { + await Promise.all([ + client.ft.create('index', { + field: SCHEMA_FIELD_TYPE.NUMERIC + }), + client.hSet('1', 'field', '1'), + client.hSet('2', 'field', '2') + ]); + + + const normalizeObject = obj => JSON.parse(JSON.stringify(obj)); + const res = await client.ft.profileAggregate('index', '*'); + + const normalizedRes = normalizeObject(res); + // TODO uncomment after https://redis.io/docs/latest/commands/ft.aggregate/#return + // starts returning valid values + // assert.equal(normalizedRes.results.total, 2); + + assert.ok(normalizedRes.profile[0] === 'Shards'); + assert.ok(Array.isArray(normalizedRes.profile[1])); + assert.ok(normalizedRes.profile[2] === 'Coordinator'); + assert.ok(Array.isArray(normalizedRes.profile[3])); + + const shardProfile = normalizedRes.profile[1][0]; + assert.ok(shardProfile.includes('Total profile time')); + assert.ok(shardProfile.includes('Parsing time')); + assert.ok(shardProfile.includes('Pipeline creation time')); + assert.ok(shardProfile.includes('Warning')); + assert.ok(shardProfile.includes('Iterators profile')); + + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClientIfVersionWithinRange([[7, 2, 0], [7, 4, 0]], 'client.ft.search', async client => { + await Promise.all([ + client.ft.create('index', { + field: SCHEMA_FIELD_TYPE.NUMERIC + }), + client.hSet('1', 'field', '1'), + client.hSet('2', 'field', '2') + ]); + + const normalizeObject = obj => JSON.parse(JSON.stringify(obj)); + const res = await client.ft.profileAggregate('index', '*'); + const normalizedRes = normalizeObject(res); + + // TODO uncomment after https://redis.io/docs/latest/commands/ft.aggregate/#return + // starts returning valid values + // assert.equal(normalizedRes.results.total, 2); + + assert.ok(Array.isArray(normalizedRes.profile)); + assert.equal(normalizedRes.profile[0][0], 'Total profile time'); + assert.equal(normalizedRes.profile[1][0], 'Parsing time'); + assert.equal(normalizedRes.profile[2][0], 'Pipeline creation time'); + assert.equal(normalizedRes.profile[3][0], 'Warning'); + assert.equal(normalizedRes.profile[4][0], 'Iterators profile'); + assert.equal(normalizedRes.profile[5][0], 'Result processors profile'); + + const iteratorsProfile = normalizedRes.profile[4][1]; + assert.equal(iteratorsProfile[0], 'Type'); + assert.equal(iteratorsProfile[1], 'WILDCARD'); + assert.equal(iteratorsProfile[2], 'Time'); + assert.equal(iteratorsProfile[4], 'Counter'); + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClientIfVersionWithinRange([[8], 'LATEST'], '[RESP3] client.ft.search', async client => { + await Promise.all([ + client.ft.create('index', { + field: SCHEMA_FIELD_TYPE.NUMERIC + }), + client.hSet('1', 'field', '1'), + client.hSet('2', 'field', '2') + ]); + + + const normalizeObject = obj => JSON.parse(JSON.stringify(obj)); + const res = await client.ft.profileAggregate('index', '*'); + + // TODO uncomment after https://redis.io/docs/latest/commands/ft.aggregate/#return + // starts returning valid values + // assert.equal(res.Results.total_results, 2); + + const normalizedRes = normalizeObject(res); + assert.ok(normalizedRes.Profile.Shards); + }, GLOBAL.SERVERS.OPEN_3); +}); diff --git a/packages/search/lib/commands/PROFILE_AGGREGATE.ts b/packages/search/lib/commands/PROFILE_AGGREGATE.ts new file mode 100644 index 00000000000..99aca95a698 --- /dev/null +++ b/packages/search/lib/commands/PROFILE_AGGREGATE.ts @@ -0,0 +1,44 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { Command, ReplyUnion, UnwrapReply } from '@redis/client/dist/lib/RESP/types'; +import AGGREGATE, { AggregateRawReply, FtAggregateOptions, parseAggregateOptions } from './AGGREGATE'; +import { ProfileOptions, ProfileRawReplyResp2, ProfileReplyResp2, } from './PROFILE_SEARCH'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Profiles the execution of an aggregation query for performance analysis. + * @param parser - The command parser + * @param index - Name of the index to profile query against + * @param query - The aggregation query to profile + * @param options - Optional parameters: + * - LIMITED: Collect limited timing information only + * - All options supported by FT.AGGREGATE command + */ + parseCommand( + parser: CommandParser, + index: string, + query: string, + options?: ProfileOptions & FtAggregateOptions + ) { + parser.push('FT.PROFILE', index, 'AGGREGATE'); + + if (options?.LIMITED) { + parser.push('LIMITED'); + } + + parser.push('QUERY', query); + + parseAggregateOptions(parser, options) + }, + transformReply: { + 2: (reply: UnwrapReply>): ProfileReplyResp2 => { + return { + results: AGGREGATE.transformReply[2](reply[0]), + profile: reply[1] + } + }, + 3: (reply: ReplyUnion): ReplyUnion => reply + }, + unstableResp3: true +} as const satisfies Command; diff --git a/packages/search/lib/commands/PROFILE_SEARCH.spec.ts b/packages/search/lib/commands/PROFILE_SEARCH.spec.ts new file mode 100644 index 00000000000..419b879d00a --- /dev/null +++ b/packages/search/lib/commands/PROFILE_SEARCH.spec.ts @@ -0,0 +1,95 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import PROFILE_SEARCH from './PROFILE_SEARCH'; +import { SCHEMA_FIELD_TYPE } from './CREATE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; +import { DEFAULT_DIALECT } from '../dialect/default'; + +describe('PROFILE SEARCH', () => { + describe('transformArguments', () => { + it('without options', () => { + assert.deepEqual( + parseArgs(PROFILE_SEARCH, 'index', 'query'), + ['FT.PROFILE', 'index', 'SEARCH', 'QUERY', 'query', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('with options', () => { + assert.deepEqual( + parseArgs(PROFILE_SEARCH, 'index', 'query', { + LIMITED: true, + VERBATIM: true, + INKEYS: 'key' + }), + ['FT.PROFILE', 'index', 'SEARCH', 'LIMITED', 'QUERY', 'query', + 'VERBATIM', 'INKEYS', '1', 'key', 'DIALECT', DEFAULT_DIALECT] + ); + }); + }); + + testUtils.testWithClientIfVersionWithinRange([[8], 'LATEST'], 'client.ft.search', async client => { + await Promise.all([ + client.ft.create('index', { + field: SCHEMA_FIELD_TYPE.NUMERIC + }), + client.hSet('1', 'field', '1') + ]); + + const normalizeObject = obj => JSON.parse(JSON.stringify(obj)); + + const res = await client.ft.profileSearch('index', '*'); + + const normalizedRes = normalizeObject(res); + assert.equal(normalizedRes.results.total, 1); + + assert.ok(normalizedRes.profile[0] === 'Shards'); + assert.ok(Array.isArray(normalizedRes.profile[1])); + assert.ok(normalizedRes.profile[2] === 'Coordinator'); + assert.ok(Array.isArray(normalizedRes.profile[3])); + + const shardProfile = normalizedRes.profile[1][0]; + assert.ok(shardProfile.includes('Total profile time')); + assert.ok(shardProfile.includes('Parsing time')); + assert.ok(shardProfile.includes('Pipeline creation time')); + assert.ok(shardProfile.includes('Warning')); + assert.ok(shardProfile.includes('Iterators profile')); + ; + + }, GLOBAL.SERVERS.OPEN); + + + + + + testUtils.testWithClientIfVersionWithinRange([[7, 2, 0], [7, 4, 0]], 'client.ft.search', async client => { + await Promise.all([ + client.ft.create('index', { + field: SCHEMA_FIELD_TYPE.NUMERIC + }), + client.hSet('1', 'field', '1') + ]); + + const normalizeObject = obj => JSON.parse(JSON.stringify(obj)); + + const res = await client.ft.profileSearch('index', '*'); + + const normalizedRes = normalizeObject(res); + assert.equal(normalizedRes.results.total, 1); + + assert.ok(Array.isArray(normalizedRes.profile)); + assert.equal(normalizedRes.profile[0][0], 'Total profile time'); + assert.equal(normalizedRes.profile[1][0], 'Parsing time'); + assert.equal(normalizedRes.profile[2][0], 'Pipeline creation time'); + assert.equal(normalizedRes.profile[3][0], 'Warning'); + assert.equal(normalizedRes.profile[4][0], 'Iterators profile'); + assert.equal(normalizedRes.profile[5][0], 'Result processors profile'); + + const iteratorsProfile = normalizedRes.profile[4][1]; + assert.equal(iteratorsProfile[0], 'Type'); + assert.equal(iteratorsProfile[1], 'WILDCARD'); + assert.equal(iteratorsProfile[2], 'Time'); + assert.equal(iteratorsProfile[4], 'Counter'); + + }, GLOBAL.SERVERS.OPEN); + +}); diff --git a/packages/search/lib/commands/PROFILE_SEARCH.ts b/packages/search/lib/commands/PROFILE_SEARCH.ts new file mode 100644 index 00000000000..cdbb12fcdd8 --- /dev/null +++ b/packages/search/lib/commands/PROFILE_SEARCH.ts @@ -0,0 +1,60 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { ArrayReply, Command, RedisArgument, ReplyUnion, TuplesReply, UnwrapReply } from '@redis/client/dist/lib/RESP/types'; +import { AggregateReply } from './AGGREGATE'; +import SEARCH, { FtSearchOptions, SearchRawReply, SearchReply, parseSearchOptions } from './SEARCH'; + +export type ProfileRawReplyResp2 = TuplesReply<[ + T, + ArrayReply +]>; + +type ProfileSearchResponseResp2 = ProfileRawReplyResp2; + +export interface ProfileReplyResp2 { + results: SearchReply | AggregateReply; + profile: ReplyUnion; +} + +export interface ProfileOptions { + LIMITED?: true; +} + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Profiles the execution of a search query for performance analysis. + * @param parser - The command parser + * @param index - Name of the index to profile query against + * @param query - The search query to profile + * @param options - Optional parameters: + * - LIMITED: Collect limited timing information only + * - All options supported by FT.SEARCH command + */ + parseCommand( + parser: CommandParser, + index: RedisArgument, + query: RedisArgument, + options?: ProfileOptions & FtSearchOptions + ) { + parser.push('FT.PROFILE', index, 'SEARCH'); + + if (options?.LIMITED) { + parser.push('LIMITED'); + } + + parser.push('QUERY', query); + + parseSearchOptions(parser, options); + }, + transformReply: { + 2: (reply: UnwrapReply): ProfileReplyResp2 => { + return { + results: SEARCH.transformReply[2](reply[0]), + profile: reply[1] + }; + }, + 3: (reply: ReplyUnion): ReplyUnion => reply + }, + unstableResp3: true +} as const satisfies Command; diff --git a/packages/search/lib/commands/SEARCH.spec.ts b/packages/search/lib/commands/SEARCH.spec.ts new file mode 100644 index 00000000000..97e1a9a9885 --- /dev/null +++ b/packages/search/lib/commands/SEARCH.spec.ts @@ -0,0 +1,409 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SEARCH from './SEARCH'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; +import { DEFAULT_DIALECT } from '../dialect/default'; + + +describe('FT.SEARCH', () => { + describe('transformArguments', () => { + it('without options', () => { + assert.deepEqual( + parseArgs(SEARCH, 'index', 'query'), + ['FT.SEARCH', 'index', 'query', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('with VERBATIM', () => { + assert.deepEqual( + parseArgs(SEARCH, 'index', 'query', { + VERBATIM: true + }), + ['FT.SEARCH', 'index', 'query', 'VERBATIM', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('with NOSTOPWORDS', () => { + assert.deepEqual( + parseArgs(SEARCH, 'index', 'query', { + NOSTOPWORDS: true + }), + ['FT.SEARCH', 'index', 'query', 'NOSTOPWORDS', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('with INKEYS', () => { + assert.deepEqual( + parseArgs(SEARCH, 'index', 'query', { + INKEYS: 'key' + }), + ['FT.SEARCH', 'index', 'query', 'INKEYS', '1', 'key', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('with INFIELDS', () => { + assert.deepEqual( + parseArgs(SEARCH, 'index', 'query', { + INFIELDS: 'field' + }), + ['FT.SEARCH', 'index', 'query', 'INFIELDS', '1', 'field', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('with RETURN', () => { + assert.deepEqual( + parseArgs(SEARCH, 'index', 'query', { + RETURN: 'return' + }), + ['FT.SEARCH', 'index', 'query', 'RETURN', '1', 'return', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + describe('with SUMMARIZE', () => { + it('true', () => { + assert.deepEqual( + parseArgs(SEARCH, 'index', 'query', { + SUMMARIZE: true + }), + ['FT.SEARCH', 'index', 'query', 'SUMMARIZE', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + describe('with FIELDS', () => { + it('string', () => { + assert.deepEqual( + parseArgs(SEARCH, 'index', 'query', { + SUMMARIZE: { + FIELDS: '@field' + } + }), + ['FT.SEARCH', 'index', 'query', 'SUMMARIZE', 'FIELDS', '1', '@field', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('Array', () => { + assert.deepEqual( + parseArgs(SEARCH, 'index', 'query', { + SUMMARIZE: { + FIELDS: ['@1', '@2'] + } + }), + ['FT.SEARCH', 'index', 'query', 'SUMMARIZE', 'FIELDS', '2', '@1', '@2', 'DIALECT', DEFAULT_DIALECT] + ); + }); + }); + + it('with FRAGS', () => { + assert.deepEqual( + parseArgs(SEARCH, 'index', 'query', { + SUMMARIZE: { + FRAGS: 1 + } + }), + ['FT.SEARCH', 'index', 'query', 'SUMMARIZE', 'FRAGS', '1', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('with LEN', () => { + assert.deepEqual( + parseArgs(SEARCH, 'index', 'query', { + SUMMARIZE: { + LEN: 1 + } + }), + ['FT.SEARCH', 'index', 'query', 'SUMMARIZE', 'LEN', '1', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('with SEPARATOR', () => { + assert.deepEqual( + parseArgs(SEARCH, 'index', 'query', { + SUMMARIZE: { + SEPARATOR: 'separator' + } + }), + ['FT.SEARCH', 'index', 'query', 'SUMMARIZE', 'SEPARATOR', 'separator', 'DIALECT', DEFAULT_DIALECT] + ); + }); + }); + + describe('with HIGHLIGHT', () => { + it('true', () => { + assert.deepEqual( + parseArgs(SEARCH, 'index', 'query', { + HIGHLIGHT: true + }), + ['FT.SEARCH', 'index', 'query', 'HIGHLIGHT', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + describe('with FIELDS', () => { + it('string', () => { + assert.deepEqual( + parseArgs(SEARCH, 'index', 'query', { + HIGHLIGHT: { + FIELDS: ['@field'] + } + }), + ['FT.SEARCH', 'index', 'query', 'HIGHLIGHT', 'FIELDS', '1', '@field', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('Array', () => { + assert.deepEqual( + parseArgs(SEARCH, 'index', 'query', { + HIGHLIGHT: { + FIELDS: ['@1', '@2'] + } + }), + ['FT.SEARCH', 'index', 'query', 'HIGHLIGHT', 'FIELDS', '2', '@1', '@2', 'DIALECT', DEFAULT_DIALECT] + ); + }); + }); + + it('with TAGS', () => { + assert.deepEqual( + parseArgs(SEARCH, 'index', 'query', { + HIGHLIGHT: { + TAGS: { + open: 'open', + close: 'close' + } + } + }), + ['FT.SEARCH', 'index', 'query', 'HIGHLIGHT', 'TAGS', 'open', 'close', 'DIALECT', DEFAULT_DIALECT] + ); + }); + }); + + it('with SLOP', () => { + assert.deepEqual( + parseArgs(SEARCH, 'index', 'query', { + SLOP: 1 + }), + ['FT.SEARCH', 'index', 'query', 'SLOP', '1', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('with TIMEOUT', () => { + assert.deepEqual( + parseArgs(SEARCH, 'index', 'query', { + TIMEOUT: 1 + }), + ['FT.SEARCH', 'index', 'query', 'TIMEOUT', '1', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('with INORDER', () => { + assert.deepEqual( + parseArgs(SEARCH, 'index', 'query', { + INORDER: true + }), + ['FT.SEARCH', 'index', 'query', 'INORDER', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('with LANGUAGE', () => { + assert.deepEqual( + parseArgs(SEARCH, 'index', 'query', { + LANGUAGE: 'Arabic' + }), + ['FT.SEARCH', 'index', 'query', 'LANGUAGE', 'Arabic', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('with EXPANDER', () => { + assert.deepEqual( + parseArgs(SEARCH, 'index', 'query', { + EXPANDER: 'expender' + }), + ['FT.SEARCH', 'index', 'query', 'EXPANDER', 'expender', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('with SCORER', () => { + assert.deepEqual( + parseArgs(SEARCH, 'index', 'query', { + SCORER: 'scorer' + }), + ['FT.SEARCH', 'index', 'query', 'SCORER', 'scorer', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('with SORTBY', () => { + assert.deepEqual( + parseArgs(SEARCH, 'index', 'query', { + SORTBY: '@by' + }), + ['FT.SEARCH', 'index', 'query', 'SORTBY', '@by', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('with LIMIT', () => { + assert.deepEqual( + parseArgs(SEARCH, 'index', 'query', { + LIMIT: { + from: 0, + size: 1 + } + }), + ['FT.SEARCH', 'index', 'query', 'LIMIT', '0', '1', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('with PARAMS', () => { + assert.deepEqual( + parseArgs(SEARCH, 'index', 'query', { + PARAMS: { + string: 'string', + buffer: Buffer.from('buffer'), + number: 1 + } + }), + ['FT.SEARCH', 'index', 'query', 'PARAMS', '6', 'string', 'string', 'buffer', Buffer.from('buffer'), 'number', '1', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('with DIALECT', () => { + assert.deepEqual( + parseArgs(SEARCH, 'index', 'query', { + DIALECT: 1 + }), + ['FT.SEARCH', 'index', 'query', 'DIALECT', '1'] + ); + }); + }); + + describe('client.ft.search', () => { + testUtils.testWithClient('without optional options', async client => { + await Promise.all([ + client.ft.create('index', { + field: 'TEXT' + }), + client.hSet('1', 'field', '1') + ]); + + assert.deepEqual( + await client.ft.search('index', '*'), + { + total: 1, + documents: [{ + id: '1', + value: Object.create(null, { + field: { + value: '1', + configurable: true, + enumerable: true + } + }) + }] + } + ); + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClient('RETURN []', async client => { + await Promise.all([ + client.ft.create('index', { + field: 'TEXT' + }), + client.hSet('1', 'field', '1'), + client.hSet('2', 'field', '2') + ]); + + assert.deepEqual( + await client.ft.search('index', '*', { + RETURN: [] + }), + { + total: 2, + documents: [{ + id: '1', + value: Object.create(null) + }, { + id: '2', + value: Object.create(null) + }] + } + ); + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClient('properly parse content/nocontent scenarios', async client => { + + const indexName = 'foo'; + await client.ft.create( + indexName, + { + itemOrder: { + type: 'NUMERIC', + SORTABLE: true, + }, + name: { + type: 'TEXT', + }, + }, + { + ON: 'HASH', + PREFIX: 'item:', + } + ); + + await client.hSet("item:1", { + itemOrder: 1, + name: "First item", + }); + + await client.hSet("item:2", { + itemOrder: 2, + name: "Second item", + }); + + await client.hSet("item:3", { + itemOrder: 3, + name: "Third item", + }); + + // Search with SORTBY and LIMIT + let result = await client.ft.search(indexName, "@itemOrder:[0 10]", { + SORTBY: { + BY: "itemOrder", + DIRECTION: "ASC", + }, + LIMIT: { + from: 0, + size: 1, // only get first result + }, + }); + + assert.equal(result.total, 3, "Result's `total` value reflects the total scanned documents"); + assert.equal(result.documents.length, 1); + let doc = result.documents[0]; + assert.equal(doc.id, 'item:1'); + assert.equal(doc.value.itemOrder, '1'); + assert.equal(doc.value.name, 'First item'); + + await client.del("item:3"); + + // Search again after removing item:3 + result = await client.ft.search(indexName, "@itemOrder:[0 10]", { + SORTBY: { + BY: "itemOrder", + DIRECTION: "ASC", + }, + LIMIT: { + from: 0, + size: 1, // only get first result + }, + }); + + assert.equal(result.total, 2, "Result's `total` value reflects the total scanned documents"); + assert.equal(result.documents.length, 1); + doc = result.documents[0]; + assert.equal(doc.id, 'item:1'); + assert.equal(doc.value.itemOrder, '1'); + assert.equal(doc.value.name, 'First item'); + + + }, GLOBAL.SERVERS.OPEN); + + }); +}); diff --git a/packages/search/lib/commands/SEARCH.ts b/packages/search/lib/commands/SEARCH.ts new file mode 100644 index 00000000000..03779a446cc --- /dev/null +++ b/packages/search/lib/commands/SEARCH.ts @@ -0,0 +1,246 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, Command, ReplyUnion } from '@redis/client/dist/lib/RESP/types'; +import { RedisVariadicArgument, parseOptionalVariadicArgument } from '@redis/client/dist/lib/commands/generic-transformers'; +import { RediSearchLanguage } from './CREATE'; +import { DEFAULT_DIALECT } from '../dialect/default'; + +export type FtSearchParams = Record; + +export function parseParamsArgument(parser: CommandParser, params?: FtSearchParams) { + if (params) { + parser.push('PARAMS'); + + const args: Array = []; + for (const key in params) { + if (!Object.hasOwn(params, key)) continue; + + const value = params[key]; + args.push( + key, + typeof value === 'number' ? value.toString() : value + ); + } + + parser.pushVariadicWithLength(args); + } +} + +export interface FtSearchOptions { + VERBATIM?: boolean; + NOSTOPWORDS?: boolean; + INKEYS?: RedisVariadicArgument; + INFIELDS?: RedisVariadicArgument; + RETURN?: RedisVariadicArgument; + SUMMARIZE?: boolean | { + FIELDS?: RedisArgument | Array; + FRAGS?: number; + LEN?: number; + SEPARATOR?: RedisArgument; + }; + HIGHLIGHT?: boolean | { + FIELDS?: RedisArgument | Array; + TAGS?: { + open: RedisArgument; + close: RedisArgument; + }; + }; + SLOP?: number; + TIMEOUT?: number; + INORDER?: boolean; + LANGUAGE?: RediSearchLanguage; + EXPANDER?: RedisArgument; + SCORER?: RedisArgument; + SORTBY?: RedisArgument | { + BY: RedisArgument; + DIRECTION?: 'ASC' | 'DESC'; + }; + LIMIT?: { + from: number | RedisArgument; + size: number | RedisArgument; + }; + PARAMS?: FtSearchParams; + DIALECT?: number; +} + +export function parseSearchOptions(parser: CommandParser, options?: FtSearchOptions) { + if (options?.VERBATIM) { + parser.push('VERBATIM'); + } + + if (options?.NOSTOPWORDS) { + parser.push('NOSTOPWORDS'); + } + + parseOptionalVariadicArgument(parser, 'INKEYS', options?.INKEYS); + parseOptionalVariadicArgument(parser, 'INFIELDS', options?.INFIELDS); + parseOptionalVariadicArgument(parser, 'RETURN', options?.RETURN); + + if (options?.SUMMARIZE) { + parser.push('SUMMARIZE'); + + if (typeof options.SUMMARIZE === 'object') { + parseOptionalVariadicArgument(parser, 'FIELDS', options.SUMMARIZE.FIELDS); + + if (options.SUMMARIZE.FRAGS !== undefined) { + parser.push('FRAGS', options.SUMMARIZE.FRAGS.toString()); + } + + if (options.SUMMARIZE.LEN !== undefined) { + parser.push('LEN', options.SUMMARIZE.LEN.toString()); + } + + if (options.SUMMARIZE.SEPARATOR !== undefined) { + parser.push('SEPARATOR', options.SUMMARIZE.SEPARATOR); + } + } + } + + if (options?.HIGHLIGHT) { + parser.push('HIGHLIGHT'); + + if (typeof options.HIGHLIGHT === 'object') { + parseOptionalVariadicArgument(parser, 'FIELDS', options.HIGHLIGHT.FIELDS); + + if (options.HIGHLIGHT.TAGS) { + parser.push('TAGS', options.HIGHLIGHT.TAGS.open, options.HIGHLIGHT.TAGS.close); + } + } + } + + if (options?.SLOP !== undefined) { + parser.push('SLOP', options.SLOP.toString()); + } + + if (options?.TIMEOUT !== undefined) { + parser.push('TIMEOUT', options.TIMEOUT.toString()); + } + + if (options?.INORDER) { + parser.push('INORDER'); + } + + if (options?.LANGUAGE) { + parser.push('LANGUAGE', options.LANGUAGE); + } + + if (options?.EXPANDER) { + parser.push('EXPANDER', options.EXPANDER); + } + + if (options?.SCORER) { + parser.push('SCORER', options.SCORER); + } + + if (options?.SORTBY) { + parser.push('SORTBY'); + + if (typeof options.SORTBY === 'string' || options.SORTBY instanceof Buffer) { + parser.push(options.SORTBY); + } else { + parser.push(options.SORTBY.BY); + + if (options.SORTBY.DIRECTION) { + parser.push(options.SORTBY.DIRECTION); + } + } + } + + if (options?.LIMIT) { + parser.push('LIMIT', options.LIMIT.from.toString(), options.LIMIT.size.toString()); + } + + parseParamsArgument(parser, options?.PARAMS); + + if (options?.DIALECT) { + parser.push('DIALECT', options.DIALECT.toString()); + } else { + parser.push('DIALECT', DEFAULT_DIALECT); + } +} + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Searches a RediSearch index with the given query. + * @param parser - The command parser + * @param index - The index name to search + * @param query - The text query to search. For syntax, see https://redis.io/docs/stack/search/reference/query_syntax + * @param options - Optional search parameters including: + * - VERBATIM: do not try to use stemming for query expansion + * - NOSTOPWORDS: do not filter stopwords from the query + * - INKEYS/INFIELDS: restrict the search to specific keys/fields + * - RETURN: limit which fields are returned + * - SUMMARIZE/HIGHLIGHT: create search result highlights + * - LIMIT: pagination control + * - SORTBY: sort results by a specific field + * - PARAMS: bind parameters to the query + */ + parseCommand(parser: CommandParser, index: RedisArgument, query: RedisArgument, options?: FtSearchOptions) { + parser.push('FT.SEARCH', index, query); + + parseSearchOptions(parser, options); + }, + transformReply: { + 2: (reply: SearchRawReply): SearchReply => { + // if reply[2] is array, then we have content/documents. Otherwise, only ids + const withoutDocuments = reply.length > 2 && !Array.isArray(reply[2]); + + const documents = []; + let i = 1; + while (i < reply.length) { + documents.push({ + id: reply[i++], + value: withoutDocuments ? Object.create(null) : documentValue(reply[i++]) + }); + } + + return { + total: reply[0], + documents + }; + }, + 3: undefined as unknown as () => ReplyUnion + }, + unstableResp3: true +} as const satisfies Command; + +export type SearchRawReply = Array; + +interface SearchDocumentValue { + [key: string]: string | number | null | Array | SearchDocumentValue; +} + +export interface SearchReply { + total: number; + documents: Array<{ + id: string; + value: SearchDocumentValue; + }>; +} + +function documentValue(tuples: any) { + const message = Object.create(null); + + if(!tuples) { + return message; + } + + let i = 0; + while (i < tuples.length) { + const key = tuples[i++], + value = tuples[i++]; + if (key === '$') { // might be a JSON reply + try { + Object.assign(message, JSON.parse(value)); + continue; + } catch { + // set as a regular property if not a valid JSON + } + } + + message[key] = value; + } + + return message; +} diff --git a/packages/search/lib/commands/SEARCH_NOCONTENT.spec.ts b/packages/search/lib/commands/SEARCH_NOCONTENT.spec.ts new file mode 100644 index 00000000000..cd37409b5bb --- /dev/null +++ b/packages/search/lib/commands/SEARCH_NOCONTENT.spec.ts @@ -0,0 +1,36 @@ +import { strict as assert } from 'assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SEARCH_NOCONTENT from './SEARCH_NOCONTENT'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; +import { DEFAULT_DIALECT } from '../dialect/default'; + +describe('FT.SEARCH NOCONTENT', () => { + describe('transformArguments', () => { + it('without options', () => { + assert.deepEqual( + parseArgs(SEARCH_NOCONTENT, 'index', 'query'), + ['FT.SEARCH', 'index', 'query', 'DIALECT', DEFAULT_DIALECT, 'NOCONTENT'] + ); + }); + }); + + describe('client.ft.searchNoContent', () => { + testUtils.testWithClient('returns total and keys', async client => { + await Promise.all([ + client.ft.create('index', { + field: 'TEXT' + }), + client.hSet('1', 'field', 'field1'), + client.hSet('2', 'field', 'field2') + ]); + + assert.deepEqual( + await client.ft.searchNoContent('index', '*'), + { + total: 2, + documents: ['1', '2'] + } + ); + }, GLOBAL.SERVERS.OPEN); + }); +}); diff --git a/packages/search/lib/commands/SEARCH_NOCONTENT.ts b/packages/search/lib/commands/SEARCH_NOCONTENT.ts new file mode 100644 index 00000000000..2fcfd2b4166 --- /dev/null +++ b/packages/search/lib/commands/SEARCH_NOCONTENT.ts @@ -0,0 +1,34 @@ +import { Command, ReplyUnion } from '@redis/client/dist/lib/RESP/types'; +import SEARCH, { SearchRawReply } from './SEARCH'; + +export default { + NOT_KEYED_COMMAND: SEARCH.NOT_KEYED_COMMAND, + IS_READ_ONLY: SEARCH.IS_READ_ONLY, + /** + * Performs a search query but returns only document ids without their contents. + * @param args - Same parameters as FT.SEARCH: + * - parser: The command parser + * - index: Name of the index to search + * - query: The text query to search + * - options: Optional search parameters + */ + parseCommand(...args: Parameters) { + SEARCH.parseCommand(...args); + args[0].push('NOCONTENT'); + }, + transformReply: { + 2: (reply: SearchRawReply): SearchNoContentReply => { + return { + total: reply[0], + documents: reply.slice(1) + } + }, + 3: undefined as unknown as () => ReplyUnion + }, + unstableResp3: true +} as const satisfies Command; + +export interface SearchNoContentReply { + total: number; + documents: Array; +}; \ No newline at end of file diff --git a/packages/search/lib/commands/SPELLCHECK.spec.ts b/packages/search/lib/commands/SPELLCHECK.spec.ts new file mode 100644 index 00000000000..482deed6a45 --- /dev/null +++ b/packages/search/lib/commands/SPELLCHECK.spec.ts @@ -0,0 +1,81 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SPELLCHECK from './SPELLCHECK'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; +import { DEFAULT_DIALECT } from '../dialect/default'; + +describe('FT.SPELLCHECK', () => { + describe('transformArguments', () => { + it('without options', () => { + assert.deepEqual( + parseArgs(SPELLCHECK, 'index', 'query'), + ['FT.SPELLCHECK', 'index', 'query', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('with DISTANCE', () => { + assert.deepEqual( + parseArgs(SPELLCHECK, 'index', 'query', { + DISTANCE: 2 + }), + ['FT.SPELLCHECK', 'index', 'query', 'DISTANCE', '2', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + describe('with TERMS', () => { + it('single', () => { + assert.deepEqual( + parseArgs(SPELLCHECK, 'index', 'query', { + TERMS: { + mode: 'INCLUDE', + dictionary: 'dictionary' + } + }), + ['FT.SPELLCHECK', 'index', 'query', 'TERMS', 'INCLUDE', 'dictionary', 'DIALECT', DEFAULT_DIALECT] + ); + }); + + it('multiple', () => { + assert.deepEqual( + parseArgs(SPELLCHECK, 'index', 'query', { + TERMS: [{ + mode: 'INCLUDE', + dictionary: 'include' + }, { + mode: 'EXCLUDE', + dictionary: 'exclude' + }] + }), + ['FT.SPELLCHECK', 'index', 'query', 'TERMS', 'INCLUDE', 'include', 'TERMS', 'EXCLUDE', 'exclude', 'DIALECT', DEFAULT_DIALECT] + ); + }); + }); + + it('with DIALECT', () => { + assert.deepEqual( + parseArgs(SPELLCHECK, 'index', 'query', { + DIALECT: 1 + }), + ['FT.SPELLCHECK', 'index', 'query', 'DIALECT', '1'] + ); + }); + }); + + testUtils.testWithClient('client.ft.spellCheck', async client => { + const [,, reply] = await Promise.all([ + client.ft.create('index', { + field: 'TEXT' + }), + client.hSet('key', 'field', 'query'), + client.ft.spellCheck('index', 'quer') + ]); + + assert.deepEqual(reply, [{ + term: 'quer', + suggestions: [{ + score: 1, + suggestion: 'query' + }] + }]); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/search/lib/commands/SPELLCHECK.ts b/packages/search/lib/commands/SPELLCHECK.ts new file mode 100644 index 00000000000..d6d84b19543 --- /dev/null +++ b/packages/search/lib/commands/SPELLCHECK.ts @@ -0,0 +1,83 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, Command, ReplyUnion } from '@redis/client/dist/lib/RESP/types'; +import { DEFAULT_DIALECT } from '../dialect/default'; + +export interface Terms { + mode: 'INCLUDE' | 'EXCLUDE'; + dictionary: RedisArgument; +} + +export interface FtSpellCheckOptions { + DISTANCE?: number; + TERMS?: Terms | Array; + DIALECT?: number; +} + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Performs spelling correction on a search query. + * @param parser - The command parser + * @param index - Name of the index to use for spelling corrections + * @param query - The search query to check for spelling + * @param options - Optional parameters: + * - DISTANCE: Maximum Levenshtein distance for spelling suggestions + * - TERMS: Custom dictionary terms to include/exclude + * - DIALECT: Version of query dialect to use (defaults to 1) + */ + parseCommand(parser: CommandParser, index: RedisArgument, query: RedisArgument, options?: FtSpellCheckOptions) { + parser.push('FT.SPELLCHECK', index, query); + + if (options?.DISTANCE) { + parser.push('DISTANCE', options.DISTANCE.toString()); + } + + if (options?.TERMS) { + if (Array.isArray(options.TERMS)) { + for (const term of options.TERMS) { + parseTerms(parser, term); + } + } else { + parseTerms(parser, options.TERMS); + } + } + + if (options?.DIALECT) { + parser.push('DIALECT', options.DIALECT.toString()); + } else { + parser.push('DIALECT', DEFAULT_DIALECT); + } + }, + transformReply: { + 2: (rawReply: SpellCheckRawReply): SpellCheckReply => { + return rawReply.map(([, term, suggestions]) => ({ + term, + suggestions: suggestions.map(([score, suggestion]) => ({ + score: Number(score), + suggestion + })) + })); + }, + 3: undefined as unknown as () => ReplyUnion, + }, + unstableResp3: true +} as const satisfies Command; + +function parseTerms(parser: CommandParser, { mode, dictionary }: Terms) { + parser.push('TERMS', mode, dictionary); +} + +type SpellCheckRawReply = Array<[ + _: string, + term: string, + suggestions: Array<[score: string, suggestion: string]> +]>; + +type SpellCheckReply = Array<{ + term: string, + suggestions: Array<{ + score: number, + suggestion: string + }> +}>; diff --git a/packages/search/lib/commands/SUGADD.spec.ts b/packages/search/lib/commands/SUGADD.spec.ts new file mode 100644 index 00000000000..2e0ce92edbc --- /dev/null +++ b/packages/search/lib/commands/SUGADD.spec.ts @@ -0,0 +1,36 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SUGADD from './SUGADD'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('FT.SUGADD', () => { + describe('transformArguments', () => { + it('without options', () => { + assert.deepEqual( + parseArgs(SUGADD, 'key', 'string', 1), + ['FT.SUGADD', 'key', 'string', '1'] + ); + }); + + it('with INCR', () => { + assert.deepEqual( + parseArgs(SUGADD, 'key', 'string', 1, { INCR: true }), + ['FT.SUGADD', 'key', 'string', '1', 'INCR'] + ); + }); + + it('with PAYLOAD', () => { + assert.deepEqual( + parseArgs(SUGADD, 'key', 'string', 1, { PAYLOAD: 'payload' }), + ['FT.SUGADD', 'key', 'string', '1', 'PAYLOAD', 'payload'] + ); + }); + }); + + testUtils.testWithClient('client.ft.sugAdd', async client => { + assert.equal( + await client.ft.sugAdd('key', 'string', 1), + 1 + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/search/lib/commands/SUGADD.ts b/packages/search/lib/commands/SUGADD.ts new file mode 100644 index 00000000000..3fa592a2733 --- /dev/null +++ b/packages/search/lib/commands/SUGADD.ts @@ -0,0 +1,35 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, NumberReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export interface FtSugAddOptions { + INCR?: boolean; + PAYLOAD?: RedisArgument; +} + +export default { + IS_READ_ONLY: true, + /** + * Adds a suggestion string to an auto-complete suggestion dictionary. + * @param parser - The command parser + * @param key - The suggestion dictionary key + * @param string - The suggestion string to add + * @param score - The suggestion score used for sorting + * @param options - Optional parameters: + * - INCR: If true, increment the existing entry's score + * - PAYLOAD: Optional payload to associate with the suggestion + */ + parseCommand(parser: CommandParser, key: RedisArgument, string: RedisArgument, score: number, options?: FtSugAddOptions) { + parser.push('FT.SUGADD'); + parser.pushKey(key); + parser.push(string, score.toString()); + + if (options?.INCR) { + parser.push('INCR'); + } + + if (options?.PAYLOAD) { + parser.push('PAYLOAD', options.PAYLOAD); + } + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/search/lib/commands/SUGDEL.spec.ts b/packages/search/lib/commands/SUGDEL.spec.ts new file mode 100644 index 00000000000..21677f14213 --- /dev/null +++ b/packages/search/lib/commands/SUGDEL.spec.ts @@ -0,0 +1,20 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SUGDEL from './SUGDEL'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('FT.SUGDEL', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(SUGDEL, 'key', 'string'), + ['FT.SUGDEL', 'key', 'string'] + ); + }); + + testUtils.testWithClient('client.ft.sugDel', async client => { + assert.equal( + await client.ft.sugDel('key', 'string'), + 0 + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/search/lib/commands/SUGDEL.ts b/packages/search/lib/commands/SUGDEL.ts new file mode 100644 index 00000000000..852b33f5c52 --- /dev/null +++ b/packages/search/lib/commands/SUGDEL.ts @@ -0,0 +1,18 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, NumberReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export default { + IS_READ_ONLY: true, + /** + * Deletes a string from a suggestion dictionary. + * @param parser - The command parser + * @param key - The suggestion dictionary key + * @param string - The suggestion string to delete + */ + parseCommand(parser: CommandParser, key: RedisArgument, string: RedisArgument) { + parser.push('FT.SUGDEL'); + parser.pushKey(key); + parser.push(string); + }, + transformReply: undefined as unknown as () => NumberReply<0 | 1> +} as const satisfies Command; diff --git a/packages/search/lib/commands/SUGGET.spec.ts b/packages/search/lib/commands/SUGGET.spec.ts new file mode 100644 index 00000000000..b82ea547782 --- /dev/null +++ b/packages/search/lib/commands/SUGGET.spec.ts @@ -0,0 +1,57 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SUGGET from './SUGGET'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('FT.SUGGET', () => { + describe('transformArguments', () => { + it('without options', () => { + assert.deepEqual( + parseArgs(SUGGET, 'key', 'prefix'), + ['FT.SUGGET', 'key', 'prefix'] + ); + }); + + it('with FUZZY', () => { + assert.deepEqual( + parseArgs(SUGGET, 'key', 'prefix', { FUZZY: true }), + ['FT.SUGGET', 'key', 'prefix', 'FUZZY'] + ); + }); + + it('with MAX', () => { + assert.deepEqual( + parseArgs(SUGGET, 'key', 'prefix', { MAX: 10 }), + ['FT.SUGGET', 'key', 'prefix', 'MAX', '10'] + ); + }); + }); + + describe('client.ft.sugGet', () => { + + testUtils.testWithClientIfVersionWithinRange([[8], 'LATEST'], 'null', async client => { + assert.deepStrictEqual( + await client.ft.sugGet('key', 'prefix'), + [] + ); + }, GLOBAL.SERVERS.OPEN); + + + + testUtils.testWithClientIfVersionWithinRange([[6, 2, 0], [7, 4, 0]], 'null', async client => { + assert.deepStrictEqual( + await client.ft.sugGet('key', 'prefix'), + null + ); + }, GLOBAL.SERVERS.OPEN) + + testUtils.testWithClient('with suggestions', async client => { + const [, reply] = await Promise.all([ + client.ft.sugAdd('key', 'string', 1), + client.ft.sugGet('key', 's') + ]); + + assert.deepEqual(reply, ['string']); + }, GLOBAL.SERVERS.OPEN); + }); +}); diff --git a/packages/search/lib/commands/SUGGET.ts b/packages/search/lib/commands/SUGGET.ts new file mode 100644 index 00000000000..6c463a020e2 --- /dev/null +++ b/packages/search/lib/commands/SUGGET.ts @@ -0,0 +1,34 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { NullReply, ArrayReply, BlobStringReply, Command, RedisArgument } from '@redis/client/dist/lib/RESP/types'; + +export interface FtSugGetOptions { + FUZZY?: boolean; + MAX?: number; +} + +export default { + IS_READ_ONLY: true, + /** + * Gets completion suggestions for a prefix from a suggestion dictionary. + * @param parser - The command parser + * @param key - The suggestion dictionary key + * @param prefix - The prefix to get completion suggestions for + * @param options - Optional parameters: + * - FUZZY: Enable fuzzy prefix matching + * - MAX: Maximum number of results to return + */ + parseCommand(parser: CommandParser, key: RedisArgument, prefix: RedisArgument, options?: FtSugGetOptions) { + parser.push('FT.SUGGET'); + parser.pushKey(key); + parser.push(prefix); + + if (options?.FUZZY) { + parser.push('FUZZY'); + } + + if (options?.MAX !== undefined) { + parser.push('MAX', options.MAX.toString()); + } + }, + transformReply: undefined as unknown as () => NullReply | ArrayReply +} as const satisfies Command; diff --git a/packages/search/lib/commands/SUGGET_WITHPAYLOADS.spec.ts b/packages/search/lib/commands/SUGGET_WITHPAYLOADS.spec.ts new file mode 100644 index 00000000000..c01b87e2892 --- /dev/null +++ b/packages/search/lib/commands/SUGGET_WITHPAYLOADS.spec.ts @@ -0,0 +1,43 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SUGGET_WITHPAYLOADS from './SUGGET_WITHPAYLOADS'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('FT.SUGGET WITHPAYLOADS', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(SUGGET_WITHPAYLOADS, 'key', 'prefix'), + ['FT.SUGGET', 'key', 'prefix', 'WITHPAYLOADS'] + ); + }); + + testUtils.testWithClientIfVersionWithinRange([[8], 'LATEST'], 'null', async client => { + assert.deepStrictEqual( + await client.ft.sugGetWithPayloads('key', 'prefix'), + [] + ); + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClientIfVersionWithinRange([[6], [7, 4, 0]], 'null', async client => { + assert.deepStrictEqual( + await client.ft.sugGetWithPayloads('key', 'prefix'), + null + ); + }, GLOBAL.SERVERS.OPEN); + + describe('with suggestions', () => { + testUtils.testWithClient('with suggestions', async client => { + const [, reply] = await Promise.all([ + client.ft.sugAdd('key', 'string', 1, { + PAYLOAD: 'payload' + }), + client.ft.sugGetWithPayloads('key', 'string') + ]); + + assert.deepEqual(reply, [{ + suggestion: 'string', + payload: 'payload' + }]); + }, GLOBAL.SERVERS.OPEN); + }); +}); diff --git a/packages/search/lib/commands/SUGGET_WITHPAYLOADS.ts b/packages/search/lib/commands/SUGGET_WITHPAYLOADS.ts new file mode 100644 index 00000000000..a83279be0ff --- /dev/null +++ b/packages/search/lib/commands/SUGGET_WITHPAYLOADS.ts @@ -0,0 +1,37 @@ +import { NullReply, ArrayReply, BlobStringReply, UnwrapReply, Command } from '@redis/client/dist/lib/RESP/types'; +import { isNullReply } from '@redis/client/dist/lib/commands/generic-transformers'; +import SUGGET from './SUGGET'; + +export default { + IS_READ_ONLY: SUGGET.IS_READ_ONLY, + /** + * Gets completion suggestions with their payloads from a suggestion dictionary. + * @param args - Same parameters as FT.SUGGET: + * - parser: The command parser + * - key: The suggestion dictionary key + * - prefix: The prefix to get completion suggestions for + * - options: Optional parameters for fuzzy matching and max results + */ + parseCommand(...args: Parameters) { + SUGGET.parseCommand(...args); + args[0].push('WITHPAYLOADS'); + }, + transformReply(reply: NullReply | UnwrapReply>) { + if (isNullReply(reply)) return null; + + const transformedReply: Array<{ + suggestion: BlobStringReply; + payload: BlobStringReply; + }> = new Array(reply.length / 2); + let replyIndex = 0, + arrIndex = 0; + while (replyIndex < reply.length) { + transformedReply[arrIndex++] = { + suggestion: reply[replyIndex++], + payload: reply[replyIndex++] + }; + } + + return transformedReply; + } +} as const satisfies Command; diff --git a/packages/search/lib/commands/SUGGET_WITHSCORES.spec.ts b/packages/search/lib/commands/SUGGET_WITHSCORES.spec.ts new file mode 100644 index 00000000000..50db89ffe99 --- /dev/null +++ b/packages/search/lib/commands/SUGGET_WITHSCORES.spec.ts @@ -0,0 +1,35 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SUGGET_WITHSCORES from './SUGGET_WITHSCORES'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('FT.SUGGET WITHSCORES', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(SUGGET_WITHSCORES, 'key', 'prefix'), + ['FT.SUGGET', 'key', 'prefix', 'WITHSCORES'] + ); + }); + + describe('client.ft.sugGetWithScores', () => { + + testUtils.testWithClientIfVersionWithinRange([[8],'LATEST'], 'null', async client => { + assert.deepStrictEqual( + await client.ft.sugGetWithScores('key', 'prefix'), + [] + ); + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClientIfVersionWithinRange([[8],'LATEST'],'with suggestions', async client => { + const [, reply] = await Promise.all([ + client.ft.sugAdd('key', 'string', 1), + client.ft.sugGetWithScores('key', 's') + ]); + + assert.ok(Array.isArray(reply)); + assert.equal(reply.length, 1); + assert.equal(reply[0].suggestion, 'string'); + assert.equal(typeof reply[0].score, 'number'); + }, GLOBAL.SERVERS.OPEN); + }); +}); diff --git a/packages/search/lib/commands/SUGGET_WITHSCORES.ts b/packages/search/lib/commands/SUGGET_WITHSCORES.ts new file mode 100644 index 00000000000..5c0a3fba2a3 --- /dev/null +++ b/packages/search/lib/commands/SUGGET_WITHSCORES.ts @@ -0,0 +1,56 @@ +import { NullReply, ArrayReply, BlobStringReply, DoubleReply, UnwrapReply, Command, TypeMapping } from '@redis/client/dist/lib/RESP/types'; +import { isNullReply, transformDoubleReply } from '@redis/client/dist/lib/commands/generic-transformers'; +import SUGGET from './SUGGET'; + +type SuggestScore = { + suggestion: BlobStringReply; + score: DoubleReply; +} + +export default { + IS_READ_ONLY: SUGGET.IS_READ_ONLY, + /** + * Gets completion suggestions with their scores from a suggestion dictionary. + * @param args - Same parameters as FT.SUGGET: + * - parser: The command parser + * - key: The suggestion dictionary key + * - prefix: The prefix to get completion suggestions for + * - options: Optional parameters for fuzzy matching and max results + */ + parseCommand(...args: Parameters) { + SUGGET.parseCommand(...args); + args[0].push('WITHSCORES'); + }, + transformReply: { + 2: (reply: NullReply | UnwrapReply>, preserve?: any, typeMapping?: TypeMapping) => { + if (isNullReply(reply)) return null; + + const transformedReply: Array = new Array(reply.length / 2); + let replyIndex = 0, + arrIndex = 0; + while (replyIndex < reply.length) { + transformedReply[arrIndex++] = { + suggestion: reply[replyIndex++], + score: transformDoubleReply[2](reply[replyIndex++], preserve, typeMapping) + }; + } + + return transformedReply; + }, + 3: (reply: UnwrapReply>) => { + if (isNullReply(reply)) return null; + + const transformedReply: Array = new Array(reply.length / 2); + let replyIndex = 0, + arrIndex = 0; + while (replyIndex < reply.length) { + transformedReply[arrIndex++] = { + suggestion: reply[replyIndex++] as BlobStringReply, + score: reply[replyIndex++] as DoubleReply + }; + } + + return transformedReply; + } + } +} as const satisfies Command; diff --git a/packages/search/lib/commands/SUGGET_WITHSCORES_WITHPAYLOADS.spec.ts b/packages/search/lib/commands/SUGGET_WITHSCORES_WITHPAYLOADS.spec.ts new file mode 100644 index 00000000000..96eb473159f --- /dev/null +++ b/packages/search/lib/commands/SUGGET_WITHSCORES_WITHPAYLOADS.spec.ts @@ -0,0 +1,37 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SUGGET_WITHSCORES_WITHPAYLOADS from './SUGGET_WITHSCORES_WITHPAYLOADS'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('FT.SUGGET WITHSCORES WITHPAYLOADS', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(SUGGET_WITHSCORES_WITHPAYLOADS, 'key', 'prefix'), + ['FT.SUGGET', 'key', 'prefix', 'WITHSCORES', 'WITHPAYLOADS'] + ); + }); + + describe('client.ft.sugGetWithScoresWithPayloads', () => { + testUtils.testWithClientIfVersionWithinRange([[8], 'LATEST'], 'null', async client => { + assert.deepStrictEqual( + await client.ft.sugGetWithScoresWithPayloads('key', 'prefix'), + [] + ); + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClientIfVersionWithinRange([[8], 'LATEST'], 'with suggestions', async client => { + const [, reply] = await Promise.all([ + client.ft.sugAdd('key', 'string', 1, { + PAYLOAD: 'payload' + }), + client.ft.sugGetWithScoresWithPayloads('key', 'string') + ]); + + assert.ok(Array.isArray(reply)); + assert.equal(reply.length, 1); + assert.equal(reply[0].suggestion, 'string'); + assert.equal(typeof reply[0].score, 'number'); + assert.equal(reply[0].payload, 'payload'); + }, GLOBAL.SERVERS.OPEN); + }); +}); diff --git a/packages/search/lib/commands/SUGGET_WITHSCORES_WITHPAYLOADS.ts b/packages/search/lib/commands/SUGGET_WITHSCORES_WITHPAYLOADS.ts new file mode 100644 index 00000000000..b7aa38df3fe --- /dev/null +++ b/packages/search/lib/commands/SUGGET_WITHSCORES_WITHPAYLOADS.ts @@ -0,0 +1,62 @@ +import { NullReply, ArrayReply, BlobStringReply, DoubleReply, UnwrapReply, Command, TypeMapping } from '@redis/client/dist/lib/RESP/types'; +import { isNullReply, transformDoubleReply } from '@redis/client/dist/lib/commands/generic-transformers'; +import SUGGET from './SUGGET'; + +type SuggestScoreWithPayload = { + suggestion: BlobStringReply; + score: DoubleReply; + payload: BlobStringReply; +} + +export default { + IS_READ_ONLY: SUGGET.IS_READ_ONLY, + /** + * Gets completion suggestions with their scores and payloads from a suggestion dictionary. + * @param args - Same parameters as FT.SUGGET: + * - parser: The command parser + * - key: The suggestion dictionary key + * - prefix: The prefix to get completion suggestions for + * - options: Optional parameters for fuzzy matching and max results + */ + parseCommand(...args: Parameters) { + SUGGET.parseCommand(...args); + args[0].push( + 'WITHSCORES', + 'WITHPAYLOADS' + ); + }, + transformReply: { + 2: (reply: NullReply | UnwrapReply>, preserve?: any, typeMapping?: TypeMapping) => { + if (isNullReply(reply)) return null; + + const transformedReply: Array = new Array(reply.length / 3); + let replyIndex = 0, + arrIndex = 0; + while (replyIndex < reply.length) { + transformedReply[arrIndex++] = { + suggestion: reply[replyIndex++], + score: transformDoubleReply[2](reply[replyIndex++], preserve, typeMapping), + payload: reply[replyIndex++] + }; + } + + return transformedReply; + }, + 3: (reply: NullReply | UnwrapReply>) => { + if (isNullReply(reply)) return null; + + const transformedReply: Array = new Array(reply.length / 3); + let replyIndex = 0, + arrIndex = 0; + while (replyIndex < reply.length) { + transformedReply[arrIndex++] = { + suggestion: reply[replyIndex++] as BlobStringReply, + score: reply[replyIndex++] as DoubleReply, + payload: reply[replyIndex++] as BlobStringReply + }; + } + + return transformedReply; + } + } +} as const satisfies Command; diff --git a/packages/search/lib/commands/SUGLEN.spec.ts b/packages/search/lib/commands/SUGLEN.spec.ts new file mode 100644 index 00000000000..d738f09042e --- /dev/null +++ b/packages/search/lib/commands/SUGLEN.spec.ts @@ -0,0 +1,20 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SUGLEN from './SUGLEN'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('FT.SUGLEN', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(SUGLEN, 'key'), + ['FT.SUGLEN', 'key'] + ); + }); + + testUtils.testWithClient('client.ft.sugLen', async client => { + assert.equal( + await client.ft.sugLen('key'), + 0 + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/search/lib/commands/SUGLEN.ts b/packages/search/lib/commands/SUGLEN.ts new file mode 100644 index 00000000000..ecc4f4a6fc0 --- /dev/null +++ b/packages/search/lib/commands/SUGLEN.ts @@ -0,0 +1,15 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, NumberReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export default { + IS_READ_ONLY: true, + /** + * Gets the size of a suggestion dictionary. + * @param parser - The command parser + * @param key - The suggestion dictionary key + */ + parseCommand(parser: CommandParser, key: RedisArgument) { + parser.push('FT.SUGLEN', key); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/search/lib/commands/SYNDUMP.spec.ts b/packages/search/lib/commands/SYNDUMP.spec.ts new file mode 100644 index 00000000000..88bf50cfb54 --- /dev/null +++ b/packages/search/lib/commands/SYNDUMP.spec.ts @@ -0,0 +1,25 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SYNDUMP from './SYNDUMP'; +import { SCHEMA_FIELD_TYPE } from './CREATE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('FT.SYNDUMP', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(SYNDUMP, 'index'), + ['FT.SYNDUMP', 'index'] + ); + }); + + testUtils.testWithClient('client.ft.synDump', async client => { + const [, reply] = await Promise.all([ + client.ft.create('index', { + field: SCHEMA_FIELD_TYPE.TEXT + }), + client.ft.synDump('index') + ]); + + assert.deepEqual(reply, {}); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/search/lib/commands/SYNDUMP.ts b/packages/search/lib/commands/SYNDUMP.ts new file mode 100644 index 00000000000..da3e77b4223 --- /dev/null +++ b/packages/search/lib/commands/SYNDUMP.ts @@ -0,0 +1,28 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, MapReply, BlobStringReply, ArrayReply, UnwrapReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Dumps the contents of a synonym group. + * @param parser - The command parser + * @param index - Name of the index that contains the synonym group + */ + parseCommand(parser: CommandParser, index: RedisArgument) { + parser.push('FT.SYNDUMP', index); + }, + transformReply: { + 2: (reply: UnwrapReply>>) => { + const result: Record> = {}; + let i = 0; + while (i < reply.length) { + const key = (reply[i++] as unknown as UnwrapReply).toString(), + value = reply[i++] as unknown as ArrayReply; + result[key] = value; + } + return result; + }, + 3: undefined as unknown as () => MapReply> + } +} as const satisfies Command; diff --git a/packages/search/lib/commands/SYNUPDATE.spec.ts b/packages/search/lib/commands/SYNUPDATE.spec.ts new file mode 100644 index 00000000000..f93e0599151 --- /dev/null +++ b/packages/search/lib/commands/SYNUPDATE.spec.ts @@ -0,0 +1,43 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import SYNUPDATE from './SYNUPDATE'; +import { SCHEMA_FIELD_TYPE } from './CREATE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('FT.SYNUPDATE', () => { + describe('transformArguments', () => { + it('single term', () => { + assert.deepEqual( + parseArgs(SYNUPDATE, 'index', 'groupId', 'term'), + ['FT.SYNUPDATE', 'index', 'groupId', 'term'] + ); + }); + + it('multiple terms', () => { + assert.deepEqual( + parseArgs(SYNUPDATE, 'index', 'groupId', ['1', '2']), + ['FT.SYNUPDATE', 'index', 'groupId', '1', '2'] + ); + }); + + it('with SKIPINITIALSCAN', () => { + assert.deepEqual( + parseArgs(SYNUPDATE, 'index', 'groupId', 'term', { + SKIPINITIALSCAN: true + }), + ['FT.SYNUPDATE', 'index', 'groupId', 'SKIPINITIALSCAN', 'term'] + ); + }); + }); + + testUtils.testWithClient('client.ft.synUpdate', async client => { + const [, reply] = await Promise.all([ + client.ft.create('index', { + field: SCHEMA_FIELD_TYPE.TEXT + }), + client.ft.synUpdate('index', 'groupId', 'term') + ]); + + assert.equal(reply, 'OK'); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/search/lib/commands/SYNUPDATE.ts b/packages/search/lib/commands/SYNUPDATE.ts new file mode 100644 index 00000000000..0fed14f894a --- /dev/null +++ b/packages/search/lib/commands/SYNUPDATE.ts @@ -0,0 +1,37 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { SimpleStringReply, Command, RedisArgument } from '@redis/client/dist/lib/RESP/types'; +import { RedisVariadicArgument } from '@redis/client/dist/lib/commands/generic-transformers'; + +export interface FtSynUpdateOptions { + SKIPINITIALSCAN?: boolean; +} + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Updates a synonym group with new terms. + * @param parser - The command parser + * @param index - Name of the index that contains the synonym group + * @param groupId - ID of the synonym group to update + * @param terms - One or more synonym terms to add to the group + * @param options - Optional parameters: + * - SKIPINITIALSCAN: Skip the initial scan for existing documents + */ + parseCommand( + parser: CommandParser, + index: RedisArgument, + groupId: RedisArgument, + terms: RedisVariadicArgument, + options?: FtSynUpdateOptions + ) { + parser.push('FT.SYNUPDATE', index, groupId); + + if (options?.SKIPINITIALSCAN) { + parser.push('SKIPINITIALSCAN'); + } + + parser.pushVariadic(terms); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/search/lib/commands/TAGVALS.spec.ts b/packages/search/lib/commands/TAGVALS.spec.ts new file mode 100644 index 00000000000..f0d83c9f7ad --- /dev/null +++ b/packages/search/lib/commands/TAGVALS.spec.ts @@ -0,0 +1,25 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import TAGVALS from './TAGVALS'; +import { SCHEMA_FIELD_TYPE } from './CREATE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('FT.TAGVALS', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(TAGVALS, 'index', '@field'), + ['FT.TAGVALS', 'index', '@field'] + ); + }); + + testUtils.testWithClient('client.ft.tagVals', async client => { + const [, reply] = await Promise.all([ + client.ft.create('index', { + field: SCHEMA_FIELD_TYPE.TAG + }), + client.ft.tagVals('index', 'field') + ]); + + assert.deepEqual(reply, []); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/search/lib/commands/TAGVALS.ts b/packages/search/lib/commands/TAGVALS.ts new file mode 100644 index 00000000000..1c307945f2b --- /dev/null +++ b/packages/search/lib/commands/TAGVALS.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, ArrayReply, SetReply, BlobStringReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Returns the distinct values in a TAG field. + * @param parser - The command parser + * @param index - Name of the index + * @param fieldName - Name of the TAG field to get values from + */ + parseCommand(parser: CommandParser, index: RedisArgument, fieldName: RedisArgument) { + parser.push('FT.TAGVALS', index, fieldName); + }, + transformReply: { + 2: undefined as unknown as () => ArrayReply, + 3: undefined as unknown as () => SetReply + } +} as const satisfies Command; diff --git a/packages/search/lib/commands/_LIST.spec.ts b/packages/search/lib/commands/_LIST.spec.ts new file mode 100644 index 00000000000..dfe32f2e29d --- /dev/null +++ b/packages/search/lib/commands/_LIST.spec.ts @@ -0,0 +1,20 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import _LIST from './_LIST'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('_LIST', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(_LIST), + ['FT._LIST'] + ); + }); + + testUtils.testWithClient('client.ft._list', async client => { + assert.deepEqual( + await client.ft._list(), + [] + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/search/lib/commands/_LIST.ts b/packages/search/lib/commands/_LIST.ts new file mode 100644 index 00000000000..1b30e044e61 --- /dev/null +++ b/packages/search/lib/commands/_LIST.ts @@ -0,0 +1,18 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { ArrayReply, SetReply, BlobStringReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Lists all existing indexes in the database. + * @param parser - The command parser + */ + parseCommand(parser: CommandParser) { + parser.push('FT._LIST'); + }, + transformReply: { + 2: undefined as unknown as () => ArrayReply, + 3: undefined as unknown as () => SetReply + } +} as const satisfies Command; diff --git a/packages/search/lib/commands/index.spec.ts b/packages/search/lib/commands/index.spec.ts new file mode 100644 index 00000000000..04808932c59 --- /dev/null +++ b/packages/search/lib/commands/index.spec.ts @@ -0,0 +1,48 @@ +import { strict as assert } from 'node:assert'; + +/* import { pushArgumentsWithLength, pushSortByArguments } from '.'; + +describe('pushSortByArguments', () => { + describe('single', () => { + it('string', () => { + assert.deepEqual( + pushSortByArguments([], 'SORTBY', '@property'), + ['SORTBY', '1', '@property'] + ); + }); + + it('.BY', () => { + assert.deepEqual( + pushSortByArguments([], 'SORTBY', { BY: '@property' }), + ['SORTBY', '1', '@property'] + ); + }); + + it('with DIRECTION', () => { + assert.deepEqual( + pushSortByArguments([], 'SORTBY', { + BY: '@property', + DIRECTION: 'ASC' + }), + ['SORTBY', '2', '@property', 'ASC'] + ); + }); + }); + + it('multiple', () => { + assert.deepEqual( + pushSortByArguments([], 'SORTBY', ['@1', '@2']), + ['SORTBY', '2', '@1', '@2'] + ); + }); +}); + +it('pushArgumentsWithLength', () => { + assert.deepEqual( + pushArgumentsWithLength(['a'], args => { + args.push('b', 'c'); + }), + ['a', '2', 'b', 'c'] + ); +}); +*/ \ No newline at end of file diff --git a/packages/search/lib/commands/index.ts b/packages/search/lib/commands/index.ts new file mode 100644 index 00000000000..7aa3f061bf7 --- /dev/null +++ b/packages/search/lib/commands/index.ts @@ -0,0 +1,117 @@ +import _LIST from './_LIST'; +import ALTER from './ALTER'; +import AGGREGATE_WITHCURSOR from './AGGREGATE_WITHCURSOR'; +import AGGREGATE from './AGGREGATE'; +import ALIASADD from './ALIASADD'; +import ALIASDEL from './ALIASDEL'; +import ALIASUPDATE from './ALIASUPDATE'; +import CONFIG_GET from './CONFIG_GET'; +import CONFIG_SET from './CONFIG_SET'; +import CREATE from './CREATE'; +import CURSOR_DEL from './CURSOR_DEL'; +import CURSOR_READ from './CURSOR_READ'; +import DICTADD from './DICTADD'; +import DICTDEL from './DICTDEL'; +import DICTDUMP from './DICTDUMP'; +import DROPINDEX from './DROPINDEX'; +import EXPLAIN from './EXPLAIN'; +import EXPLAINCLI from './EXPLAINCLI'; +import INFO from './INFO'; +import PROFILESEARCH from './PROFILE_SEARCH'; +import PROFILEAGGREGATE from './PROFILE_AGGREGATE'; +import SEARCH_NOCONTENT from './SEARCH_NOCONTENT'; +import SEARCH from './SEARCH'; +import SPELLCHECK from './SPELLCHECK'; +import SUGADD from './SUGADD'; +import SUGDEL from './SUGDEL'; +import SUGGET_WITHPAYLOADS from './SUGGET_WITHPAYLOADS'; +import SUGGET_WITHSCORES_WITHPAYLOADS from './SUGGET_WITHSCORES_WITHPAYLOADS'; +import SUGGET_WITHSCORES from './SUGGET_WITHSCORES'; +import SUGGET from './SUGGET'; +import SUGLEN from './SUGLEN'; +import SYNDUMP from './SYNDUMP'; +import SYNUPDATE from './SYNUPDATE'; +import TAGVALS from './TAGVALS'; + +export default { + _LIST, + _list: _LIST, + ALTER, + alter: ALTER, + AGGREGATE_WITHCURSOR, + aggregateWithCursor: AGGREGATE_WITHCURSOR, + AGGREGATE, + aggregate: AGGREGATE, + ALIASADD, + aliasAdd: ALIASADD, + ALIASDEL, + aliasDel: ALIASDEL, + ALIASUPDATE, + aliasUpdate: ALIASUPDATE, + /** + * @deprecated Redis >=8 uses the standard CONFIG command + */ + CONFIG_GET, + /** + * @deprecated Redis >=8 uses the standard CONFIG command + */ + configGet: CONFIG_GET, + /** + * @deprecated Redis >=8 uses the standard CONFIG command + */ + CONFIG_SET, + /** + * @deprecated Redis >=8 uses the standard CONFIG command + */ + configSet: CONFIG_SET, + CREATE, + create: CREATE, + CURSOR_DEL, + cursorDel: CURSOR_DEL, + CURSOR_READ, + cursorRead: CURSOR_READ, + DICTADD, + dictAdd: DICTADD, + DICTDEL, + dictDel: DICTDEL, + DICTDUMP, + dictDump: DICTDUMP, + DROPINDEX, + dropIndex: DROPINDEX, + EXPLAIN, + explain: EXPLAIN, + EXPLAINCLI, + explainCli: EXPLAINCLI, + INFO, + info: INFO, + PROFILESEARCH, + profileSearch: PROFILESEARCH, + PROFILEAGGREGATE, + profileAggregate: PROFILEAGGREGATE, + SEARCH_NOCONTENT, + searchNoContent: SEARCH_NOCONTENT, + SEARCH, + search: SEARCH, + SPELLCHECK, + spellCheck: SPELLCHECK, + SUGADD, + sugAdd: SUGADD, + SUGDEL, + sugDel: SUGDEL, + SUGGET_WITHPAYLOADS, + sugGetWithPayloads: SUGGET_WITHPAYLOADS, + SUGGET_WITHSCORES_WITHPAYLOADS, + sugGetWithScoresWithPayloads: SUGGET_WITHSCORES_WITHPAYLOADS, + SUGGET_WITHSCORES, + sugGetWithScores: SUGGET_WITHSCORES, + SUGGET, + sugGet: SUGGET, + SUGLEN, + sugLen: SUGLEN, + SYNDUMP, + synDump: SYNDUMP, + SYNUPDATE, + synUpdate: SYNUPDATE, + TAGVALS, + tagVals: TAGVALS +}; diff --git a/packages/search/lib/dialect/default.ts b/packages/search/lib/dialect/default.ts new file mode 100644 index 00000000000..54cde05d119 --- /dev/null +++ b/packages/search/lib/dialect/default.ts @@ -0,0 +1 @@ +export const DEFAULT_DIALECT = '2'; diff --git a/packages/search/lib/index.ts b/packages/search/lib/index.ts new file mode 100644 index 00000000000..9bcfb91b956 --- /dev/null +++ b/packages/search/lib/index.ts @@ -0,0 +1,21 @@ +export { default } from './commands' + +export { SearchReply } from './commands/SEARCH' +export { RediSearchSchema } from './commands/CREATE' +export { + REDISEARCH_LANGUAGE, + RediSearchLanguage, + SCHEMA_FIELD_TYPE, + SchemaFieldType, + SCHEMA_TEXT_FIELD_PHONETIC, + SchemaTextFieldPhonetic, + SCHEMA_VECTOR_FIELD_ALGORITHM, + SchemaVectorFieldAlgorithm +} from './commands/CREATE' +export { + FT_AGGREGATE_GROUP_BY_REDUCERS, + FtAggregateGroupByReducer, + FT_AGGREGATE_STEPS, + FtAggregateStep +} from './commands/AGGREGATE' +export { FtSearchOptions } from './commands/SEARCH' diff --git a/packages/search/lib/test-utils.ts b/packages/search/lib/test-utils.ts new file mode 100644 index 00000000000..035ae29dd01 --- /dev/null +++ b/packages/search/lib/test-utils.ts @@ -0,0 +1,32 @@ +import TestUtils from '@redis/test-utils'; +import RediSearch from '.'; +import { RespVersions } from '@redis/client'; + +export default TestUtils.createFromConfig({ + dockerImageName: 'redislabs/client-libs-test', + dockerImageVersionArgument: 'redis-version', + defaultDockerVersion: '8.4-M01-pre' +}); + +export const GLOBAL = { + SERVERS: { + OPEN: { + serverArguments: [], + clientOptions: { + modules: { + ft: RediSearch + } + } + }, + OPEN_3: { + serverArguments: [], + clientOptions: { + RESP: 3 as RespVersions, + unstableResp3:true, + modules: { + ft: RediSearch + } + } + } + } +}; diff --git a/packages/search/package.json b/packages/search/package.json new file mode 100644 index 00000000000..d39d0a70077 --- /dev/null +++ b/packages/search/package.json @@ -0,0 +1,37 @@ +{ + "name": "@redis/search", + "version": "5.9.0-beta.2", + "license": "MIT", + "main": "./dist/lib/index.js", + "types": "./dist/lib/index.d.ts", + "files": [ + "dist/", + "!dist/tsconfig.tsbuildinfo" + ], + "scripts": { + "test": "nyc -r text-summary -r lcov mocha -r tsx './lib/**/*.spec.ts'", + "test-sourcemap": "mocha -r ts-node/register/transpile-only './lib/**/*.spec.ts'", + "release": "release-it" + }, + "peerDependencies": { + "@redis/client": "^5.9.0-beta.2" + }, + "devDependencies": { + "@redis/test-utils": "*" + }, + "engines": { + "node": ">= 18" + }, + "repository": { + "type": "git", + "url": "git://github.com/redis/node-redis.git" + }, + "bugs": { + "url": "https://github.com/redis/node-redis/issues" + }, + "homepage": "https://github.com/redis/node-redis/tree/master/packages/search", + "keywords": [ + "redis", + "RediSearch" + ] +} diff --git a/packages/search/tsconfig.json b/packages/search/tsconfig.json new file mode 100644 index 00000000000..f37b7dc1152 --- /dev/null +++ b/packages/search/tsconfig.json @@ -0,0 +1,20 @@ +{ + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "outDir": "./dist" + }, + "include": [ + "./lib/**/*.ts" + ], + "exclude": [ + "./lib/test-utils.ts", + "./lib/**/*.spec.ts" + ], + "typedocOptions": { + "entryPoints": [ + "./lib" + ], + "entryPointStrategy": "expand", + "out": "../../documentation/search" + } +} diff --git a/packages/test-utils/docker/Dockerfile b/packages/test-utils/docker/Dockerfile new file mode 100644 index 00000000000..23fc0b3a517 --- /dev/null +++ b/packages/test-utils/docker/Dockerfile @@ -0,0 +1,9 @@ +ARG IMAGE +FROM ${IMAGE} + +ARG REDIS_ARGUMENTS +ENV REDIS_ARGUMENTS=${REDIS_ARGUMENTS} + +COPY ./entrypoint.sh / + +ENTRYPOINT ["/entrypoint.sh"] diff --git a/packages/test-utils/docker/entrypoint.sh b/packages/test-utils/docker/entrypoint.sh new file mode 100755 index 00000000000..d4006f55622 --- /dev/null +++ b/packages/test-utils/docker/entrypoint.sh @@ -0,0 +1,3 @@ +#!/bin/bash + +redis-server $REDIS_ARGUMENTS diff --git a/packages/test-utils/lib/cae-client-testing.ts b/packages/test-utils/lib/cae-client-testing.ts new file mode 100644 index 00000000000..92b846dd37e --- /dev/null +++ b/packages/test-utils/lib/cae-client-testing.ts @@ -0,0 +1,30 @@ +import { readFile } from 'node:fs/promises'; + +interface RawRedisEndpoint { + username?: string; + password?: string; + tls: boolean; + endpoints: string[]; +} + +export type RedisEndpointsConfig = Record; + +export function loadFromJson(jsonString: string): RedisEndpointsConfig { + try { + return JSON.parse(jsonString) as RedisEndpointsConfig; + } catch (error) { + throw new Error(`Invalid JSON configuration: ${error}`); + } +} + +export async function loadFromFile(path: string): Promise { + try { + const configFile = await readFile(path, 'utf-8'); + return loadFromJson(configFile); + } catch (error) { + if (error instanceof Error && 'code' in error && error.code === 'ENOENT') { + throw new Error(`Config file not found at path: ${path}`); + } + throw error; + } +} \ No newline at end of file diff --git a/packages/test-utils/lib/dockers.ts b/packages/test-utils/lib/dockers.ts new file mode 100644 index 00000000000..47257964f6a --- /dev/null +++ b/packages/test-utils/lib/dockers.ts @@ -0,0 +1,447 @@ +import { RedisClusterClientOptions } from '@redis/client/dist/lib/cluster'; +import { createConnection } from 'node:net'; +import { once } from 'node:events'; +import { createClient } from '@redis/client/index'; +import { setTimeout } from 'node:timers/promises'; +// import { ClusterSlotsReply } from '@redis/client/dist/lib/commands/CLUSTER_SLOTS'; +import { execFile as execFileCallback } from 'node:child_process'; +import { promisify } from 'node:util'; +import * as fs from 'node:fs'; +import * as os from 'node:os'; +import * as path from 'node:path'; + +const execAsync = promisify(execFileCallback); + +interface ErrorWithCode extends Error { + code: string; +} + +async function isPortAvailable(port: number): Promise { + try { + const socket = createConnection({ port }); + await once(socket, 'connect'); + socket.end(); + } catch (err) { + if (err instanceof Error && (err as ErrorWithCode).code === 'ECONNREFUSED') { + return true; + } + } + + return false; +} + +const portIterator = (async function* (): AsyncIterableIterator { + for (let i = 6379; i < 65535; i++) { + if (await isPortAvailable(i)) { + yield i; + } + } + + throw new Error('All ports are in use'); +})(); + +interface RedisServerDockerConfig { + image: string; + version: string; +} + +interface SentinelConfig { + mode: "sentinel"; + mounts: Array; + port: number; +} + +interface ServerConfig { + mode: "server"; +} + +export type RedisServerDockerOptions = RedisServerDockerConfig & (SentinelConfig | ServerConfig) + +export interface RedisServerDocker { + port: number; + dockerId: string; +} + +export async function spawnRedisServerDocker( +options: RedisServerDockerOptions, serverArguments: Array): Promise { + let port; + if (options.mode == "sentinel") { + port = options.port; + } else { + port = (await portIterator.next()).value; + } + + const portStr = port.toString(); + + const dockerArgs = [ + 'run', + '--init', + '-e', `PORT=${portStr}` + ]; + + if (options.mode == "sentinel") { + options.mounts.forEach(mount => { + dockerArgs.push('-v', mount); + }); + } + + dockerArgs.push( + '-d', + '--network', 'host', + `${options.image}:${options.version}` + ); + + if (serverArguments.length > 0) { + for (let i = 0; i < serverArguments.length; i++) { + dockerArgs.push(serverArguments[i]) + } + } + + console.log(`[Docker] Spawning Redis container - Image: ${options.image}:${options.version}, Port: ${port}, Mode: ${options.mode}`); + + const { stdout, stderr } = await execAsync('docker', dockerArgs); + + if (!stdout) { + throw new Error(`docker run error - ${stderr}`); + } + + while (await isPortAvailable(port)) { + await setTimeout(50); + } + + return { + port, + dockerId: stdout.trim() + }; +} +const RUNNING_SERVERS = new Map, ReturnType>(); + +export function spawnRedisServer(dockerConfig: RedisServerDockerOptions, serverArguments: Array): Promise { + const runningServer = RUNNING_SERVERS.get(serverArguments); + if (runningServer) { + return runningServer; + } + + const dockerPromise = spawnRedisServerDocker(dockerConfig, serverArguments); + RUNNING_SERVERS.set(serverArguments, dockerPromise); + return dockerPromise; +} + +async function dockerRemove(dockerId: string): Promise { + const { stderr } = await execAsync('docker', ['rm', '-f', dockerId]); + if (stderr) { + throw new Error(`docker rm error - ${stderr}`); + } +} + +after(() => { + return Promise.all( + [...RUNNING_SERVERS.values()].map(async dockerPromise => + await dockerRemove((await dockerPromise).dockerId) + ) + ); +}); + +export type RedisClusterDockersConfig = RedisServerDockerOptions & { + numberOfMasters?: number; + numberOfReplicas?: number; +} + +async function spawnRedisClusterNodeDockers( + dockersConfig: RedisClusterDockersConfig, + serverArguments: Array, + fromSlot: number, + toSlot: number, + clientConfig?: Partial +) { + const range: Array = []; + for (let i = fromSlot; i < toSlot; i++) { + range.push(i); + } + + const master = await spawnRedisClusterNodeDocker( + dockersConfig, + serverArguments, + clientConfig + ); + + await master.client.clusterAddSlots(range); + + if (!dockersConfig.numberOfReplicas) return [master]; + + const replicasPromises: Array> = []; + for (let i = 0; i < (dockersConfig.numberOfReplicas ?? 0); i++) { + replicasPromises.push( + spawnRedisClusterNodeDocker(dockersConfig, [ + ...serverArguments, + '--cluster-enabled', + 'yes', + '--cluster-node-timeout', + '5000' + ], clientConfig).then(async replica => { + + const requirePassIndex = serverArguments.findIndex((x) => x === '--requirepass'); + if (requirePassIndex !== -1) { + const password = serverArguments[requirePassIndex + 1]; + await replica.client.configSet({ 'masterauth': password }) + } + await replica.client.clusterMeet('127.0.0.1', master.docker.port); + + while ((await replica.client.clusterSlots()).length === 0) { + await setTimeout(25); + } + + await replica.client.clusterReplicate( + await master.client.clusterMyId() + ); + + return replica; + }) + ); + } + + return [ + master, + ...await Promise.all(replicasPromises) + ]; +} + +async function spawnRedisClusterNodeDocker( + dockersConfig: RedisServerDockerOptions, + serverArguments: Array, + clientConfig?: Partial +) { + const docker = await spawnRedisServerDocker(dockersConfig, [ + ...serverArguments, + '--cluster-enabled', + 'yes', + '--cluster-node-timeout', + '5000' + ]), + client = createClient({ + socket: { + port: docker.port + }, + ...clientConfig + }); + + await client.connect(); + + return { + docker, + client + }; +} + +const SLOTS = 16384; + +async function spawnRedisClusterDockers( + dockersConfig: RedisClusterDockersConfig, + serverArguments: Array, + clientConfig?: Partial +): Promise> { + const numberOfMasters = dockersConfig.numberOfMasters ?? 2, + slotsPerNode = Math.floor(SLOTS / numberOfMasters), + spawnPromises: Array> = []; + for (let i = 0; i < numberOfMasters; i++) { + const fromSlot = i * slotsPerNode, + toSlot = i === numberOfMasters - 1 ? SLOTS : fromSlot + slotsPerNode; + spawnPromises.push( + spawnRedisClusterNodeDockers( + dockersConfig, + serverArguments, + fromSlot, + toSlot, + clientConfig + ) + ); + } + + const nodes = (await Promise.all(spawnPromises)).flat(), + meetPromises: Array> = []; + for (let i = 1; i < nodes.length; i++) { + meetPromises.push( + nodes[i].client.clusterMeet('127.0.0.1', nodes[0].docker.port) + ); + } + + await Promise.all(meetPromises); + + await Promise.all( + nodes.map(async ({ client }) => { + while ( + totalNodes(await client.clusterSlots()) !== nodes.length || + !(await client.sendCommand(['CLUSTER', 'INFO'])).startsWith('cluster_state:ok') // TODO + ) { + await setTimeout(50); + } + + client.destroy(); + }) + ); + + return nodes.map(({ docker }) => docker); +} + +// TODO: type ClusterSlotsReply +function totalNodes(slots: any) { + let total = slots.length; + for (const slot of slots) { + total += slot.replicas.length; + } + + return total; +} + +const RUNNING_CLUSTERS = new Map, ReturnType>(); + +export function spawnRedisCluster( + dockersConfig: RedisClusterDockersConfig, + serverArguments: Array, + clientConfig?: Partial): Promise> { + + const runningCluster = RUNNING_CLUSTERS.get(serverArguments); + if (runningCluster) { + return runningCluster; + } + + const dockersPromise = spawnRedisClusterDockers(dockersConfig, serverArguments, clientConfig); + + RUNNING_CLUSTERS.set(serverArguments, dockersPromise); + return dockersPromise; +} + +after(() => { + return Promise.all( + [...RUNNING_CLUSTERS.values()].map(async dockersPromise => { + return Promise.all( + (await dockersPromise).map(({ dockerId }) => dockerRemove(dockerId)) + ); + }) + ); +}); + + +const RUNNING_NODES = new Map, Array>(); +const RUNNING_SENTINELS = new Map, Array>(); + +export async function spawnRedisSentinel( + dockerConfigs: RedisServerDockerOptions, + serverArguments: Array, +): Promise> { + const runningNodes = RUNNING_SENTINELS.get(serverArguments); + if (runningNodes) { + return runningNodes; + } + + const passIndex = serverArguments.indexOf('--requirepass')+1; + let password: string | undefined = undefined; + if (passIndex != 0) { + password = serverArguments[passIndex]; + } + + const master = await spawnRedisServerDocker(dockerConfigs, serverArguments); + const redisNodes: Array = [master]; + const replicaPromises: Array> = []; + + const replicasCount = 2; + for (let i = 0; i < replicasCount; i++) { + replicaPromises.push((async () => { + const replica = await spawnRedisServerDocker(dockerConfigs, serverArguments); + const client = createClient({ + socket: { + port: replica.port + }, + password: password + }); + + await client.connect(); + await client.replicaOf("127.0.0.1", master.port); + await client.close(); + + return replica; + })()); + } + + const replicas = await Promise.all(replicaPromises); + redisNodes.push(...replicas); + RUNNING_NODES.set(serverArguments, redisNodes); + + const sentinelPromises: Array> = []; + const sentinelCount = 3; + + const appPrefix = 'sentinel-config-dir'; + const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), appPrefix)); + + for (let i = 0; i < sentinelCount; i++) { + sentinelPromises.push( + spawnSentinelNode( + dockerConfigs, + serverArguments, + master.port, + "mymaster", + path.join(tmpDir, i.toString()), + password, + ), + ) + } + + const sentinelNodes = await Promise.all(sentinelPromises); + RUNNING_SENTINELS.set(serverArguments, sentinelNodes); + + if (tmpDir) { + fs.rmSync(tmpDir, { recursive: true }); + } + + return sentinelNodes; +} + +after(() => { + return Promise.all( + [...RUNNING_NODES.values(), ...RUNNING_SENTINELS.values()].map(async dockersPromise => { + return Promise.all( + dockersPromise.map(({ dockerId }) => dockerRemove(dockerId)) + ); + }) + ); +}); + + +export async function spawnSentinelNode( + dockerConfigs: RedisServerDockerOptions, + serverArguments: Array, + masterPort: number, + sentinelName: string, + tmpDir: string, + password?: string, +) { + const port = (await portIterator.next()).value; + + let sentinelConfig = `port ${port} +sentinel monitor ${sentinelName} 127.0.0.1 ${masterPort} 2 +sentinel down-after-milliseconds ${sentinelName} 500 +sentinel failover-timeout ${sentinelName} 1000 +`; + if (password !== undefined) { + sentinelConfig += `requirepass ${password}\n`; + sentinelConfig += `sentinel auth-pass ${sentinelName} ${password}\n`; + } + + const dir = fs.mkdtempSync(tmpDir); + fs.writeFile(`${dir}/redis.conf`, sentinelConfig, err => { + if (err) { + console.error("failed to create temporary config file", err); + } + }); + + return await spawnRedisServerDocker( + { + image: dockerConfigs.image, + version: dockerConfigs.version, + mode: "sentinel", + mounts: [`${dir}/redis.conf:/redis/config/node-sentinel-1/redis.conf`], + port: port, + }, + serverArguments, + ); +} \ No newline at end of file diff --git a/packages/test-utils/lib/index.spec.ts b/packages/test-utils/lib/index.spec.ts new file mode 100644 index 00000000000..0f1e7552284 --- /dev/null +++ b/packages/test-utils/lib/index.spec.ts @@ -0,0 +1,106 @@ +import { strict as assert } from 'node:assert'; +import TestUtils from './index'; + +describe('TestUtils', () => { + describe('parseVersionNumber', () => { + it('should handle special versions', () => { + assert.deepStrictEqual(TestUtils.parseVersionNumber('latest'), [Infinity]); + assert.deepStrictEqual(TestUtils.parseVersionNumber('edge'), [Infinity]); + }); + + it('should parse simple version numbers', () => { + assert.deepStrictEqual(TestUtils.parseVersionNumber('7.4.0'), [7, 4, 0]); + }); + + it('should handle versions with multiple dashes and prefixes', () => { + assert.deepStrictEqual(TestUtils.parseVersionNumber('rs-7.4.0-v2'), [7, 4, 0]); + assert.deepStrictEqual(TestUtils.parseVersionNumber('rs-7.4.0'), [7, 4, 0]); + assert.deepStrictEqual(TestUtils.parseVersionNumber('7.4.0-v2'), [7, 4, 0]); + }); + + it('should handle various version number formats', () => { + assert.deepStrictEqual(TestUtils.parseVersionNumber('10.5'), [10, 5]); + assert.deepStrictEqual(TestUtils.parseVersionNumber('8.0.0'), [8, 0, 0]); + assert.deepStrictEqual(TestUtils.parseVersionNumber('rs-6.2.4-v1'), [6, 2, 4]); + }); + + it('should throw TypeError for invalid version strings', () => { + ['', 'invalid', 'rs-', 'v2', 'rs-invalid-v2'].forEach(version => { + assert.throws( + () => TestUtils.parseVersionNumber(version), + TypeError, + `Expected TypeError for version string: ${version}` + ); + }); + }); + }); +}); + + + +describe('Version Comparison', () => { + it('should correctly compare versions', () => { + const tests: [Array, Array, -1 | 0 | 1][] = [ + [[1, 0, 0], [1, 0, 0], 0], + [[2, 0, 0], [1, 9, 9], 1], + [[1, 9, 9], [2, 0, 0], -1], + [[1, 2, 3], [1, 2], 1], + [[1, 2], [1, 2, 3], -1], + [[1, 2, 0], [1, 2, 1], -1], + [[1], [1, 0, 0], 0], + [[2], [1, 9, 9], 1], + ]; + + tests.forEach(([a, b, expected]) => { + + assert.equal( + TestUtils.compareVersions(a, b), + expected, + `Failed comparing ${a.join('.')} with ${b.join('.')}: expected ${expected}` + ); + }); + }); + + it('should correctly compare versions', () => { + const tests: [Array, Array, -1 | 0 | 1][] = [ + [[1, 0, 0], [1, 0, 0], 0], + [[2, 0, 0], [1, 9, 9], 1], + [[1, 9, 9], [2, 0, 0], -1], + [[1, 2, 3], [1, 2], 1], + [[1, 2], [1, 2, 3], -1], + [[1, 2, 0], [1, 2, 1], -1], + [[1], [1, 0, 0], 0], + [[2], [1, 9, 9], 1], + ]; + + tests.forEach(([a, b, expected]) => { + + assert.equal( + TestUtils.compareVersions(a, b), + expected, + `Failed comparing ${a.join('.')} with ${b.join('.')}: expected ${expected}` + ); + }); + }) + it('isVersionInRange should work correctly', () => { + const tests: [Array, Array, Array, boolean][] = [ + [[7, 0, 0], [7, 0, 0], [7, 0, 0], true], + [[7, 0, 1], [7, 0, 0], [7, 0, 2], true], + [[7, 0, 0], [7, 0, 1], [7, 0, 2], false], + [[7, 0, 3], [7, 0, 1], [7, 0, 2], false], + [[7], [6, 0, 0], [8, 0, 0], true], + [[7, 1, 1], [7, 1, 0], [7, 1, 2], true], + [[6, 0, 0], [7, 0, 0], [8, 0, 0], false], + [[9, 0, 0], [7, 0, 0], [8, 0, 0], false] + ]; + + tests.forEach(([version, min, max, expected]) => { + const testUtils = new TestUtils({ string: version.join('.'), numbers: version }, "test") + assert.equal( + testUtils.isVersionInRange(min, max), + expected, + `Failed checking if ${version.join('.')} is between ${min.join('.')} and ${max.join('.')}: expected ${expected}` + ); + }); + }) +}); diff --git a/packages/test-utils/lib/index.ts b/packages/test-utils/lib/index.ts new file mode 100644 index 00000000000..65a5af5ab55 --- /dev/null +++ b/packages/test-utils/lib/index.ts @@ -0,0 +1,635 @@ +import { + RedisModules, + RedisFunctions, + RedisScripts, + RespVersions, + TypeMapping, + // CommandPolicies, + createClient, + createSentinel, + RedisClientOptions, + RedisClientType, + RedisSentinelOptions, + RedisSentinelType, + RedisPoolOptions, + RedisClientPoolType, + createClientPool, + createCluster, + RedisClusterOptions, + RedisClusterType +} from '@redis/client/index'; +import { RedisNode } from '@redis/client/lib/sentinel/types' +import { spawnRedisServer, spawnRedisCluster, spawnRedisSentinel, RedisServerDockerOptions, RedisServerDocker, spawnSentinelNode, spawnRedisServerDocker } from './dockers'; +import yargs from 'yargs'; +import { hideBin } from 'yargs/helpers'; + +import * as fs from 'node:fs'; +import * as os from 'node:os'; +import * as path from 'node:path'; +import { RedisProxy, getFreePortNumber } from './redis-proxy'; + +interface TestUtilsConfig { + /** + * The name of the Docker image to use for spawning Redis test instances. + * This should be a valid Docker image name that contains a Redis server. + * + * @example 'redislabs/client-libs-test' + */ + dockerImageName: string; + + /** + * The command-line argument name used to specify the Redis version. + * This argument can be passed when running tests / GH actions. + * + * @example + * If set to 'redis-version', you can run tests with: + * ```bash + * npm test -- --redis-version="6.2" + * ``` + */ + dockerImageVersionArgument: string; + + /** + * The default Redis version to use if no version is specified via command-line arguments. + * Can be a specific version number (e.g., '6.2'), 'latest', or 'edge'. + * If not provided, defaults to 'latest'. + * + * @optional + * @default 'latest' + */ + defaultDockerVersion?: string; +} +interface CommonTestOptions { + serverArguments: Array; + minimumDockerVersion?: Array; + skipTest?: boolean; +} + +interface ClientTestOptions< + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> extends CommonTestOptions { + clientOptions?: Partial>; + disableClientSetup?: boolean; +} + +interface SentinelTestOptions< + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> extends CommonTestOptions { + sentinelOptions?: Partial>; + clientOptions?: Partial>; + scripts?: S; + functions?: F; + modules?: M; + disableClientSetup?: boolean; + replicaPoolSize?: number; + masterPoolSize?: number; + reserveClient?: boolean; +} + +interface ClientPoolTestOptions< + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping +> extends CommonTestOptions { + clientOptions?: Partial>; + poolOptions?: RedisPoolOptions; +} + +interface ClusterTestOptions< + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping + // POLICIES extends CommandPolicies +> extends CommonTestOptions { + clusterConfiguration?: Partial>; + numberOfMasters?: number; + numberOfReplicas?: number; + disableClusterSetup?: boolean; +} + +interface AllTestOptions< + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping + // POLICIES extends CommandPolicies +> { + client: ClientTestOptions; + cluster: ClusterTestOptions; +} + +interface Version { + string: string; + numbers: Array; +} + +export default class TestUtils { + static parseVersionNumber(version: string): Array { + if (version === 'latest' || version === 'edge') return [Infinity]; + + + // Match complete version number patterns + const versionMatch = version.match(/(^|\-)\d+(\.\d+)*($|\-)/); + if (!versionMatch) { + throw new TypeError(`${version} is not a valid redis version`); + } + + // Extract just the numbers and dots between first and last dash (or start/end) + const versionNumbers = versionMatch[0].replace(/^\-|\-$/g, ''); + + return versionNumbers.split('.').map(x => { + const value = Number(x); + if (Number.isNaN(value)) { + throw new TypeError(`${version} is not a valid redis version`); + } + return value; + }); + } + static #getVersion(argumentName: string, defaultVersion = 'latest'): Version { + return yargs(hideBin(process.argv)) + .option(argumentName, { + type: 'string', + default: defaultVersion + }) + .coerce(argumentName, (version: string) => { + return { + string: version, + numbers: TestUtils.parseVersionNumber(version) + }; + }) + .demandOption(argumentName) + .parseSync()[argumentName]; + } + + readonly #VERSION_NUMBERS: Array; + readonly #DOCKER_IMAGE: RedisServerDockerOptions; + + constructor({ string, numbers }: Version, dockerImageName: string) { + this.#VERSION_NUMBERS = numbers; + this.#DOCKER_IMAGE = { + image: dockerImageName, + version: string, + mode: "server" + }; + } + + /** + * Creates a new TestUtils instance from a configuration object. + * + * @param config - Configuration object containing Docker image and version settings + * @param config.dockerImageName - The name of the Docker image to use for tests + * @param config.dockerImageVersionArgument - The command-line argument name for specifying Redis version + * @param config.defaultDockerVersion - Optional default Redis version if not specified via arguments + * @returns A new TestUtils instance configured with the provided settings + */ + public static createFromConfig(config: TestUtilsConfig) { + return new TestUtils( + TestUtils.#getVersion(config.dockerImageVersionArgument, + config.defaultDockerVersion), config.dockerImageName); + } + + isVersionGreaterThan(minimumVersion: Array | undefined): boolean { + if (minimumVersion === undefined) return true; + return TestUtils.compareVersions(this.#VERSION_NUMBERS, minimumVersion) >= 0; + } + + isVersionGreaterThanHook(minimumVersion: Array | undefined): void { + const isVersionGreaterThanHook = this.isVersionGreaterThan.bind(this); + const versionNumber = this.#VERSION_NUMBERS.join('.'); + const minimumVersionString = minimumVersion?.join('.'); + before(function () { + if (!isVersionGreaterThanHook(minimumVersion)) { + console.warn(`TestUtils: Version ${versionNumber} is less than minimum version ${minimumVersionString}, skipping test`); + return this.skip(); + } + }); + } + + isVersionInRange(minVersion: Array, maxVersion: Array): boolean { + return TestUtils.compareVersions(this.#VERSION_NUMBERS, minVersion) >= 0 && + TestUtils.compareVersions(this.#VERSION_NUMBERS, maxVersion) <= 0 + } + + /** + * Compares two semantic version arrays and returns: + * -1 if version a is less than version b + * 0 if version a equals version b + * 1 if version a is greater than version b + * + * @param a First version array + * @param b Second version array + * @returns -1 | 0 | 1 + */ + static compareVersions(a: Array, b: Array): -1 | 0 | 1 { + const maxLength = Math.max(a.length, b.length); + + const paddedA = [...a, ...Array(maxLength - a.length).fill(0)]; + const paddedB = [...b, ...Array(maxLength - b.length).fill(0)]; + + for (let i = 0; i < maxLength; i++) { + if (paddedA[i] > paddedB[i]) return 1; + if (paddedA[i] < paddedB[i]) return -1; + } + + return 0; + } + + testWithClient< + M extends RedisModules = {}, + F extends RedisFunctions = {}, + S extends RedisScripts = {}, + RESP extends RespVersions = 2, + TYPE_MAPPING extends TypeMapping = {} + >( + title: string, + fn: (client: RedisClientType) => unknown, + options: ClientTestOptions + ): void { + let dockerPromise: ReturnType; + if (this.isVersionGreaterThan(options.minimumDockerVersion)) { + const dockerImage = this.#DOCKER_IMAGE; + before(function () { + this.timeout(30000); + + dockerPromise = spawnRedisServer(dockerImage, options.serverArguments); + return dockerPromise; + }); + } + + it(title, async function () { + if (options.skipTest) return this.skip(); + if (!dockerPromise) return this.skip(); + + const client = createClient({ + ...options.clientOptions, + socket: { + ...options.clientOptions?.socket, + port: (await dockerPromise).port + } + }); + + if (options.disableClientSetup) { + return fn(client); + } + + await client.connect(); + + try { + await client.flushAll(); + await fn(client); + } finally { + if (client.isOpen) { + await client.flushAll(); + client.destroy(); + } + } + }); + } + testWithProxiedClient( + title: string, + fn: (proxiedClient: RedisClientType, proxy: RedisProxy) => unknown, + options: ClientTestOptions + ) { + + this.testWithClient(title, async (client) => { + const freePort = await getFreePortNumber() + const socketOptions = client?.options?.socket; + const proxy = new RedisProxy({ + listenHost: '127.0.0.1', + listenPort: freePort, + //@ts-ignore + targetPort: socketOptions.port, + //@ts-ignore + targetHost: socketOptions.host, + enableLogging: true + }); + + + await proxy.start(); + const proxyClient = client.duplicate({ + socket: { + port: proxy.config.listenPort, + host: proxy.config.listenHost + }, + }); + + await proxyClient.connect(); + + try { + await fn(proxyClient, proxy); + } finally { + await proxyClient.destroy(); + await proxy.stop() + } + }, options); + } + testWithClientSentinel< + M extends RedisModules = {}, + F extends RedisFunctions = {}, + S extends RedisScripts = {}, + RESP extends RespVersions = 2, + TYPE_MAPPING extends TypeMapping = {} + >( + title: string, + fn: (sentinel: RedisSentinelType) => unknown, + options: SentinelTestOptions + ): void { + let dockerPromises: ReturnType; + + const passIndex = options.serverArguments.indexOf('--requirepass')+1; + let password: string | undefined = undefined; + if (passIndex != 0) { + password = options.serverArguments[passIndex]; + } + + if (this.isVersionGreaterThan(options.minimumDockerVersion)) { + const dockerImage = this.#DOCKER_IMAGE; + before(function () { + this.timeout(30000); + dockerPromises = spawnRedisSentinel(dockerImage, options.serverArguments); + return dockerPromises; + }); + } + + it(title, async function () { + this.timeout(30000); + if (options.skipTest) return this.skip(); + if (!dockerPromises) return this.skip(); + + + const promises = await dockerPromises; + const rootNodes: Array = promises.map(promise => ({ + host: "127.0.0.1", + port: promise.port + })); + + + const sentinel = createSentinel({ + name: 'mymaster', + sentinelRootNodes: rootNodes, + nodeClientOptions: { + commandOptions: options.clientOptions?.commandOptions, + password: password || undefined, + }, + sentinelClientOptions: { + password: password || undefined, + }, + replicaPoolSize: options?.replicaPoolSize || 0, + scripts: options?.scripts || {}, + modules: options?.modules || {}, + functions: options?.functions || {}, + masterPoolSize: options?.masterPoolSize || undefined, + reserveClient: options?.reserveClient || false, + ...options?.sentinelOptions + }) as RedisSentinelType; + + if (options.disableClientSetup) { + return fn(sentinel); + } + + await sentinel.connect(); + + try { + await sentinel.flushAll(); + await fn(sentinel); + } finally { + if (sentinel.isOpen) { + await sentinel.flushAll(); + sentinel.destroy(); + } + } + }); + } + + testWithClientIfVersionWithinRange< + M extends RedisModules = {}, + F extends RedisFunctions = {}, + S extends RedisScripts = {}, + RESP extends RespVersions = 2, + TYPE_MAPPING extends TypeMapping = {} + >( + range: ([minVersion: Array, maxVersion: Array] | [minVersion: Array, 'LATEST']), + title: string, + fn: (client: RedisClientType) => unknown, + options: ClientTestOptions + ): void { + + if (this.isVersionInRange(range[0], range[1] === 'LATEST' ? [Infinity, Infinity, Infinity] : range[1])) { + return this.testWithClient(`${title} [${range[0].join('.')}] - [${(range[1] === 'LATEST') ? range[1] : range[1].join(".")}] `, fn, options) + } else { + console.warn(`Skipping test ${title} because server version ${this.#VERSION_NUMBERS.join('.')} is not within range ${range[0].join(".")} - ${range[1] !== 'LATEST' ? range[1].join(".") : 'LATEST'}`) + } + } + + testWithClienSentineltIfVersionWithinRange< + M extends RedisModules = {}, + F extends RedisFunctions = {}, + S extends RedisScripts = {}, + RESP extends RespVersions = 2, + TYPE_MAPPING extends TypeMapping = {} + >( + range: ([minVersion: Array, maxVersion: Array] | [minVersion: Array, 'LATEST']), + title: string, + fn: (sentinel: RedisSentinelType) => unknown, + options: SentinelTestOptions + ): void { + + if (this.isVersionInRange(range[0], range[1] === 'LATEST' ? [Infinity, Infinity, Infinity] : range[1])) { + return this.testWithClientSentinel(`${title} [${range[0].join('.')}] - [${(range[1] === 'LATEST') ? range[1] : range[1].join(".")}] `, fn, options) + } else { + console.warn(`Skipping test ${title} because server version ${this.#VERSION_NUMBERS.join('.')} is not within range ${range[0].join(".")} - ${range[1] !== 'LATEST' ? range[1].join(".") : 'LATEST'}`) + } + } + + testWithClientPool< + M extends RedisModules = {}, + F extends RedisFunctions = {}, + S extends RedisScripts = {}, + RESP extends RespVersions = 2, + TYPE_MAPPING extends TypeMapping = {} + >( + title: string, + fn: (client: RedisClientPoolType) => unknown, + options: ClientPoolTestOptions + ): void { + let dockerPromise: ReturnType; + if (this.isVersionGreaterThan(options.minimumDockerVersion)) { + const dockerImage = this.#DOCKER_IMAGE; + before(function () { + this.timeout(30000); + + dockerPromise = spawnRedisServer(dockerImage, options.serverArguments); + return dockerPromise; + }); + } + + it(title, async function () { + if (options.skipTest) return this.skip(); + if (!dockerPromise) return this.skip(); + + const pool = createClientPool({ + ...options.clientOptions, + socket: { + ...options.clientOptions?.socket, + port: (await dockerPromise).port + } + }, options.poolOptions); + + await pool.connect(); + + try { + await pool.flushAll(); + await fn(pool); + } finally { + await pool.flushAll(); + pool.close(); + } + }); + } + + static async #clusterFlushAll< + M extends RedisModules, + F extends RedisFunctions, + S extends RedisScripts, + RESP extends RespVersions, + TYPE_MAPPING extends TypeMapping + // POLICIES extends CommandPolicies + >(cluster: RedisClusterType): Promise { + return Promise.all( + cluster.masters.map(async master => { + if (master.client) { + await (await cluster.nodeClient(master)).flushAll(); + } + }) + ); + } + + testWithCluster< + M extends RedisModules = {}, + F extends RedisFunctions = {}, + S extends RedisScripts = {}, + RESP extends RespVersions = 2, + TYPE_MAPPING extends TypeMapping = {} + // POLICIES extends CommandPolicies = {} + >( + title: string, + fn: (cluster: RedisClusterType) => unknown, + options: ClusterTestOptions + ): void { + let dockersPromise: ReturnType; + if (this.isVersionGreaterThan(options.minimumDockerVersion)) { + const dockerImage = this.#DOCKER_IMAGE; + before(function () { + this.timeout(30000); + + dockersPromise = spawnRedisCluster({ + ...dockerImage, + numberOfMasters: options.numberOfMasters, + numberOfReplicas: options.numberOfReplicas + }, options.serverArguments, + options.clusterConfiguration?.defaults); + return dockersPromise; + }); + } + + it(title, async function () { + if (!dockersPromise) return this.skip(); + + const dockers = await dockersPromise, + cluster = createCluster({ + rootNodes: dockers.map(({ port }) => ({ + socket: { + port + } + })), + minimizeConnections: options.clusterConfiguration?.minimizeConnections ?? true, + ...options.clusterConfiguration + }); + + if(options.disableClusterSetup) { + return fn(cluster); + } + + await cluster.connect(); + + try { + await TestUtils.#clusterFlushAll(cluster); + await fn(cluster); + } finally { + await TestUtils.#clusterFlushAll(cluster); + cluster.destroy(); + } + }); + } + + testAll< + M extends RedisModules = {}, + F extends RedisFunctions = {}, + S extends RedisScripts = {}, + RESP extends RespVersions = 2, + TYPE_MAPPING extends TypeMapping = {} + // POLICIES extends CommandPolicies = {} + >( + title: string, + fn: (client: RedisClientType | RedisClusterType) => unknown, + options: AllTestOptions + ) { + this.testWithClient(`client.${title}`, fn, options.client); + this.testWithCluster(`cluster.${title}`, fn, options.cluster); + } + + + spawnRedisServer< + M extends RedisModules = {}, + F extends RedisFunctions = {}, + S extends RedisScripts = {}, + RESP extends RespVersions = 2, + TYPE_MAPPING extends TypeMapping = {} + // POLICIES extends CommandPolicies = {} + >( + options: ClientPoolTestOptions + ): Promise { + return spawnRedisServerDocker(this.#DOCKER_IMAGE, options.serverArguments) + } + + async spawnRedisSentinels< + M extends RedisModules = {}, + F extends RedisFunctions = {}, + S extends RedisScripts = {}, + RESP extends RespVersions = 2, + TYPE_MAPPING extends TypeMapping = {} + // POLICIES extends CommandPolicies = {} + >( + options: ClientPoolTestOptions, + masterPort: number, + sentinelName: string, + count: number + ): Promise> { + const sentinels: Array = []; + for (let i = 0; i < count; i++) { + const appPrefix = 'sentinel-config-dir'; + const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), appPrefix)); + + sentinels.push(await spawnSentinelNode(this.#DOCKER_IMAGE, options.serverArguments, masterPort, sentinelName, tmpDir)) + + if (tmpDir) { + fs.rmSync(tmpDir, { recursive: true }); + } + } + + return sentinels + } +} diff --git a/packages/test-utils/lib/redis-proxy-spec.ts b/packages/test-utils/lib/redis-proxy-spec.ts new file mode 100644 index 00000000000..89b3b28c35a --- /dev/null +++ b/packages/test-utils/lib/redis-proxy-spec.ts @@ -0,0 +1,111 @@ +import { strict as assert } from 'node:assert'; +import { Buffer } from 'node:buffer'; +import { testUtils, GLOBAL } from './test-utils'; +import { RedisProxy } from './redis-proxy'; +import type { RedisClientType } from '@redis/client/lib/client/index.js'; + +describe('RedisSocketProxy', function () { + testUtils.testWithClient('basic proxy functionality', async (client: RedisClientType) => { + const socketOptions = client?.options?.socket; + //@ts-ignore + assert(socketOptions?.port, 'Test requires a TCP connection to Redis'); + + const proxyPort = 50000 + Math.floor(Math.random() * 10000); + const proxy = new RedisProxy({ + listenHost: '127.0.0.1', + listenPort: proxyPort, + //@ts-ignore + targetPort: socketOptions.port, + //@ts-ignore + targetHost: socketOptions.host || '127.0.0.1', + enableLogging: true + }); + + const proxyEvents = { + connections: [] as any[], + dataTransfers: [] as any[] + }; + + proxy.on('connection', (connectionInfo) => { + proxyEvents.connections.push(connectionInfo); + }); + + proxy.on('data', (connectionId, direction, data) => { + proxyEvents.dataTransfers.push({ connectionId, direction, dataLength: data.length }); + }); + + try { + await proxy.start(); + + const proxyClient = client.duplicate({ + socket: { + port: proxyPort, + host: '127.0.0.1' + }, + }); + + await proxyClient.connect(); + + const stats = proxy.getStats(); + assert.equal(stats.activeConnections, 1, 'Should have one active connection'); + assert.equal(proxyEvents.connections.length, 1, 'Should have recorded one connection event'); + + const pingResult = await proxyClient.ping(); + assert.equal(pingResult, 'PONG', 'Client should be able to communicate with Redis through the proxy'); + + const clientToServerTransfers = proxyEvents.dataTransfers.filter(t => t.direction === 'client->server'); + const serverToClientTransfers = proxyEvents.dataTransfers.filter(t => t.direction === 'server->client'); + + assert(clientToServerTransfers.length > 0, 'Should have client->server data transfers'); + assert(serverToClientTransfers.length > 0, 'Should have server->client data transfers'); + + const testKey = `test:proxy:${Date.now()}`; + const testValue = 'proxy-test-value'; + + await proxyClient.set(testKey, testValue); + const retrievedValue = await proxyClient.get(testKey); + assert.equal(retrievedValue, testValue, 'Should be able to set and get values through proxy'); + + proxyClient.destroy(); + + + } finally { + await proxy.stop(); + } + }, GLOBAL.SERVERS.OPEN_RESP_3); + + testUtils.testWithProxiedClient('custom message injection via proxy client', + async (proxiedClient: RedisClientType, proxy: RedisProxy) => { + const customMessageTransfers: any[] = []; + + proxy.on('data', (connectionId, direction, data) => { + if (direction === 'server->client') { + customMessageTransfers.push({ connectionId, dataLength: data.length, data }); + } + }); + + + const stats = proxy.getStats(); + assert.equal(stats.activeConnections, 1, 'Should have one active connection'); + + // Send a resp3 push + const customMessage = Buffer.from('>4\r\n$6\r\nMOVING\r\n:1\r\n:2\r\n$6\r\nhost:3\r\n'); + + const sendResults = proxy.sendToAllClients(customMessage); + assert.equal(sendResults.length, 1, 'Should send to one client'); + assert.equal(sendResults[0].success, true, 'Custom message send should succeed'); + + + const customMessageFound = customMessageTransfers.find(transfer => + transfer.dataLength === customMessage.length + ); + assert(customMessageFound, 'Should have recorded the custom message transfer'); + + assert.equal(customMessageFound.dataLength, customMessage.length, + 'Custom message length should match'); + + const pingResult = await proxiedClient.ping(); + assert.equal(pingResult, 'PONG', 'Client should be able to communicate with Redis through the proxy'); + + }, GLOBAL.SERVERS.OPEN_RESP_3) +}); diff --git a/packages/test-utils/lib/redis-proxy.ts b/packages/test-utils/lib/redis-proxy.ts new file mode 100644 index 00000000000..217ec528a33 --- /dev/null +++ b/packages/test-utils/lib/redis-proxy.ts @@ -0,0 +1,329 @@ +import * as net from 'net'; +import { EventEmitter } from 'events'; + +interface ProxyConfig { + readonly listenPort: number; + readonly listenHost?: string; + readonly targetHost: string; + readonly targetPort: number; + readonly timeout?: number; + readonly enableLogging?: boolean; +} + +interface ConnectionInfo { + readonly id: string; + readonly clientAddress: string; + readonly clientPort: number; + readonly connectedAt: Date; +} + +interface ActiveConnection extends ConnectionInfo { + readonly clientSocket: net.Socket; + readonly serverSocket: net.Socket; +} + +type SendResult = + | { readonly success: true; readonly connectionId: string } + | { readonly success: false; readonly error: string; readonly connectionId: string }; + +type DataDirection = 'client->server' | 'server->client'; + +interface ProxyStats { + readonly activeConnections: number; + readonly totalConnections: number; + readonly connections: readonly ConnectionInfo[]; +} + +interface ProxyEvents { + /** Emitted when a new client connects */ + 'connection': (connectionInfo: ConnectionInfo) => void; + /** Emitted when a connection is closed */ + 'disconnect': (connectionInfo: ConnectionInfo) => void; + /** Emitted when data is transferred */ + 'data': (connectionId: string, direction: DataDirection, data: Buffer) => void; + /** Emitted when an error occurs */ + 'error': (error: Error, connectionId?: string) => void; + /** Emitted when the proxy server starts */ + 'listening': (host: string, port: number) => void; + /** Emitted when the proxy server stops */ + 'close': () => void; +} + +export class RedisProxy extends EventEmitter { + private readonly server: net.Server; + public readonly config: Required; + private readonly connections: Map; + private isRunning: boolean; + + constructor(config: ProxyConfig) { + super(); + + + this.config = { + listenHost: '127.0.0.1', + timeout: 30000, + enableLogging: false, + ...config + }; + + this.connections = new Map(); + this.isRunning = false; + this.server = this.createServer(); + } + + public async start(): Promise { + return new Promise((resolve, reject) => { + if (this.isRunning) { + reject(new Error('Proxy is already running')); + return; + } + + this.server.listen(this.config.listenPort, this.config.listenHost, () => { + this.isRunning = true; + this.log(`Proxy listening on ${this.config.listenHost}:${this.config.listenPort}`); + this.log(`Forwarding to Redis server at ${this.config.targetHost}:${this.config.targetPort}`); + this.emit('listening', this.config.listenHost, this.config.listenPort); + resolve(); + }); + + this.server.on('error', (error) => { + this.emit('error', error); + reject(error); + }); + }); + } + + public async stop(): Promise { + return new Promise((resolve) => { + if (!this.isRunning) { + resolve(); + return; + } + + Array.from(this.connections.keys()).forEach((connectionId) => { + this.closeConnection(connectionId); + }); + + this.server.close(() => { + this.isRunning = false; + this.log('Proxy server stopped'); + this.emit('close'); + resolve(); + }); + }); + } + + public getStats(): ProxyStats { + const connections = Array.from(this.connections.values()); + + return { + activeConnections: connections.length, + totalConnections: connections.length, + connections: connections.map((conn) => ({ + id: conn.id, + clientAddress: conn.clientAddress, + clientPort: conn.clientPort, + connectedAt: conn.connectedAt, + })) + }; + } + + public closeConnection(connectionId: string): boolean { + const connection = this.connections.get(connectionId); + if (!connection) { + return false; + } + + connection.clientSocket.destroy(); + connection.serverSocket.destroy(); + this.connections.delete(connectionId); + this.emit('disconnect', connection); + return true; + } + + public sendToClient(connectionId: string, data: Buffer): SendResult { + const connection = this.connections.get(connectionId); + if (!connection) { + return { + success: false, + error: 'Connection not found', + connectionId + }; + } + + if (connection.clientSocket.destroyed || !connection.clientSocket.writable) { + return { + success: false, + error: 'Client socket is not writable', + connectionId + }; + } + + try { + connection.clientSocket.write(data); + + this.log(`Sent ${data.length} bytes to client ${connectionId}`); + this.emit('data', connectionId, 'server->client', data); + + return { + success: true, + connectionId + }; + } catch (error) { + const errorMessage = error instanceof Error ? error.message : 'Unknown error'; + this.log(`Failed to send data to client ${connectionId}: ${errorMessage}`); + return { + success: false, + error: errorMessage, + connectionId + }; + } + } + + public sendToAllClients(data: Buffer): readonly SendResult[] { + const connectionIds = Array.from(this.connections.keys()); + const results = connectionIds.map((connectionId) => + this.sendToClient(connectionId, data) + ); + + const successCount = results.filter((result) => result.success).length; + const totalCount = results.length; + + this.log(`Sent ${data.length} bytes to ${successCount}/${totalCount} clients`); + + return results; + } + + public sendToClients(connectionIds: readonly string[], data: Buffer): readonly SendResult[] { + const results = connectionIds.map((connectionId) => + this.sendToClient(connectionId, data) + ); + + const successCount = results.filter((result) => result.success).length; + const totalCount = results.length; + + this.log(`Sent ${data.length} bytes to ${successCount}/${totalCount} specified clients`); + + return results; + } + + public getActiveConnectionIds(): readonly string[] { + return Array.from(this.connections.keys()); + } + + private createServer(): net.Server { + return net.createServer((clientSocket) => { + this.handleClientConnection(clientSocket); + }); + } + + private handleClientConnection(clientSocket: net.Socket): void { + const connectionId = this.generateConnectionId(); + const serverSocket = net.createConnection({ + host: this.config.targetHost, + port: this.config.targetPort + }); + + const connectionInfo: ActiveConnection = { + id: connectionId, + clientAddress: clientSocket.remoteAddress || 'unknown', + clientPort: clientSocket.remotePort || 0, + connectedAt: new Date(), + clientSocket, + serverSocket + }; + + this.connections.set(connectionId, connectionInfo); + this.log(`New connection ${connectionId} from ${connectionInfo.clientAddress}:${connectionInfo.clientPort}`); + + clientSocket.setTimeout(this.config.timeout); + + serverSocket.on('connect', () => { + this.log(`Connected to Redis server for connection ${connectionId}`); + this.emit('connection', connectionInfo); + }); + + clientSocket.on('data', (data) => { + this.emit('data', connectionId, 'client->server', data); + serverSocket.write(data); + }); + + serverSocket.on('data', (data) => { + this.emit('data', connectionId, 'server->client', data); + clientSocket.write(data); + }); + + clientSocket.on('close', () => { + this.log(`Client disconnected for connection ${connectionId}`); + serverSocket.destroy(); + this.cleanupConnection(connectionId); + }); + + serverSocket.on('close', () => { + this.log(`Server disconnected for connection ${connectionId}`); + clientSocket.destroy(); + this.cleanupConnection(connectionId); + }); + + clientSocket.on('error', (error) => { + this.log(`Client error for connection ${connectionId}: ${error.message}`); + this.emit('error', error, connectionId); + serverSocket.destroy(); + this.cleanupConnection(connectionId); + }); + + serverSocket.on('error', (error) => { + this.log(`Server error for connection ${connectionId}: ${error.message}`); + this.emit('error', error, connectionId); + clientSocket.destroy(); + this.cleanupConnection(connectionId); + }); + + clientSocket.on('timeout', () => { + this.log(`Connection ${connectionId} timed out`); + clientSocket.destroy(); + serverSocket.destroy(); + this.cleanupConnection(connectionId); + }); + } + + private cleanupConnection(connectionId: string): void { + const connection = this.connections.get(connectionId); + if (connection) { + this.connections.delete(connectionId); + this.emit('disconnect', connection); + } + } + + private generateConnectionId(): string { + return `conn_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`; + } + + private log(message: string): void { + if (this.config.enableLogging) { + console.log(`[RedisProxy] ${new Date().toISOString()} - ${message}`); + } + } +} +import { createServer } from 'net'; + +export function getFreePortNumber(): Promise { + return new Promise((resolve, reject) => { + const server = createServer(); + + server.listen(0, () => { + const address = server.address(); + server.close(() => { + if (address && typeof address === 'object') { + resolve(address.port); + } + }); + }); + + server.on('error', reject); + }); +} + +export { RedisProxy as RedisTransparentProxy }; +export type { ProxyConfig, ConnectionInfo, ProxyEvents, SendResult, DataDirection, ProxyStats }; + diff --git a/packages/test-utils/lib/test-utils.ts b/packages/test-utils/lib/test-utils.ts new file mode 100644 index 00000000000..7a172f6c4de --- /dev/null +++ b/packages/test-utils/lib/test-utils.ts @@ -0,0 +1,25 @@ +import TestUtils from './index' + +export const testUtils = TestUtils.createFromConfig({ + dockerImageName: 'redislabs/client-libs-test', + dockerImageVersionArgument: 'redis-version', + defaultDockerVersion: '8.4-M01-pre' +}); + + + +export const DEBUG_MODE_ARGS = testUtils.isVersionGreaterThan([7]) ? + ['--enable-debug-command', 'yes'] : + []; + +export const GLOBAL = { + SERVERS: { + + OPEN_RESP_3: { + serverArguments: [...DEBUG_MODE_ARGS], + clientOptions: { + RESP: 3, + } + }, + } +} diff --git a/packages/test-utils/package.json b/packages/test-utils/package.json new file mode 100644 index 00000000000..f7373f6add1 --- /dev/null +++ b/packages/test-utils/package.json @@ -0,0 +1,16 @@ +{ + "name": "@redis/test-utils", + "private": true, + "main": "./dist/lib/index.js", + "types": "./dist/lib/index.d.ts", + "scripts": { + "test": "nyc -r text-summary -r lcov mocha -r tsx './lib/**/*.spec.ts'" + }, + "peerDependencies": { + "@redis/client": "*" + }, + "devDependencies": { + "@types/yargs": "^17.0.32", + "yargs": "^17.7.2" + } +} diff --git a/packages/test-utils/tsconfig.json b/packages/test-utils/tsconfig.json new file mode 100644 index 00000000000..6bb104668fc --- /dev/null +++ b/packages/test-utils/tsconfig.json @@ -0,0 +1,12 @@ +{ + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "outDir": "./dist" + }, + "include": [ + "./lib/**/*.ts" + ], + "references": [{ + "path": "../client" + }] +} diff --git a/packages/time-series/.nycrc.json b/packages/time-series/.nycrc.json new file mode 100644 index 00000000000..367a89ad32c --- /dev/null +++ b/packages/time-series/.nycrc.json @@ -0,0 +1,4 @@ +{ + "extends": "@istanbuljs/nyc-config-typescript", + "exclude": ["dist", "**/*.spec.ts", "lib/test-utils.ts"] +} diff --git a/packages/time-series/.release-it.json b/packages/time-series/.release-it.json new file mode 100644 index 00000000000..0ffec5a0c70 --- /dev/null +++ b/packages/time-series/.release-it.json @@ -0,0 +1,22 @@ +{ + "npm": { + "publish": true, + "publishArgs": ["--access", "public"] + }, + "git": { + "tagName": "time-series@${version}", + "tagMatch": "time-series@*", + "commitMessage": "Release ${tagName}", + "tagAnnotation": "Release ${tagName}", + "commitArgs": "--all" + }, + "plugins": { + "@release-it/bumper": { + "out": { + "file": "package.json", + "path": ["peerDependencies.@redis/client"], + "versionPrefix": "^" + } + } + } +} diff --git a/packages/time-series/README.md b/packages/time-series/README.md new file mode 100644 index 00000000000..ff42bfb6b3d --- /dev/null +++ b/packages/time-series/README.md @@ -0,0 +1,143 @@ +# @redis/time-series + +This package provides support for the [RedisTimeSeries](https://redis.io/docs/data-types/timeseries/) module, which adds a time series data structure to Redis. + +Should be used with [`redis`/`@redis/client`](https://github.com/redis/node-redis). + +:warning: To use these extra commands, your Redis server must have the RedisTimeSeries module installed. + +## Usage + +For a complete example, see [`time-series.js`](https://github.com/redis/node-redis/blob/master/examples/time-series.js) in the Node Redis examples folder. + +### Creating Time Series data structure in Redis + +The [`TS.CREATE`](https://oss.redis.com/redistimeseries/commands/#tscreate) command creates a new time series. + +Here, we'll create a new time series "`temperature`": + +```javascript + +import { createClient } from 'redis'; +import { TimeSeriesDuplicatePolicies, TimeSeriesEncoding, TimeSeriesAggregationType } from '@redis/time-series'; + +... +const created = await client.ts.create('temperature', { + RETENTION: 86400000, // 1 day in milliseconds + ENCODING: TimeSeriesEncoding.UNCOMPRESSED, // No compression - When not specified, the option is set to COMPRESSED + DUPLICATE_POLICY: TimeSeriesDuplicatePolicies.BLOCK, // No duplicates - When not specified: set to the global DUPLICATE_POLICY configuration of the database (which by default, is BLOCK). +}); + +if (created === 'OK') { + console.log('Created timeseries.'); +} else { + console.log('Error creating timeseries :('); + process.exit(1); +} +``` + +### Adding new value to a Time Series data structure in Redis + +With RedisTimeSeries, we can add a single value to time series data structure using the [`TS.ADD`](https://redis.io/commands/ts.add/) command and if we would like to add multiple values we can use the [`TS.MADD`](https://redis.io/commands/ts.madd/) command. + +```javascript + +let value = Math.floor(Math.random() * 1000) + 1; // Random data point value +let currentTimestamp = 1640995200000; // Jan 1 2022 00:00:00 +let num = 0; + +while (num < 10000) { + // Add a new value to the timeseries, providing our own timestamp: + // https://redis.io/commands/ts.add/ + await client.ts.add('temperature', currentTimestamp, value); + console.log(`Added timestamp ${currentTimestamp}, value ${value}.`); + + num += 1; + value = Math.floor(Math.random() * 1000) + 1; // Get another random value + currentTimestamp += 1000; // Move on one second. +} + +// Add multiple values to the timeseries in round trip to the server: +// https://redis.io/commands/ts.madd/ +const response = await client.ts.mAdd([{ + key: 'temperature', + timestamp: currentTimestamp + 60000, + value: Math.floor(Math.random() * 1000) + 1 +}, { + key: 'temperature', + timestamp: currentTimestamp + 120000, + value: Math.floor(Math.random() * 1000) + 1 +}]); +``` + +### Retrieving Time Series data from Redis + +With RedisTimeSeries, we can retrieve the time series data using the [`TS.RANGE`](https://redis.io/commands/ts.range/) command by passing the criteria as follows: + +```javascript +// Query the timeseries with TS.RANGE: +// https://redis.io/commands/ts.range/ +const fromTimestamp = 1640995200000; // Jan 1 2022 00:00:00 +const toTimestamp = 1640995260000; // Jan 1 2022 00:01:00 +const rangeResponse = await client.ts.range('temperature', fromTimestamp, toTimestamp, { + // Group into 10 second averages. + AGGREGATION: { + type: TimeSeriesAggregationType.AVERAGE, + timeBucket: 10000 + } +}); + +console.log('RANGE RESPONSE:'); +// rangeResponse looks like: +// [ +// { timestamp: 1640995200000, value: 356.8 }, +// { timestamp: 1640995210000, value: 534.8 }, +// { timestamp: 1640995220000, value: 481.3 }, +// { timestamp: 1640995230000, value: 437 }, +// { timestamp: 1640995240000, value: 507.3 }, +// { timestamp: 1640995250000, value: 581.2 }, +// { timestamp: 1640995260000, value: 600 } +// ] +``` + +### Altering Time Series data Stored in Redis + +RedisTimeSeries includes commands that can update values in a time series data structure. + +Using the [`TS.ALTER`](https://redis.io/commands/ts.alter/) command, we can update time series retention like this: + +```javascript +// https://redis.io/commands/ts.alter/ +const alterResponse = await client.ts.alter('temperature', { + RETENTION: 0 // Keep the entries forever +}); +``` + +### Retrieving Information about the timeseries Stored in Redis + +RedisTimeSeries also includes commands that can help to view the information on the state of a time series. + +Using the [`TS.INFO`](https://redis.io/commands/ts.info/) command, we can view timeseries information like this: + +```javascript +// Get some information about the state of the timeseries. +// https://redis.io/commands/ts.info/ +const tsInfo = await client.ts.info('temperature'); + +// tsInfo looks like this: +// { +// totalSamples: 1440, +// memoryUsage: 28904, +// firstTimestamp: 1641508920000, +// lastTimestamp: 1641595320000, +// retentionTime: 86400000, +// chunkCount: 7, +// chunkSize: 4096, +// chunkType: 'uncompressed', +// duplicatePolicy: 'block', +// labels: [], +// sourceKey: null, +// rules: [] +// } +``` + diff --git a/packages/time-series/lib/commands/ADD.spec.ts b/packages/time-series/lib/commands/ADD.spec.ts new file mode 100644 index 00000000000..d66c85441a1 --- /dev/null +++ b/packages/time-series/lib/commands/ADD.spec.ts @@ -0,0 +1,94 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ADD from './ADD'; +import { TIME_SERIES_ENCODING, TIME_SERIES_DUPLICATE_POLICIES } from './helpers'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TS.ADD', () => { + describe('transformArguments', () => { + it('without options', () => { + assert.deepEqual( + parseArgs(ADD, 'key', '*', 1), + ['TS.ADD', 'key', '*', '1'] + ); + }); + + it('with RETENTION', () => { + assert.deepEqual( + parseArgs(ADD, 'key', '*', 1, { + RETENTION: 1 + }), + ['TS.ADD', 'key', '*', '1', 'RETENTION', '1'] + ); + }); + + it('with ENCODING', () => { + assert.deepEqual( + parseArgs(ADD, 'key', '*', 1, { + ENCODING: TIME_SERIES_ENCODING.UNCOMPRESSED + }), + ['TS.ADD', 'key', '*', '1', 'ENCODING', 'UNCOMPRESSED'] + ); + }); + + it('with CHUNK_SIZE', () => { + assert.deepEqual( + parseArgs(ADD, 'key', '*', 1, { + CHUNK_SIZE: 1 + }), + ['TS.ADD', 'key', '*', '1', 'CHUNK_SIZE', '1'] + ); + }); + + it('with ON_DUPLICATE', () => { + assert.deepEqual( + parseArgs(ADD, 'key', '*', 1, { + ON_DUPLICATE: TIME_SERIES_DUPLICATE_POLICIES.BLOCK + }), + ['TS.ADD', 'key', '*', '1', 'ON_DUPLICATE', 'BLOCK'] + ); + }); + + it('with LABELS', () => { + assert.deepEqual( + parseArgs(ADD, 'key', '*', 1, { + LABELS: { label: 'value' } + }), + ['TS.ADD', 'key', '*', '1', 'LABELS', 'label', 'value'] + ); + }); + + it ('with IGNORE', () => { + assert.deepEqual( + parseArgs(ADD, 'key', '*', 1, { + IGNORE: { + maxTimeDiff: 1, + maxValDiff: 1 + } + }), + ['TS.ADD', 'key', '*', '1', 'IGNORE', '1', '1'] + ) + }); + + it('with RETENTION, ENCODING, CHUNK_SIZE, ON_DUPLICATE, LABELS, IGNORE', () => { + assert.deepEqual( + parseArgs(ADD, 'key', '*', 1, { + RETENTION: 1, + ENCODING: TIME_SERIES_ENCODING.UNCOMPRESSED, + CHUNK_SIZE: 1, + ON_DUPLICATE: TIME_SERIES_DUPLICATE_POLICIES.BLOCK, + LABELS: { label: 'value' }, + IGNORE: { maxTimeDiff: 1, maxValDiff: 1} + }), + ['TS.ADD', 'key', '*', '1', 'RETENTION', '1', 'ENCODING', 'UNCOMPRESSED', 'CHUNK_SIZE', '1', 'ON_DUPLICATE', 'BLOCK', 'LABELS', 'label', 'value', 'IGNORE', '1', '1'] + ); + }); + }); + + testUtils.testWithClient('client.ts.add', async client => { + assert.equal( + await client.ts.add('key', 0, 1), + 0 + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/time-series/lib/commands/ADD.ts b/packages/time-series/lib/commands/ADD.ts new file mode 100644 index 00000000000..78a9247c41e --- /dev/null +++ b/packages/time-series/lib/commands/ADD.ts @@ -0,0 +1,66 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, NumberReply, Command } from '@redis/client/dist/lib/RESP/types'; +import { + transformTimestampArgument, + parseRetentionArgument, + TimeSeriesEncoding, + parseEncodingArgument, + parseChunkSizeArgument, + TimeSeriesDuplicatePolicies, + Labels, + parseLabelsArgument, + Timestamp, + parseIgnoreArgument +} from './helpers'; + +export interface TsIgnoreOptions { + maxTimeDiff: number; + maxValDiff: number; +} + +export interface TsAddOptions { + RETENTION?: number; + ENCODING?: TimeSeriesEncoding; + CHUNK_SIZE?: number; + ON_DUPLICATE?: TimeSeriesDuplicatePolicies; + LABELS?: Labels; + IGNORE?: TsIgnoreOptions; +} + +export default { + IS_READ_ONLY: false, + /** + * Creates or appends a sample to a time series + * @param parser - The command parser + * @param key - The key name for the time series + * @param timestamp - The timestamp of the sample + * @param value - The value of the sample + * @param options - Optional configuration parameters + */ + parseCommand( + parser: CommandParser, + key: RedisArgument, + timestamp: Timestamp, + value: number, + options?: TsAddOptions + ) { + parser.push('TS.ADD'); + parser.pushKey(key); + parser.push(transformTimestampArgument(timestamp), value.toString()); + + parseRetentionArgument(parser, options?.RETENTION); + + parseEncodingArgument(parser, options?.ENCODING); + + parseChunkSizeArgument(parser, options?.CHUNK_SIZE); + + if (options?.ON_DUPLICATE) { + parser.push('ON_DUPLICATE', options.ON_DUPLICATE); + } + + parseLabelsArgument(parser, options?.LABELS); + + parseIgnoreArgument(parser, options?.IGNORE); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/time-series/lib/commands/ALTER.spec.ts b/packages/time-series/lib/commands/ALTER.spec.ts new file mode 100644 index 00000000000..46b94c5863a --- /dev/null +++ b/packages/time-series/lib/commands/ALTER.spec.ts @@ -0,0 +1,86 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import ALTER from './ALTER'; +import { TIME_SERIES_DUPLICATE_POLICIES } from './helpers'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TS.ALTER', () => { + describe('transformArguments', () => { + it('without options', () => { + assert.deepEqual( + parseArgs(ALTER, 'key'), + ['TS.ALTER', 'key'] + ); + }); + + it('with RETENTION', () => { + assert.deepEqual( + parseArgs(ALTER, 'key', { + RETENTION: 1 + }), + ['TS.ALTER', 'key', 'RETENTION', '1'] + ); + }); + + it('with CHUNK_SIZE', () => { + assert.deepEqual( + parseArgs(ALTER, 'key', { + CHUNK_SIZE: 1 + }), + ['TS.ALTER', 'key', 'CHUNK_SIZE', '1'] + ); + }); + + it('with DUPLICATE_POLICY', () => { + assert.deepEqual( + parseArgs(ALTER, 'key', { + DUPLICATE_POLICY: TIME_SERIES_DUPLICATE_POLICIES.BLOCK + }), + ['TS.ALTER', 'key', 'DUPLICATE_POLICY', 'BLOCK'] + ); + }); + + it('with LABELS', () => { + assert.deepEqual( + parseArgs(ALTER, 'key', { + LABELS: { label: 'value' } + }), + ['TS.ALTER', 'key', 'LABELS', 'label', 'value'] + ); + }); + + it('with IGNORE with MAX_TIME_DIFF', () => { + assert.deepEqual( + parseArgs(ALTER, 'key', { + IGNORE: { + maxTimeDiff: 1, + maxValDiff: 1 + } + }), + ['TS.ALTER', 'key', 'IGNORE', '1', '1'] + ) + }); + + it('with RETENTION, CHUNK_SIZE, DUPLICATE_POLICY, LABELS, IGNORE', () => { + assert.deepEqual( + parseArgs(ALTER, 'key', { + RETENTION: 1, + CHUNK_SIZE: 1, + DUPLICATE_POLICY: TIME_SERIES_DUPLICATE_POLICIES.BLOCK, + LABELS: { label: 'value' }, + IGNORE: { maxTimeDiff: 1, maxValDiff: 1} + }), + ['TS.ALTER', 'key', 'RETENTION', '1', 'CHUNK_SIZE', '1', 'DUPLICATE_POLICY', 'BLOCK', 'LABELS', 'label', 'value', 'IGNORE', '1', '1'] + ); + }); + }); + + testUtils.testWithClient('client.ts.alter', async client => { + const [, reply] = await Promise.all([ + client.ts.create('key'), + client.ts.alter('key') + ]); + + assert.equal(reply, 'OK'); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/time-series/lib/commands/ALTER.ts b/packages/time-series/lib/commands/ALTER.ts new file mode 100644 index 00000000000..613539b4861 --- /dev/null +++ b/packages/time-series/lib/commands/ALTER.ts @@ -0,0 +1,32 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '@redis/client/dist/lib/RESP/types'; +import { TsCreateOptions } from './CREATE'; +import { parseRetentionArgument, parseChunkSizeArgument, parseDuplicatePolicy, parseLabelsArgument, parseIgnoreArgument } from './helpers'; + + +export type TsAlterOptions = Pick; + +export default { + IS_READ_ONLY: false, + /** + * Alters the configuration of an existing time series + * @param parser - The command parser + * @param key - The key name for the time series + * @param options - Configuration parameters to alter + */ + parseCommand(parser: CommandParser, key: RedisArgument, options?: TsAlterOptions) { + parser.push('TS.ALTER'); + parser.pushKey(key); + + parseRetentionArgument(parser, options?.RETENTION); + + parseChunkSizeArgument(parser, options?.CHUNK_SIZE); + + parseDuplicatePolicy(parser, options?.DUPLICATE_POLICY); + + parseLabelsArgument(parser, options?.LABELS); + + parseIgnoreArgument(parser, options?.IGNORE); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/time-series/lib/commands/CREATE.spec.ts b/packages/time-series/lib/commands/CREATE.spec.ts new file mode 100644 index 00000000000..4fbfabb6858 --- /dev/null +++ b/packages/time-series/lib/commands/CREATE.spec.ts @@ -0,0 +1,94 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import CREATE from './CREATE'; +import { TIME_SERIES_ENCODING, TIME_SERIES_DUPLICATE_POLICIES } from './helpers'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TS.CREATE', () => { + describe('transformArguments', () => { + it('without options', () => { + assert.deepEqual( + parseArgs(CREATE, 'key'), + ['TS.CREATE', 'key'] + ); + }); + + it('with RETENTION', () => { + assert.deepEqual( + parseArgs(CREATE, 'key', { + RETENTION: 1 + }), + ['TS.CREATE', 'key', 'RETENTION', '1'] + ); + }); + + it('with ENCODING', () => { + assert.deepEqual( + parseArgs(CREATE, 'key', { + ENCODING: TIME_SERIES_ENCODING.UNCOMPRESSED + }), + ['TS.CREATE', 'key', 'ENCODING', 'UNCOMPRESSED'] + ); + }); + + it('with CHUNK_SIZE', () => { + assert.deepEqual( + parseArgs(CREATE, 'key', { + CHUNK_SIZE: 1 + }), + ['TS.CREATE', 'key', 'CHUNK_SIZE', '1'] + ); + }); + + it('with DUPLICATE_POLICY', () => { + assert.deepEqual( + parseArgs(CREATE, 'key', { + DUPLICATE_POLICY: TIME_SERIES_DUPLICATE_POLICIES.BLOCK + }), + ['TS.CREATE', 'key', 'DUPLICATE_POLICY', 'BLOCK'] + ); + }); + + it('with LABELS', () => { + assert.deepEqual( + parseArgs(CREATE, 'key', { + LABELS: { label: 'value' } + }), + ['TS.CREATE', 'key', 'LABELS', 'label', 'value'] + ); + }); + + it('with IGNORE with MAX_TIME_DIFF', () => { + assert.deepEqual( + parseArgs(CREATE, 'key', { + IGNORE: { + maxTimeDiff: 1, + maxValDiff: 1 + } + }), + ['TS.CREATE', 'key', 'IGNORE', '1', '1'] + ) + }); + + it('with RETENTION, ENCODING, CHUNK_SIZE, DUPLICATE_POLICY, LABELS, IGNORE', () => { + assert.deepEqual( + parseArgs(CREATE, 'key', { + RETENTION: 1, + ENCODING: TIME_SERIES_ENCODING.UNCOMPRESSED, + CHUNK_SIZE: 1, + DUPLICATE_POLICY: TIME_SERIES_DUPLICATE_POLICIES.BLOCK, + LABELS: { label: 'value' }, + IGNORE: { maxTimeDiff: 1, maxValDiff: 1} + }), + ['TS.CREATE', 'key', 'RETENTION', '1', 'ENCODING', 'UNCOMPRESSED', 'CHUNK_SIZE', '1', 'DUPLICATE_POLICY', 'BLOCK', 'LABELS', 'label', 'value', 'IGNORE', '1', '1'] + ); + }); + }); + + testUtils.testWithClient('client.ts.create', async client => { + assert.equal( + await client.ts.create('key'), + 'OK' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/time-series/lib/commands/CREATE.ts b/packages/time-series/lib/commands/CREATE.ts new file mode 100644 index 00000000000..e8a6b03773f --- /dev/null +++ b/packages/time-series/lib/commands/CREATE.ts @@ -0,0 +1,50 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '@redis/client/dist/lib/RESP/types'; +import { + parseRetentionArgument, + TimeSeriesEncoding, + parseEncodingArgument, + parseChunkSizeArgument, + TimeSeriesDuplicatePolicies, + parseDuplicatePolicy, + Labels, + parseLabelsArgument, + parseIgnoreArgument +} from './helpers'; +import { TsIgnoreOptions } from './ADD'; + +export interface TsCreateOptions { + RETENTION?: number; + ENCODING?: TimeSeriesEncoding; + CHUNK_SIZE?: number; + DUPLICATE_POLICY?: TimeSeriesDuplicatePolicies; + LABELS?: Labels; + IGNORE?: TsIgnoreOptions; +} + +export default { + IS_READ_ONLY: false, + /** + * Creates a new time series + * @param parser - The command parser + * @param key - The key name for the new time series + * @param options - Optional configuration parameters + */ + parseCommand(parser: CommandParser, key: RedisArgument, options?: TsCreateOptions) { + parser.push('TS.CREATE'); + parser.pushKey(key); + + parseRetentionArgument(parser, options?.RETENTION); + + parseEncodingArgument(parser, options?.ENCODING); + + parseChunkSizeArgument(parser, options?.CHUNK_SIZE); + + parseDuplicatePolicy(parser, options?.DUPLICATE_POLICY); + + parseLabelsArgument(parser, options?.LABELS); + + parseIgnoreArgument(parser, options?.IGNORE); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/time-series/lib/commands/CREATERULE.spec.ts b/packages/time-series/lib/commands/CREATERULE.spec.ts new file mode 100644 index 00000000000..da26bf458e2 --- /dev/null +++ b/packages/time-series/lib/commands/CREATERULE.spec.ts @@ -0,0 +1,32 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import CREATERULE, { TIME_SERIES_AGGREGATION_TYPE } from './CREATERULE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TS.CREATERULE', () => { + describe('transformArguments', () => { + it('without options', () => { + assert.deepEqual( + parseArgs(CREATERULE, 'source', 'destination', TIME_SERIES_AGGREGATION_TYPE.AVG, 1), + ['TS.CREATERULE', 'source', 'destination', 'AGGREGATION', 'AVG', '1'] + ); + }); + + it('with alignTimestamp', () => { + assert.deepEqual( + parseArgs(CREATERULE, 'source', 'destination', TIME_SERIES_AGGREGATION_TYPE.AVG, 1, 1), + ['TS.CREATERULE', 'source', 'destination', 'AGGREGATION', 'AVG', '1', '1'] + ); + }); + }); + + testUtils.testWithClient('client.ts.createRule', async client => { + const [, , reply] = await Promise.all([ + client.ts.create('source'), + client.ts.create('destination'), + client.ts.createRule('source', 'destination', TIME_SERIES_AGGREGATION_TYPE.AVG, 1) + ]); + + assert.equal(reply, 'OK'); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/time-series/lib/commands/CREATERULE.ts b/packages/time-series/lib/commands/CREATERULE.ts new file mode 100644 index 00000000000..71d08009ac2 --- /dev/null +++ b/packages/time-series/lib/commands/CREATERULE.ts @@ -0,0 +1,50 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export const TIME_SERIES_AGGREGATION_TYPE = { + AVG: 'AVG', + FIRST: 'FIRST', + LAST: 'LAST', + MIN: 'MIN', + MAX: 'MAX', + SUM: 'SUM', + RANGE: 'RANGE', + COUNT: 'COUNT', + STD_P: 'STD.P', + STD_S: 'STD.S', + VAR_P: 'VAR.P', + VAR_S: 'VAR.S', + TWA: 'TWA' +} as const; + +export type TimeSeriesAggregationType = typeof TIME_SERIES_AGGREGATION_TYPE[keyof typeof TIME_SERIES_AGGREGATION_TYPE]; + +export default { + IS_READ_ONLY: false, + /** + * Creates a compaction rule from source time series to destination time series + * @param parser - The command parser + * @param sourceKey - The source time series key + * @param destinationKey - The destination time series key + * @param aggregationType - The aggregation type to use + * @param bucketDuration - The duration of each bucket in milliseconds + * @param alignTimestamp - Optional timestamp for alignment + */ + parseCommand( + parser: CommandParser, + sourceKey: RedisArgument, + destinationKey: RedisArgument, + aggregationType: TimeSeriesAggregationType, + bucketDuration: number, + alignTimestamp?: number + ) { + parser.push('TS.CREATERULE'); + parser.pushKeys([sourceKey, destinationKey]); + parser.push('AGGREGATION', aggregationType, bucketDuration.toString()); + + if (alignTimestamp !== undefined) { + parser.push(alignTimestamp.toString()); + } + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/time-series/lib/commands/DECRBY.spec.ts b/packages/time-series/lib/commands/DECRBY.spec.ts new file mode 100644 index 00000000000..b272ed1614d --- /dev/null +++ b/packages/time-series/lib/commands/DECRBY.spec.ts @@ -0,0 +1,93 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import DECRBY from './DECRBY'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TS.DECRBY', () => { + describe('transformArguments', () => { + it('without options', () => { + assert.deepEqual( + parseArgs(DECRBY, 'key', 1), + ['TS.DECRBY', 'key', '1'] + ); + }); + + it('with TIMESTAMP', () => { + assert.deepEqual( + parseArgs(DECRBY, 'key', 1, { + TIMESTAMP: '*' + }), + ['TS.DECRBY', 'key', '1', 'TIMESTAMP', '*'] + ); + }); + + it('with RETENTION', () => { + assert.deepEqual( + parseArgs(DECRBY, 'key', 1, { + RETENTION: 1 + }), + ['TS.DECRBY', 'key', '1', 'RETENTION', '1'] + ); + }); + + it('with UNCOMPRESSED', () => { + assert.deepEqual( + parseArgs(DECRBY, 'key', 1, { + UNCOMPRESSED: true + }), + ['TS.DECRBY', 'key', '1', 'UNCOMPRESSED'] + ); + }); + + it('with CHUNK_SIZE', () => { + assert.deepEqual( + parseArgs(DECRBY, 'key', 1, { + CHUNK_SIZE: 100 + }), + ['TS.DECRBY', 'key', '1', 'CHUNK_SIZE', '100'] + ); + }); + + it('with LABELS', () => { + assert.deepEqual( + parseArgs(DECRBY, 'key', 1, { + LABELS: { label: 'value' } + }), + ['TS.DECRBY', 'key', '1', 'LABELS', 'label', 'value'] + ); + }); + + it ('with IGNORE', () => { + assert.deepEqual( + parseArgs(DECRBY, 'key', 1, { + IGNORE: { + maxTimeDiff: 1, + maxValDiff: 1 + } + }), + ['TS.DECRBY', 'key', '1', 'IGNORE', '1', '1'] + ) + }); + + it('with TIMESTAMP, RETENTION, UNCOMPRESSED, CHUNK_SIZE and LABELS', () => { + assert.deepEqual( + parseArgs(DECRBY, 'key', 1, { + TIMESTAMP: '*', + RETENTION: 1, + UNCOMPRESSED: true, + CHUNK_SIZE: 2, + LABELS: { label: 'value' }, + IGNORE: { maxTimeDiff: 1, maxValDiff: 1 } + }), + ['TS.DECRBY', 'key', '1', 'TIMESTAMP', '*', 'RETENTION', '1', 'UNCOMPRESSED', 'CHUNK_SIZE', '2', 'LABELS', 'label', 'value', 'IGNORE', '1', '1'] + ); + }); + }); + + testUtils.testWithClient('client.ts.decrBy', async client => { + assert.equal( + typeof await client.ts.decrBy('key', 1), + 'number' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/time-series/lib/commands/DECRBY.ts b/packages/time-series/lib/commands/DECRBY.ts new file mode 100644 index 00000000000..61b9d94e082 --- /dev/null +++ b/packages/time-series/lib/commands/DECRBY.ts @@ -0,0 +1,17 @@ +import { Command } from '@redis/client/dist/lib/RESP/types'; +import INCRBY, { parseIncrByArguments } from './INCRBY'; + +export default { + IS_READ_ONLY: INCRBY.IS_READ_ONLY, + /** + * Decreases the value of a time series by a given amount + * @param args - Arguments passed to the parseIncrByArguments function + */ + parseCommand(...args: Parameters) { + const parser = args[0]; + + parser.push('TS.DECRBY'); + parseIncrByArguments(...args); + }, + transformReply: INCRBY.transformReply +} as const satisfies Command; diff --git a/packages/time-series/lib/commands/DEL.spec.ts b/packages/time-series/lib/commands/DEL.spec.ts new file mode 100644 index 00000000000..07d29ca095e --- /dev/null +++ b/packages/time-series/lib/commands/DEL.spec.ts @@ -0,0 +1,22 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import DEL from './DEL'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TS.DEL', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(DEL, 'key', '-', '+'), + ['TS.DEL', 'key', '-', '+'] + ); + }); + + testUtils.testWithClient('client.ts.del', async client => { + const [, reply] = await Promise.all([ + client.ts.create('key'), + client.ts.del('key', '-', '+') + ]); + + assert.equal(reply, 0); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/time-series/lib/commands/DEL.ts b/packages/time-series/lib/commands/DEL.ts new file mode 100644 index 00000000000..1e0e01164fd --- /dev/null +++ b/packages/time-series/lib/commands/DEL.ts @@ -0,0 +1,20 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { Timestamp, transformTimestampArgument } from './helpers'; +import { RedisArgument, NumberReply, Command, } from '@redis/client/dist/lib/RESP/types'; + +export default { + IS_READ_ONLY: false, + /** + * Deletes samples between two timestamps from a time series + * @param parser - The command parser + * @param key - The key name of the time series + * @param fromTimestamp - Start timestamp to delete from + * @param toTimestamp - End timestamp to delete until + */ + parseCommand(parser: CommandParser, key: RedisArgument, fromTimestamp: Timestamp, toTimestamp: Timestamp) { + parser.push('TS.DEL'); + parser.pushKey(key); + parser.push(transformTimestampArgument(fromTimestamp), transformTimestampArgument(toTimestamp)); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/time-series/lib/commands/DELETERULE.spec.ts b/packages/time-series/lib/commands/DELETERULE.spec.ts new file mode 100644 index 00000000000..d7a19a8eaa1 --- /dev/null +++ b/packages/time-series/lib/commands/DELETERULE.spec.ts @@ -0,0 +1,25 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import DELETERULE from './DELETERULE'; +import { TIME_SERIES_AGGREGATION_TYPE } from './CREATERULE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TS.DELETERULE', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(DELETERULE, 'source', 'destination'), + ['TS.DELETERULE', 'source', 'destination'] + ); + }); + + testUtils.testWithClient('client.ts.deleteRule', async client => { + const [, , , reply] = await Promise.all([ + client.ts.create('source'), + client.ts.create('destination'), + client.ts.createRule('source', 'destination', TIME_SERIES_AGGREGATION_TYPE.AVG, 1), + client.ts.deleteRule('source', 'destination') + ]); + + assert.equal(reply, 'OK'); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/time-series/lib/commands/DELETERULE.ts b/packages/time-series/lib/commands/DELETERULE.ts new file mode 100644 index 00000000000..8897f666b7b --- /dev/null +++ b/packages/time-series/lib/commands/DELETERULE.ts @@ -0,0 +1,17 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, SimpleStringReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export default { + IS_READ_ONLY: false, + /** + * Deletes a compaction rule between source and destination time series + * @param parser - The command parser + * @param sourceKey - The source time series key + * @param destinationKey - The destination time series key + */ + parseCommand(parser: CommandParser, sourceKey: RedisArgument, destinationKey: RedisArgument) { + parser.push('TS.DELETERULE'); + parser.pushKeys([sourceKey, destinationKey]); + }, + transformReply: undefined as unknown as () => SimpleStringReply<'OK'> +} as const satisfies Command; diff --git a/packages/time-series/lib/commands/GET.spec.ts b/packages/time-series/lib/commands/GET.spec.ts new file mode 100644 index 00000000000..836a1b638af --- /dev/null +++ b/packages/time-series/lib/commands/GET.spec.ts @@ -0,0 +1,47 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import GET from './GET'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TS.GET', () => { + describe('transformArguments', () => { + it('without options', () => { + assert.deepEqual( + parseArgs(GET, 'key'), + ['TS.GET', 'key'] + ); + }); + + it('with LATEST', () => { + assert.deepEqual( + parseArgs(GET, 'key', { + LATEST: true + }), + ['TS.GET', 'key', 'LATEST'] + ); + }); + }); + + describe('client.ts.get', () => { + testUtils.testWithClient('null', async client => { + const [, reply] = await Promise.all([ + client.ts.create('key'), + client.ts.get('key') + ]); + + assert.equal(reply, null); + }, GLOBAL.SERVERS.OPEN); + + testUtils.testWithClient('with sample', async client => { + const [, reply] = await Promise.all([ + client.ts.add('key', 0, 1), + client.ts.get('key') + ]); + + assert.deepEqual(reply, { + timestamp: 0, + value: 1 + }); + }, GLOBAL.SERVERS.OPEN); + }); +}); diff --git a/packages/time-series/lib/commands/GET.ts b/packages/time-series/lib/commands/GET.ts new file mode 100644 index 00000000000..9462705c02c --- /dev/null +++ b/packages/time-series/lib/commands/GET.ts @@ -0,0 +1,40 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, TuplesReply, NumberReply, DoubleReply, UnwrapReply, Resp2Reply, Command } from '@redis/client/dist/lib/RESP/types'; + +export interface TsGetOptions { + LATEST?: boolean; +} + +export type TsGetReply = TuplesReply<[]> | TuplesReply<[NumberReply, DoubleReply]>; + +export default { + IS_READ_ONLY: true, + /** + * Gets the last sample of a time series + * @param parser - The command parser + * @param key - The key name of the time series + * @param options - Optional parameters for the command + */ + parseCommand(parser: CommandParser, key: RedisArgument, options?: TsGetOptions) { + parser.push('TS.GET'); + parser.pushKey(key); + + if (options?.LATEST) { + parser.push('LATEST'); + } + }, + transformReply: { + 2(reply: UnwrapReply>) { + return reply.length === 0 ? null : { + timestamp: reply[0], + value: Number(reply[1]) + }; + }, + 3(reply: UnwrapReply) { + return reply.length === 0 ? null : { + timestamp: reply[0], + value: reply[1] + }; + } + } +} as const satisfies Command; diff --git a/packages/time-series/lib/commands/INCRBY.spec.ts b/packages/time-series/lib/commands/INCRBY.spec.ts new file mode 100644 index 00000000000..5d005952b30 --- /dev/null +++ b/packages/time-series/lib/commands/INCRBY.spec.ts @@ -0,0 +1,103 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import INCRBY from './INCRBY'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TS.INCRBY', () => { + describe('transformArguments', () => { + it('without options', () => { + assert.deepEqual( + parseArgs(INCRBY, 'key', 1), + ['TS.INCRBY', 'key', '1'] + ); + }); + + it('with TIMESTAMP', () => { + assert.deepEqual( + parseArgs(INCRBY, 'key', 1, { + TIMESTAMP: '*' + }), + ['TS.INCRBY', 'key', '1', 'TIMESTAMP', '*'] + ); + }); + + it('with RETENTION', () => { + assert.deepEqual( + parseArgs(INCRBY, 'key', 1, { + RETENTION: 1 + }), + ['TS.INCRBY', 'key', '1', 'RETENTION', '1'] + ); + }); + + it('with UNCOMPRESSED', () => { + assert.deepEqual( + parseArgs(INCRBY, 'key', 1, { + UNCOMPRESSED: true + }), + ['TS.INCRBY', 'key', '1', 'UNCOMPRESSED'] + ); + }); + + it('without UNCOMPRESSED', () => { + assert.deepEqual( + parseArgs(INCRBY, 'key', 1, { + UNCOMPRESSED: false + }), + ['TS.INCRBY', 'key', '1'] + ); + }); + + it('with CHUNK_SIZE', () => { + assert.deepEqual( + parseArgs(INCRBY, 'key', 1, { + CHUNK_SIZE: 1 + }), + ['TS.INCRBY', 'key', '1', 'CHUNK_SIZE', '1'] + ); + }); + + it('with LABELS', () => { + assert.deepEqual( + parseArgs(INCRBY, 'key', 1, { + LABELS: { label: 'value' } + }), + ['TS.INCRBY', 'key', '1', 'LABELS', 'label', 'value'] + ); + }); + + it ('with IGNORE', () => { + assert.deepEqual( + parseArgs(INCRBY, 'key', 1, { + IGNORE: { + maxTimeDiff: 1, + maxValDiff: 1 + } + }), + ['TS.INCRBY', 'key', '1', 'IGNORE', '1', '1'] + ) + }); + + it('with TIMESTAMP, RETENTION, UNCOMPRESSED, CHUNK_SIZE and LABELS', () => { + assert.deepEqual( + parseArgs(INCRBY, 'key', 1, { + TIMESTAMP: '*', + RETENTION: 1, + UNCOMPRESSED: true, + CHUNK_SIZE: 1, + LABELS: { label: 'value' }, + IGNORE: { maxTimeDiff: 1, maxValDiff: 1 } + }), + ['TS.INCRBY', 'key', '1', 'TIMESTAMP', '*', 'RETENTION', '1', 'UNCOMPRESSED', + 'CHUNK_SIZE', '1', 'LABELS', 'label', 'value', 'IGNORE', '1', '1'] + ); + }); + }); + + testUtils.testWithClient('client.ts.incrBy', async client => { + assert.equal( + typeof await client.ts.incrBy('key', 1), + 'number' + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/time-series/lib/commands/INCRBY.ts b/packages/time-series/lib/commands/INCRBY.ts new file mode 100644 index 00000000000..41f11b0d7f5 --- /dev/null +++ b/packages/time-series/lib/commands/INCRBY.ts @@ -0,0 +1,61 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, NumberReply, Command } from '@redis/client/dist/lib/RESP/types'; +import { Timestamp, transformTimestampArgument, parseRetentionArgument, parseChunkSizeArgument, Labels, parseLabelsArgument, parseIgnoreArgument } from './helpers'; +import { TsIgnoreOptions } from './ADD'; + +export interface TsIncrByOptions { + TIMESTAMP?: Timestamp; + RETENTION?: number; + UNCOMPRESSED?: boolean; + CHUNK_SIZE?: number; + LABELS?: Labels; + IGNORE?: TsIgnoreOptions; +} + +/** + * Parses arguments for incrementing a time series value + * @param parser - The command parser + * @param key - The key name of the time series + * @param value - The value to increment by + * @param options - Optional parameters for the command + */ +export function parseIncrByArguments( + parser: CommandParser, + key: RedisArgument, + value: number, + options?: TsIncrByOptions +) { + parser.pushKey(key); + parser.push(value.toString()); + + if (options?.TIMESTAMP !== undefined && options?.TIMESTAMP !== null) { + parser.push('TIMESTAMP', transformTimestampArgument(options.TIMESTAMP)); + } + + parseRetentionArgument(parser, options?.RETENTION); + + if (options?.UNCOMPRESSED) { + parser.push('UNCOMPRESSED'); + } + + parseChunkSizeArgument(parser, options?.CHUNK_SIZE); + + parseLabelsArgument(parser, options?.LABELS); + + parseIgnoreArgument(parser, options?.IGNORE); +} + +export default { + IS_READ_ONLY: false, + /** + * Increases the value of a time series by a given amount + * @param args - Arguments passed to the {@link parseIncrByArguments} function + */ + parseCommand(...args: Parameters) { + const parser = args[0]; + + parser.push('TS.INCRBY'); + parseIncrByArguments(...args); + }, + transformReply: undefined as unknown as () => NumberReply +} as const satisfies Command; diff --git a/packages/time-series/lib/commands/INFO.spec.ts b/packages/time-series/lib/commands/INFO.spec.ts new file mode 100644 index 00000000000..994cb281915 --- /dev/null +++ b/packages/time-series/lib/commands/INFO.spec.ts @@ -0,0 +1,55 @@ +import { strict as assert } from 'node:assert'; +import { TIME_SERIES_DUPLICATE_POLICIES } from './helpers'; +import testUtils, { GLOBAL } from '../test-utils'; +import INFO, { InfoReply } from './INFO'; +import { TIME_SERIES_AGGREGATION_TYPE } from './CREATERULE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TS.INFO', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(INFO, 'key'), + ['TS.INFO', 'key'] + ); + }); + + testUtils.testWithClient('client.ts.info', async client => { + await Promise.all([ + client.ts.create('key', { + LABELS: { id: '1' }, + DUPLICATE_POLICY: TIME_SERIES_DUPLICATE_POLICIES.LAST + }), + client.ts.create('key2'), + client.ts.createRule('key', 'key2', TIME_SERIES_AGGREGATION_TYPE.COUNT, 5), + client.ts.add('key', 1, 10) + ]); + + assertInfo(await client.ts.info('key') as any); + }, GLOBAL.SERVERS.OPEN); +}); + +export function assertInfo(info: InfoReply): void { + assert.equal(typeof info.totalSamples, 'number'); + assert.equal(typeof info.memoryUsage, 'number'); + assert.equal(typeof info.firstTimestamp, 'number'); + assert.equal(typeof info.lastTimestamp, 'number'); + assert.equal(typeof info.retentionTime, 'number'); + assert.equal(typeof info.chunkCount, 'number'); + assert.equal(typeof info.chunkSize, 'number'); + assert.equal(typeof info.chunkType, 'string'); + assert.equal(typeof info.duplicatePolicy, 'string'); + assert.ok(Array.isArray(info.labels)); + for (const label of info.labels) { + assert.equal(typeof label, 'object'); + assert.equal(typeof label.name, 'string'); + assert.equal(typeof label.value, 'string'); + } + assert.ok(Array.isArray(info.rules)); + for (const rule of info.rules) { + assert.equal(typeof rule, 'object'); + assert.equal(typeof rule.aggregationType, 'string'); + assert.equal(typeof rule.key, 'string'); + assert.equal(typeof rule.timeBucket, 'number'); + } + assert.ok(info.sourceKey === null || typeof info.sourceKey === 'string'); +} diff --git a/packages/time-series/lib/commands/INFO.ts b/packages/time-series/lib/commands/INFO.ts new file mode 100644 index 00000000000..2e908a9d32d --- /dev/null +++ b/packages/time-series/lib/commands/INFO.ts @@ -0,0 +1,134 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { ArrayReply, BlobStringReply, Command, DoubleReply, NumberReply, ReplyUnion, SimpleStringReply, TypeMapping } from "@redis/client/dist/lib/RESP/types"; +import { TimeSeriesDuplicatePolicies } from "./helpers"; +import { TimeSeriesAggregationType } from "./CREATERULE"; +import { transformDoubleReply } from '@redis/client/dist/lib/commands/generic-transformers'; + +export type InfoRawReplyTypes = SimpleStringReply | + NumberReply | + TimeSeriesDuplicatePolicies | null | + Array<[name: BlobStringReply, value: BlobStringReply]> | + BlobStringReply | + Array<[key: BlobStringReply, timeBucket: NumberReply, aggregationType: TimeSeriesAggregationType]> | + DoubleReply + +export type InfoRawReply = Array; + +export type InfoRawReplyOld = [ + 'totalSamples', + NumberReply, + 'memoryUsage', + NumberReply, + 'firstTimestamp', + NumberReply, + 'lastTimestamp', + NumberReply, + 'retentionTime', + NumberReply, + 'chunkCount', + NumberReply, + 'chunkSize', + NumberReply, + 'chunkType', + SimpleStringReply, + 'duplicatePolicy', + TimeSeriesDuplicatePolicies | null, + 'labels', + ArrayReply<[name: BlobStringReply, value: BlobStringReply]>, + 'sourceKey', + BlobStringReply | null, + 'rules', + ArrayReply<[key: BlobStringReply, timeBucket: NumberReply, aggregationType: TimeSeriesAggregationType]>, + 'ignoreMaxTimeDiff', + NumberReply, + 'ignoreMaxValDiff', + DoubleReply, +]; + +export interface InfoReply { + totalSamples: NumberReply; + memoryUsage: NumberReply; + firstTimestamp: NumberReply; + lastTimestamp: NumberReply; + retentionTime: NumberReply; + chunkCount: NumberReply; + chunkSize: NumberReply; + chunkType: SimpleStringReply; + duplicatePolicy: TimeSeriesDuplicatePolicies | null; + labels: Array<{ + name: BlobStringReply; + value: BlobStringReply; + }>; + sourceKey: BlobStringReply | null; + rules: Array<{ + key: BlobStringReply; + timeBucket: NumberReply; + aggregationType: TimeSeriesAggregationType + }>; + /** Added in 7.4 */ + ignoreMaxTimeDiff: NumberReply; + /** Added in 7.4 */ + ignoreMaxValDiff: DoubleReply; +} + +export default { + IS_READ_ONLY: true, + /** + * Gets information about a time series + * @param parser - The command parser + * @param key - The key name of the time series + */ + parseCommand(parser: CommandParser, key: string) { + parser.push('TS.INFO'); + parser.pushKey(key); + }, + transformReply: { + 2: (reply: InfoRawReply, _, typeMapping?: TypeMapping): InfoReply => { + const ret = {} as any; + + for (let i=0; i < reply.length; i += 2) { + const key = (reply[i] as any).toString(); + + switch (key) { + case 'totalSamples': + case 'memoryUsage': + case 'firstTimestamp': + case 'lastTimestamp': + case 'retentionTime': + case 'chunkCount': + case 'chunkSize': + case 'chunkType': + case 'duplicatePolicy': + case 'sourceKey': + case 'ignoreMaxTimeDiff': + ret[key] = reply[i+1]; + break; + case 'labels': + ret[key] = (reply[i+1] as Array<[name: BlobStringReply, value: BlobStringReply]>).map( + ([name, value]) => ({ + name, + value + }) + ); + break; + case 'rules': + ret[key] = (reply[i+1] as Array<[key: BlobStringReply, timeBucket: NumberReply, aggregationType: TimeSeriesAggregationType]>).map( + ([key, timeBucket, aggregationType]) => ({ + key, + timeBucket, + aggregationType + }) + ); + break; + case 'ignoreMaxValDiff': + ret[key] = transformDoubleReply[2](reply[27] as unknown as BlobStringReply, undefined, typeMapping); + break; + } + } + + return ret; + }, + 3: undefined as unknown as () => ReplyUnion + }, + unstableResp3: true + } as const satisfies Command; diff --git a/packages/time-series/lib/commands/INFO_DEBUG.spec.ts b/packages/time-series/lib/commands/INFO_DEBUG.spec.ts new file mode 100644 index 00000000000..ff9d6aa3c72 --- /dev/null +++ b/packages/time-series/lib/commands/INFO_DEBUG.spec.ts @@ -0,0 +1,41 @@ +import { strict as assert } from 'node:assert'; +import { TIME_SERIES_DUPLICATE_POLICIES } from './helpers'; +import testUtils, { GLOBAL } from '../test-utils'; +import { assertInfo } from './INFO.spec'; +import INFO_DEBUG from './INFO_DEBUG'; +import { TIME_SERIES_AGGREGATION_TYPE } from './CREATERULE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TS.INFO_DEBUG', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(INFO_DEBUG, 'key'), + ['TS.INFO', 'key', 'DEBUG'] + ); + }); + + testUtils.testWithClient('client.ts.infoDebug', async client => { + await Promise.all([ + client.ts.create('key', { + LABELS: { id: '1' }, + DUPLICATE_POLICY: TIME_SERIES_DUPLICATE_POLICIES.LAST + }), + client.ts.create('key2'), + client.ts.createRule('key', 'key2', TIME_SERIES_AGGREGATION_TYPE.COUNT, 5), + client.ts.add('key', 1, 10) + ]); + + const infoDebug = await client.ts.infoDebug('key'); + assertInfo(infoDebug as any); + assert.equal(typeof infoDebug.keySelfName, 'string'); + assert.ok(Array.isArray(infoDebug.chunks)); + for (const chunk of infoDebug.chunks) { + assert.equal(typeof chunk, 'object'); + assert.equal(typeof chunk.startTimestamp, 'number'); + assert.equal(typeof chunk.endTimestamp, 'number'); + assert.equal(typeof chunk.samples, 'number'); + assert.equal(typeof chunk.size, 'number'); + assert.equal(typeof chunk.bytesPerSample, 'string'); + } + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/time-series/lib/commands/INFO_DEBUG.ts b/packages/time-series/lib/commands/INFO_DEBUG.ts new file mode 100644 index 00000000000..bbdee4924ff --- /dev/null +++ b/packages/time-series/lib/commands/INFO_DEBUG.ts @@ -0,0 +1,82 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { BlobStringReply, Command, NumberReply, SimpleStringReply, TypeMapping, ReplyUnion } from "@redis/client/dist/lib/RESP/types"; +import INFO, { InfoRawReply, InfoRawReplyTypes, InfoReply } from "./INFO"; + +type chunkType = Array<[ + 'startTimestamp', + NumberReply, + 'endTimestamp', + NumberReply, + 'samples', + NumberReply, + 'size', + NumberReply, + 'bytesPerSample', + SimpleStringReply +]>; + +type InfoDebugRawReply = [ + ...InfoRawReply, + 'keySelfName', + BlobStringReply, + 'Chunks', + chunkType +]; + +export type InfoDebugRawReplyType = InfoRawReplyTypes | chunkType + +export interface InfoDebugReply extends InfoReply { + keySelfName: BlobStringReply, + chunks: Array<{ + startTimestamp: NumberReply; + endTimestamp: NumberReply; + samples: NumberReply; + size: NumberReply; + bytesPerSample: SimpleStringReply; + }>; +} + +export default { + IS_READ_ONLY: INFO.IS_READ_ONLY, + /** + * Gets debug information about a time series + * @param parser - The command parser + * @param key - The key name of the time series + */ + parseCommand(parser: CommandParser, key: string) { + INFO.parseCommand(parser, key); + parser.push('DEBUG'); + }, + transformReply: { + 2: (reply: InfoDebugRawReply, _, typeMapping?: TypeMapping): InfoDebugReply => { + const ret = INFO.transformReply[2](reply as unknown as InfoRawReply, _, typeMapping) as any; + + for (let i=0; i < reply.length; i += 2) { + const key = (reply[i] as any).toString(); + + switch (key) { + case 'keySelfName': { + ret[key] = reply[i+1]; + break; + } + case 'Chunks': { + ret['chunks'] = (reply[i+1] as chunkType).map( + chunk => ({ + startTimestamp: chunk[1], + endTimestamp: chunk[3], + samples: chunk[5], + size: chunk[7], + bytesPerSample: chunk[9] + }) + ); + break; + } + } + } + + return ret; + }, + 3: undefined as unknown as () => ReplyUnion + }, + unstableResp3: true +} as const satisfies Command; diff --git a/packages/time-series/lib/commands/MADD.spec.ts b/packages/time-series/lib/commands/MADD.spec.ts new file mode 100644 index 00000000000..8bf8e27fdb3 --- /dev/null +++ b/packages/time-series/lib/commands/MADD.spec.ts @@ -0,0 +1,42 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import MADD from './MADD'; +import { SimpleError } from '@redis/client/lib/errors'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TS.MADD', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(MADD, [{ + key: '1', + timestamp: 0, + value: 0 + }, { + key: '2', + timestamp: 1, + value: 1 + }]), + ['TS.MADD', '1', '0', '0', '2', '1', '1'] + ); + }); + + testUtils.testWithClient('client.ts.mAdd', async client => { + const [, reply] = await Promise.all([ + client.ts.create('key'), + client.ts.mAdd([{ + key: 'key', + timestamp: 0, + value: 1 + }, { + key: 'key', + timestamp: 0, + value: 1 + }]) + ]); + + assert.ok(Array.isArray(reply)); + assert.equal(reply.length, 2); + assert.equal(reply[0], 0); + assert.ok(reply[1] instanceof SimpleError); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/time-series/lib/commands/MADD.ts b/packages/time-series/lib/commands/MADD.ts new file mode 100644 index 00000000000..d0b36ea373f --- /dev/null +++ b/packages/time-series/lib/commands/MADD.ts @@ -0,0 +1,27 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { Timestamp, transformTimestampArgument } from './helpers'; +import { ArrayReply, NumberReply, SimpleErrorReply, Command } from '@redis/client/dist/lib/RESP/types'; + +export interface TsMAddSample { + key: string; + timestamp: Timestamp; + value: number; +} + +export default { + IS_READ_ONLY: false, + /** + * Adds multiple samples to multiple time series + * @param parser - The command parser + * @param toAdd - Array of samples to add to different time series + */ + parseCommand(parser: CommandParser, toAdd: Array) { + parser.push('TS.MADD'); + + for (const { key, timestamp, value } of toAdd) { + parser.pushKey(key); + parser.push(transformTimestampArgument(timestamp), value.toString()); + } + }, + transformReply: undefined as unknown as () => ArrayReply +} as const satisfies Command; diff --git a/packages/time-series/lib/commands/MGET.spec.ts b/packages/time-series/lib/commands/MGET.spec.ts new file mode 100644 index 00000000000..ba2e571be49 --- /dev/null +++ b/packages/time-series/lib/commands/MGET.spec.ts @@ -0,0 +1,46 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import MGET from './MGET'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TS.MGET', () => { + describe('transformArguments', () => { + it('without options', () => { + assert.deepEqual( + parseArgs(MGET, 'label=value'), + ['TS.MGET', 'FILTER', 'label=value'] + ); + }); + + it('with LATEST', () => { + assert.deepEqual( + parseArgs(MGET, 'label=value', { + LATEST: true + }), + ['TS.MGET', 'LATEST', 'FILTER', 'label=value'] + ); + }); + }); + + testUtils.testWithClient('client.ts.mGet', async client => { + const [, reply] = await Promise.all([ + client.ts.add('key', 0, 0, { + LABELS: { label: 'value' } + }), + client.ts.mGet('label=value') + ]); + + assert.deepStrictEqual(reply, Object.create(null, { + key: { + configurable: true, + enumerable: true, + value: { + sample: { + timestamp: 0, + value: 0 + } + } + } + })); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/time-series/lib/commands/MGET.ts b/packages/time-series/lib/commands/MGET.ts new file mode 100644 index 00000000000..023c0bda2d4 --- /dev/null +++ b/packages/time-series/lib/commands/MGET.ts @@ -0,0 +1,77 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { Command, BlobStringReply, ArrayReply, Resp2Reply, MapReply, TuplesReply, TypeMapping } from '@redis/client/dist/lib/RESP/types'; +import { resp2MapToValue, resp3MapToValue, SampleRawReply, transformSampleReply } from './helpers'; +import { RedisVariadicArgument } from '@redis/client/dist/lib/commands/generic-transformers'; + +export interface TsMGetOptions { + LATEST?: boolean; +} + +/** + * Adds LATEST argument to command if specified + * @param parser - The command parser + * @param latest - Whether to include the LATEST argument + */ +export function parseLatestArgument(parser: CommandParser, latest?: boolean) { + if (latest) { + parser.push('LATEST'); + } +} + +/** + * Adds FILTER argument to command + * @param parser - The command parser + * @param filter - Filter to match time series keys + */ +export function parseFilterArgument(parser: CommandParser, filter: RedisVariadicArgument) { + parser.push('FILTER'); + parser.pushVariadic(filter); +} + +export type MGetRawReply2 = ArrayReply< + TuplesReply<[ + key: BlobStringReply, + labels: never, + sample: Resp2Reply + ]> +>; + +export type MGetRawReply3 = MapReply< + BlobStringReply, + TuplesReply<[ + labels: never, + sample: SampleRawReply + ]> +>; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Gets the last samples matching a specific filter from multiple time series + * @param parser - The command parser + * @param filter - Filter to match time series keys + * @param options - Optional parameters for the command + */ + parseCommand(parser: CommandParser, filter: RedisVariadicArgument, options?: TsMGetOptions) { + parser.push('TS.MGET'); + parseLatestArgument(parser, options?.LATEST); + parseFilterArgument(parser, filter); + }, + transformReply: { + 2(reply: MGetRawReply2, _, typeMapping?: TypeMapping) { + return resp2MapToValue(reply, ([,, sample]) => { + return { + sample: transformSampleReply[2](sample) + }; + }, typeMapping); + }, + 3(reply: MGetRawReply3) { + return resp3MapToValue(reply, ([, sample]) => { + return { + sample: transformSampleReply[3](sample) + }; + }); + } + } +} as const satisfies Command; diff --git a/packages/time-series/lib/commands/MGET_SELECTED_LABELS.spec.ts b/packages/time-series/lib/commands/MGET_SELECTED_LABELS.spec.ts new file mode 100644 index 00000000000..d79c463fc7d --- /dev/null +++ b/packages/time-series/lib/commands/MGET_SELECTED_LABELS.spec.ts @@ -0,0 +1,47 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import MGET_SELECTED_LABELS from './MGET_SELECTED_LABELS'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TS.MGET_SELECTED_LABELS', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(MGET_SELECTED_LABELS, 'label=value', 'label'), + ['TS.MGET', 'SELECTED_LABELS', 'label', 'FILTER', 'label=value'] + ); + }); + + testUtils.testWithClient('client.ts.mGetSelectedLabels', async client => { + const [, reply] = await Promise.all([ + client.ts.add('key', 0, 0, { + LABELS: { label: 'value' } + }), + client.ts.mGetSelectedLabels('label=value', ['label', 'NX']) + ]); + + assert.deepStrictEqual(reply, Object.create(null, { + key: { + configurable: true, + enumerable: true, + value: { + labels: Object.create(null, { + label: { + configurable: true, + enumerable: true, + value: 'value' + }, + NX: { + configurable: true, + enumerable: true, + value: null + } + }), + sample: { + timestamp: 0, + value: 0 + } + } + } + })); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/time-series/lib/commands/MGET_SELECTED_LABELS.ts b/packages/time-series/lib/commands/MGET_SELECTED_LABELS.ts new file mode 100644 index 00000000000..a13fcbeaa56 --- /dev/null +++ b/packages/time-series/lib/commands/MGET_SELECTED_LABELS.ts @@ -0,0 +1,24 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { Command, BlobStringReply, NullReply } from '@redis/client/dist/lib/RESP/types'; +import { RedisVariadicArgument } from '@redis/client/dist/lib/commands/generic-transformers'; +import { TsMGetOptions, parseLatestArgument, parseFilterArgument } from './MGET'; +import { parseSelectedLabelsArguments } from './helpers'; +import { createTransformMGetLabelsReply } from './MGET_WITHLABELS'; + +export default { + IS_READ_ONLY: true, + /** + * Gets the last samples matching a specific filter with selected labels + * @param parser - The command parser + * @param filter - Filter to match time series keys + * @param selectedLabels - Labels to include in the output + * @param options - Optional parameters for the command + */ + parseCommand(parser: CommandParser, filter: RedisVariadicArgument, selectedLabels: RedisVariadicArgument, options?: TsMGetOptions) { + parser.push('TS.MGET'); + parseLatestArgument(parser, options?.LATEST); + parseSelectedLabelsArguments(parser, selectedLabels); + parseFilterArgument(parser, filter); + }, + transformReply: createTransformMGetLabelsReply(), +} as const satisfies Command; diff --git a/packages/time-series/lib/commands/MGET_WITHLABELS.spec.ts b/packages/time-series/lib/commands/MGET_WITHLABELS.spec.ts new file mode 100644 index 00000000000..33fc5308444 --- /dev/null +++ b/packages/time-series/lib/commands/MGET_WITHLABELS.spec.ts @@ -0,0 +1,42 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import MGET_WITHLABELS from './MGET_WITHLABELS'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TS.MGET_WITHLABELS', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(MGET_WITHLABELS, 'label=value'), + ['TS.MGET', 'WITHLABELS', 'FILTER', 'label=value'] + ); + }); + + testUtils.testWithClient('client.ts.mGetWithLabels', async client => { + const [, reply] = await Promise.all([ + client.ts.add('key', 0, 0, { + LABELS: { label: 'value' } + }), + client.ts.mGetWithLabels('label=value') + ]); + + assert.deepStrictEqual(reply, Object.create(null, { + key: { + configurable: true, + enumerable: true, + value: { + labels: Object.create(null, { + label: { + configurable: true, + enumerable: true, + value: 'value' + } + }), + sample: { + timestamp: 0, + value: 0 + } + } + } + })); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/time-series/lib/commands/MGET_WITHLABELS.ts b/packages/time-series/lib/commands/MGET_WITHLABELS.ts new file mode 100644 index 00000000000..aa9b5687eec --- /dev/null +++ b/packages/time-series/lib/commands/MGET_WITHLABELS.ts @@ -0,0 +1,68 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { Command, BlobStringReply, ArrayReply, Resp2Reply, MapReply, TuplesReply, TypeMapping } from '@redis/client/dist/lib/RESP/types'; +import { RedisVariadicArgument } from '@redis/client/dist/lib/commands/generic-transformers'; +import { TsMGetOptions, parseLatestArgument, parseFilterArgument } from './MGET'; +import { RawLabelValue, resp2MapToValue, resp3MapToValue, SampleRawReply, transformRESP2Labels, transformSampleReply } from './helpers'; + +export interface TsMGetWithLabelsOptions extends TsMGetOptions { + SELECTED_LABELS?: RedisVariadicArgument; +} + +export type MGetLabelsRawReply2 = ArrayReply< + TuplesReply<[ + key: BlobStringReply, + labels: ArrayReply< + TuplesReply<[ + label: BlobStringReply, + value: T + ]> + >, + sample: Resp2Reply + ]> +>; + +export type MGetLabelsRawReply3 = MapReply< + BlobStringReply, + TuplesReply<[ + labels: MapReply, + sample: SampleRawReply + ]> +>; + +export function createTransformMGetLabelsReply() { + return { + 2(reply: MGetLabelsRawReply2, _, typeMapping?: TypeMapping) { + return resp2MapToValue(reply, ([, labels, sample]) => { + return { + labels: transformRESP2Labels(labels), + sample: transformSampleReply[2](sample) + }; + }, typeMapping); + }, + 3(reply: MGetLabelsRawReply3) { + return resp3MapToValue(reply, ([labels, sample]) => { + return { + labels, + sample: transformSampleReply[3](sample) + }; + }); + } + } satisfies Command['transformReply']; +} + +export default { + IS_READ_ONLY: true, + /** + * Gets the last samples matching a specific filter with labels + * @param parser - The command parser + * @param filter - Filter to match time series keys + * @param options - Optional parameters for the command + */ + parseCommand(parser: CommandParser, filter: RedisVariadicArgument, options?: TsMGetWithLabelsOptions) { + parser.push('TS.MGET'); + parseLatestArgument(parser, options?.LATEST); + parser.push('WITHLABELS'); + parseFilterArgument(parser, filter); + }, + transformReply: createTransformMGetLabelsReply(), +} as const satisfies Command; diff --git a/packages/time-series/lib/commands/MRANGE.spec.ts b/packages/time-series/lib/commands/MRANGE.spec.ts new file mode 100644 index 00000000000..94c8e72983a --- /dev/null +++ b/packages/time-series/lib/commands/MRANGE.spec.ts @@ -0,0 +1,63 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import MRANGE from './MRANGE'; +import { TIME_SERIES_AGGREGATION_TYPE } from './CREATERULE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TS.MRANGE', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(MRANGE, '-', '+', 'label=value', { + LATEST: true, + FILTER_BY_TS: [0], + FILTER_BY_VALUE: { + min: 0, + max: 1 + }, + COUNT: 1, + ALIGN: '-', + AGGREGATION: { + type: TIME_SERIES_AGGREGATION_TYPE.AVG, + timeBucket: 1 + } + }), + [ + 'TS.MRANGE', '-', '+', + 'LATEST', + 'FILTER_BY_TS', '0', + 'FILTER_BY_VALUE', '0', '1', + 'COUNT', '1', + 'ALIGN', '-', + 'AGGREGATION', 'AVG', '1', + 'FILTER', 'label=value' + ] + ); + }); + + testUtils.testWithClient('client.ts.mRange', async client => { + const [, reply] = await Promise.all([ + client.ts.add('key', 0, 0, { + LABELS: { + label: 'value' + } + }), + client.ts.mRange('-', '+', 'label=value', { + COUNT: 1 + }) + ]); + + assert.deepStrictEqual( + reply, + Object.create(null, { + key: { + configurable: true, + enumerable: true, + value: [{ + timestamp: 0, + value: 0 + }] + } + }) + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/time-series/lib/commands/MRANGE.ts b/packages/time-series/lib/commands/MRANGE.ts new file mode 100644 index 00000000000..8b9ec66e6e3 --- /dev/null +++ b/packages/time-series/lib/commands/MRANGE.ts @@ -0,0 +1,73 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { Command, ArrayReply, BlobStringReply, Resp2Reply, MapReply, TuplesReply, TypeMapping, RedisArgument } from '@redis/client/dist/lib/RESP/types'; +import { RedisVariadicArgument } from '@redis/client/dist/lib/commands/generic-transformers'; +import { resp2MapToValue, resp3MapToValue, SampleRawReply, Timestamp, transformSamplesReply } from './helpers'; +import { TsRangeOptions, parseRangeArguments } from './RANGE'; +import { parseFilterArgument } from './MGET'; + +export type TsMRangeRawReply2 = ArrayReply< + TuplesReply<[ + key: BlobStringReply, + labels: never, // empty array without WITHLABELS or SELECTED_LABELS + samples: ArrayReply> + ]> +>; + +export type TsMRangeRawReply3 = MapReply< + BlobStringReply, + TuplesReply<[ + labels: never, // empty hash without WITHLABELS or SELECTED_LABELS + metadata: never, // ?! + samples: ArrayReply + ]> +>; + +/** + * Creates a function that parses arguments for multi-range commands + * @param command - The command name to use (TS.MRANGE or TS.MREVRANGE) + */ +export function createTransformMRangeArguments(command: RedisArgument) { + return ( + parser: CommandParser, + fromTimestamp: Timestamp, + toTimestamp: Timestamp, + filter: RedisVariadicArgument, + options?: TsRangeOptions + ) => { + parser.push(command); + parseRangeArguments( + parser, + fromTimestamp, + toTimestamp, + options + ); + + parseFilterArgument(parser, filter); + }; +} + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Gets samples for time series matching a specific filter within a time range + * @param parser - The command parser + * @param fromTimestamp - Start timestamp for range + * @param toTimestamp - End timestamp for range + * @param filter - Filter to match time series keys + * @param options - Optional parameters for the command + */ + parseCommand: createTransformMRangeArguments('TS.MRANGE'), + transformReply: { + 2(reply: TsMRangeRawReply2, _?: any, typeMapping?: TypeMapping) { + return resp2MapToValue(reply, ([_key, _labels, samples]) => { + return transformSamplesReply[2](samples); + }, typeMapping); + }, + 3(reply: TsMRangeRawReply3) { + return resp3MapToValue(reply, ([_labels, _metadata, samples]) => { + return transformSamplesReply[3](samples); + }); + } + }, +} as const satisfies Command; diff --git a/packages/time-series/lib/commands/MRANGE_GROUPBY.spec.ts b/packages/time-series/lib/commands/MRANGE_GROUPBY.spec.ts new file mode 100644 index 00000000000..bcdde20fe98 --- /dev/null +++ b/packages/time-series/lib/commands/MRANGE_GROUPBY.spec.ts @@ -0,0 +1,67 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import MRANGE_GROUPBY, { TIME_SERIES_REDUCERS } from './MRANGE_GROUPBY'; +import { TIME_SERIES_AGGREGATION_TYPE } from './CREATERULE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TS.MRANGE_GROUPBY', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(MRANGE_GROUPBY, '-', '+', 'label=value', { + REDUCE: TIME_SERIES_REDUCERS.AVG, + label: 'label' + }, { + LATEST: true, + FILTER_BY_TS: [0], + FILTER_BY_VALUE: { + min: 0, + max: 1 + }, + COUNT: 1, + ALIGN: '-', + AGGREGATION: { + type: TIME_SERIES_AGGREGATION_TYPE.AVG, + timeBucket: 1 + } + }), + [ + 'TS.MRANGE', '-', '+', + 'LATEST', + 'FILTER_BY_TS', '0', + 'FILTER_BY_VALUE', '0', '1', + 'COUNT', '1', + 'ALIGN', '-', 'AGGREGATION', 'AVG', '1', + 'FILTER', 'label=value', + 'GROUPBY', 'label', 'REDUCE', 'AVG' + ] + ); + }); + + testUtils.testWithClient('client.ts.mRangeGroupBy', async client => { + const [, reply] = await Promise.all([ + client.ts.add('key', 0, 0, { + LABELS: { label: 'value' } + }), + client.ts.mRangeGroupBy('-', '+', 'label=value', { + REDUCE: TIME_SERIES_REDUCERS.AVG, + label: 'label' + }) + ]); + + assert.deepStrictEqual( + reply, + Object.create(null, { + 'label=value': { + configurable: true, + enumerable: true, + value: { + samples: [{ + timestamp: 0, + value: 0 + }] + } + } + }) + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/time-series/lib/commands/MRANGE_GROUPBY.ts b/packages/time-series/lib/commands/MRANGE_GROUPBY.ts new file mode 100644 index 00000000000..dc049276127 --- /dev/null +++ b/packages/time-series/lib/commands/MRANGE_GROUPBY.ts @@ -0,0 +1,125 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { Command, ArrayReply, BlobStringReply, Resp2Reply, MapReply, TuplesReply, TypeMapping, RedisArgument, TuplesToMapReply, UnwrapReply } from '@redis/client/dist/lib/RESP/types'; +import { RedisVariadicArgument } from '@redis/client/dist/lib/commands/generic-transformers'; +import { resp2MapToValue, resp3MapToValue, SampleRawReply, Timestamp, transformSamplesReply } from './helpers'; +import { TsRangeOptions, parseRangeArguments } from './RANGE'; +import { parseFilterArgument } from './MGET'; + +export const TIME_SERIES_REDUCERS = { + AVG: 'AVG', + SUM: 'SUM', + MIN: 'MIN', + MAX: 'MAX', + RANGE: 'RANGE', + COUNT: 'COUNT', + STD_P: 'STD.P', + STD_S: 'STD.S', + VAR_P: 'VAR.P', + VAR_S: 'VAR.S' +} as const; + +export type TimeSeriesReducer = typeof TIME_SERIES_REDUCERS[keyof typeof TIME_SERIES_REDUCERS]; + +export interface TsMRangeGroupBy { + label: RedisArgument; + REDUCE: TimeSeriesReducer; +} + +/** + * Adds GROUPBY arguments to command + * @param parser - The command parser + * @param groupBy - Group by parameters + */ +export function parseGroupByArguments(parser: CommandParser, groupBy: TsMRangeGroupBy) { + parser.push('GROUPBY', groupBy.label, 'REDUCE', groupBy.REDUCE); +} + +export type TsMRangeGroupByRawReply2 = ArrayReply< + TuplesReply<[ + key: BlobStringReply, + labels: never, // empty array without WITHLABELS or SELECTED_LABELS + samples: ArrayReply> + ]> +>; + +export type TsMRangeGroupByRawMetadataReply3 = TuplesToMapReply<[ + [BlobStringReply<'sources'>, ArrayReply] +]>; + +export type TsMRangeGroupByRawReply3 = MapReply< + BlobStringReply, + TuplesReply<[ + labels: never, // empty hash without WITHLABELS or SELECTED_LABELS + metadata1: never, // ?! + metadata2: TsMRangeGroupByRawMetadataReply3, + samples: ArrayReply + ]> +>; + +/** + * Creates a function that parses arguments for multi-range commands with grouping + * @param command - The command name to use (TS.MRANGE or TS.MREVRANGE) + */ +export function createTransformMRangeGroupByArguments(command: RedisArgument) { + return ( + parser: CommandParser, + fromTimestamp: Timestamp, + toTimestamp: Timestamp, + filter: RedisVariadicArgument, + groupBy: TsMRangeGroupBy, + options?: TsRangeOptions + ) => { + parser.push(command); + parseRangeArguments(parser, fromTimestamp, toTimestamp, options) + + parseFilterArgument(parser, filter); + + parseGroupByArguments(parser, groupBy); + }; +} + +/** + * Extracts source keys from RESP3 metadata reply + * @param raw - Raw metadata from RESP3 reply + */ +export function extractResp3MRangeSources(raw: TsMRangeGroupByRawMetadataReply3) { + const unwrappedMetadata2 = raw as unknown as UnwrapReply; + if (unwrappedMetadata2 instanceof Map) { + return unwrappedMetadata2.get('sources')!; + } else if (unwrappedMetadata2 instanceof Array) { + return unwrappedMetadata2[1]; + } else { + return unwrappedMetadata2.sources; + } +} + +export default { + IS_READ_ONLY: true, + /** + * Gets samples for time series matching a filter within a time range with grouping + * @param parser - The command parser + * @param fromTimestamp - Start timestamp for range + * @param toTimestamp - End timestamp for range + * @param filter - Filter to match time series keys + * @param groupBy - Group by parameters + * @param options - Optional parameters for the command + */ + parseCommand: createTransformMRangeGroupByArguments('TS.MRANGE'), + transformReply: { + 2(reply: TsMRangeGroupByRawReply2, _?: any, typeMapping?: TypeMapping) { + return resp2MapToValue(reply, ([_key, _labels, samples]) => { + return { + samples: transformSamplesReply[2](samples) + }; + }, typeMapping); + }, + 3(reply: TsMRangeGroupByRawReply3) { + return resp3MapToValue(reply, ([_labels, _metadata1, metadata2, samples]) => { + return { + sources: extractResp3MRangeSources(metadata2), + samples: transformSamplesReply[3](samples) + }; + }); + } + }, +} as const satisfies Command; diff --git a/packages/time-series/lib/commands/MRANGE_SELECTED_LABELS.spec.ts b/packages/time-series/lib/commands/MRANGE_SELECTED_LABELS.spec.ts new file mode 100644 index 00000000000..92680dea375 --- /dev/null +++ b/packages/time-series/lib/commands/MRANGE_SELECTED_LABELS.spec.ts @@ -0,0 +1,73 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import MRANGE_SELECTED_LABELS from './MRANGE_SELECTED_LABELS'; +import { TIME_SERIES_AGGREGATION_TYPE } from './CREATERULE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TS.MRANGE_SELECTED_LABELS', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(MRANGE_SELECTED_LABELS, '-', '+', 'label', 'label=value', { + FILTER_BY_TS: [0], + FILTER_BY_VALUE: { + min: 0, + max: 1 + }, + COUNT: 1, + ALIGN: '-', + AGGREGATION: { + type: TIME_SERIES_AGGREGATION_TYPE.AVG, + timeBucket: 1 + } + }), + [ + 'TS.MRANGE', '-', '+', + 'FILTER_BY_TS', '0', + 'FILTER_BY_VALUE', '0', '1', + 'COUNT', '1', + 'ALIGN', '-', 'AGGREGATION', 'AVG', '1', + 'SELECTED_LABELS', 'label', + 'FILTER', 'label=value' + ] + ); + }); + + testUtils.testWithClient('client.ts.mRangeSelectedLabels', async client => { + const [, reply] = await Promise.all([ + client.ts.add('key', 0, 0, { + LABELS: { label: 'value' } + }), + client.ts.mRangeSelectedLabels('-', '+', ['label', 'NX'], 'label=value', { + COUNT: 1 + }) + ]); + + assert.deepStrictEqual( + reply, + Object.create(null, { + key: { + configurable: true, + enumerable: true, + value: { + labels: Object.create(null, { + label: { + configurable: true, + enumerable: true, + value: 'value' + }, + NX: { + configurable: true, + enumerable: true, + value: null + } + }), + samples: [{ + timestamp: 0, + value: 0 + }] + } + } + }) + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/time-series/lib/commands/MRANGE_SELECTED_LABELS.ts b/packages/time-series/lib/commands/MRANGE_SELECTED_LABELS.ts new file mode 100644 index 00000000000..c9b737fd290 --- /dev/null +++ b/packages/time-series/lib/commands/MRANGE_SELECTED_LABELS.ts @@ -0,0 +1,85 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { Command, ArrayReply, BlobStringReply, Resp2Reply, MapReply, TuplesReply, TypeMapping, NullReply, RedisArgument } from '@redis/client/dist/lib/RESP/types'; +import { RedisVariadicArgument } from '@redis/client/dist/lib/commands/generic-transformers'; +import { parseSelectedLabelsArguments, resp2MapToValue, resp3MapToValue, SampleRawReply, Timestamp, transformRESP2Labels, transformSamplesReply } from './helpers'; +import { TsRangeOptions, parseRangeArguments } from './RANGE'; +import { parseFilterArgument } from './MGET'; + +export type TsMRangeSelectedLabelsRawReply2 = ArrayReply< + TuplesReply<[ + key: BlobStringReply, + labels: ArrayReply>, + samples: ArrayReply> + ]> +>; + +export type TsMRangeSelectedLabelsRawReply3 = MapReply< + BlobStringReply, + TuplesReply<[ + labels: MapReply, + metadata: never, // ?! + samples: ArrayReply + ]> +>; + +/** + * Creates a function that parses arguments for multi-range commands with selected labels + * @param command - The command name to use (TS.MRANGE or TS.MREVRANGE) + */ +export function createTransformMRangeSelectedLabelsArguments(command: RedisArgument) { + return ( + parser: CommandParser, + fromTimestamp: Timestamp, + toTimestamp: Timestamp, + selectedLabels: RedisVariadicArgument, + filter: RedisVariadicArgument, + options?: TsRangeOptions + ) => { + parser.push(command); + parseRangeArguments( + parser, + fromTimestamp, + toTimestamp, + options + ); + + parseSelectedLabelsArguments(parser, selectedLabels); + + parseFilterArgument(parser, filter); + }; +} + +export default { + IS_READ_ONLY: true, + /** + * Gets samples for time series matching a filter with selected labels + * @param parser - The command parser + * @param fromTimestamp - Start timestamp for range + * @param toTimestamp - End timestamp for range + * @param selectedLabels - Labels to include in the output + * @param filter - Filter to match time series keys + * @param options - Optional parameters for the command + */ + parseCommand: createTransformMRangeSelectedLabelsArguments('TS.MRANGE'), + transformReply: { + 2(reply: TsMRangeSelectedLabelsRawReply2, _?: any, typeMapping?: TypeMapping) { + return resp2MapToValue(reply, ([_key, labels, samples]) => { + return { + labels: transformRESP2Labels(labels, typeMapping), + samples: transformSamplesReply[2](samples) + }; + }, typeMapping); + }, + 3(reply: TsMRangeSelectedLabelsRawReply3) { + return resp3MapToValue(reply, ([_key, labels, samples]) => { + return { + labels, + samples: transformSamplesReply[3](samples) + }; + }); + } + }, +} as const satisfies Command; diff --git a/packages/time-series/lib/commands/MRANGE_SELECTED_LABELS_GROUPBY.spec.ts b/packages/time-series/lib/commands/MRANGE_SELECTED_LABELS_GROUPBY.spec.ts new file mode 100644 index 00000000000..4e5b2b47094 --- /dev/null +++ b/packages/time-series/lib/commands/MRANGE_SELECTED_LABELS_GROUPBY.spec.ts @@ -0,0 +1,81 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import MRANGE_SELECTED_LABELS_GROUPBY from './MRANGE_SELECTED_LABELS_GROUPBY'; +import { TIME_SERIES_REDUCERS } from './MRANGE_GROUPBY'; +import { TIME_SERIES_AGGREGATION_TYPE } from './CREATERULE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TS.MRANGE_SELECTED_LABELS_GROUPBY', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(MRANGE_SELECTED_LABELS_GROUPBY, '-', '+', 'label', 'label=value', { + REDUCE: TIME_SERIES_REDUCERS.AVG, + label: 'label' + }, { + LATEST: true, + FILTER_BY_TS: [0], + FILTER_BY_VALUE: { + min: 0, + max: 1 + }, + COUNT: 1, + ALIGN: '-', + AGGREGATION: { + type: TIME_SERIES_AGGREGATION_TYPE.AVG, + timeBucket: 1 + } + }), + [ + 'TS.MRANGE', '-', '+', + 'LATEST', + 'FILTER_BY_TS', '0', + 'FILTER_BY_VALUE', '0', '1', + 'COUNT', '1', + 'ALIGN', '-', 'AGGREGATION', 'AVG', '1', + 'SELECTED_LABELS', 'label', + 'FILTER', 'label=value', + 'GROUPBY', 'label', 'REDUCE', 'AVG' + ] + ); + }); + + testUtils.testWithClient('client.ts.mRangeSelectedLabelsGroupBy', async client => { + const [, reply] = await Promise.all([ + client.ts.add('key', 0, 0, { + LABELS: { label: 'value' } + }), + client.ts.mRangeSelectedLabelsGroupBy('-', '+', ['label', 'NX'], 'label=value', { + REDUCE: TIME_SERIES_REDUCERS.AVG, + label: 'label' + }) + ]); + + assert.deepStrictEqual( + reply, + Object.create(null, { + 'label=value': { + configurable: true, + enumerable: true, + value: { + labels: Object.create(null, { + label: { + configurable: true, + enumerable: true, + value: 'value' + }, + NX: { + configurable: true, + enumerable: true, + value: null + } + }), + samples: [{ + timestamp: 0, + value: 0 + }] + } + } + }) + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/time-series/lib/commands/MRANGE_SELECTED_LABELS_GROUPBY.ts b/packages/time-series/lib/commands/MRANGE_SELECTED_LABELS_GROUPBY.ts new file mode 100644 index 00000000000..d2f94b82bb3 --- /dev/null +++ b/packages/time-series/lib/commands/MRANGE_SELECTED_LABELS_GROUPBY.ts @@ -0,0 +1,77 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { Command, ArrayReply, BlobStringReply, MapReply, TuplesReply, RedisArgument, NullReply } from '@redis/client/dist/lib/RESP/types'; +import { RedisVariadicArgument } from '@redis/client/dist/lib/commands/generic-transformers'; +import { parseSelectedLabelsArguments, resp3MapToValue, SampleRawReply, Timestamp, transformSamplesReply } from './helpers'; +import { TsRangeOptions, parseRangeArguments } from './RANGE'; +import { extractResp3MRangeSources, parseGroupByArguments, TsMRangeGroupBy, TsMRangeGroupByRawMetadataReply3 } from './MRANGE_GROUPBY'; +import { parseFilterArgument } from './MGET'; +import MRANGE_SELECTED_LABELS from './MRANGE_SELECTED_LABELS'; + +export type TsMRangeWithLabelsGroupByRawReply3 = MapReply< + BlobStringReply, + TuplesReply<[ + labels: MapReply, + metadata: never, // ?! + metadata2: TsMRangeGroupByRawMetadataReply3, + samples: ArrayReply + ]> +>; + +/** + * Creates a function that parses arguments for multi-range commands with selected labels and grouping + * @param command - The command name to use (TS.MRANGE or TS.MREVRANGE) + */ +export function createMRangeSelectedLabelsGroupByTransformArguments( + command: RedisArgument +) { + return ( + parser: CommandParser, + fromTimestamp: Timestamp, + toTimestamp: Timestamp, + selectedLabels: RedisVariadicArgument, + filter: RedisVariadicArgument, + groupBy: TsMRangeGroupBy, + options?: TsRangeOptions + ) => { + parser.push(command); + parseRangeArguments( + parser, + fromTimestamp, + toTimestamp, + options + ); + + parseSelectedLabelsArguments(parser, selectedLabels); + + parseFilterArgument(parser, filter); + + parseGroupByArguments(parser, groupBy); + }; +} + +export default { + IS_READ_ONLY: true, + /** + * Gets samples for time series matching a filter with selected labels and grouping + * @param parser - The command parser + * @param fromTimestamp - Start timestamp for range + * @param toTimestamp - End timestamp for range + * @param selectedLabels - Labels to include in the output + * @param filter - Filter to match time series keys + * @param groupBy - Group by parameters + * @param options - Optional parameters for the command + */ + parseCommand: createMRangeSelectedLabelsGroupByTransformArguments('TS.MRANGE'), + transformReply: { + 2: MRANGE_SELECTED_LABELS.transformReply[2], + 3(reply: TsMRangeWithLabelsGroupByRawReply3) { + return resp3MapToValue(reply, ([labels, _metadata, metadata2, samples]) => { + return { + labels, + sources: extractResp3MRangeSources(metadata2), + samples: transformSamplesReply[3](samples) + }; + }); + } + }, +} as const satisfies Command; diff --git a/packages/time-series/lib/commands/MRANGE_WITHLABELS.spec.ts b/packages/time-series/lib/commands/MRANGE_WITHLABELS.spec.ts new file mode 100644 index 00000000000..eab2e1fadbe --- /dev/null +++ b/packages/time-series/lib/commands/MRANGE_WITHLABELS.spec.ts @@ -0,0 +1,69 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import MRANGE_WITHLABELS from './MRANGE_WITHLABELS'; +import { TIME_SERIES_AGGREGATION_TYPE } from './CREATERULE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TS.MRANGE_WITHLABELS', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(MRANGE_WITHLABELS, '-', '+', 'label=value', { + LATEST: true, + FILTER_BY_TS: [0], + FILTER_BY_VALUE: { + min: 0, + max: 1 + }, + COUNT: 1, + ALIGN: '-', + AGGREGATION: { + type: TIME_SERIES_AGGREGATION_TYPE.AVG, + timeBucket: 1 + } + }), + [ + 'TS.MRANGE', '-', '+', + 'LATEST', + 'FILTER_BY_TS', '0', + 'FILTER_BY_VALUE', '0', '1', + 'COUNT', '1', + 'ALIGN', '-', + 'AGGREGATION', 'AVG', '1', + 'WITHLABELS', + 'FILTER', 'label=value' + ] + ); + }); + + testUtils.testWithClient('client.ts.mRangeWithLabels', async client => { + const [, reply] = await Promise.all([ + client.ts.add('key', 0, 0, { + LABELS: { label: 'value' } + }), + client.ts.mRangeWithLabels('-', '+', 'label=value') + ]); + + assert.deepStrictEqual( + reply, + Object.create(null, { + key: { + configurable: true, + enumerable: true, + value: { + labels: Object.create(null, { + label: { + configurable: true, + enumerable: true, + value: 'value' + } + }), + samples: [{ + timestamp: 0, + value: 0 + }] + } + } + }) + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/time-series/lib/commands/MRANGE_WITHLABELS.ts b/packages/time-series/lib/commands/MRANGE_WITHLABELS.ts new file mode 100644 index 00000000000..01a3634cf4c --- /dev/null +++ b/packages/time-series/lib/commands/MRANGE_WITHLABELS.ts @@ -0,0 +1,93 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { Command, UnwrapReply, ArrayReply, BlobStringReply, Resp2Reply, MapReply, TuplesReply, TypeMapping, RedisArgument } from '@redis/client/dist/lib/RESP/types'; +import { RedisVariadicArgument } from '@redis/client/dist/lib/commands/generic-transformers'; +import { resp2MapToValue, resp3MapToValue, SampleRawReply, Timestamp, transformSamplesReply } from './helpers'; +import { TsRangeOptions, parseRangeArguments } from './RANGE'; +import { parseFilterArgument } from './MGET'; + +export type TsMRangeWithLabelsRawReply2 = ArrayReply< + TuplesReply<[ + key: BlobStringReply, + labels: ArrayReply>, + samples: ArrayReply> + ]> +>; + +export type TsMRangeWithLabelsRawReply3 = MapReply< + BlobStringReply, + TuplesReply<[ + labels: MapReply, + metadata: never, // ?! + samples: ArrayReply + ]> +>; + +/** + * Creates a function that parses arguments for multi-range commands with labels + * @param command - The command name to use (TS.MRANGE or TS.MREVRANGE) + */ +export function createTransformMRangeWithLabelsArguments(command: RedisArgument) { + return ( + parser: CommandParser, + fromTimestamp: Timestamp, + toTimestamp: Timestamp, + filter: RedisVariadicArgument, + options?: TsRangeOptions + ) => { + parser.push(command); + parseRangeArguments( + parser, + fromTimestamp, + toTimestamp, + options + ); + + parser.push('WITHLABELS'); + + parseFilterArgument(parser, filter); + }; +} + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Gets samples for time series matching a filter with labels + * @param parser - The command parser + * @param fromTimestamp - Start timestamp for range + * @param toTimestamp - End timestamp for range + * @param filter - Filter to match time series keys + * @param options - Optional parameters for the command + */ + parseCommand: createTransformMRangeWithLabelsArguments('TS.MRANGE'), + transformReply: { + 2(reply: TsMRangeWithLabelsRawReply2, _?: any, typeMapping?: TypeMapping) { + return resp2MapToValue(reply, ([_key, labels, samples]) => { + const unwrappedLabels = labels as unknown as UnwrapReply; + // TODO: use Map type mapping for labels + const labelsObject: Record = Object.create(null); + for (const tuple of unwrappedLabels) { + const [key, value] = tuple as unknown as UnwrapReply; + const unwrappedKey = key as unknown as UnwrapReply; + labelsObject[unwrappedKey.toString()] = value; + } + + return { + labels: labelsObject, + samples: transformSamplesReply[2](samples) + }; + }, typeMapping); + }, + 3(reply: TsMRangeWithLabelsRawReply3) { + return resp3MapToValue(reply, ([labels, _metadata, samples]) => { + return { + labels, + samples: transformSamplesReply[3](samples) + }; + }); + } + }, +} as const satisfies Command; diff --git a/packages/time-series/lib/commands/MRANGE_WITHLABELS_GROUPBY.spec.ts b/packages/time-series/lib/commands/MRANGE_WITHLABELS_GROUPBY.spec.ts new file mode 100644 index 00000000000..4a8b8fe707f --- /dev/null +++ b/packages/time-series/lib/commands/MRANGE_WITHLABELS_GROUPBY.spec.ts @@ -0,0 +1,78 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import MRANGE_WITHLABELS_GROUPBY from './MRANGE_WITHLABELS_GROUPBY'; +import { TIME_SERIES_REDUCERS } from './MRANGE_GROUPBY'; +import { TIME_SERIES_AGGREGATION_TYPE } from './CREATERULE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TS.MRANGE_WITHLABELS_GROUPBY', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(MRANGE_WITHLABELS_GROUPBY, '-', '+', 'label=value', { + label: 'label', + REDUCE: TIME_SERIES_REDUCERS.AVG + }, { + LATEST: true, + FILTER_BY_TS: [0], + FILTER_BY_VALUE: { + min: 0, + max: 1 + }, + COUNT: 1, + ALIGN: '-', + AGGREGATION: { + type: TIME_SERIES_AGGREGATION_TYPE.AVG, + timeBucket: 1 + } + }), + [ + 'TS.MRANGE', '-', '+', + 'LATEST', + 'FILTER_BY_TS', '0', + 'FILTER_BY_VALUE', '0', '1', + 'COUNT', '1', + 'ALIGN', '-', + 'AGGREGATION', 'AVG', '1', + 'WITHLABELS', + 'FILTER', 'label=value', + 'GROUPBY', 'label', 'REDUCE', 'AVG' + ] + ); + }); + + testUtils.testWithClient('client.ts.mRangeWithLabelsGroupBy', async client => { + const [, reply] = await Promise.all([ + client.ts.add('key', 0, 0, { + LABELS: { label: 'value' } + }), + client.ts.mRangeWithLabelsGroupBy('-', '+', 'label=value', { + label: 'label', + REDUCE: TIME_SERIES_REDUCERS.AVG + }) + ]); + + assert.deepStrictEqual( + reply, + Object.create(null, { + 'label=value': { + configurable: true, + enumerable: true, + value: { + labels: Object.create(null, { + label: { + configurable: true, + enumerable: true, + value: 'value' + } + }), + sources: ['key'], + samples: [{ + timestamp: 0, + value: 0 + }] + } + } + }) + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/time-series/lib/commands/MRANGE_WITHLABELS_GROUPBY.ts b/packages/time-series/lib/commands/MRANGE_WITHLABELS_GROUPBY.ts new file mode 100644 index 00000000000..08c70000f70 --- /dev/null +++ b/packages/time-series/lib/commands/MRANGE_WITHLABELS_GROUPBY.ts @@ -0,0 +1,88 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { Command, ArrayReply, BlobStringReply, Resp2Reply, MapReply, TuplesReply, TypeMapping, RedisArgument } from '@redis/client/dist/lib/RESP/types'; +import { RedisVariadicArgument } from '@redis/client/dist/lib/commands/generic-transformers'; +import { resp2MapToValue, resp3MapToValue, SampleRawReply, Timestamp, transformRESP2LabelsWithSources, transformSamplesReply } from './helpers'; +import { TsRangeOptions, parseRangeArguments } from './RANGE'; +import { extractResp3MRangeSources, parseGroupByArguments, TsMRangeGroupBy, TsMRangeGroupByRawMetadataReply3 } from './MRANGE_GROUPBY'; +import { parseFilterArgument } from './MGET'; + +export type TsMRangeWithLabelsGroupByRawReply2 = ArrayReply< + TuplesReply<[ + key: BlobStringReply, + labels: ArrayReply>, + samples: ArrayReply> + ]> +>; + +export type TsMRangeWithLabelsGroupByRawReply3 = MapReply< + BlobStringReply, + TuplesReply<[ + labels: MapReply, + metadata: never, // ?! + metadata2: TsMRangeGroupByRawMetadataReply3, + samples: ArrayReply + ]> +>; + +export function createMRangeWithLabelsGroupByTransformArguments(command: RedisArgument) { + return ( + parser: CommandParser, + fromTimestamp: Timestamp, + toTimestamp: Timestamp, + filter: RedisVariadicArgument, + groupBy: TsMRangeGroupBy, + options?: TsRangeOptions + ) => { + parser.push(command); + parseRangeArguments( + parser, + fromTimestamp, + toTimestamp, + options + ); + + parser.push('WITHLABELS'); + + parseFilterArgument(parser, filter); + + parseGroupByArguments(parser, groupBy); + }; +} + +export default { + IS_READ_ONLY: true, + /** + * Gets samples for time series matching a filter with labels and grouping + * @param parser - The command parser + * @param fromTimestamp - Start timestamp for range + * @param toTimestamp - End timestamp for range + * @param filter - Filter to match time series keys + * @param groupBy - Group by parameters + * @param options - Optional parameters for the command + */ + parseCommand: createMRangeWithLabelsGroupByTransformArguments('TS.MRANGE'), + transformReply: { + 2(reply: TsMRangeWithLabelsGroupByRawReply2, _?: any, typeMapping?: TypeMapping) { + return resp2MapToValue(reply, ([_key, labels, samples]) => { + const transformed = transformRESP2LabelsWithSources(labels); + return { + labels: transformed.labels, + sources: transformed.sources, + samples: transformSamplesReply[2](samples) + }; + }, typeMapping); + }, + 3(reply: TsMRangeWithLabelsGroupByRawReply3) { + return resp3MapToValue(reply, ([labels, _metadata, metadata2, samples]) => { + return { + labels, + sources: extractResp3MRangeSources(metadata2), + samples: transformSamplesReply[3](samples) + }; + }); + } + }, +} as const satisfies Command; diff --git a/packages/time-series/lib/commands/MREVRANGE.spec.ts b/packages/time-series/lib/commands/MREVRANGE.spec.ts new file mode 100644 index 00000000000..09051103f8b --- /dev/null +++ b/packages/time-series/lib/commands/MREVRANGE.spec.ts @@ -0,0 +1,63 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import MREVRANGE from './MREVRANGE'; +import { TIME_SERIES_AGGREGATION_TYPE } from './CREATERULE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TS.MREVRANGE', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(MREVRANGE, '-', '+', 'label=value', { + LATEST: true, + FILTER_BY_TS: [0], + FILTER_BY_VALUE: { + min: 0, + max: 1 + }, + COUNT: 1, + ALIGN: '-', + AGGREGATION: { + type: TIME_SERIES_AGGREGATION_TYPE.AVG, + timeBucket: 1 + } + }), + [ + 'TS.MREVRANGE', '-', '+', + 'LATEST', + 'FILTER_BY_TS', '0', + 'FILTER_BY_VALUE', '0', '1', + 'COUNT', '1', + 'ALIGN', '-', + 'AGGREGATION', 'AVG', '1', + 'FILTER', 'label=value' + ] + ); + }); + + testUtils.testWithClient('client.ts.mRevRange', async client => { + const [, reply] = await Promise.all([ + client.ts.add('key', 0, 0, { + LABELS: { + label: 'value' + } + }), + client.ts.mRevRange('-', '+', 'label=value', { + COUNT: 1 + }) + ]); + + assert.deepStrictEqual( + reply, + Object.create(null, { + key: { + configurable: true, + enumerable: true, + value: [{ + timestamp: 0, + value: 0 + }] + } + }) + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/time-series/lib/commands/MREVRANGE.ts b/packages/time-series/lib/commands/MREVRANGE.ts new file mode 100644 index 00000000000..54bd5ca9cca --- /dev/null +++ b/packages/time-series/lib/commands/MREVRANGE.ts @@ -0,0 +1,17 @@ +import { Command } from '@redis/client/dist/lib/RESP/types'; +import MRANGE, { createTransformMRangeArguments } from './MRANGE'; + +export default { + NOT_KEYED_COMMAND: MRANGE.NOT_KEYED_COMMAND, + IS_READ_ONLY: MRANGE.IS_READ_ONLY, + /** + * Gets samples for time series matching a specific filter within a time range (in reverse order) + * @param parser - The command parser + * @param fromTimestamp - Start timestamp for range + * @param toTimestamp - End timestamp for range + * @param filter - Filter to match time series keys + * @param options - Optional parameters for the command + */ + parseCommand: createTransformMRangeArguments('TS.MREVRANGE'), + transformReply: MRANGE.transformReply, +} as const satisfies Command; diff --git a/packages/time-series/lib/commands/MREVRANGE_GROUPBY.spec.ts b/packages/time-series/lib/commands/MREVRANGE_GROUPBY.spec.ts new file mode 100644 index 00000000000..d32d675ad0a --- /dev/null +++ b/packages/time-series/lib/commands/MREVRANGE_GROUPBY.spec.ts @@ -0,0 +1,68 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import MREVRANGE_GROUPBY from './MREVRANGE_GROUPBY'; +import { TIME_SERIES_REDUCERS } from './MRANGE_GROUPBY'; +import { TIME_SERIES_AGGREGATION_TYPE } from './CREATERULE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TS.MREVRANGE_GROUPBY', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(MREVRANGE_GROUPBY, '-', '+', 'label=value', { + REDUCE: TIME_SERIES_REDUCERS.AVG, + label: 'label' + }, { + LATEST: true, + FILTER_BY_TS: [0], + FILTER_BY_VALUE: { + min: 0, + max: 1 + }, + COUNT: 1, + ALIGN: '-', + AGGREGATION: { + type: TIME_SERIES_AGGREGATION_TYPE.AVG, + timeBucket: 1 + } + }), + [ + 'TS.MREVRANGE', '-', '+', + 'LATEST', + 'FILTER_BY_TS', '0', + 'FILTER_BY_VALUE', '0', '1', + 'COUNT', '1', + 'ALIGN', '-', 'AGGREGATION', 'AVG', '1', + 'FILTER', 'label=value', + 'GROUPBY', 'label', 'REDUCE', 'AVG' + ] + ); + }); + + testUtils.testWithClient('client.ts.mRevRangeGroupBy', async client => { + const [, reply] = await Promise.all([ + client.ts.add('key', 0, 0, { + LABELS: { label: 'value' } + }), + client.ts.mRevRangeGroupBy('-', '+', 'label=value', { + REDUCE: TIME_SERIES_REDUCERS.AVG, + label: 'label' + }) + ]); + + assert.deepStrictEqual( + reply, + Object.create(null, { + 'label=value': { + configurable: true, + enumerable: true, + value: { + samples: [{ + timestamp: 0, + value: 0 + }] + } + } + }) + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/time-series/lib/commands/MREVRANGE_GROUPBY.ts b/packages/time-series/lib/commands/MREVRANGE_GROUPBY.ts new file mode 100644 index 00000000000..329d9cceb8e --- /dev/null +++ b/packages/time-series/lib/commands/MREVRANGE_GROUPBY.ts @@ -0,0 +1,17 @@ +import { Command } from '@redis/client/dist/lib/RESP/types'; +import MRANGE_GROUPBY, { createTransformMRangeGroupByArguments } from './MRANGE_GROUPBY'; + +export default { + IS_READ_ONLY: MRANGE_GROUPBY.IS_READ_ONLY, + /** + * Gets samples for time series matching a filter within a time range with grouping (in reverse order) + * @param parser - The command parser + * @param fromTimestamp - Start timestamp for range + * @param toTimestamp - End timestamp for range + * @param filter - Filter to match time series keys + * @param groupBy - Group by parameters + * @param options - Optional parameters for the command + */ + parseCommand: createTransformMRangeGroupByArguments('TS.MREVRANGE'), + transformReply: MRANGE_GROUPBY.transformReply, +} as const satisfies Command; diff --git a/packages/time-series/lib/commands/MREVRANGE_SELECTED_LABELS.spec.ts b/packages/time-series/lib/commands/MREVRANGE_SELECTED_LABELS.spec.ts new file mode 100644 index 00000000000..f68e34727c2 --- /dev/null +++ b/packages/time-series/lib/commands/MREVRANGE_SELECTED_LABELS.spec.ts @@ -0,0 +1,74 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import MREVRANGE_SELECTED_LABELS from './MREVRANGE_SELECTED_LABELS'; +import { TIME_SERIES_AGGREGATION_TYPE } from './CREATERULE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TS.MREVRANGE_SELECTED_LABELS', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(MREVRANGE_SELECTED_LABELS, '-', '+', 'label', 'label=value', { + FILTER_BY_TS: [0], + FILTER_BY_VALUE: { + min: 0, + max: 1 + }, + COUNT: 1, + ALIGN: '-', + AGGREGATION: { + type: TIME_SERIES_AGGREGATION_TYPE.AVG, + timeBucket: 1 + } + }), + [ + 'TS.MREVRANGE', '-', '+', + 'FILTER_BY_TS', '0', + 'FILTER_BY_VALUE', '0', '1', + 'COUNT', '1', + 'ALIGN', '-', 'AGGREGATION', 'AVG', '1', + 'SELECTED_LABELS', 'label', + 'FILTER', 'label=value' + ] + ); + }); + + testUtils.testWithClient('client.ts.mRevRangeSelectedLabels', async client => { + const [, reply] = await Promise.all([ + client.ts.add('key', 0, 0, { + LABELS: { label: 'value' } + }), + client.ts.mRevRangeSelectedLabels('-', '+', ['label', 'NX'], 'label=value', { + COUNT: 1 + }) + ]); + + assert.deepStrictEqual( + reply, + Object.create(null, { + key: { + configurable: true, + enumerable: true, + value: { + labels: Object.create(null, { + label: { + configurable: true, + enumerable: true, + value: 'value' + }, + NX: { + configurable: true, + enumerable: true, + value: null + } + }), + samples: [{ + timestamp: 0, + value: 0 + }] + } + + } + }) + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/time-series/lib/commands/MREVRANGE_SELECTED_LABELS.ts b/packages/time-series/lib/commands/MREVRANGE_SELECTED_LABELS.ts new file mode 100644 index 00000000000..15dc9d87daa --- /dev/null +++ b/packages/time-series/lib/commands/MREVRANGE_SELECTED_LABELS.ts @@ -0,0 +1,17 @@ +import { Command } from '@redis/client/dist/lib/RESP/types'; +import MRANGE_SELECTED_LABELS, { createTransformMRangeSelectedLabelsArguments } from './MRANGE_SELECTED_LABELS'; + +export default { + IS_READ_ONLY: MRANGE_SELECTED_LABELS.IS_READ_ONLY, + /** + * Gets samples for time series matching a filter with selected labels (in reverse order) + * @param parser - The command parser + * @param fromTimestamp - Start timestamp for range + * @param toTimestamp - End timestamp for range + * @param selectedLabels - Labels to include in the output + * @param filter - Filter to match time series keys + * @param options - Optional parameters for the command + */ + parseCommand: createTransformMRangeSelectedLabelsArguments('TS.MREVRANGE'), + transformReply: MRANGE_SELECTED_LABELS.transformReply, +} as const satisfies Command; diff --git a/packages/time-series/lib/commands/MREVRANGE_SELECTED_LABELS_GROUPBY.spec.ts b/packages/time-series/lib/commands/MREVRANGE_SELECTED_LABELS_GROUPBY.spec.ts new file mode 100644 index 00000000000..444bb2f3d24 --- /dev/null +++ b/packages/time-series/lib/commands/MREVRANGE_SELECTED_LABELS_GROUPBY.spec.ts @@ -0,0 +1,81 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import MREVRANGE_SELECTED_LABELS_GROUPBY from './MREVRANGE_SELECTED_LABELS_GROUPBY'; +import { TIME_SERIES_REDUCERS } from './MRANGE_GROUPBY'; +import { TIME_SERIES_AGGREGATION_TYPE } from './CREATERULE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TS.MREVRANGE_SELECTED_LABELS_GROUPBY', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(MREVRANGE_SELECTED_LABELS_GROUPBY, '-', '+', 'label', 'label=value', { + REDUCE: TIME_SERIES_REDUCERS.AVG, + label: 'label' + }, { + LATEST: true, + FILTER_BY_TS: [0], + FILTER_BY_VALUE: { + min: 0, + max: 1 + }, + COUNT: 1, + ALIGN: '-', + AGGREGATION: { + type: TIME_SERIES_AGGREGATION_TYPE.AVG, + timeBucket: 1 + } + }), + [ + 'TS.MREVRANGE', '-', '+', + 'LATEST', + 'FILTER_BY_TS', '0', + 'FILTER_BY_VALUE', '0', '1', + 'COUNT', '1', + 'ALIGN', '-', 'AGGREGATION', 'AVG', '1', + 'SELECTED_LABELS', 'label', + 'FILTER', 'label=value', + 'GROUPBY', 'label', 'REDUCE', 'AVG' + ] + ); + }); + + testUtils.testWithClient('client.ts.mRevRangeSelectedLabelsGroupBy', async client => { + const [, reply] = await Promise.all([ + client.ts.add('key', 0, 0, { + LABELS: { label: 'value' } + }), + client.ts.mRevRangeSelectedLabelsGroupBy('-', '+', ['label', 'NX'], 'label=value', { + REDUCE: TIME_SERIES_REDUCERS.AVG, + label: 'label' + }) + ]); + + assert.deepStrictEqual( + reply, + Object.create(null, { + 'label=value': { + configurable: true, + enumerable: true, + value: { + labels: Object.create(null, { + label: { + configurable: true, + enumerable: true, + value: 'value' + }, + NX: { + configurable: true, + enumerable: true, + value: null + } + }), + samples: [{ + timestamp: 0, + value: 0 + }] + } + } + }) + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/time-series/lib/commands/MREVRANGE_SELECTED_LABELS_GROUPBY.ts b/packages/time-series/lib/commands/MREVRANGE_SELECTED_LABELS_GROUPBY.ts new file mode 100644 index 00000000000..c044a9ca064 --- /dev/null +++ b/packages/time-series/lib/commands/MREVRANGE_SELECTED_LABELS_GROUPBY.ts @@ -0,0 +1,18 @@ +import { Command } from '@redis/client/dist/lib/RESP/types'; +import MRANGE_SELECTED_LABELS_GROUPBY, { createMRangeSelectedLabelsGroupByTransformArguments } from './MRANGE_SELECTED_LABELS_GROUPBY'; + +export default { + IS_READ_ONLY: MRANGE_SELECTED_LABELS_GROUPBY.IS_READ_ONLY, + /** + * Gets samples for time series matching a filter with selected labels and grouping (in reverse order) + * @param parser - The command parser + * @param fromTimestamp - Start timestamp for range + * @param toTimestamp - End timestamp for range + * @param selectedLabels - Labels to include in the output + * @param filter - Filter to match time series keys + * @param groupBy - Group by parameters + * @param options - Optional parameters for the command + */ + parseCommand: createMRangeSelectedLabelsGroupByTransformArguments('TS.MREVRANGE'), + transformReply: MRANGE_SELECTED_LABELS_GROUPBY.transformReply, +} as const satisfies Command; diff --git a/packages/time-series/lib/commands/MREVRANGE_WITHLABELS.spec.ts b/packages/time-series/lib/commands/MREVRANGE_WITHLABELS.spec.ts new file mode 100644 index 00000000000..da43a715f2e --- /dev/null +++ b/packages/time-series/lib/commands/MREVRANGE_WITHLABELS.spec.ts @@ -0,0 +1,69 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import MREVRANGE_WITHLABELS from './MREVRANGE_WITHLABELS'; +import { TIME_SERIES_AGGREGATION_TYPE } from './CREATERULE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TS.MREVRANGE_WITHLABELS', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(MREVRANGE_WITHLABELS, '-', '+', 'label=value', { + LATEST: true, + FILTER_BY_TS: [0], + FILTER_BY_VALUE: { + min: 0, + max: 1 + }, + COUNT: 1, + ALIGN: '-', + AGGREGATION: { + type: TIME_SERIES_AGGREGATION_TYPE.AVG, + timeBucket: 1 + } + }), + [ + 'TS.MREVRANGE', '-', '+', + 'LATEST', + 'FILTER_BY_TS', '0', + 'FILTER_BY_VALUE', '0', '1', + 'COUNT', '1', + 'ALIGN', '-', + 'AGGREGATION', 'AVG', '1', + 'WITHLABELS', + 'FILTER', 'label=value' + ] + ); + }); + + testUtils.testWithClient('client.ts.mRevRangeWithLabels', async client => { + const [, reply] = await Promise.all([ + client.ts.add('key', 0, 0, { + LABELS: { label: 'value' } + }), + client.ts.mRevRangeWithLabels('-', '+', 'label=value') + ]); + + assert.deepStrictEqual( + reply, + Object.create(null, { + key: { + configurable: true, + enumerable: true, + value: { + labels: Object.create(null, { + label: { + configurable: true, + enumerable: true, + value: 'value' + } + }), + samples: [{ + timestamp: 0, + value: 0 + }] + } + } + }) + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/time-series/lib/commands/MREVRANGE_WITHLABELS.ts b/packages/time-series/lib/commands/MREVRANGE_WITHLABELS.ts new file mode 100644 index 00000000000..0a05ab2c985 --- /dev/null +++ b/packages/time-series/lib/commands/MREVRANGE_WITHLABELS.ts @@ -0,0 +1,17 @@ +import { Command } from '@redis/client/dist/lib/RESP/types'; +import MRANGE_WITHLABELS, { createTransformMRangeWithLabelsArguments } from './MRANGE_WITHLABELS'; + +export default { + NOT_KEYED_COMMAND: MRANGE_WITHLABELS.NOT_KEYED_COMMAND, + IS_READ_ONLY: MRANGE_WITHLABELS.IS_READ_ONLY, + /** + * Gets samples for time series matching a filter with labels (in reverse order) + * @param parser - The command parser + * @param fromTimestamp - Start timestamp for range + * @param toTimestamp - End timestamp for range + * @param filter - Filter to match time series keys + * @param options - Optional parameters for the command + */ + parseCommand: createTransformMRangeWithLabelsArguments('TS.MREVRANGE'), + transformReply: MRANGE_WITHLABELS.transformReply, +} as const satisfies Command; diff --git a/packages/time-series/lib/commands/MREVRANGE_WITHLABELS_GROUPBY.spec.ts b/packages/time-series/lib/commands/MREVRANGE_WITHLABELS_GROUPBY.spec.ts new file mode 100644 index 00000000000..f4e6df9f0c6 --- /dev/null +++ b/packages/time-series/lib/commands/MREVRANGE_WITHLABELS_GROUPBY.spec.ts @@ -0,0 +1,78 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import MREVRANGE_WITHLABELS_GROUPBY from './MREVRANGE_WITHLABELS_GROUPBY'; +import { TIME_SERIES_REDUCERS } from './MRANGE_GROUPBY'; +import { TIME_SERIES_AGGREGATION_TYPE } from './CREATERULE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TS.MREVRANGE_WITHLABELS_GROUPBY', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(MREVRANGE_WITHLABELS_GROUPBY, '-', '+', 'label=value', { + label: 'label', + REDUCE: TIME_SERIES_REDUCERS.AVG + }, { + LATEST: true, + FILTER_BY_TS: [0], + FILTER_BY_VALUE: { + min: 0, + max: 1 + }, + COUNT: 1, + ALIGN: '-', + AGGREGATION: { + type: TIME_SERIES_AGGREGATION_TYPE.AVG, + timeBucket: 1 + } + }), + [ + 'TS.MREVRANGE', '-', '+', + 'LATEST', + 'FILTER_BY_TS', '0', + 'FILTER_BY_VALUE', '0', '1', + 'COUNT', '1', + 'ALIGN', '-', + 'AGGREGATION', 'AVG', '1', + 'WITHLABELS', + 'FILTER', 'label=value', + 'GROUPBY', 'label', 'REDUCE', 'AVG' + ] + ); + }); + + testUtils.testWithClient('client.ts.mRevRangeWithLabelsGroupBy', async client => { + const [, reply] = await Promise.all([ + client.ts.add('key', 0, 0, { + LABELS: { label: 'value' } + }), + client.ts.mRevRangeWithLabelsGroupBy('-', '+', 'label=value', { + label: 'label', + REDUCE: TIME_SERIES_REDUCERS.AVG + }) + ]); + + assert.deepStrictEqual( + reply, + Object.create(null, { + 'label=value': { + configurable: true, + enumerable: true, + value: { + labels: Object.create(null, { + label: { + configurable: true, + enumerable: true, + value: 'value' + } + }), + sources: ['key'], + samples: [{ + timestamp: 0, + value: 0 + }] + } + } + }) + ); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/time-series/lib/commands/MREVRANGE_WITHLABELS_GROUPBY.ts b/packages/time-series/lib/commands/MREVRANGE_WITHLABELS_GROUPBY.ts new file mode 100644 index 00000000000..e5c62898951 --- /dev/null +++ b/packages/time-series/lib/commands/MREVRANGE_WITHLABELS_GROUPBY.ts @@ -0,0 +1,17 @@ +import { Command } from '@redis/client/dist/lib/RESP/types'; +import MRANGE_WITHLABELS_GROUPBY, { createMRangeWithLabelsGroupByTransformArguments } from './MRANGE_WITHLABELS_GROUPBY'; + +export default { + IS_READ_ONLY: MRANGE_WITHLABELS_GROUPBY.IS_READ_ONLY, + /** + * Gets samples for time series matching a filter with labels and grouping (in reverse order) + * @param parser - The command parser + * @param fromTimestamp - Start timestamp for range + * @param toTimestamp - End timestamp for range + * @param filter - Filter to match time series keys + * @param groupBy - Group by parameters + * @param options - Optional parameters for the command + */ + parseCommand: createMRangeWithLabelsGroupByTransformArguments('TS.MREVRANGE'), + transformReply: MRANGE_WITHLABELS_GROUPBY.transformReply, +} as const satisfies Command; diff --git a/packages/time-series/lib/commands/QUERYINDEX.spec.ts b/packages/time-series/lib/commands/QUERYINDEX.spec.ts new file mode 100644 index 00000000000..2f3f5617fb3 --- /dev/null +++ b/packages/time-series/lib/commands/QUERYINDEX.spec.ts @@ -0,0 +1,35 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import QUERYINDEX from './QUERYINDEX'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TS.QUERYINDEX', () => { + describe('transformArguments', () => { + it('single filter', () => { + assert.deepEqual( + parseArgs(QUERYINDEX, '*'), + ['TS.QUERYINDEX', '*'] + ); + }); + + it('multiple filters', () => { + assert.deepEqual( + parseArgs(QUERYINDEX, ['a=1', 'b=2']), + ['TS.QUERYINDEX', 'a=1', 'b=2'] + ); + }); + }); + + testUtils.testWithClient('client.ts.queryIndex', async client => { + const [, reply] = await Promise.all([ + client.ts.create('key', { + LABELS: { + label: 'value' + } + }), + client.ts.queryIndex('label=value') + ]); + + assert.deepEqual(reply, ['key']); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/time-series/lib/commands/QUERYINDEX.ts b/packages/time-series/lib/commands/QUERYINDEX.ts new file mode 100644 index 00000000000..158a7341c8a --- /dev/null +++ b/packages/time-series/lib/commands/QUERYINDEX.ts @@ -0,0 +1,21 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { ArrayReply, BlobStringReply, SetReply, Command } from '@redis/client/dist/lib/RESP/types'; +import { RedisVariadicArgument } from '@redis/client/dist/lib/commands/generic-transformers'; + +export default { + NOT_KEYED_COMMAND: true, + IS_READ_ONLY: true, + /** + * Queries the index for time series matching a specific filter + * @param parser - The command parser + * @param filter - Filter to match time series labels + */ + parseCommand(parser: CommandParser, filter: RedisVariadicArgument) { + parser.push('TS.QUERYINDEX'); + parser.pushVariadic(filter); + }, + transformReply: { + 2: undefined as unknown as () => ArrayReply, + 3: undefined as unknown as () => SetReply + } +} as const satisfies Command; diff --git a/packages/time-series/lib/commands/RANGE.spec.ts b/packages/time-series/lib/commands/RANGE.spec.ts new file mode 100644 index 00000000000..2d20b455fc1 --- /dev/null +++ b/packages/time-series/lib/commands/RANGE.spec.ts @@ -0,0 +1,41 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import RANGE from './RANGE'; +import { TIME_SERIES_AGGREGATION_TYPE } from './CREATERULE'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TS.RANGE', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(RANGE, 'key', '-', '+', { + FILTER_BY_TS: [0], + FILTER_BY_VALUE: { + min: 1, + max: 2 + }, + COUNT: 1, + ALIGN: '-', + AGGREGATION: { + type: TIME_SERIES_AGGREGATION_TYPE.AVG, + timeBucket: 1 + } + }), + [ + 'TS.RANGE', 'key', '-', '+', 'FILTER_BY_TS', '0', 'FILTER_BY_VALUE', + '1', '2', 'COUNT', '1', 'ALIGN', '-', 'AGGREGATION', 'AVG', '1' + ] + ); + }); + + testUtils.testWithClient('client.ts.range', async client => { + const [, reply] = await Promise.all([ + client.ts.add('key', 1, 2), + client.ts.range('key', '-', '+') + ]); + + assert.deepEqual(reply, [{ + timestamp: 1, + value: 2 + }]); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/time-series/lib/commands/RANGE.ts b/packages/time-series/lib/commands/RANGE.ts new file mode 100644 index 00000000000..03d58d012ca --- /dev/null +++ b/packages/time-series/lib/commands/RANGE.ts @@ -0,0 +1,122 @@ +import { CommandParser } from '@redis/client/dist/lib/client/parser'; +import { RedisArgument, Command } from '@redis/client/dist/lib/RESP/types'; +import { Timestamp, transformTimestampArgument, SamplesRawReply, transformSamplesReply } from './helpers'; +import { TimeSeriesAggregationType } from './CREATERULE'; +import { Resp2Reply } from '@redis/client/dist/lib/RESP/types'; + +export const TIME_SERIES_BUCKET_TIMESTAMP = { + LOW: '-', + MIDDLE: '~', + END: '+' +}; + +export type TimeSeriesBucketTimestamp = typeof TIME_SERIES_BUCKET_TIMESTAMP[keyof typeof TIME_SERIES_BUCKET_TIMESTAMP]; + +export interface TsRangeOptions { + LATEST?: boolean; + FILTER_BY_TS?: Array; + FILTER_BY_VALUE?: { + min: number; + max: number; + }; + COUNT?: number; + ALIGN?: Timestamp; + AGGREGATION?: { + ALIGN?: Timestamp; + type: TimeSeriesAggregationType; + timeBucket: Timestamp; + BUCKETTIMESTAMP?: TimeSeriesBucketTimestamp; + EMPTY?: boolean; + }; +} + +export function parseRangeArguments( + parser: CommandParser, + fromTimestamp: Timestamp, + toTimestamp: Timestamp, + options?: TsRangeOptions +) { + parser.push( + transformTimestampArgument(fromTimestamp), + transformTimestampArgument(toTimestamp) + ); + + if (options?.LATEST) { + parser.push('LATEST'); + } + + if (options?.FILTER_BY_TS) { + parser.push('FILTER_BY_TS'); + for (const timestamp of options.FILTER_BY_TS) { + parser.push(transformTimestampArgument(timestamp)); + } + } + + if (options?.FILTER_BY_VALUE) { + parser.push( + 'FILTER_BY_VALUE', + options.FILTER_BY_VALUE.min.toString(), + options.FILTER_BY_VALUE.max.toString() + ); + } + + if (options?.COUNT !== undefined) { + parser.push('COUNT', options.COUNT.toString()); + } + + if (options?.AGGREGATION) { + if (options?.ALIGN !== undefined) { + parser.push('ALIGN', transformTimestampArgument(options.ALIGN)); + } + + parser.push( + 'AGGREGATION', + options.AGGREGATION.type, + transformTimestampArgument(options.AGGREGATION.timeBucket) + ); + + if (options.AGGREGATION.BUCKETTIMESTAMP) { + parser.push( + 'BUCKETTIMESTAMP', + options.AGGREGATION.BUCKETTIMESTAMP + ); + } + + if (options.AGGREGATION.EMPTY) { + parser.push('EMPTY'); + } + } +} + +export function transformRangeArguments( + parser: CommandParser, + key: RedisArgument, + fromTimestamp: Timestamp, + toTimestamp: Timestamp, + options?: TsRangeOptions +) { + parser.pushKey(key); + parseRangeArguments(parser, fromTimestamp, toTimestamp, options); +} + +export default { + IS_READ_ONLY: true, + /** + * Gets samples from a time series within a time range + * @param args - Arguments passed to the {@link transformRangeArguments} function + */ + parseCommand(...args: Parameters) { + const parser = args[0]; + + parser.push('TS.RANGE'); + transformRangeArguments(...args); + }, + transformReply: { + 2(reply: Resp2Reply) { + return transformSamplesReply[2](reply); + }, + 3(reply: SamplesRawReply) { + return transformSamplesReply[3](reply); + } + } +} as const satisfies Command; diff --git a/packages/time-series/lib/commands/REVRANGE.spec.ts b/packages/time-series/lib/commands/REVRANGE.spec.ts new file mode 100644 index 00000000000..a4c6aa2c0db --- /dev/null +++ b/packages/time-series/lib/commands/REVRANGE.spec.ts @@ -0,0 +1,41 @@ +import { strict as assert } from 'node:assert'; +import testUtils, { GLOBAL } from '../test-utils'; +import REVRANGE from './REVRANGE'; +import { TIME_SERIES_AGGREGATION_TYPE } from '../index'; +import { parseArgs } from '@redis/client/lib/commands/generic-transformers'; + +describe('TS.REVRANGE', () => { + it('transformArguments', () => { + assert.deepEqual( + parseArgs(REVRANGE, 'key', '-', '+', { + FILTER_BY_TS: [0], + FILTER_BY_VALUE: { + min: 1, + max: 2 + }, + COUNT: 1, + ALIGN: '-', + AGGREGATION: { + type: TIME_SERIES_AGGREGATION_TYPE.AVG, + timeBucket: 1 + } + }), + [ + 'TS.REVRANGE', 'key', '-', '+', 'FILTER_BY_TS', '0', 'FILTER_BY_VALUE', + '1', '2', 'COUNT', '1', 'ALIGN', '-', 'AGGREGATION', 'AVG', '1' + ] + ); + }); + + testUtils.testWithClient('client.ts.revRange', async client => { + const [, reply] = await Promise.all([ + client.ts.add('key', 1, 2), + client.ts.revRange('key', '-', '+') + ]); + + assert.deepEqual(reply, [{ + timestamp: 1, + value: 2 + }]); + }, GLOBAL.SERVERS.OPEN); +}); diff --git a/packages/time-series/lib/commands/REVRANGE.ts b/packages/time-series/lib/commands/REVRANGE.ts new file mode 100644 index 00000000000..27389b896c3 --- /dev/null +++ b/packages/time-series/lib/commands/REVRANGE.ts @@ -0,0 +1,17 @@ +import { Command } from '@redis/client/dist/lib/RESP/types'; +import RANGE, { transformRangeArguments } from './RANGE'; + +export default { + IS_READ_ONLY: RANGE.IS_READ_ONLY, + /** + * Gets samples from a time series within a time range (in reverse order) + * @param args - Arguments passed to the {@link transformRangeArguments} function + */ + parseCommand(...args: Parameters) { + const parser = args[0]; + + parser.push('TS.REVRANGE'); + transformRangeArguments(...args); + }, + transformReply: RANGE.transformReply +} as const satisfies Command; diff --git a/packages/time-series/lib/commands/helpers.ts b/packages/time-series/lib/commands/helpers.ts new file mode 100644 index 00000000000..3e277d0747d --- /dev/null +++ b/packages/time-series/lib/commands/helpers.ts @@ -0,0 +1,306 @@ +import { CommandParser } from "@redis/client/dist/lib/client/parser"; +import { TsIgnoreOptions } from "./ADD"; +import { ArrayReply, BlobStringReply, DoubleReply, MapReply, NullReply, NumberReply, ReplyUnion, Resp2Reply, RespType, TuplesReply, TypeMapping, UnwrapReply } from "@redis/client/dist/lib/RESP/types"; +import { RESP_TYPES } from "@redis/client"; +import { RedisVariadicArgument } from "@redis/client/dist/lib/commands/generic-transformers"; + +export function parseIgnoreArgument(parser: CommandParser, ignore?: TsIgnoreOptions) { + if (ignore !== undefined) { + parser.push('IGNORE', ignore.maxTimeDiff.toString(), ignore.maxValDiff.toString()); + } +} + +export function parseRetentionArgument(parser: CommandParser, retention?: number) { + if (retention !== undefined) { + parser.push('RETENTION', retention.toString()); + } +} + +export const TIME_SERIES_ENCODING = { + COMPRESSED: 'COMPRESSED', + UNCOMPRESSED: 'UNCOMPRESSED' +} as const; + +export type TimeSeriesEncoding = typeof TIME_SERIES_ENCODING[keyof typeof TIME_SERIES_ENCODING]; + +export function parseEncodingArgument(parser: CommandParser, encoding?: TimeSeriesEncoding) { + if (encoding !== undefined) { + parser.push('ENCODING', encoding); + } +} + +export function parseChunkSizeArgument(parser: CommandParser, chunkSize?: number) { + if (chunkSize !== undefined) { + parser.push('CHUNK_SIZE', chunkSize.toString()); + } +} + +export const TIME_SERIES_DUPLICATE_POLICIES = { + BLOCK: 'BLOCK', + FIRST: 'FIRST', + LAST: 'LAST', + MIN: 'MIN', + MAX: 'MAX', + SUM: 'SUM' +} as const; + +export type TimeSeriesDuplicatePolicies = typeof TIME_SERIES_DUPLICATE_POLICIES[keyof typeof TIME_SERIES_DUPLICATE_POLICIES]; + +export function parseDuplicatePolicy(parser: CommandParser, duplicatePolicy?: TimeSeriesDuplicatePolicies) { + if (duplicatePolicy !== undefined) { + parser.push('DUPLICATE_POLICY', duplicatePolicy); + } +} + +export type Timestamp = number | Date | string; + +export function transformTimestampArgument(timestamp: Timestamp): string { + if (typeof timestamp === 'string') return timestamp; + + return ( + typeof timestamp === 'number' ? + timestamp : + timestamp.getTime() + ).toString(); +} + +export type Labels = { + [label: string]: string; +}; + +export function parseLabelsArgument(parser: CommandParser, labels?: Labels) { + if (labels) { + parser.push('LABELS'); + + for (const [label, value] of Object.entries(labels)) { + parser.push(label, value); + } + } +} + +export type SampleRawReply = TuplesReply<[timestamp: NumberReply, value: DoubleReply]>; + +export const transformSampleReply = { + 2(reply: Resp2Reply) { + const [ timestamp, value ] = reply as unknown as UnwrapReply; + return { + timestamp, + value: Number(value) // TODO: use double type mapping instead + }; + }, + 3(reply: SampleRawReply) { + const [ timestamp, value ] = reply as unknown as UnwrapReply; + return { + timestamp, + value + }; + } +}; + +export type SamplesRawReply = ArrayReply; + +export const transformSamplesReply = { + 2(reply: Resp2Reply) { + return (reply as unknown as UnwrapReply) + .map(sample => transformSampleReply[2](sample)); + }, + 3(reply: SamplesRawReply) { + return (reply as unknown as UnwrapReply) + .map(sample => transformSampleReply[3](sample)); + } +}; + +// TODO: move to @redis/client? +export function resp2MapToValue< + RAW_VALUE extends TuplesReply<[key: BlobStringReply, ...rest: Array]>, + TRANSFORMED +>( + wrappedReply: ArrayReply, + parseFunc: (rawValue: UnwrapReply) => TRANSFORMED, + typeMapping?: TypeMapping +): MapReply { + const reply = wrappedReply as unknown as UnwrapReply; + switch (typeMapping?.[RESP_TYPES.MAP]) { + case Map: { + const ret = new Map(); + for (const wrappedTuple of reply) { + const tuple = wrappedTuple as unknown as UnwrapReply; + const key = tuple[0] as unknown as UnwrapReply; + ret.set(key.toString(), parseFunc(tuple)); + } + return ret as never; + } + case Array: { + for (const wrappedTuple of reply) { + const tuple = wrappedTuple as unknown as UnwrapReply; + (tuple[1] as unknown as TRANSFORMED) = parseFunc(tuple); + } + return reply as never; + } + default: { + const ret: Record = Object.create(null); + for (const wrappedTuple of reply) { + const tuple = wrappedTuple as unknown as UnwrapReply; + const key = tuple[0] as unknown as UnwrapReply; + ret[key.toString()] = parseFunc(tuple); + } + return ret as never; + } + } +} + +export function resp3MapToValue< + RAW_VALUE extends RespType, // TODO: simplify types + TRANSFORMED +>( + wrappedReply: MapReply, + parseFunc: (rawValue: UnwrapReply) => TRANSFORMED +): MapReply { + const reply = wrappedReply as unknown as UnwrapReply; + if (reply instanceof Array) { + for (let i = 1; i < reply.length; i += 2) { + (reply[i] as unknown as TRANSFORMED) = parseFunc(reply[i] as unknown as UnwrapReply); + } + } else if (reply instanceof Map) { + for (const [key, value] of reply.entries()) { + (reply as unknown as Map).set( + key, + parseFunc(value as unknown as UnwrapReply) + ); + } + } else { + for (const [key, value] of Object.entries(reply)) { + (reply[key] as unknown as TRANSFORMED) = parseFunc(value as unknown as UnwrapReply); + } + } + return reply as never; +} + +export function parseSelectedLabelsArguments( + parser: CommandParser, + selectedLabels: RedisVariadicArgument +) { + parser.push('SELECTED_LABELS'); + parser.pushVariadic(selectedLabels); +} + +export type RawLabelValue = BlobStringReply | NullReply; + +export type RawLabels = ArrayReply>; + +export function transformRESP2Labels( + labels: RawLabels, + typeMapping?: TypeMapping +): MapReply { + const unwrappedLabels = labels as unknown as UnwrapReply; + switch (typeMapping?.[RESP_TYPES.MAP]) { + case Map: + const map = new Map(); + for (const tuple of unwrappedLabels) { + const [key, value] = tuple as unknown as UnwrapReply; + const unwrappedKey = key as unknown as UnwrapReply; + map.set(unwrappedKey.toString(), value); + } + return map as never; + + case Array: + return unwrappedLabels.flat() as never; + + case Object: + default: + const labelsObject: Record = Object.create(null); + for (const tuple of unwrappedLabels) { + const [key, value] = tuple as unknown as UnwrapReply; + const unwrappedKey = key as unknown as UnwrapReply; + labelsObject[unwrappedKey.toString()] = value; + } + return labelsObject as never; + } +} + +export function transformRESP2LabelsWithSources( + labels: RawLabels, + typeMapping?: TypeMapping +) { + const unwrappedLabels = labels as unknown as UnwrapReply; + const to = unwrappedLabels.length - 2; // ignore __reducer__ and __source__ + let transformedLabels: MapReply; + switch (typeMapping?.[RESP_TYPES.MAP]) { + case Map: + const map = new Map(); + for (let i = 0; i < to; i++) { + const [key, value] = unwrappedLabels[i] as unknown as UnwrapReply; + const unwrappedKey = key as unknown as UnwrapReply; + map.set(unwrappedKey.toString(), value); + } + transformedLabels = map as never; + break; + + case Array: + transformedLabels = unwrappedLabels.slice(0, to).flat() as never; + break; + + case Object: + default: + const labelsObject: Record = Object.create(null); + for (let i = 0; i < to; i++) { + const [key, value] = unwrappedLabels[i] as unknown as UnwrapReply; + const unwrappedKey = key as unknown as UnwrapReply; + labelsObject[unwrappedKey.toString()] = value; + } + transformedLabels = labelsObject as never; + break; + } + + const sourcesTuple = unwrappedLabels[unwrappedLabels.length - 1]; + const unwrappedSourcesTuple = sourcesTuple as unknown as UnwrapReply; + // the __source__ label will never be null + const transformedSources = transformRESP2Sources(unwrappedSourcesTuple[1] as BlobStringReply); + + return { + labels: transformedLabels, + sources: transformedSources + }; +} + +function transformRESP2Sources(sourcesRaw: BlobStringReply) { + // if a label contains "," this function will produce incorrcet results.. + // there is not much we can do about it, and we assume most users won't be using "," in their labels.. + + const unwrappedSources = sourcesRaw as unknown as UnwrapReply; + if (typeof unwrappedSources === 'string') { + return unwrappedSources.split(','); + } + + const indexOfComma = unwrappedSources.indexOf(','); + if (indexOfComma === -1) { + return [unwrappedSources]; + } + + const sourcesArray = [ + unwrappedSources.subarray(0, indexOfComma) + ]; + + let previousComma = indexOfComma + 1; + while (true) { + const indexOf = unwrappedSources.indexOf(',', previousComma); + if (indexOf === -1) { + sourcesArray.push( + unwrappedSources.subarray(previousComma) + ); + break; + } + + const source = unwrappedSources.subarray( + previousComma, + indexOf + ); + sourcesArray.push(source); + previousComma = indexOf + 1; + } + + return sourcesArray; +} \ No newline at end of file diff --git a/packages/time-series/lib/commands/index.spec.ts b/packages/time-series/lib/commands/index.spec.ts new file mode 100644 index 00000000000..b565abea476 --- /dev/null +++ b/packages/time-series/lib/commands/index.spec.ts @@ -0,0 +1,423 @@ +// import { RedisCommandArguments } from '@redis/client/lib/commands'; +// import { strict as assert } from 'node:assert'; +// import { +// transformTimestampArgument, +// pushRetentionArgument, +// TimeSeriesEncoding, +// pushEncodingArgument, +// pushChunkSizeArgument, +// pushDuplicatePolicy, +// pushLabelsArgument, +// transformIncrDecrArguments, +// transformSampleReply, +// TimeSeriesAggregationType, +// pushRangeArguments, +// pushMRangeGroupByArguments, +// TimeSeriesReducers, +// pushFilterArgument, +// pushMRangeArguments, +// pushWithLabelsArgument, +// pushMRangeWithLabelsArguments, +// transformRangeReply, +// transformMRangeReply, +// transformMRangeWithLabelsReply, +// TimeSeriesDuplicatePolicies, +// pushLatestArgument, +// TimeSeriesBucketTimestamp +// } from './helpers'; + +// describe('transformTimestampArgument', () => { +// it('number', () => { +// assert.equal( +// transformTimestampArgument(0), +// '0' +// ); +// }); + +// it('Date', () => { +// assert.equal( +// transformTimestampArgument(new Date(0)), +// '0' +// ); +// }); + +// it('string', () => { +// assert.equal( +// transformTimestampArgument('*'), +// '*' +// ); +// }); +// }); + +// function testOptionalArgument(fn: (args: RedisCommandArguments) => unknown): void { +// it('undefined', () => { +// assert.deepEqual( +// fn([]), +// [] +// ); +// }); +// } + +// describe('pushRetentionArgument', () => { +// testOptionalArgument(pushRetentionArgument); + +// it('number', () => { +// assert.deepEqual( +// pushRetentionArgument([], 1), +// ['RETENTION', '1'] +// ); +// }); +// }); + +// describe('pushEncodingArgument', () => { +// testOptionalArgument(pushEncodingArgument); + +// it('UNCOMPRESSED', () => { +// assert.deepEqual( +// pushEncodingArgument([], TimeSeriesEncoding.UNCOMPRESSED), +// ['ENCODING', 'UNCOMPRESSED'] +// ); +// }); +// }); + +// describe('pushChunkSizeArgument', () => { +// testOptionalArgument(pushChunkSizeArgument); + +// it('number', () => { +// assert.deepEqual( +// pushChunkSizeArgument([], 1), +// ['CHUNK_SIZE', '1'] +// ); +// }); +// }); + +// describe('pushDuplicatePolicy', () => { +// testOptionalArgument(pushDuplicatePolicy); + +// it('BLOCK', () => { +// assert.deepEqual( +// pushDuplicatePolicy([], TimeSeriesDuplicatePolicies.BLOCK), +// ['DUPLICATE_POLICY', 'BLOCK'] +// ); +// }); +// }); + +// describe('pushLabelsArgument', () => { +// testOptionalArgument(pushLabelsArgument); + +// it("{ label: 'value' }", () => { +// assert.deepEqual( +// pushLabelsArgument([], { label: 'value' }), +// ['LABELS', 'label', 'value'] +// ); +// }); +// }); + +// describe('transformIncrDecrArguments', () => { +// it('without options', () => { +// assert.deepEqual( +// transformIncrDecrArguments('TS.INCRBY', 'key', 1), +// ['TS.INCRBY', 'key', '1'] +// ); +// }); + +// it('with TIMESTAMP', () => { +// assert.deepEqual( +// transformIncrDecrArguments('TS.INCRBY', 'key', 1, { +// TIMESTAMP: '*' +// }), +// ['TS.INCRBY', 'key', '1', 'TIMESTAMP', '*'] +// ); +// }); + +// it('with UNCOMPRESSED', () => { +// assert.deepEqual( +// transformIncrDecrArguments('TS.INCRBY', 'key', 1, { +// UNCOMPRESSED: true +// }), +// ['TS.INCRBY', 'key', '1', 'UNCOMPRESSED'] +// ); +// }); + +// it('with UNCOMPRESSED false', () => { +// assert.deepEqual( +// transformIncrDecrArguments('TS.INCRBY', 'key', 1, { +// UNCOMPRESSED: false +// }), +// ['TS.INCRBY', 'key', '1'] +// ); +// }); +// }); + +// it('transformSampleReply', () => { +// assert.deepEqual( +// transformSampleReply([1, '1.1']), +// { +// timestamp: 1, +// value: 1.1 +// } +// ); +// }); + +// describe('pushRangeArguments', () => { +// it('without options', () => { +// assert.deepEqual( +// pushRangeArguments([], '-', '+'), +// ['-', '+'] +// ); +// }); + +// describe('with FILTER_BY_TS', () => { +// it('string', () => { +// assert.deepEqual( +// pushRangeArguments([], '-', '+', { +// FILTER_BY_TS: ['ts'] +// }), +// ['-', '+', 'FILTER_BY_TS', 'ts'] +// ); +// }); + +// it('Array', () => { +// assert.deepEqual( +// pushRangeArguments([], '-', '+', { +// FILTER_BY_TS: ['1', '2'] +// }), +// ['-', '+', 'FILTER_BY_TS', '1', '2'] +// ); +// }); +// }); + +// it('with FILTER_BY_VALUE', () => { +// assert.deepEqual( +// pushRangeArguments([], '-', '+', { +// FILTER_BY_VALUE: { +// min: 1, +// max: 2 +// } +// }), +// ['-', '+', 'FILTER_BY_VALUE', '1', '2'] +// ); +// }); + +// it('with COUNT', () => { +// assert.deepEqual( +// pushRangeArguments([], '-', '+', { +// COUNT: 1 +// }), +// ['-', '+', 'COUNT', '1'] +// ); +// }); + +// it('with ALIGN', () => { +// assert.deepEqual( +// pushRangeArguments([], '-', '+', { +// ALIGN: 1 +// }), +// ['-', '+', 'ALIGN', '1'] +// ); +// }); + +// describe('with AGGREGATION', () => { +// it('without options', () => { +// assert.deepEqual( +// pushRangeArguments([], '-', '+', { +// AGGREGATION: { +// type: TimeSeriesAggregationType.FIRST, +// timeBucket: 1 +// } +// }), +// ['-', '+', 'AGGREGATION', 'FIRST', '1'] +// ); +// }); + +// it('with BUCKETTIMESTAMP', () => { +// assert.deepEqual( +// pushRangeArguments([], '-', '+', { +// AGGREGATION: { +// type: TimeSeriesAggregationType.FIRST, +// timeBucket: 1, +// BUCKETTIMESTAMP: TimeSeriesBucketTimestamp.LOW +// } +// }), +// ['-', '+', 'AGGREGATION', 'FIRST', '1', 'BUCKETTIMESTAMP', '-'] +// ); +// }); + +// it('with BUCKETTIMESTAMP', () => { +// assert.deepEqual( +// pushRangeArguments([], '-', '+', { +// AGGREGATION: { +// type: TimeSeriesAggregationType.FIRST, +// timeBucket: 1, +// EMPTY: true +// } +// }), +// ['-', '+', 'AGGREGATION', 'FIRST', '1', 'EMPTY'] +// ); +// }); +// }); + +// it('with FILTER_BY_TS, FILTER_BY_VALUE, COUNT, ALIGN, AGGREGATION', () => { +// assert.deepEqual( +// pushRangeArguments([], '-', '+', { +// FILTER_BY_TS: ['ts'], +// FILTER_BY_VALUE: { +// min: 1, +// max: 2 +// }, +// COUNT: 1, +// ALIGN: 1, +// AGGREGATION: { +// type: TimeSeriesAggregationType.FIRST, +// timeBucket: 1, +// BUCKETTIMESTAMP: TimeSeriesBucketTimestamp.LOW, +// EMPTY: true +// } +// }), +// ['-', '+', 'FILTER_BY_TS', 'ts', 'FILTER_BY_VALUE', '1', '2', +// 'COUNT', '1', 'ALIGN', '1', 'AGGREGATION', 'FIRST', '1', 'BUCKETTIMESTAMP', '-', 'EMPTY'] +// ); +// }); +// }); + +// describe('pushMRangeGroupByArguments', () => { +// it('undefined', () => { +// assert.deepEqual( +// pushMRangeGroupByArguments([]), +// [] +// ); +// }); + +// it('with GROUPBY', () => { +// assert.deepEqual( +// pushMRangeGroupByArguments([], { +// label: 'label', +// reducer: TimeSeriesReducers.MAXIMUM +// }), +// ['GROUPBY', 'label', 'REDUCE', 'MAX'] +// ); +// }); +// }); + +// describe('pushFilterArgument', () => { +// it('string', () => { +// assert.deepEqual( +// pushFilterArgument([], 'label=value'), +// ['FILTER', 'label=value'] +// ); +// }); + +// it('Array', () => { +// assert.deepEqual( +// pushFilterArgument([], ['1=1', '2=2']), +// ['FILTER', '1=1', '2=2'] +// ); +// }); +// }); + +// describe('pushMRangeArguments', () => { +// it('without options', () => { +// assert.deepEqual( +// pushMRangeArguments([], '-', '+', 'label=value'), +// ['-', '+', 'FILTER', 'label=value'] +// ); +// }); + +// it('with GROUPBY', () => { +// assert.deepEqual( +// pushMRangeArguments([], '-', '+', 'label=value', { +// GROUPBY: { +// label: 'label', +// reducer: TimeSeriesReducers.MAXIMUM +// } +// }), +// ['-', '+', 'FILTER', 'label=value', 'GROUPBY', 'label', 'REDUCE', 'MAX'] +// ); +// }); +// }); + +// it('pushMRangeWithLabelsArguments', () => { +// assert.deepEqual( +// pushMRangeWithLabelsArguments([], '-', '+', 'label=value'), +// ['-', '+', 'WITHLABELS', 'FILTER', 'label=value'] +// ); +// }); + +// it('transformRangeReply', () => { +// assert.deepEqual( +// transformRangeReply([[1, '1.1'], [2, '2.2']]), +// [{ +// timestamp: 1, +// value: 1.1 +// }, { +// timestamp: 2, +// value: 2.2 +// }] +// ); +// }); + +// describe('transformMRangeReply', () => { +// assert.deepEqual( +// transformMRangeReply([[ +// 'key', +// [], +// [[1, '1.1'], [2, '2.2']] +// ]]), +// [{ +// key: 'key', +// samples: [{ +// timestamp: 1, +// value: 1.1 +// }, { +// timestamp: 2, +// value: 2.2 +// }] +// }] +// ); +// }); + +// describe('transformMRangeWithLabelsReply', () => { +// assert.deepEqual( +// transformMRangeWithLabelsReply([[ +// 'key', +// [['label', 'value']], +// [[1, '1.1'], [2, '2.2']] +// ]]), +// [{ +// key: 'key', +// labels: { +// label: 'value' +// }, +// samples: [{ +// timestamp: 1, +// value: 1.1 +// }, { +// timestamp: 2, +// value: 2.2 +// }] +// }] +// ); +// }); + +// describe('pushLatestArgument', () => { +// it('undefined', () => { +// assert.deepEqual( +// pushLatestArgument([]), +// [] +// ); +// }); + +// it('false', () => { +// assert.deepEqual( +// pushLatestArgument([], false), +// [] +// ); +// }); + +// it('true', () => { +// assert.deepEqual( +// pushLatestArgument([], true), +// ['LATEST'] +// ); +// }); +// }) diff --git a/packages/time-series/lib/commands/index.ts b/packages/time-series/lib/commands/index.ts new file mode 100644 index 00000000000..43bde4767bf --- /dev/null +++ b/packages/time-series/lib/commands/index.ts @@ -0,0 +1,97 @@ +import ADD from './ADD'; +import ALTER from './ALTER'; +import CREATE from './CREATE'; +import CREATERULE from './CREATERULE'; +import DECRBY from './DECRBY'; +import DEL from './DEL'; +import DELETERULE from './DELETERULE'; +import GET from './GET'; +import INCRBY from './INCRBY'; +import INFO_DEBUG from './INFO_DEBUG'; +import INFO from './INFO'; +import MADD from './MADD'; +import MGET_SELECTED_LABELS from './MGET_SELECTED_LABELS'; +import MGET_WITHLABELS from './MGET_WITHLABELS'; +import MGET from './MGET'; +import MRANGE_GROUPBY from './MRANGE_GROUPBY'; +import MRANGE_SELECTED_LABELS_GROUPBY from './MRANGE_SELECTED_LABELS_GROUPBY'; +import MRANGE_SELECTED_LABELS from './MRANGE_SELECTED_LABELS'; +import MRANGE_WITHLABELS_GROUPBY from './MRANGE_WITHLABELS_GROUPBY'; +import MRANGE_WITHLABELS from './MRANGE_WITHLABELS'; +import MRANGE from './MRANGE'; +import MREVRANGE_GROUPBY from './MREVRANGE_GROUPBY'; +import MREVRANGE_SELECTED_LABELS_GROUPBY from './MREVRANGE_SELECTED_LABELS_GROUPBY'; +import MREVRANGE_SELECTED_LABELS from './MREVRANGE_SELECTED_LABELS'; +import MREVRANGE_WITHLABELS_GROUPBY from './MREVRANGE_WITHLABELS_GROUPBY'; +import MREVRANGE_WITHLABELS from './MREVRANGE_WITHLABELS'; +import MREVRANGE from './MREVRANGE'; +import QUERYINDEX from './QUERYINDEX'; +import RANGE from './RANGE'; +import REVRANGE from './REVRANGE'; +import { RedisCommands } from '@redis/client/dist/lib/RESP/types'; + +export * from './helpers'; + +export default { + ADD, + add: ADD, + ALTER, + alter: ALTER, + CREATE, + create: CREATE, + CREATERULE, + createRule: CREATERULE, + DECRBY, + decrBy: DECRBY, + DEL, + del: DEL, + DELETERULE, + deleteRule: DELETERULE, + GET, + get: GET, + INCRBY, + incrBy: INCRBY, + INFO_DEBUG, + infoDebug: INFO_DEBUG, + INFO, + info: INFO, + MADD, + mAdd: MADD, + MGET_SELECTED_LABELS, + mGetSelectedLabels: MGET_SELECTED_LABELS, + MGET_WITHLABELS, + mGetWithLabels: MGET_WITHLABELS, + MGET, + mGet: MGET, + MRANGE_GROUPBY, + mRangeGroupBy: MRANGE_GROUPBY, + MRANGE_SELECTED_LABELS_GROUPBY, + mRangeSelectedLabelsGroupBy: MRANGE_SELECTED_LABELS_GROUPBY, + MRANGE_SELECTED_LABELS, + mRangeSelectedLabels: MRANGE_SELECTED_LABELS, + MRANGE_WITHLABELS_GROUPBY, + mRangeWithLabelsGroupBy: MRANGE_WITHLABELS_GROUPBY, + MRANGE_WITHLABELS, + mRangeWithLabels: MRANGE_WITHLABELS, + MRANGE, + mRange: MRANGE, + MREVRANGE_GROUPBY, + mRevRangeGroupBy: MREVRANGE_GROUPBY, + MREVRANGE_SELECTED_LABELS_GROUPBY, + mRevRangeSelectedLabelsGroupBy: MREVRANGE_SELECTED_LABELS_GROUPBY, + MREVRANGE_SELECTED_LABELS, + mRevRangeSelectedLabels: MREVRANGE_SELECTED_LABELS, + MREVRANGE_WITHLABELS_GROUPBY, + mRevRangeWithLabelsGroupBy: MREVRANGE_WITHLABELS_GROUPBY, + MREVRANGE_WITHLABELS, + mRevRangeWithLabels: MREVRANGE_WITHLABELS, + MREVRANGE, + mRevRange: MREVRANGE, + QUERYINDEX, + queryIndex: QUERYINDEX, + RANGE, + range: RANGE, + REVRANGE, + revRange: REVRANGE +} as const satisfies RedisCommands; + diff --git a/packages/time-series/lib/index.ts b/packages/time-series/lib/index.ts new file mode 100644 index 00000000000..52422bf1b5a --- /dev/null +++ b/packages/time-series/lib/index.ts @@ -0,0 +1,8 @@ +export { + default, + TIME_SERIES_ENCODING, TimeSeriesEncoding, + TIME_SERIES_DUPLICATE_POLICIES, TimeSeriesDuplicatePolicies +} from './commands'; +export { TIME_SERIES_AGGREGATION_TYPE, TimeSeriesAggregationType } from './commands/CREATERULE'; +export { TIME_SERIES_BUCKET_TIMESTAMP, TimeSeriesBucketTimestamp } from './commands/RANGE'; +export { TIME_SERIES_REDUCERS, TimeSeriesReducer } from './commands/MRANGE_GROUPBY'; diff --git a/packages/time-series/lib/test-utils.ts b/packages/time-series/lib/test-utils.ts new file mode 100644 index 00000000000..0275da9cf2c --- /dev/null +++ b/packages/time-series/lib/test-utils.ts @@ -0,0 +1,21 @@ +import TestUtils from '@redis/test-utils'; +import TimeSeries from '.'; + +export default TestUtils.createFromConfig({ + dockerImageName: 'redislabs/client-libs-test', + dockerImageVersionArgument: 'redis-version', + defaultDockerVersion: '8.4-M01-pre' +}); + +export const GLOBAL = { + SERVERS: { + OPEN: { + serverArguments: [], + clientOptions: { + modules: { + ts: TimeSeries + } + } + } + } +}; diff --git a/packages/time-series/package.json b/packages/time-series/package.json new file mode 100644 index 00000000000..bd31e6845cb --- /dev/null +++ b/packages/time-series/package.json @@ -0,0 +1,36 @@ +{ + "name": "@redis/time-series", + "version": "5.9.0-beta.2", + "license": "MIT", + "main": "./dist/lib/index.js", + "types": "./dist/lib/index.d.ts", + "files": [ + "dist/", + "!dist/tsconfig.tsbuildinfo" + ], + "scripts": { + "test": "nyc -r text-summary -r lcov mocha -r tsx './lib/**/*.spec.ts'", + "release": "release-it" + }, + "peerDependencies": { + "@redis/client": "^5.9.0-beta.2" + }, + "devDependencies": { + "@redis/test-utils": "*" + }, + "engines": { + "node": ">= 18" + }, + "repository": { + "type": "git", + "url": "git://github.com/redis/node-redis.git" + }, + "bugs": { + "url": "https://github.com/redis/node-redis/issues" + }, + "homepage": "https://github.com/redis/node-redis/tree/master/packages/time-series", + "keywords": [ + "redis", + "RedisTimeSeries" + ] +} diff --git a/packages/time-series/tsconfig.json b/packages/time-series/tsconfig.json new file mode 100644 index 00000000000..0c5809dcbe2 --- /dev/null +++ b/packages/time-series/tsconfig.json @@ -0,0 +1,20 @@ +{ + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "outDir": "./dist" + }, + "include": [ + "./lib/**/*.ts" + ], + "exclude": [ + "./lib/test-utils.ts", + "./lib/**/*.spec.ts" + ], + "typedocOptions": { + "entryPoints": [ + "./lib" + ], + "entryPointStrategy": "expand", + "out": "../../documentation/time-series" + } +} diff --git a/test.js b/test.js deleted file mode 100644 index cc7ba888786..00000000000 --- a/test.js +++ /dev/null @@ -1,1416 +0,0 @@ -/*global require console setTimeout process Buffer */ -var redis = require("./index"), - client = redis.createClient(), - client2 = redis.createClient(), - client3 = redis.createClient(), - assert = require("assert"), - util = require("./lib/util"), - test_db_num = 15, // this DB will be flushed and used for testing - tests = {}, - connected = false, - ended = false, - next, cur_start, run_next_test, all_tests, all_start, test_count; - -// Set this to truthy to see the wire protocol and other debugging info -redis.debug_mode = process.argv[2]; - -function buffers_to_strings(arr) { - return arr.map(function (val) { - return val.toString(); - }); -} - -function require_number(expected, label) { - return function (err, results) { - assert.strictEqual(null, err, "result sent back unexpected error: " + err); - assert.strictEqual(expected, results, label + " " + expected + " !== " + results); - assert.strictEqual(typeof results, "number", label); - return true; - }; -} - -function require_number_any(label) { - return function (err, results) { - assert.strictEqual(null, err, "result sent back unexpected error: " + err); - assert.strictEqual(typeof results, "number", label + " " + results + " is not a number"); - return true; - }; -} - -function require_number_pos(label) { - return function (err, results) { - assert.strictEqual(null, err, "result sent back unexpected error: " + err); - assert.strictEqual(true, (results > 0), label + " " + results + " is not a positive number"); - return true; - }; -} - -function require_string(str, label) { - return function (err, results) { - assert.strictEqual(null, err, "result sent back unexpected error: " + err); - assert.equal(str, results, label + " " + str + " does not match " + results); - return true; - }; -} - -function require_null(label) { - return function (err, results) { - assert.strictEqual(null, err, "result sent back unexpected error: " + err); - assert.strictEqual(null, results, label + ": " + results + " is not null"); - return true; - }; -} - -function require_error(label) { - return function (err, results) { - assert.notEqual(err, null, label + " err is null, but an error is expected here."); - return true; - }; -} - -function is_empty_array(obj) { - return Array.isArray(obj) && obj.length === 0; -} - -function last(name, fn) { - return function (err, results) { - fn(err, results); - next(name); - }; -} - -next = function next(name) { - console.log(" \x1b[33m" + (Date.now() - cur_start) + "\x1b[0m ms"); - run_next_test(); -}; - -// Tests are run in the order they are defined. So FLUSHDB should be stay first. - -tests.FLUSHDB = function () { - var name = "FLUSHDB"; - client.select(test_db_num, require_string("OK", name)); - client2.select(test_db_num, require_string("OK", name)); - client3.select(test_db_num, require_string("OK", name)); - client.mset("flush keys 1", "flush val 1", "flush keys 2", "flush val 2", require_string("OK", name)); - client.FLUSHDB(require_string("OK", name)); - client.dbsize(last(name, require_number(0, name))); -}; - -tests.MULTI_1 = function () { - var name = "MULTI_1", multi1, multi2; - - // Provoke an error at queue time - multi1 = client.multi(); - multi1.mset("multifoo", "10", "multibar", "20", require_string("OK", name)); - multi1.set("foo2", require_error(name)); - multi1.incr("multifoo", require_number(11, name)); - multi1.incr("multibar", require_number(21, name)); - multi1.exec(); - - // Confirm that the previous command, while containing an error, still worked. - multi2 = client.multi(); - multi2.incr("multibar", require_number(22, name)); - multi2.incr("multifoo", require_number(12, name)); - multi2.exec(function (err, replies) { - assert.strictEqual(22, replies[0]); - assert.strictEqual(12, replies[1]); - next(name); - }); -}; - -tests.MULTI_2 = function () { - var name = "MULTI_2"; - - // test nested multi-bulk replies - client.multi([ - ["mget", "multifoo", "multibar", function (err, res) { - assert.strictEqual(2, res.length, name); - assert.strictEqual("12", res[0].toString(), name); - assert.strictEqual("22", res[1].toString(), name); - }], - ["set", "foo2", require_error(name)], - ["incr", "multifoo", require_number(13, name)], - ["incr", "multibar", require_number(23, name)] - ]).exec(function (err, replies) { - assert.strictEqual(2, replies[0].length, name); - assert.strictEqual("12", replies[0][0].toString(), name); - assert.strictEqual("22", replies[0][1].toString(), name); - - assert.strictEqual("13", replies[1].toString()); - assert.strictEqual("23", replies[2].toString()); - next(name); - }); -}; - -tests.MULTI_3 = function () { - var name = "MULTI_3"; - - client.sadd("some set", "mem 1"); - client.sadd("some set", "mem 2"); - client.sadd("some set", "mem 3"); - client.sadd("some set", "mem 4"); - - // make sure empty mb reply works - client.del("some missing set"); - client.smembers("some missing set", function (err, reply) { - // make sure empty mb reply works - assert.strictEqual(true, is_empty_array(reply), name); - }); - - // test nested multi-bulk replies with empty mb elements. - client.multi([ - ["smembers", "some set"], - ["del", "some set"], - ["smembers", "some set"] - ]) - .scard("some set") - .exec(function (err, replies) { - assert.strictEqual(true, is_empty_array(replies[2]), name); - next(name); - }); -}; - -tests.MULTI_4 = function () { - var name = "MULTI_4"; - - client.multi() - .mset('some', '10', 'keys', '20') - .incr('some') - .incr('keys') - .mget('some', 'keys') - .exec(function (err, replies) { - assert.strictEqual(null, err); - assert.equal('OK', replies[0]); - assert.equal(11, replies[1]); - assert.equal(21, replies[2]); - assert.equal(11, replies[3][0].toString()); - assert.equal(21, replies[3][1].toString()); - next(name); - }); -}; - -tests.MULTI_5 = function () { - var name = "MULTI_5"; - - // test nested multi-bulk replies with nulls. - client.multi([ - ["mget", ["multifoo", "some", "random value", "keys"]], - ["incr", "multifoo"] - ]) - .exec(function (err, replies) { - assert.strictEqual(replies.length, 2, name); - assert.strictEqual(replies[0].length, 4, name); - next(name); - }); -}; - -tests.MULTI_6 = function () { - var name = "MULTI_6"; - - client.multi() - .hmset("multihash", "a", "foo", "b", 1) - .hmset("multihash", { - extra: "fancy", - things: "here" - }) - .hgetall("multihash") - .exec(function (err, replies) { - assert.strictEqual(null, err); - assert.equal("OK", replies[0]); - assert.equal(Object.keys(replies[2]).length, 4); - assert.equal("foo", replies[2].a); - assert.equal("1", replies[2].b); - assert.equal("fancy", replies[2].extra); - assert.equal("here", replies[2].things); - next(name); - }); -}; - -tests.EVAL_1 = function () { - var name = "EVAL_1"; - - if (client.server_info.versions[0] >= 2 && client.server_info.versions[1] >= 9) { - // test {EVAL - Lua integer -> Redis protocol type conversion} - client.eval("return 100.5", 0, require_number(100, name)); - // test {EVAL - Lua string -> Redis protocol type conversion} - client.eval("return 'hello world'", 0, require_string("hello world", name)); - // test {EVAL - Lua true boolean -> Redis protocol type conversion} - client.eval("return true", 0, require_number(1, name)); - // test {EVAL - Lua false boolean -> Redis protocol type conversion} - client.eval("return false", 0, require_null(name)); - // test {EVAL - Lua status code reply -> Redis protocol type conversion} - client.eval("return {ok='fine'}", 0, require_string("fine", name)); - // test {EVAL - Lua error reply -> Redis protocol type conversion} - client.eval("return {err='this is an error'}", 0, require_error(name)); - // test {EVAL - Lua table -> Redis protocol type conversion} - client.eval("return {1,2,3,'ciao',{1,2}}", 0, function (err, res) { - assert.strictEqual(5, res.length, name); - assert.strictEqual(1, res[0], name); - assert.strictEqual(2, res[1], name); - assert.strictEqual(3, res[2], name); - assert.strictEqual("ciao", res[3], name); - assert.strictEqual(2, res[4].length, name); - assert.strictEqual(1, res[4][0], name); - assert.strictEqual(2, res[4][1], name); - }); - // test {EVAL - Are the KEYS and ARGS arrays populated correctly?} - client.eval("return {KEYS[1],KEYS[2],ARGV[1],ARGV[2]}", 2, "a", "b", "c", "d", function (err, res) { - assert.strictEqual(4, res.length, name); - assert.strictEqual("a", res[0], name); - assert.strictEqual("b", res[1], name); - assert.strictEqual("c", res[2], name); - assert.strictEqual("d", res[3], name); - }); - // test {EVAL - is Lua able to call Redis API?} - client.set("mykey", "myval"); - client.eval("return redis.call('get','mykey')", 0, require_string("myval", name)); - // test {EVALSHA - Can we call a SHA1 if already defined?} - client.evalsha("9bd632c7d33e571e9f24556ebed26c3479a87129", 0, require_string("myval", name)); - // test {EVALSHA - Do we get an error on non defined SHA1?} - client.evalsha("ffffffffffffffffffffffffffffffffffffffff", 0, require_error(name)); - // test {EVAL - Redis integer -> Lua type conversion} - client.set("x", 0); - client.eval("local foo = redis.call('incr','x')\n" + "return {type(foo),foo}", 0, function (err, res) { - assert.strictEqual(2, res.length, name); - assert.strictEqual("number", res[0], name); - assert.strictEqual(1, res[1], name); - }); - // test {EVAL - Redis bulk -> Lua type conversion} - client.eval("local foo = redis.call('get','mykey'); return {type(foo),foo}", 0, function (err, res) { - assert.strictEqual(2, res.length, name); - assert.strictEqual("string", res[0], name); - assert.strictEqual("myval", res[1], name); - }); - // test {EVAL - Redis multi bulk -> Lua type conversion} - client.del("mylist"); - client.rpush("mylist", "a"); - client.rpush("mylist", "b"); - client.rpush("mylist", "c"); - client.eval("local foo = redis.call('lrange','mylist',0,-1)\n" + "return {type(foo),foo[1],foo[2],foo[3],# foo}", 0, function (err, res) { - assert.strictEqual(5, res.length, name); - assert.strictEqual("table", res[0], name); - assert.strictEqual("a", res[1], name); - assert.strictEqual("b", res[2], name); - assert.strictEqual("c", res[3], name); - assert.strictEqual(3, res[4], name); - }); - // test {EVAL - Redis status reply -> Lua type conversion} - client.eval("local foo = redis.call('set','mykey','myval'); return {type(foo),foo['ok']}", 0, function (err, res) { - assert.strictEqual(2, res.length, name); - assert.strictEqual("table", res[0], name); - assert.strictEqual("OK", res[1], name); - }); - // test {EVAL - Redis error reply -> Lua type conversion} - client.set("mykey", "myval"); - client.eval("local foo = redis.call('incr','mykey'); return {type(foo),foo['err']}", 0, function (err, res) { - assert.strictEqual(2, res.length, name); - assert.strictEqual("table", res[0], name); - assert.strictEqual("ERR value is not an integer or out of range", res[1], name); - }); - // test {EVAL - Redis nil bulk reply -> Lua type conversion} - client.del("mykey"); - client.eval("local foo = redis.call('get','mykey'); return {type(foo),foo == false}", 0, function (err, res) { - assert.strictEqual(2, res.length, name); - assert.strictEqual("boolean", res[0], name); - assert.strictEqual(1, res[1], name); - }); - // test {EVAL - Script can't run more than configured time limit} { - client.config("set", "lua-time-limit", 1); - client.eval("local i = 0; while true do i=i+1 end", 0, last("name", require_error(name))); - } else { - console.log("Skipping " + name + " because server version isn't new enough."); - next(name); - } -}; - -tests.WATCH_MULTI = function () { - var name = 'WATCH_MULTI', multi; - - if (client.server_info.versions[0] >= 2 && client.server_info.versions[1] >= 1) { - client.watch(name); - client.incr(name); - multi = client.multi(); - multi.incr(name); - multi.exec(last(name, require_null(name))); - } else { - console.log("Skipping " + name + " because server version isn't new enough."); - next(name); - } -}; - -tests.detect_buffers = function () { - var name = "detect_buffers", detect_client = redis.createClient(null, null, {detect_buffers: true}); - - detect_client.on("ready", function () { - // single Buffer or String - detect_client.set("string key 1", "string value"); - detect_client.get("string key 1", require_string("string value", name)); - detect_client.get(new Buffer("string key 1"), function (err, reply) { - assert.strictEqual(null, err, name); - assert.strictEqual(true, Buffer.isBuffer(reply), name); - assert.strictEqual("", reply.inspect(), name); - }); - - detect_client.hmset("hash key 2", "key 1", "val 1", "key 2", "val 2"); - // array of Buffers or Strings - detect_client.hmget("hash key 2", "key 1", "key 2", function (err, reply) { - assert.strictEqual(null, err, name); - assert.strictEqual(true, Array.isArray(reply), name); - assert.strictEqual(2, reply.length, name); - assert.strictEqual("val 1", reply[0], name); - assert.strictEqual("val 2", reply[1], name); - }); - detect_client.hmget(new Buffer("hash key 2"), "key 1", "key 2", function (err, reply) { - assert.strictEqual(null, err, name); - assert.strictEqual(true, Array.isArray(reply)); - assert.strictEqual(2, reply.length, name); - assert.strictEqual(true, Buffer.isBuffer(reply[0])); - assert.strictEqual(true, Buffer.isBuffer(reply[1])); - assert.strictEqual("", reply[0].inspect(), name); - assert.strictEqual("", reply[1].inspect(), name); - }); - - // Object of Buffers or Strings - detect_client.hgetall("hash key 2", function (err, reply) { - assert.strictEqual(null, err, name); - assert.strictEqual("object", typeof reply, name); - assert.strictEqual(2, Object.keys(reply).length, name); - assert.strictEqual("val 1", reply["key 1"], name); - assert.strictEqual("val 2", reply["key 2"], name); - }); - detect_client.hgetall(new Buffer("hash key 2"), function (err, reply) { - assert.strictEqual(null, err, name); - assert.strictEqual("object", typeof reply, name); - assert.strictEqual(2, Object.keys(reply).length, name); - assert.strictEqual(true, Buffer.isBuffer(reply["key 1"])); - assert.strictEqual(true, Buffer.isBuffer(reply["key 2"])); - assert.strictEqual("", reply["key 1"].inspect(), name); - assert.strictEqual("", reply["key 2"].inspect(), name); - }); - - detect_client.quit(function (err, res) { - next(name); - }); - }); -}; - -tests.socket_nodelay = function () { - var name = "socket_nodelay", c1, c2, c3, ready_count = 0, quit_count = 0; - - c1 = redis.createClient(null, null, {socket_nodelay: true}); - c2 = redis.createClient(null, null, {socket_nodelay: false}); - c3 = redis.createClient(null, null); - - function quit_check() { - quit_count++; - - if (quit_count === 3) { - next(name); - } - } - - function run() { - assert.strictEqual(true, c1.options.socket_nodelay, name); - assert.strictEqual(false, c2.options.socket_nodelay, name); - assert.strictEqual(true, c3.options.socket_nodelay, name); - - c1.set(["set key 1", "set val"], require_string("OK", name)); - c1.set(["set key 2", "set val"], require_string("OK", name)); - c1.get(["set key 1"], require_string("set val", name)); - c1.get(["set key 2"], require_string("set val", name)); - - c2.set(["set key 3", "set val"], require_string("OK", name)); - c2.set(["set key 4", "set val"], require_string("OK", name)); - c2.get(["set key 3"], require_string("set val", name)); - c2.get(["set key 4"], require_string("set val", name)); - - c3.set(["set key 5", "set val"], require_string("OK", name)); - c3.set(["set key 6", "set val"], require_string("OK", name)); - c3.get(["set key 5"], require_string("set val", name)); - c3.get(["set key 6"], require_string("set val", name)); - - c1.quit(quit_check); - c2.quit(quit_check); - c3.quit(quit_check); - } - - function ready_check() { - ready_count++; - if (ready_count === 3) { - run(); - } - } - - c1.on("ready", ready_check); - c2.on("ready", ready_check); - c3.on("ready", ready_check); -}; - -tests.reconnect = function () { - var name = "reconnect"; - - client.set("recon 1", "one"); - client.set("recon 2", "two", function (err, res) { - // Do not do this in normal programs. This is to simulate the server closing on us. - // For orderly shutdown in normal programs, do client.quit() - client.stream.destroy(); - }); - - client.on("reconnecting", function on_recon(params) { - client.on("connect", function on_connect() { - client.select(test_db_num, require_string("OK", name)); - client.get("recon 1", require_string("one", name)); - client.get("recon 1", require_string("one", name)); - client.get("recon 2", require_string("two", name)); - client.get("recon 2", require_string("two", name)); - client.removeListener("connect", on_connect); - client.removeListener("reconnecting", on_recon); - next(name); - }); - }); -}; - -tests.HSET = function () { - var key = "test hash", - field1 = new Buffer("0123456789"), - value1 = new Buffer("abcdefghij"), - field2 = new Buffer(0), - value2 = new Buffer(0), - name = "HSET"; - - client.HSET(key, field1, value1, require_number(1, name)); - client.HGET(key, field1, require_string(value1.toString(), name)); - - // Empty value - client.HSET(key, field1, value2, require_number(0, name)); - client.HGET([key, field1], require_string("", name)); - - // Empty key, empty value - client.HSET([key, field2, value1], require_number(1, name)); - client.HSET(key, field2, value2, last(name, require_number(0, name))); -}; - -tests.HMSET_BUFFER_AND_ARRAY = function () { - // Saving a buffer and an array to the same key should not error - var key = "test hash", - field1 = "buffer", - value1 = new Buffer("abcdefghij"), - field2 = "array", - value2 = ["array contents"], - name = "HSET"; - - client.HMSET(key, field1, value1, field2, value2, last(name, require_string("OK", name))); -}; - -// TODO - add test for HMSET with optional callbacks - -tests.HMGET = function () { - var key1 = "test hash 1", key2 = "test hash 2", name = "HMGET"; - - // redis-like hmset syntax - client.HMSET(key1, "0123456789", "abcdefghij", "some manner of key", "a type of value", require_string("OK", name)); - - // fancy hmset syntax - client.HMSET(key2, { - "0123456789": "abcdefghij", - "some manner of key": "a type of value" - }, require_string("OK", name)); - - client.HMGET(key1, "0123456789", "some manner of key", function (err, reply) { - assert.strictEqual("abcdefghij", reply[0].toString(), name); - assert.strictEqual("a type of value", reply[1].toString(), name); - }); - - client.HMGET(key2, "0123456789", "some manner of key", function (err, reply) { - assert.strictEqual("abcdefghij", reply[0].toString(), name); - assert.strictEqual("a type of value", reply[1].toString(), name); - }); - - client.HMGET(key1, ["0123456789"], function (err, reply) { - assert.strictEqual("abcdefghij", reply[0], name); - }); - - client.HMGET(key1, ["0123456789", "some manner of key"], function (err, reply) { - assert.strictEqual("abcdefghij", reply[0], name); - assert.strictEqual("a type of value", reply[1], name); - }); - - client.HMGET(key1, "missing thing", "another missing thing", function (err, reply) { - assert.strictEqual(null, reply[0], name); - assert.strictEqual(null, reply[1], name); - next(name); - }); -}; - -tests.HINCRBY = function () { - var name = "HINCRBY"; - client.hset("hash incr", "value", 10, require_number(1, name)); - client.HINCRBY("hash incr", "value", 1, require_number(11, name)); - client.HINCRBY("hash incr", "value 2", 1, last(name, require_number(1, name))); -}; - -tests.SUBSCRIBE = function () { - var client1 = client, msg_count = 0, name = "SUBSCRIBE"; - - client1.on("subscribe", function (channel, count) { - if (channel === "chan1") { - client2.publish("chan1", "message 1", require_number(1, name)); - client2.publish("chan2", "message 2", require_number(1, name)); - client2.publish("chan1", "message 3", require_number(1, name)); - } - }); - - client1.on("unsubscribe", function (channel, count) { - if (count === 0) { - // make sure this connection can go into and out of pub/sub mode - client1.incr("did a thing", last(name, require_number(2, name))); - } - }); - - client1.on("message", function (channel, message) { - msg_count += 1; - assert.strictEqual("message " + msg_count, message.toString()); - if (msg_count === 3) { - client1.unsubscribe("chan1", "chan2"); - } - }); - - client1.set("did a thing", 1, require_string("OK", name)); - client1.subscribe("chan1", "chan2", function (err, results) { - assert.strictEqual(null, err, "result sent back unexpected error: " + err); - assert.strictEqual("chan1", results.toString(), name); - }); -}; - -tests.SUBSCRIBE_QUIT = function () { - var name = "SUBSCRIBE_QUIT"; - client3.on("end", function () { - next(name); - }); - client3.on("subscribe", function (channel, count) { - client3.quit(); - }); - client3.subscribe("chan3"); -}; - -tests.EXISTS = function () { - var name = "EXISTS"; - client.del("foo", "foo2", require_number_any(name)); - client.set("foo", "bar", require_string("OK", name)); - client.EXISTS("foo", require_number(1, name)); - client.EXISTS("foo2", last(name, require_number(0, name))); -}; - -tests.DEL = function () { - var name = "DEL"; - client.DEL("delkey", require_number_any(name)); - client.set("delkey", "delvalue", require_string("OK", name)); - client.DEL("delkey", require_number(1, name)); - client.exists("delkey", require_number(0, name)); - client.DEL("delkey", require_number(0, name)); - client.mset("delkey", "delvalue", "delkey2", "delvalue2", require_string("OK", name)); - client.DEL("delkey", "delkey2", last(name, require_number(2, name))); -}; - -tests.TYPE = function () { - var name = "TYPE"; - client.set(["string key", "should be a string"], require_string("OK", name)); - client.rpush(["list key", "should be a list"], require_number_pos(name)); - client.sadd(["set key", "should be a set"], require_number_any(name)); - client.zadd(["zset key", "10.0", "should be a zset"], require_number_any(name)); - client.hset(["hash key", "hashtest", "should be a hash"], require_number_any(0, name)); - - client.TYPE(["string key"], require_string("string", name)); - client.TYPE(["list key"], require_string("list", name)); - client.TYPE(["set key"], require_string("set", name)); - client.TYPE(["zset key"], require_string("zset", name)); - client.TYPE("not here yet", require_string("none", name)); - client.TYPE(["hash key"], last(name, require_string("hash", name))); -}; - -tests.KEYS = function () { - var name = "KEYS"; - client.mset(["test keys 1", "test val 1", "test keys 2", "test val 2"], require_string("OK", name)); - client.KEYS(["test keys*"], function (err, results) { - assert.strictEqual(null, err, "result sent back unexpected error: " + err); - assert.strictEqual(2, results.length, name); - assert.strictEqual("test keys 1", results[0].toString(), name); - assert.strictEqual("test keys 2", results[1].toString(), name); - next(name); - }); -}; - -tests.MULTIBULK_ZERO_LENGTH = function () { - var name = "MULTIBULK_ZERO_LENGTH"; - client.KEYS(['users:*'], function (err, results) { - assert.strictEqual(null, err, 'error on empty multibulk reply'); - assert.strictEqual(true, is_empty_array(results), "not an empty array"); - next(name); - }); -}; - -tests.RANDOMKEY = function () { - var name = "RANDOMKEY"; - client.mset(["test keys 1", "test val 1", "test keys 2", "test val 2"], require_string("OK", name)); - client.RANDOMKEY([], function (err, results) { - assert.strictEqual(null, err, name + " result sent back unexpected error: " + err); - assert.strictEqual(true, /\w+/.test(results), name); - next(name); - }); -}; - -tests.RENAME = function () { - var name = "RENAME"; - client.set(['foo', 'bar'], require_string("OK", name)); - client.RENAME(["foo", "new foo"], require_string("OK", name)); - client.exists(["foo"], require_number(0, name)); - client.exists(["new foo"], last(name, require_number(1, name))); -}; - -tests.RENAMENX = function () { - var name = "RENAMENX"; - client.set(['foo', 'bar'], require_string("OK", name)); - client.set(['foo2', 'bar2'], require_string("OK", name)); - client.RENAMENX(["foo", "foo2"], require_number(0, name)); - client.exists(["foo"], require_number(1, name)); - client.exists(["foo2"], require_number(1, name)); - client.del(["foo2"], require_number(1, name)); - client.RENAMENX(["foo", "foo2"], require_number(1, name)); - client.exists(["foo"], require_number(0, name)); - client.exists(["foo2"], last(name, require_number(1, name))); -}; - -tests.DBSIZE = function () { - var name = "DBSIZE"; - client.set(['foo', 'bar'], require_string("OK", name)); - client.DBSIZE([], last(name, require_number_pos("DBSIZE"))); -}; - -tests.GET = function () { - var name = "GET"; - client.set(["get key", "get val"], require_string("OK", name)); - client.GET(["get key"], last(name, require_string("get val", name))); -}; - -tests.SET = function () { - var name = "SET"; - client.SET(["set key", "set val"], require_string("OK", name)); - client.get(["set key"], last(name, require_string("set val", name))); -}; - -tests.GETSET = function () { - var name = "GETSET"; - client.set(["getset key", "getset val"], require_string("OK", name)); - client.GETSET(["getset key", "new getset val"], require_string("getset val", name)); - client.get(["getset key"], last(name, require_string("new getset val", name))); -}; - -tests.MGET = function () { - var name = "MGET"; - client.mset(["mget keys 1", "mget val 1", "mget keys 2", "mget val 2", "mget keys 3", "mget val 3"], require_string("OK", name)); - client.MGET("mget keys 1", "mget keys 2", "mget keys 3", function (err, results) { - assert.strictEqual(null, err, "result sent back unexpected error: " + err); - assert.strictEqual(3, results.length, name); - assert.strictEqual("mget val 1", results[0].toString(), name); - assert.strictEqual("mget val 2", results[1].toString(), name); - assert.strictEqual("mget val 3", results[2].toString(), name); - }); - client.MGET(["mget keys 1", "mget keys 2", "mget keys 3"], function (err, results) { - assert.strictEqual(null, err, "result sent back unexpected error: " + err); - assert.strictEqual(3, results.length, name); - assert.strictEqual("mget val 1", results[0].toString(), name); - assert.strictEqual("mget val 2", results[1].toString(), name); - assert.strictEqual("mget val 3", results[2].toString(), name); - }); - client.MGET(["mget keys 1", "some random shit", "mget keys 2", "mget keys 3"], function (err, results) { - assert.strictEqual(null, err, "result sent back unexpected error: " + err); - assert.strictEqual(4, results.length, name); - assert.strictEqual("mget val 1", results[0].toString(), name); - assert.strictEqual(null, results[1], name); - assert.strictEqual("mget val 2", results[2].toString(), name); - assert.strictEqual("mget val 3", results[3].toString(), name); - next(name); - }); -}; - -tests.SETNX = function () { - var name = "SETNX"; - client.set(["setnx key", "setnx value"], require_string("OK", name)); - client.SETNX(["setnx key", "new setnx value"], require_number(0, name)); - client.del(["setnx key"], require_number(1, name)); - client.exists(["setnx key"], require_number(0, name)); - client.SETNX(["setnx key", "new setnx value"], require_number(1, name)); - client.exists(["setnx key"], last(name, require_number(1, name))); -}; - -tests.SETEX = function () { - var name = "SETEX"; - client.SETEX(["setex key", "100", "setex val"], require_string("OK", name)); - client.exists(["setex key"], require_number(1, name)); - client.ttl(["setex key"], last(name, require_number_pos(name))); -}; - -tests.MSETNX = function () { - var name = "MSETNX"; - client.mset(["mset1", "val1", "mset2", "val2", "mset3", "val3"], require_string("OK", name)); - client.MSETNX(["mset3", "val3", "mset4", "val4"], require_number(0, name)); - client.del(["mset3"], require_number(1, name)); - client.MSETNX(["mset3", "val3", "mset4", "val4"], require_number(1, name)); - client.exists(["mset3"], require_number(1, name)); - client.exists(["mset4"], last(name, require_number(1, name))); -}; - -tests.HGETALL = function () { - var name = "HGETALL"; - client.hmset(["hosts", "mjr", "1", "another", "23", "home", "1234"], require_string("OK", name)); - client.HGETALL(["hosts"], function (err, obj) { - assert.strictEqual(null, err, name + " result sent back unexpected error: " + err); - assert.strictEqual(3, Object.keys(obj).length, name); - assert.strictEqual("1", obj.mjr.toString(), name); - assert.strictEqual("23", obj.another.toString(), name); - assert.strictEqual("1234", obj.home.toString(), name); - next(name); - }); -}; - -tests.HGETALL_NULL = function () { - var name = "HGETALL_NULL"; - - client.hgetall("missing", function (err, obj) { - assert.strictEqual(null, err); - assert.strictEqual(null, obj); - next(name); - }); -}; - -tests.UTF8 = function () { - var name = "UTF8", - utf8_sample = "ΰ² _ΰ² "; - - client.set(["utf8test", utf8_sample], require_string("OK", name)); - client.get(["utf8test"], function (err, obj) { - assert.strictEqual(null, err); - assert.strictEqual(utf8_sample, obj); - next(name); - }); -}; - -// Set tests were adapted from Brian Hammond's redis-node-client.js, which has a comprehensive test suite - -tests.SADD = function () { - var name = "SADD"; - - client.del('set0'); - client.sadd('set0', 'member0', require_number(1, name)); - client.sadd('set0', 'member0', last(name, require_number(0, name))); -}; - -tests.SADD2 = function () { - var name = "SADD2"; - - client.del("set0"); - client.sadd("set0", ["member0", "member1", "member2"], require_number(3, name)); - client.smembers("set0", function (err, res) { - assert.strictEqual(res.length, 3); - assert.strictEqual(res[0], "member0"); - assert.strictEqual(res[1], "member1"); - assert.strictEqual(res[2], "member2"); - next(name); - }); -}; - -tests.SISMEMBER = function () { - var name = "SISMEMBER"; - - client.del('set0'); - client.sadd('set0', 'member0', require_number(1, name)); - client.sismember('set0', 'member0', require_number(1, name)); - client.sismember('set0', 'member1', last(name, require_number(0, name))); -}; - -tests.SCARD = function () { - var name = "SCARD"; - - client.del('set0'); - client.sadd('set0', 'member0', require_number(1, name)); - client.scard('set0', require_number(1, name)); - client.sadd('set0', 'member1', require_number(1, name)); - client.scard('set0', last(name, require_number(2, name))); -}; - -tests.SREM = function () { - var name = "SREM"; - - client.del('set0'); - client.sadd('set0', 'member0', require_number(1, name)); - client.srem('set0', 'foobar', require_number(0, name)); - client.srem('set0', 'member0', require_number(1, name)); - client.scard('set0', last(name, require_number(0, name))); -}; - -tests.SPOP = function () { - var name = "SPOP"; - - client.del('zzz'); - client.sadd('zzz', 'member0', require_number(1, name)); - client.scard('zzz', require_number(1, name)); - - client.spop('zzz', function (err, value) { - if (err) { - assert.fail(err); - } - assert.equal(value, 'member0', name); - }); - - client.scard('zzz', last(name, require_number(0, name))); -}; - -tests.SDIFF = function () { - var name = "SDIFF"; - - client.del('foo'); - client.sadd('foo', 'x', require_number(1, name)); - client.sadd('foo', 'a', require_number(1, name)); - client.sadd('foo', 'b', require_number(1, name)); - client.sadd('foo', 'c', require_number(1, name)); - - client.sadd('bar', 'c', require_number(1, name)); - - client.sadd('baz', 'a', require_number(1, name)); - client.sadd('baz', 'd', require_number(1, name)); - - client.sdiff('foo', 'bar', 'baz', function (err, values) { - if (err) { - assert.fail(err, name); - } - values.sort(); - assert.equal(values.length, 2, name); - assert.equal(values[0], 'b', name); - assert.equal(values[1], 'x', name); - next(name); - }); -}; - -tests.SDIFFSTORE = function () { - var name = "SDIFFSTORE"; - - client.del('foo'); - client.del('bar'); - client.del('baz'); - client.del('quux'); - - client.sadd('foo', 'x', require_number(1, name)); - client.sadd('foo', 'a', require_number(1, name)); - client.sadd('foo', 'b', require_number(1, name)); - client.sadd('foo', 'c', require_number(1, name)); - - client.sadd('bar', 'c', require_number(1, name)); - - client.sadd('baz', 'a', require_number(1, name)); - client.sadd('baz', 'd', require_number(1, name)); - - // NB: SDIFFSTORE returns the number of elements in the dstkey - - client.sdiffstore('quux', 'foo', 'bar', 'baz', require_number(2, name)); - - client.smembers('quux', function (err, values) { - if (err) { - assert.fail(err, name); - } - var members = buffers_to_strings(values).sort(); - - assert.deepEqual(members, [ 'b', 'x' ], name); - next(name); - }); -}; - -tests.SMEMBERS = function () { - var name = "SMEMBERS"; - - client.del('foo'); - client.sadd('foo', 'x', require_number(1, name)); - - client.smembers('foo', function (err, members) { - if (err) { - assert.fail(err, name); - } - assert.deepEqual(buffers_to_strings(members), [ 'x' ], name); - }); - - client.sadd('foo', 'y', require_number(1, name)); - - client.smembers('foo', function (err, values) { - if (err) { - assert.fail(err, name); - } - assert.equal(values.length, 2, name); - var members = buffers_to_strings(values).sort(); - - assert.deepEqual(members, [ 'x', 'y' ], name); - next(name); - }); -}; - -tests.SMOVE = function () { - var name = "SMOVE"; - - client.del('foo'); - client.del('bar'); - - client.sadd('foo', 'x', require_number(1, name)); - client.smove('foo', 'bar', 'x', require_number(1, name)); - client.sismember('foo', 'x', require_number(0, name)); - client.sismember('bar', 'x', require_number(1, name)); - client.smove('foo', 'bar', 'x', last(name, require_number(0, name))); -}; - -tests.SINTER = function () { - var name = "SINTER"; - - client.del('sa'); - client.del('sb'); - client.del('sc'); - - client.sadd('sa', 'a', require_number(1, name)); - client.sadd('sa', 'b', require_number(1, name)); - client.sadd('sa', 'c', require_number(1, name)); - - client.sadd('sb', 'b', require_number(1, name)); - client.sadd('sb', 'c', require_number(1, name)); - client.sadd('sb', 'd', require_number(1, name)); - - client.sadd('sc', 'c', require_number(1, name)); - client.sadd('sc', 'd', require_number(1, name)); - client.sadd('sc', 'e', require_number(1, name)); - - client.sinter('sa', 'sb', function (err, intersection) { - if (err) { - assert.fail(err, name); - } - assert.equal(intersection.length, 2, name); - assert.deepEqual(buffers_to_strings(intersection).sort(), [ 'b', 'c' ], name); - }); - - client.sinter('sb', 'sc', function (err, intersection) { - if (err) { - assert.fail(err, name); - } - assert.equal(intersection.length, 2, name); - assert.deepEqual(buffers_to_strings(intersection).sort(), [ 'c', 'd' ], name); - }); - - client.sinter('sa', 'sc', function (err, intersection) { - if (err) { - assert.fail(err, name); - } - assert.equal(intersection.length, 1, name); - assert.equal(intersection[0], 'c', name); - }); - - // 3-way - - client.sinter('sa', 'sb', 'sc', function (err, intersection) { - if (err) { - assert.fail(err, name); - } - assert.equal(intersection.length, 1, name); - assert.equal(intersection[0], 'c', name); - next(name); - }); -}; - -tests.SINTERSTORE = function () { - var name = "SINTERSTORE"; - - client.del('sa'); - client.del('sb'); - client.del('sc'); - client.del('foo'); - - client.sadd('sa', 'a', require_number(1, name)); - client.sadd('sa', 'b', require_number(1, name)); - client.sadd('sa', 'c', require_number(1, name)); - - client.sadd('sb', 'b', require_number(1, name)); - client.sadd('sb', 'c', require_number(1, name)); - client.sadd('sb', 'd', require_number(1, name)); - - client.sadd('sc', 'c', require_number(1, name)); - client.sadd('sc', 'd', require_number(1, name)); - client.sadd('sc', 'e', require_number(1, name)); - - client.sinterstore('foo', 'sa', 'sb', 'sc', require_number(1, name)); - - client.smembers('foo', function (err, members) { - if (err) { - assert.fail(err, name); - } - assert.deepEqual(buffers_to_strings(members), [ 'c' ], name); - next(name); - }); -}; - -tests.SUNION = function () { - var name = "SUNION"; - - client.del('sa'); - client.del('sb'); - client.del('sc'); - - client.sadd('sa', 'a', require_number(1, name)); - client.sadd('sa', 'b', require_number(1, name)); - client.sadd('sa', 'c', require_number(1, name)); - - client.sadd('sb', 'b', require_number(1, name)); - client.sadd('sb', 'c', require_number(1, name)); - client.sadd('sb', 'd', require_number(1, name)); - - client.sadd('sc', 'c', require_number(1, name)); - client.sadd('sc', 'd', require_number(1, name)); - client.sadd('sc', 'e', require_number(1, name)); - - client.sunion('sa', 'sb', 'sc', function (err, union) { - if (err) { - assert.fail(err, name); - } - assert.deepEqual(buffers_to_strings(union).sort(), ['a', 'b', 'c', 'd', 'e'], name); - next(name); - }); -}; - -tests.SUNIONSTORE = function () { - var name = "SUNIONSTORE"; - - client.del('sa'); - client.del('sb'); - client.del('sc'); - client.del('foo'); - - client.sadd('sa', 'a', require_number(1, name)); - client.sadd('sa', 'b', require_number(1, name)); - client.sadd('sa', 'c', require_number(1, name)); - - client.sadd('sb', 'b', require_number(1, name)); - client.sadd('sb', 'c', require_number(1, name)); - client.sadd('sb', 'd', require_number(1, name)); - - client.sadd('sc', 'c', require_number(1, name)); - client.sadd('sc', 'd', require_number(1, name)); - client.sadd('sc', 'e', require_number(1, name)); - - client.sunionstore('foo', 'sa', 'sb', 'sc', function (err, cardinality) { - if (err) { - assert.fail(err, name); - } - assert.equal(cardinality, 5, name); - }); - - client.smembers('foo', function (err, members) { - if (err) { - assert.fail(err, name); - } - assert.equal(members.length, 5, name); - assert.deepEqual(buffers_to_strings(members).sort(), ['a', 'b', 'c', 'd', 'e'], name); - next(name); - }); -}; - -// SORT test adapted from Brian Hammond's redis-node-client.js, which has a comprehensive test suite - -tests.SORT = function () { - var name = "SORT"; - - client.del('y'); - client.del('x'); - - client.rpush('y', 'd', require_number(1, name)); - client.rpush('y', 'b', require_number(2, name)); - client.rpush('y', 'a', require_number(3, name)); - client.rpush('y', 'c', require_number(4, name)); - - client.rpush('x', '3', require_number(1, name)); - client.rpush('x', '9', require_number(2, name)); - client.rpush('x', '2', require_number(3, name)); - client.rpush('x', '4', require_number(4, name)); - - client.set('w3', '4', require_string("OK", name)); - client.set('w9', '5', require_string("OK", name)); - client.set('w2', '12', require_string("OK", name)); - client.set('w4', '6', require_string("OK", name)); - - client.set('o2', 'buz', require_string("OK", name)); - client.set('o3', 'foo', require_string("OK", name)); - client.set('o4', 'baz', require_string("OK", name)); - client.set('o9', 'bar', require_string("OK", name)); - - client.set('p2', 'qux', require_string("OK", name)); - client.set('p3', 'bux', require_string("OK", name)); - client.set('p4', 'lux', require_string("OK", name)); - client.set('p9', 'tux', require_string("OK", name)); - - // Now the data has been setup, we can test. - - // But first, test basic sorting. - - // y = [ d b a c ] - // sort y ascending = [ a b c d ] - // sort y descending = [ d c b a ] - - client.sort('y', 'asc', 'alpha', function (err, sorted) { - if (err) { - assert.fail(err, name); - } - assert.deepEqual(buffers_to_strings(sorted), ['a', 'b', 'c', 'd'], name); - }); - - client.sort('y', 'desc', 'alpha', function (err, sorted) { - if (err) { - assert.fail(err, name); - } - assert.deepEqual(buffers_to_strings(sorted), ['d', 'c', 'b', 'a'], name); - }); - - // Now try sorting numbers in a list. - // x = [ 3, 9, 2, 4 ] - - client.sort('x', 'asc', function (err, sorted) { - if (err) { - assert.fail(err, name); - } - assert.deepEqual(buffers_to_strings(sorted), [2, 3, 4, 9], name); - }); - - client.sort('x', 'desc', function (err, sorted) { - if (err) { - assert.fail(err, name); - } - assert.deepEqual(buffers_to_strings(sorted), [9, 4, 3, 2], name); - }); - - // Try sorting with a 'by' pattern. - - client.sort('x', 'by', 'w*', 'asc', function (err, sorted) { - if (err) { - assert.fail(err, name); - } - assert.deepEqual(buffers_to_strings(sorted), [3, 9, 4, 2], name); - }); - - // Try sorting with a 'by' pattern and 1 'get' pattern. - - client.sort('x', 'by', 'w*', 'asc', 'get', 'o*', function (err, sorted) { - if (err) { - assert.fail(err, name); - } - assert.deepEqual(buffers_to_strings(sorted), ['foo', 'bar', 'baz', 'buz'], name); - }); - - // Try sorting with a 'by' pattern and 2 'get' patterns. - - client.sort('x', 'by', 'w*', 'asc', 'get', 'o*', 'get', 'p*', function (err, sorted) { - if (err) { - assert.fail(err, name); - } - assert.deepEqual(buffers_to_strings(sorted), ['foo', 'bux', 'bar', 'tux', 'baz', 'lux', 'buz', 'qux'], name); - }); - - // Try sorting with a 'by' pattern and 2 'get' patterns. - // Instead of getting back the sorted set/list, store the values to a list. - // Then check that the values are there in the expected order. - - client.sort('x', 'by', 'w*', 'asc', 'get', 'o*', 'get', 'p*', 'store', 'bacon', function (err) { - if (err) { - assert.fail(err, name); - } - }); - - client.lrange('bacon', 0, -1, function (err, values) { - if (err) { - assert.fail(err, name); - } - assert.deepEqual(buffers_to_strings(values), ['foo', 'bux', 'bar', 'tux', 'baz', 'lux', 'buz', 'qux'], name); - next(name); - }); - - // TODO - sort by hash value -}; - -tests.MONITOR = function () { - var name = "MONITOR", responses = [], monitor_client; - - monitor_client = redis.createClient(); - monitor_client.monitor(function (err, res) { - client.mget("some", "keys", "foo", "bar"); - client.set("json", JSON.stringify({ - foo: "123", - bar: "sdflkdfsjk", - another: false - })); - }); - monitor_client.on("monitor", function (time, args) { - responses.push(args); - if (responses.length === 3) { - assert.strictEqual(1, responses[0].length); - assert.strictEqual("monitor", responses[0][0]); - assert.strictEqual(5, responses[1].length); - assert.strictEqual("mget", responses[1][0]); - assert.strictEqual("some", responses[1][1]); - assert.strictEqual("keys", responses[1][2]); - assert.strictEqual("foo", responses[1][3]); - assert.strictEqual("bar", responses[1][4]); - assert.strictEqual(3, responses[2].length); - assert.strictEqual("set", responses[2][0]); - assert.strictEqual("json", responses[2][1]); - assert.strictEqual('{"foo":"123","bar":"sdflkdfsjk","another":false}', responses[2][2]); - monitor_client.quit(function (err, res) { - next(name); - }); - } - }); -}; - -tests.BLPOP = function () { - var name = "BLPOP"; - - client.rpush("blocking list", "initial value", function (err, res) { - client2.BLPOP("blocking list", 0, function (err, res) { - assert.strictEqual("blocking list", res[0].toString()); - assert.strictEqual("initial value", res[1].toString()); - - client.rpush("blocking list", "wait for this value"); - }); - client2.BLPOP("blocking list", 0, function (err, res) { - assert.strictEqual("blocking list", res[0].toString()); - assert.strictEqual("wait for this value", res[1].toString()); - next(name); - }); - }); -}; - -tests.BLPOP_TIMEOUT = function () { - var name = "BLPOP_TIMEOUT"; - - // try to BLPOP the list again, which should be empty. This should timeout and return null. - client2.BLPOP("blocking list", 1, function (err, res) { - if (err) { - throw err; - } - - assert.strictEqual(res, null); - next(name); - }); -}; - -tests.EXPIRE = function () { - var name = "EXPIRE"; - client.set(['expiry key', 'bar'], require_string("OK", name)); - client.EXPIRE(["expiry key", "1"], require_number_pos(name)); - setTimeout(function () { - client.exists(["expiry key"], last(name, require_number(0, name))); - }, 2000); -}; - -tests.TTL = function () { - var name = "TTL"; - client.set(["ttl key", "ttl val"], require_string("OK", name)); - client.expire(["ttl key", "100"], require_number_pos(name)); - setTimeout(function () { - client.TTL(["ttl key"], last(name, require_number_pos(0, name))); - }, 500); -}; - -tests.OPTIONAL_CALLBACK = function () { - var name = "OPTIONAL_CALLBACK"; - client.del("op_cb1"); - client.set("op_cb1", "x"); - client.get("op_cb1", last(name, require_string("x", name))); -}; - -tests.OPTIONAL_CALLBACK_UNDEFINED = function () { - var name = "OPTIONAL_CALLBACK_UNDEFINED"; - client.del("op_cb2"); - client.set("op_cb2", "y", undefined); - client.get("op_cb2", last(name, require_string("y", name))); -}; - -// TODO - need a better way to test auth, maybe auto-config a local Redis server or something. -// Yes, this is the real password. Please be nice, thanks. -tests.auth = function () { - var name = "AUTH", client4, ready_count = 0; - - client4 = redis.createClient(9006, "filefish.redistogo.com"); - client4.auth("664b1b6aaf134e1ec281945a8de702a9", function (err, res) { - assert.strictEqual(null, err, name); - assert.strictEqual("OK", res.toString(), name); - }); - - // test auth, then kill the connection so it'll auto-reconnect and auto-re-auth - client4.on("ready", function () { - ready_count++; - if (ready_count === 1) { - client4.stream.destroy(); - } else { - client4.quit(function (err, res) { - next(name); - }); - } - }); -}; - -all_tests = Object.keys(tests); -all_start = new Date(); -test_count = 0; - -run_next_test = function run_next_test() { - var test_name = all_tests.shift(); - if (typeof tests[test_name] === "function") { - util.print('- \x1b[1m' + test_name.toLowerCase() + '\x1b[0m:'); - cur_start = new Date(); - test_count += 1; - tests[test_name](); - } else { - console.log('\n completed \x1b[32m%d\x1b[0m tests in \x1b[33m%d\x1b[0m ms\n', test_count, new Date() - all_start); - client.quit(); - client2.quit(); - } -}; - -client.once("ready", function start_tests() { - console.log("Connected to " + client.host + ":" + client.port + ", Redis server version " + client.server_info.redis_version + "\n"); - console.log("Using reply parser " + client.reply_parser.name); - - run_next_test(); - - connected = true; -}); - -client.on('end', function () { - ended = true; -}); - -// Exit immediately on connection failure, which triggers "exit", below, which fails the test -client.on("error", function (err) { - console.error("client: " + err.stack); - process.exit(); -}); -client2.on("error", function (err) { - console.error("client2: " + err.stack); - process.exit(); -}); -client3.on("error", function (err) { - console.error("client3: " + err.stack); - process.exit(); -}); -client.on("reconnecting", function (params) { - console.log("reconnecting: " + util.inspect(params)); -}); - -process.on('uncaughtException', function (err) { - console.error("Uncaught exception: " + err.stack); - process.exit(1); -}); - -process.on('exit', function (code) { - assert.equal(true, connected); - assert.equal(true, ended); -}); diff --git a/tests/buffer_bench.js b/tests/buffer_bench.js deleted file mode 100644 index a504fbc0876..00000000000 --- a/tests/buffer_bench.js +++ /dev/null @@ -1,89 +0,0 @@ -var source = new Buffer(100), - dest = new Buffer(100), i, j, k, tmp, count = 1000000, bytes = 100; - -for (i = 99 ; i >= 0 ; i--) { - source[i] = 120; -} - -var str = "This is a nice String.", - buf = new Buffer("This is a lovely Buffer."); - -var start = new Date(); -for (i = count * 100; i > 0 ; i--) { - if (Buffer.isBuffer(str)) {} -} -var end = new Date(); -console.log("Buffer.isBuffer(str) " + (end - start) + " ms"); - -var start = new Date(); -for (i = count * 100; i > 0 ; i--) { - if (Buffer.isBuffer(buf)) {} -} -var end = new Date(); -console.log("Buffer.isBuffer(buf) " + (end - start) + " ms"); - -var start = new Date(); -for (i = count * 100; i > 0 ; i--) { - if (str instanceof Buffer) {} -} -var end = new Date(); -console.log("str instanceof Buffer " + (end - start) + " ms"); - -var start = new Date(); -for (i = count * 100; i > 0 ; i--) { - if (buf instanceof Buffer) {} -} -var end = new Date(); -console.log("buf instanceof Buffer " + (end - start) + " ms"); - -for (i = bytes ; i > 0 ; i --) { - var start = new Date(); - for (j = count ; j > 0; j--) { - tmp = source.toString("ascii", 0, bytes); - } - var end = new Date(); - console.log("toString() " + i + " bytes " + (end - start) + " ms"); -} - -for (i = bytes ; i > 0 ; i --) { - var start = new Date(); - for (j = count ; j > 0; j--) { - tmp = ""; - for (k = 0; k <= i ; k++) { - tmp += String.fromCharCode(source[k]); - } - } - var end = new Date(); - console.log("manual string " + i + " bytes " + (end - start) + " ms"); -} - -for (i = bytes ; i > 0 ; i--) { - var start = new Date(); - for (j = count ; j > 0 ; j--) { - for (k = i ; k > 0 ; k--) { - dest[k] = source[k]; - } - } - var end = new Date(); - console.log("Manual copy " + i + " bytes " + (end - start) + " ms"); -} - -for (i = bytes ; i > 0 ; i--) { - var start = new Date(); - for (j = count ; j > 0 ; j--) { - for (k = i ; k > 0 ; k--) { - dest[k] = 120; - } - } - var end = new Date(); - console.log("Direct assignment " + i + " bytes " + (end - start) + " ms"); -} - -for (i = bytes ; i > 0 ; i--) { - var start = new Date(); - for (j = count ; j > 0 ; j--) { - source.copy(dest, 0, 0, i); - } - var end = new Date(); - console.log("Buffer.copy() " + i + " bytes " + (end - start) + " ms"); -} diff --git a/tests/hiredis_parser.js b/tests/hiredis_parser.js deleted file mode 100644 index f1515b110b6..00000000000 --- a/tests/hiredis_parser.js +++ /dev/null @@ -1,38 +0,0 @@ -var Parser = require('../lib/parser/hiredis').Parser; -var assert = require('assert'); - -/* -This test makes sure that exceptions thrown inside of "reply" event handlers -are not trapped and mistakenly emitted as parse errors. -*/ -(function testExecuteDoesNotCatchReplyCallbackExceptions() { - var parser = new Parser(); - var replies = [{}]; - - parser.reader = { - feed: function() {}, - get: function() { - return replies.shift(); - } - }; - - var emittedError = false; - var caughtException = false; - - parser - .on('error', function() { - emittedError = true; - }) - .on('reply', function() { - throw new Error('bad'); - }); - - try { - parser.execute(); - } catch (err) { - caughtException = true; - } - - assert.equal(caughtException, true); - assert.equal(emittedError, false); -})(); diff --git a/tests/reconnect_test.js b/tests/reconnect_test.js deleted file mode 100644 index 7abdd516651..00000000000 --- a/tests/reconnect_test.js +++ /dev/null @@ -1,29 +0,0 @@ -var redis = require("../index").createClient(null, null, { -// max_attempts: 4 -}); - -redis.on("error", function (err) { - console.log("Redis says: " + err); -}); - -redis.on("ready", function () { - console.log("Redis ready."); -}); - -redis.on("reconnecting", function (arg) { - console.log("Redis reconnecting: " + JSON.stringify(arg)); -}); -redis.on("connect", function () { - console.log("Redis connected."); -}); - -setInterval(function () { - var now = Date.now(); - redis.set("now", now, function (err, res) { - if (err) { - console.log(now + " Redis reply error: " + err); - } else { - console.log(now + " Redis reply: " + res); - } - }); -}, 100); diff --git a/tests/stress/codec.js b/tests/stress/codec.js deleted file mode 100644 index 7d764f60728..00000000000 --- a/tests/stress/codec.js +++ /dev/null @@ -1,16 +0,0 @@ -var json = { - encode: JSON.stringify, - decode: JSON.parse -}; - -var MsgPack = require('node-msgpack'); -msgpack = { - encode: MsgPack.pack, - decode: function(str) { return MsgPack.unpack(new Buffer(str)); } -}; - -bison = require('bison'); - -module.exports = json; -//module.exports = msgpack; -//module.exports = bison; diff --git a/tests/stress/pubsub/pub.js b/tests/stress/pubsub/pub.js deleted file mode 100644 index 0acde7a6eba..00000000000 --- a/tests/stress/pubsub/pub.js +++ /dev/null @@ -1,38 +0,0 @@ -'use strict'; - -var freemem = require('os').freemem; -var profiler = require('v8-profiler'); -var codec = require('../codec'); - -var sent = 0; - -var pub = require('redis').createClient(null, null, { - //command_queue_high_water: 5, - //command_queue_low_water: 1 -}) -.on('ready', function() { - this.emit('drain'); -}) -.on('drain', function() { - process.nextTick(exec); -}); - -var payload = '1'; for (var i = 0; i < 12; ++i) payload += payload; -console.log('Message payload length', payload.length); - -function exec() { - pub.publish('timeline', codec.encode({ foo: payload })); - ++sent; - if (!pub.should_buffer) { - process.nextTick(exec); - } -} - -profiler.takeSnapshot('s_0'); - -exec(); - -setInterval(function() { - profiler.takeSnapshot('s_' + sent); - console.error('sent', sent, 'free', freemem(), 'cmdqlen', pub.command_queue.length, 'offqlen', pub.offline_queue.length); -}, 2000); diff --git a/tests/stress/pubsub/run b/tests/stress/pubsub/run deleted file mode 100755 index bd9ac392539..00000000000 --- a/tests/stress/pubsub/run +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/sh -node server.js & -node server.js & -node server.js & -node server.js & -node server.js & -node server.js & -node server.js & -node server.js & -node --debug pub.js diff --git a/tests/stress/pubsub/server.js b/tests/stress/pubsub/server.js deleted file mode 100644 index 035e6b74406..00000000000 --- a/tests/stress/pubsub/server.js +++ /dev/null @@ -1,23 +0,0 @@ -'use strict'; - -var freemem = require('os').freemem; -var codec = require('../codec'); - -var id = Math.random(); -var recv = 0; - -var sub = require('redis').createClient() - .on('ready', function() { - this.subscribe('timeline'); - }) - .on('message', function(channel, message) { - var self = this; - if (message) { - message = codec.decode(message); - ++recv; - } - }); - -setInterval(function() { - console.error('id', id, 'received', recv, 'free', freemem()); -}, 2000); diff --git a/tests/stress/rpushblpop/pub.js b/tests/stress/rpushblpop/pub.js deleted file mode 100644 index 9caf1d0b823..00000000000 --- a/tests/stress/rpushblpop/pub.js +++ /dev/null @@ -1,49 +0,0 @@ -'use strict'; - -var freemem = require('os').freemem; -//var profiler = require('v8-profiler'); -var codec = require('../codec'); - -var sent = 0; - -var pub = require('redis').createClient(null, null, { - //command_queue_high_water: 5, - //command_queue_low_water: 1 -}) -.on('ready', function() { - this.del('timeline'); - this.emit('drain'); -}) -.on('drain', function() { - process.nextTick(exec); -}); - -var payload = '1'; for (var i = 0; i < 12; ++i) payload += payload; -console.log('Message payload length', payload.length); - -function exec() { - pub.rpush('timeline', codec.encode({ foo: payload })); - ++sent; - if (!pub.should_buffer) { - process.nextTick(exec); - } -} - -//profiler.takeSnapshot('s_0'); - -exec(); - -setInterval(function() { - //var ss = profiler.takeSnapshot('s_' + sent); - //console.error(ss.stringify()); - pub.llen('timeline', function(err, result) { - console.error('sent', sent, 'free', freemem(), - 'cmdqlen', pub.command_queue.length, 'offqlen', pub.offline_queue.length, - 'llen', result - ); - }); -}, 2000); - -/*setTimeout(function() { - process.exit(); -}, 30000);*/ diff --git a/tests/stress/rpushblpop/run b/tests/stress/rpushblpop/run deleted file mode 100755 index 8045ae80457..00000000000 --- a/tests/stress/rpushblpop/run +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/sh -node server.js & -#node server.js & -#node server.js & -#node server.js & -node --debug pub.js diff --git a/tests/stress/rpushblpop/server.js b/tests/stress/rpushblpop/server.js deleted file mode 100644 index 9cbcdd9ed75..00000000000 --- a/tests/stress/rpushblpop/server.js +++ /dev/null @@ -1,30 +0,0 @@ -'use strict'; - -var freemem = require('os').freemem; -var codec = require('../codec'); - -var id = Math.random(); -var recv = 0; - -var cmd = require('redis').createClient(); -var sub = require('redis').createClient() - .on('ready', function() { - this.emit('timeline'); - }) - .on('timeline', function() { - var self = this; - this.blpop('timeline', 0, function(err, result) { - var message = result[1]; - if (message) { - message = codec.decode(message); - ++recv; - } - self.emit('timeline'); - }); - }); - -setInterval(function() { - cmd.llen('timeline', function(err, result) { - console.error('id', id, 'received', recv, 'free', freemem(), 'llen', result); - }); -}, 2000); diff --git a/tests/stress/speed/00 b/tests/stress/speed/00 deleted file mode 100644 index 29d7bf7c5dd..00000000000 --- a/tests/stress/speed/00 +++ /dev/null @@ -1,13 +0,0 @@ -# size JSON msgpack bison -26602 2151.0170848180414 -25542 ? 2842.589272665782 -24835 ? ? 7280.4538397469805 -6104 6985.234528557929 -5045 ? 7217.461392841478 -4341 ? ? 14261.406335354604 -4180 15864.633685636572 -4143 ? 12954.806235781925 -4141 ? ? 44650.70733912719 -75 114227.07313350472 -40 ? 30162.440062810834 -39 ? ? 119815.66013519121 diff --git a/tests/stress/speed/plot b/tests/stress/speed/plot deleted file mode 100755 index 2563797cf54..00000000000 --- a/tests/stress/speed/plot +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/sh - -gnuplot >size-rate.jpg << _EOF_ - -set terminal png nocrop enhanced font verdana 12 size 640,480 -set logscale x -set logscale y -set grid -set xlabel 'Serialized object size, octets' -set ylabel 'decode(encode(obj)) rate, 1/sec' -plot '00' using 1:2 title 'json' smooth bezier, '00' using 1:3 title 'msgpack' smooth bezier, '00' using 1:4 title 'bison' smooth bezier - -_EOF_ diff --git a/tests/stress/speed/size-rate.png b/tests/stress/speed/size-rate.png deleted file mode 100644 index c9c2bee6b07..00000000000 Binary files a/tests/stress/speed/size-rate.png and /dev/null differ diff --git a/tests/stress/speed/speed.js b/tests/stress/speed/speed.js deleted file mode 100644 index 8e43cbc03b9..00000000000 --- a/tests/stress/speed/speed.js +++ /dev/null @@ -1,84 +0,0 @@ -var msgpack = require('node-msgpack'); -var bison = require('bison'); -var codec = { - JSON: { - encode: JSON.stringify, - decode: JSON.parse - }, - msgpack: { - encode: msgpack.pack, - decode: msgpack.unpack - }, - bison: bison -}; - -var obj, l; - -var s = '0'; -for (var i = 0; i < 12; ++i) s += s; - -obj = { - foo: s, - arrrrrr: [{a:1,b:false,c:null,d:1.0}, 1111, 2222, 33333333], - rand: [], - a: s, - ccc: s, - b: s + s + s -}; -for (i = 0; i < 100; ++i) obj.rand.push(Math.random()); -forObj(obj); - -obj = { - foo: s, - arrrrrr: [{a:1,b:false,c:null,d:1.0}, 1111, 2222, 33333333], - rand: [] -}; -for (i = 0; i < 100; ++i) obj.rand.push(Math.random()); -forObj(obj); - -obj = { - foo: s, - arrrrrr: [{a:1,b:false,c:null,d:1.0}, 1111, 2222, 33333333], - rand: [] -}; -forObj(obj); - -obj = { - arrrrrr: [{a:1,b:false,c:null,d:1.0}, 1111, 2222, 33333333], - rand: [] -}; -forObj(obj); - -function run(obj, codec) { - var t1 = Date.now(); - var n = 10000; - for (var i = 0; i < n; ++i) { - codec.decode(l = codec.encode(obj)); - } - var t2 = Date.now(); - //console.log('DONE', n*1000/(t2-t1), 'codecs/sec, length=', l.length); - return [n*1000/(t2-t1), l.length]; -} - -function series(obj, cname, n) { - var rate = 0; - var len = 0; - for (var i = 0; i < n; ++i) { - var r = run(obj, codec[cname]); - rate += r[0]; - len += r[1]; - } - rate /= n; - len /= n; - console.log(cname + ' ' + rate + ' ' + len); - return [rate, len]; -} - -function forObj(obj) { - var r = { - JSON: series(obj, 'JSON', 20), - msgpack: series(obj, 'msgpack', 20), - bison: series(obj, 'bison', 20) - }; - return r; -} diff --git a/tests/sub_quit_test.js b/tests/sub_quit_test.js deleted file mode 100644 index ad1f413228a..00000000000 --- a/tests/sub_quit_test.js +++ /dev/null @@ -1,18 +0,0 @@ -var client = require("redis").createClient(), - client2 = require("redis").createClient(); - -client.subscribe("something"); -client.on("subscribe", function (channel, count) { - console.log("Got sub: " + channel); - client.unsubscribe("something"); -}); - -client.on("unsubscribe", function (channel, count) { - console.log("Got unsub: " + channel + ", quitting"); - client.quit(); -}); - -// exercise unsub before sub -client2.unsubscribe("something"); -client2.subscribe("another thing"); -client2.quit(); diff --git a/tsconfig.base.json b/tsconfig.base.json new file mode 100644 index 00000000000..d4a631fc008 --- /dev/null +++ b/tsconfig.base.json @@ -0,0 +1,21 @@ +{ + "compilerOptions": { + "lib": ["ES2023"], + "module": "NodeNext", + "moduleResolution": "NodeNext", + "target": "ES2022", + + "strict": true, + "forceConsistentCasingInFileNames": true, + "noUnusedLocals": true, + "esModuleInterop": true, + "skipLibCheck": true, + + "composite": true, + "sourceMap": true, + "declaration": true, + "declarationMap": true, + "allowJs": true, + "resolveJsonModule": true + } +} diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 00000000000..180b3fc2ba7 --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,29 @@ +{ + "files": [], + "references": [ + { + "path": "./packages/client" + }, + { + "path": "./packages/test-utils" + }, + { + "path": "./packages/bloom" + }, + { + "path": "./packages/json" + }, + { + "path": "./packages/search" + }, + { + "path": "./packages/time-series" + }, + { + "path": "./packages/entraid" + }, + { + "path": "./packages/redis" + } + ] +}